mirror of
https://github.com/sstent/containers.git
synced 2025-12-06 08:01:51 +00:00
first
This commit is contained in:
13
dante/Dockerfile.openvpn
Normal file
13
dante/Dockerfile.openvpn
Normal file
@@ -0,0 +1,13 @@
|
||||
|
||||
FROM debian:stable
|
||||
# RUN echo 'http://dl-cdn.alpinelinux.org/alpine/edge/community' >> '/etc/apk/repositories'
|
||||
# RUN apk add --no-cache busybox musl dante-server
|
||||
|
||||
RUN apt-get update && apt-get install -y dante-server && rm -rf /var/lib/apt/lists/*
|
||||
ADD sockd.conf_tun /etc/sockd.conf
|
||||
ADD sockd.sh /usr/local/bin/
|
||||
RUN chmod a+x /usr/local/bin/sockd.sh
|
||||
|
||||
EXPOSE 1080
|
||||
|
||||
ENTRYPOINT ["/bin/sh","/usr/local/bin/sockd.sh"]
|
||||
13
dante/Dockerfile.wireguard
Normal file
13
dante/Dockerfile.wireguard
Normal file
@@ -0,0 +1,13 @@
|
||||
|
||||
FROM debian:stable
|
||||
# RUN echo 'http://dl-cdn.alpinelinux.org/alpine/edge/community' >> '/etc/apk/repositories'
|
||||
# RUN apk add --no-cache busybox musl dante-server
|
||||
|
||||
RUN apt-get update && apt-get install -y dante-server && rm -rf /var/lib/apt/lists/*
|
||||
ADD sockd.conf_wg /etc/sockd.conf
|
||||
ADD sockd.sh /usr/local/bin/
|
||||
RUN chmod a+x /usr/local/bin/sockd.sh
|
||||
|
||||
EXPOSE 1080
|
||||
|
||||
ENTRYPOINT ["/bin/sh","/usr/local/bin/sockd.sh"]
|
||||
28
dante/sockd.conf_tun
Normal file
28
dante/sockd.conf_tun
Normal file
@@ -0,0 +1,28 @@
|
||||
debug: 0
|
||||
logoutput: stderr
|
||||
internal: 0.0.0.0 port = 1080
|
||||
external: tun0
|
||||
socksmethod: none
|
||||
clientmethod: none
|
||||
user.privileged: root
|
||||
user.unprivileged: nobody
|
||||
|
||||
client pass {
|
||||
from: 192.168.1.0/24 port 1-65535 to: 0.0.0.0/0
|
||||
#clientmethod: rfc931 # match all idented users that also are in passwordfile
|
||||
}
|
||||
|
||||
client pass {
|
||||
from: 172.0.0.0/8 port 1-65535 to: 0.0.0.0/0
|
||||
#clientmethod: rfc931 # match all idented users that also are in passwordfile
|
||||
}
|
||||
|
||||
|
||||
client pass {
|
||||
from: 127.0.0.0/8 port 1-65535 to: 0.0.0.0/0
|
||||
}
|
||||
|
||||
socks pass {
|
||||
from: 0.0.0.0/0 to: 0.0.0.0/0
|
||||
protocol: tcp udp
|
||||
}
|
||||
28
dante/sockd.conf_wg
Normal file
28
dante/sockd.conf_wg
Normal file
@@ -0,0 +1,28 @@
|
||||
debug: 0
|
||||
logoutput: stderr
|
||||
internal: 0.0.0.0 port = 1080
|
||||
external: wg0
|
||||
socksmethod: none
|
||||
clientmethod: none
|
||||
user.privileged: root
|
||||
user.unprivileged: nobody
|
||||
|
||||
client pass {
|
||||
from: 192.168.1.0/24 port 1-65535 to: 0.0.0.0/0
|
||||
#clientmethod: rfc931 # match all idented users that also are in passwordfile
|
||||
}
|
||||
|
||||
client pass {
|
||||
from: 172.0.0.0/8 port 1-65535 to: 0.0.0.0/0
|
||||
#clientmethod: rfc931 # match all idented users that also are in passwordfile
|
||||
}
|
||||
|
||||
|
||||
client pass {
|
||||
from: 127.0.0.0/8 port 1-65535 to: 0.0.0.0/0
|
||||
}
|
||||
|
||||
socks pass {
|
||||
from: 0.0.0.0/0 to: 0.0.0.0/0
|
||||
protocol: tcp udp
|
||||
}
|
||||
8
dante/sockd.sh
Normal file
8
dante/sockd.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/bin/sh
|
||||
CFGFILE=/etc/sockd.conf
|
||||
PIDFILE=/tmp/sockd.pid
|
||||
WORKERS=10
|
||||
echo "Sleeping for 20"
|
||||
#sleep 5
|
||||
echo "/usr/sbin/danted -f $CFGFILE -p $PIDFILE -N $WORKERS -d 5 &"
|
||||
/usr/sbin/danted -f $CFGFILE -p $PIDFILE -N $WORKERS
|
||||
14
fitbit-collect/Dockerfile
Normal file
14
fitbit-collect/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM alpine:latest
|
||||
|
||||
RUN echo "http://dl-cdn.alpinelinux.org/alpine/edge/testing/" >> /etc/apk/repositories
|
||||
|
||||
RUN apk add --no-cache python3 && \
|
||||
python3 -m ensurepip && \
|
||||
rm -r /usr/lib/python*/ensurepip && \
|
||||
pip3 install --upgrade pip setuptools && \
|
||||
if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi && \
|
||||
if [[ ! -e /usr/bin/python ]]; then ln -sf /usr/bin/python3 /usr/bin/python; fi
|
||||
ADD requirements.txt .
|
||||
RUN pip3 install -r requirements.txt
|
||||
ADD * ./
|
||||
ENTRYPOINT ["python","./run_collect.py"]
|
||||
1
fitbit-collect/README.md
Normal file
1
fitbit-collect/README.md
Normal file
@@ -0,0 +1 @@
|
||||
[](http://droneci.service.dc1.consul/sstent/fitbit-collect)
|
||||
1
fitbit-collect/client_details.json
Normal file
1
fitbit-collect/client_details.json
Normal file
@@ -0,0 +1 @@
|
||||
{"client_id": "22BQMP", "client_secret": "280a9e3702af04f687a84862c3f6f6ac"}
|
||||
1
fitbit-collect/credentials.json
Normal file
1
fitbit-collect/credentials.json
Normal file
@@ -0,0 +1 @@
|
||||
{"installed":{"client_id":"182877671696-qj1oq6pi50s6v7nk16m59ulmg28klo0r.apps.googleusercontent.com","project_id":"quickstart-1588344492360","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"oSI3LMMY9caNiGgH0NKSO3oS","redirect_uris":["urn:ietf:wg:oauth:2.0:oob","http://localhost"]}}
|
||||
1
fitbit-collect/fitbit_data.json
Normal file
1
fitbit-collect/fitbit_data.json
Normal file
@@ -0,0 +1 @@
|
||||
{"weight": [{"bmi": 28.25, "date": "2020-04-02", "fat": 25.09000015258789, "logId": 1585811810000, "source": "Aria", "time": "07:16:50", "weight": 214}, {"bmi": 28.15, "date": "2020-04-03", "fat": 25.065000534057617, "logId": 1585899007000, "source": "Aria", "time": "07:30:07", "weight": 213.2}, {"bmi": 27.96, "date": "2020-04-04", "fat": 24.951000213623047, "logId": 1585987894000, "source": "Aria", "time": "08:11:34", "weight": 211.8}, {"bmi": 27.86, "date": "2020-04-05", "fat": 24.893999099731445, "logId": 1586075240000, "source": "Aria", "time": "08:27:20", "weight": 211}, {"bmi": 28.15, "date": "2020-04-07", "fat": 24.836999893188477, "logId": 1586243500000, "source": "Aria", "time": "07:11:40", "weight": 213.2}, {"bmi": 28.15, "date": "2020-04-08", "fat": 24.450000762939453, "logId": 1586330108000, "source": "Aria", "time": "07:15:08", "weight": 213.2}, {"bmi": 27.99, "date": "2020-04-09", "fat": 24.375999450683594, "logId": 1586420024000, "source": "Aria", "time": "08:13:44", "weight": 212}, {"bmi": 27.92, "date": "2020-04-10", "fat": 24.42300033569336, "logId": 1586502607000, "source": "Aria", "time": "07:10:07", "weight": 211.5}, {"bmi": 27.87, "date": "2020-04-11", "fat": 24.47100067138672, "logId": 1586589049000, "source": "Aria", "time": "07:10:49", "weight": 211.2}, {"bmi": 27.76, "date": "2020-04-12", "fat": 24.447999954223633, "logId": 1586676059000, "source": "Aria", "time": "07:20:59", "weight": 210.3}, {"bmi": 27.92, "date": "2020-04-13", "fat": 24.461000442504883, "logId": 1586764670000, "source": "Aria", "time": "07:57:50", "weight": 211.5}, {"bmi": 27.96, "date": "2020-04-14", "fat": 24.388999938964844, "logId": 1586849081000, "source": "Aria", "time": "07:24:41", "weight": 211.8}, {"bmi": 27.91, "date": "2020-04-15", "fat": 24.343000411987305, "logId": 1586935775000, "source": "Aria", "time": "07:29:35", "weight": 211.4}, {"bmi": 27.74, "date": "2020-04-16", "fat": 24.270999908447266, "logId": 1587021443000, "source": "Aria", "time": "07:17:23", "weight": 210.1}, {"bmi": 27.71, "date": "2020-04-17", "fat": 24.204999923706055, "logId": 1587108701000, "source": "Aria", "time": "07:31:41", "weight": 209.9}, {"bmi": 27.59, "date": "2020-04-18", "fat": 24.158000946044922, "logId": 1587194397000, "source": "Aria", "time": "07:19:57", "weight": 209}, {"bmi": 27.62, "date": "2020-04-19", "fat": 24.12299919128418, "logId": 1587284042000, "source": "Aria", "time": "08:14:02", "weight": 209.2}, {"bmi": 27.91, "date": "2020-04-20", "fat": 24.052000045776367, "logId": 1587367302000, "source": "Aria", "time": "07:21:42", "weight": 211.4}]}
|
||||
158
fitbit-collect/gather_keys_oauth2.py
Normal file
158
fitbit-collect/gather_keys_oauth2.py
Normal file
@@ -0,0 +1,158 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Note: This file was adapted from the unoffiicial Python Fitbit client Git repo:
|
||||
https://raw.githubusercontent.com/orcasgit/python-fitbit/master/gather_keys_oauth2.py
|
||||
"""
|
||||
import cherrypy
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
import traceback
|
||||
import webbrowser
|
||||
import json
|
||||
|
||||
from urllib.parse import urlparse
|
||||
# import urllib.parse as urlparse
|
||||
from base64 import b64encode
|
||||
from fitbit.api import Fitbit, FitbitOauth2Client
|
||||
from oauthlib.oauth2.rfc6749.errors import MismatchingStateError, MissingTokenError
|
||||
|
||||
CLIENT_DETAILS_FILE = 'client_details.json' # configuration for for the client
|
||||
USER_DETAILS_FILE = 'user_details.json' # user details file
|
||||
|
||||
|
||||
class OAuth2Server:
|
||||
def __init__(self, client_id, client_secret,
|
||||
redirect_uri='http://127.0.0.1:8080/'):
|
||||
""" Initialize the FitbitOauth2Client """
|
||||
self.success_html = """
|
||||
<h1>You are now authorized to access the Fitbit API!</h1>
|
||||
<br/><h3>You can close this window</h3>"""
|
||||
self.failure_html = """
|
||||
<h1>ERROR: %s</h1><br/><h3>You can close this window</h3>%s"""
|
||||
|
||||
self.fitbit = Fitbit(
|
||||
client_id,
|
||||
client_secret,
|
||||
redirect_uri=redirect_uri,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
self.redirect_uri = redirect_uri
|
||||
self.oauth = FitbitOauth2Client(client_id, client_secret)
|
||||
|
||||
|
||||
def headless_authorize(self):
|
||||
"""
|
||||
Authorize without a display using only TTY.
|
||||
"""
|
||||
url, _ = self.oauth.authorize_token_url(redirect_uri=self.redirect_uri)
|
||||
# Ask the user to open this url on a system with browser
|
||||
print('\n-------------------------------------------------------------------------')
|
||||
print('\t\tOpen the below URL in your browser\n')
|
||||
print(url)
|
||||
print('\n-------------------------------------------------------------------------\n')
|
||||
print('NOTE: After authenticating on Fitbit website, you will redirected to a URL which ')
|
||||
print('throws an ERROR. This is expected! Just copy the full redirected here.\n')
|
||||
redirected_url = input('Full redirected URL: ')
|
||||
params = urlparse.parse_qs(urlparse.urlparse(redirected_url).query)
|
||||
print(params['code'][0])
|
||||
self.authenticate_code(code=params['code'][0])
|
||||
|
||||
def browser_authorize(self):
|
||||
"""
|
||||
Open a browser to the authorization url and spool up a CherryPy
|
||||
server to accept the response
|
||||
"""
|
||||
url, _ = self.fitbit.client.authorize_token_url()
|
||||
# Open the web browser in a new thread for command-line browser support
|
||||
threading.Timer(1, webbrowser.open, args=(url,)).start()
|
||||
print()
|
||||
print('URL for authenticating is:')
|
||||
print(url)
|
||||
print()
|
||||
|
||||
# Same with redirect_uri hostname and port.
|
||||
urlparams = urlparse(self.redirect_uri)
|
||||
cherrypy.config.update({'server.socket_host': '0.0.0.0',
|
||||
'server.socket_port': urlparams.port})
|
||||
|
||||
cherrypy.quickstart(self)
|
||||
|
||||
def authenticate_code(self, code=None):
|
||||
"""
|
||||
Final stage of authentication using the code from Fitbit.
|
||||
"""
|
||||
try:
|
||||
self.oauth.fetch_access_token(code, self.redirect_uri)
|
||||
except MissingTokenError:
|
||||
error = self._fmt_failure(
|
||||
'Missing access token parameter.</br>Please check that '
|
||||
'you are using the correct client_secret'
|
||||
)
|
||||
except MismatchingStateError:
|
||||
error = self._fmt_failure('CSRF Warning! Mismatching state')
|
||||
|
||||
@cherrypy.expose
|
||||
|
||||
def index(self, state, code=None, error=None):
|
||||
"""
|
||||
Receive a Fitbit response containing a verification code. Use the code
|
||||
to fetch the access_token.
|
||||
"""
|
||||
error = None
|
||||
if code:
|
||||
try:
|
||||
self.fitbit.client.fetch_access_token(code)
|
||||
except MissingTokenError:
|
||||
error = self._fmt_failure(
|
||||
'Missing access token parameter.</br>Please check that '
|
||||
'you are using the correct client_secret')
|
||||
except MismatchingStateError:
|
||||
error = self._fmt_failure('CSRF Warning! Mismatching state')
|
||||
else:
|
||||
error = self._fmt_failure('Unknown error while authenticating')
|
||||
# Use a thread to shutdown cherrypy so we can return HTML first
|
||||
self._shutdown_cherrypy()
|
||||
return error if error else self.success_html
|
||||
|
||||
def _fmt_failure(self, message):
|
||||
tb = traceback.format_tb(sys.exc_info()[2])
|
||||
tb_html = '<pre>%s</pre>' % ('\n'.join(tb)) if tb else ''
|
||||
return self.failure_html % (message, tb_html)
|
||||
|
||||
def _shutdown_cherrypy(self):
|
||||
""" Shutdown cherrypy in one second, if it's running """
|
||||
if cherrypy.engine.state == cherrypy.engine.states.STARTED:
|
||||
threading.Timer(1, cherrypy.engine.exit).start()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if not (len(sys.argv) == 3):
|
||||
print("Arguments: client_id and client_secret")
|
||||
sys.exit(1)
|
||||
|
||||
client_id = sys.argv[1]
|
||||
client_secret = sys.argv[2]
|
||||
server = OAuth2Server(client_id, client_secret)
|
||||
# server.headless_authorize()
|
||||
server.browser_authorize()
|
||||
|
||||
profile = server.fitbit.user_profile_get()
|
||||
print('You are authorized to access data for the user: {}'.format(
|
||||
profile['user']['fullName']))
|
||||
|
||||
print('TOKEN\n=====\n')
|
||||
for key, value in server.fitbit.client.session.token.items():
|
||||
print('{} = {}'.format(key, value))
|
||||
|
||||
print("Writing client details to file for usage on next collection.")
|
||||
client_details = {'client_id': client_id, 'client_secret': client_secret} # Details of application
|
||||
with open(CLIENT_DETAILS_FILE, 'w') as f:
|
||||
json.dump(client_details, f)
|
||||
|
||||
print("Writing user details to file for usage on next collection.")
|
||||
with open(USER_DETAILS_FILE, 'w') as f:
|
||||
json.dump(server.fitbit.client.session.token, f)
|
||||
|
||||
71
fitbit-collect/googlesheet.py
Normal file
71
fitbit-collect/googlesheet.py
Normal file
@@ -0,0 +1,71 @@
|
||||
from __future__ import print_function
|
||||
import pickle
|
||||
import os.path
|
||||
from googleapiclient.discovery import build
|
||||
from google_auth_oauthlib.flow import InstalledAppFlow
|
||||
from google.auth.transport.requests import Request
|
||||
|
||||
# If modifying these scopes, delete the file token.pickle.
|
||||
SCOPES = ['https://www.googleapis.com/auth/spreadsheets']
|
||||
|
||||
# The ID and range of a sample spreadsheet.
|
||||
SAMPLE_SPREADSHEET_ID = '1YkMf_3m2YroHhtyS2FrdLzm3HnJDk4-q8r4cSagYrrg'
|
||||
SAMPLE_RANGE_NAME = 'fitbit_export!A2:E'
|
||||
|
||||
def UpdateSheet():
|
||||
"""Shows basic usage of the Sheets API.
|
||||
Prints values from a sample spreadsheet.
|
||||
"""
|
||||
creds = None
|
||||
# The file token.pickle stores the user's access and refresh tokens, and is
|
||||
# created automatically when the authorization flow completes for the first
|
||||
# time.
|
||||
if os.path.exists('token.pickle'):
|
||||
with open('token.pickle', 'rb') as token:
|
||||
creds = pickle.load(token)
|
||||
# If there are no (valid) credentials available, let the user log in.
|
||||
if not creds or not creds.valid:
|
||||
if creds and creds.expired and creds.refresh_token:
|
||||
creds.refresh(Request())
|
||||
else:
|
||||
flow = InstalledAppFlow.from_client_secrets_file(
|
||||
'credentials.json', SCOPES)
|
||||
creds = flow.run_local_server(port=0)
|
||||
# Save the credentials for the next run
|
||||
with open('token.pickle', 'wb') as token:
|
||||
pickle.dump(creds, token)
|
||||
|
||||
service = build('sheets', 'v4', credentials=creds)
|
||||
|
||||
# Call the Sheets API
|
||||
sheet = service.spreadsheets()
|
||||
result = sheet.values().get(spreadsheetId=SAMPLE_SPREADSHEET_ID,
|
||||
range=SAMPLE_RANGE_NAME).execute()
|
||||
values = result.get('values', [])
|
||||
|
||||
if not values:
|
||||
print('No data found.')
|
||||
else:
|
||||
print('Name, Major:')
|
||||
for row in values:
|
||||
# Print columns A and E, which correspond to indices 0 and 4.
|
||||
print('%s, %s' % (row[0], row[4]))
|
||||
|
||||
values = [
|
||||
[
|
||||
"TEST", "TEST"
|
||||
],
|
||||
]
|
||||
body = {
|
||||
'values': values
|
||||
}
|
||||
result = service.spreadsheets().values().append(
|
||||
spreadsheetId=SAMPLE_SPREADSHEET_ID,range=SAMPLE_RANGE_NAME,
|
||||
valueInputOption='RAW', body=body).execute()
|
||||
print('{0} cells appended.'.format(result \
|
||||
.get('updates') \
|
||||
.get('updatedCells')))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
47
fitbit-collect/jsonconsul.py
Normal file
47
fitbit-collect/jsonconsul.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import consul
|
||||
import json
|
||||
|
||||
USER_DETAILS_FILE = 'user_details.json' # user details file
|
||||
|
||||
|
||||
c = consul.Consul(host='192.168.1.237')
|
||||
|
||||
# def _get_user_details():
|
||||
# """
|
||||
# The specific user that you want to retrieve data for.
|
||||
# """
|
||||
# with open(USER_DETAILS_FILE) as f:
|
||||
# fitbit_user = json.load(f)
|
||||
# access_token = fitbit_user['access_token']
|
||||
# refresh_token = fitbit_user['refresh_token']
|
||||
# expires_at = fitbit_user['expires_at']
|
||||
|
||||
# return access_token, refresh_token, expires_at
|
||||
|
||||
def _set_user_details(access_token, refresh_token, expires_at):
|
||||
c.kv.put('access_token', access_token)
|
||||
c.kv.put('refresh_token', refresh_token)
|
||||
c.kv.put('expires_at', str(expires_at))
|
||||
|
||||
def _get_user_details():
|
||||
access_token = c.kv.get('access_token')[1]["Value"].decode("utf-8")
|
||||
refresh_token = c.kv.get('refresh_token')[1]["Value"].decode("utf-8")
|
||||
expires_at = c.kv.get('expires_at')[1]["Value"].decode("utf-8")
|
||||
return access_token, refresh_token, expires_at
|
||||
|
||||
|
||||
|
||||
|
||||
# access_token, refresh_token, expires_at = _get_user_details()
|
||||
# store_user_details(access_token, refresh_token, expires_at)
|
||||
|
||||
access_token, refresh_token, expires_at = _get_user_details()
|
||||
|
||||
print(access_token)
|
||||
print(expires_at)
|
||||
print(refresh_token)
|
||||
|
||||
# print(access_token[1]["Value"].decode("utf-8"))
|
||||
# print(refresh_token[1]["Value"].decode("utf-8"))
|
||||
# print(expires_at[1]["Value"].decode("utf-8"))
|
||||
|
||||
58
fitbit-collect/plot.py
Normal file
58
fitbit-collect/plot.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
|
||||
|
||||
def plot_steps(data):
|
||||
steps = data['activities-log-steps']
|
||||
|
||||
x = [datetime.strptime(d['dateTime'], '%Y-%M-%d').strftime("%A") for d in steps]
|
||||
y = [float(d['value']) for d in steps]
|
||||
|
||||
plt.bar(x,y)
|
||||
plt.title('Steps last 7 days')
|
||||
plt.show()
|
||||
|
||||
|
||||
def plot_sleep(data):
|
||||
sleep = data['sleep']
|
||||
x = [datetime.strptime(d['dateOfSleep'], '%Y-%M-%d').strftime("%A") for d in sleep][::-1]
|
||||
|
||||
deep = [float(d['levels']['summary']['deep']['minutes'])/60.0 for d in sleep][::-1]
|
||||
light = [float(d['levels']['summary']['light']['minutes'])/60.0 for d in sleep][::-1]
|
||||
rem = [float(d['levels']['summary']['rem']['minutes'])/60.0 for d in sleep][::-1]
|
||||
awake = [float(d['levels']['summary']['wake']['minutes'])/60.0 for d in sleep][::-1]
|
||||
|
||||
barWidth = 0.15
|
||||
|
||||
r1 = np.arange(len(deep))
|
||||
r2 = [x + barWidth for x in r1]
|
||||
r3 = [x + barWidth for x in r2]
|
||||
r4 = [x + barWidth for x in r3]
|
||||
|
||||
# Make the plot
|
||||
plt.bar(r1, awake, color='#ffa600', width=barWidth, edgecolor='white', label='Awake')
|
||||
plt.bar(r2, rem, color='#ff6361', width=barWidth, edgecolor='white', label='REM')
|
||||
plt.bar(r3, light, color='#bc5090', width=barWidth, edgecolor='white', label='Light')
|
||||
plt.bar(r4, deep, color='#003f5c', width=barWidth, edgecolor='white', label='Deep')
|
||||
|
||||
# Add xticks on the middle of the group bars
|
||||
plt.xlabel('Day', fontweight='bold')
|
||||
plt.ylabel('Hours', fontweight='bold')
|
||||
plt.xticks([r + barWidth for r in range(len(deep))], x)
|
||||
|
||||
# Create legend & Show graphic
|
||||
plt.legend()
|
||||
plt.show()
|
||||
|
||||
if __name__ == '__main__':
|
||||
with open('fitbit_data.json') as f:
|
||||
data = json.load(f)
|
||||
|
||||
#plot_steps(data)
|
||||
plot_sleep(data)
|
||||
|
||||
|
||||
|
||||
7
fitbit-collect/requirements.txt
Normal file
7
fitbit-collect/requirements.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
fitbit==0.3.1
|
||||
google-api-python-client
|
||||
google-auth-httplib2
|
||||
google-auth-oauthlib
|
||||
python-consul
|
||||
# cherrypy==18.6.0
|
||||
# matplotlib==3.2.1
|
||||
260
fitbit-collect/run_collect.py
Normal file
260
fitbit-collect/run_collect.py
Normal file
@@ -0,0 +1,260 @@
|
||||
"""
|
||||
Script to retrieve Fitbit data for the given user
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
from pprint import pprint
|
||||
from fitbit import Fitbit
|
||||
import pickle
|
||||
import os.path
|
||||
from googleapiclient.discovery import build
|
||||
from google_auth_oauthlib.flow import InstalledAppFlow
|
||||
from google.auth.transport.requests import Request
|
||||
from datetime import date, timedelta
|
||||
|
||||
import consul
|
||||
|
||||
dt = date.today() - timedelta(7)
|
||||
|
||||
|
||||
|
||||
c = consul.Consul(host='consul.service.dc1.consul')
|
||||
|
||||
# If modifying these scopes, delete the file token.pickle.
|
||||
SCOPES = ['https://www.googleapis.com/auth/spreadsheets']
|
||||
|
||||
# The ID and range of a sample spreadsheet.
|
||||
SAMPLE_SPREADSHEET_ID = '1YkMf_3m2YroHhtyS2FrdLzm3HnJDk4-q8r4cSagYrrg'
|
||||
SAMPLE_RANGE_NAME = 'fitbit_export!A2:E'
|
||||
|
||||
|
||||
|
||||
FITBIT_API = 'https://api.fitbit.com'
|
||||
|
||||
CLIENT_DETAILS_FILE = 'client_details.json' # configuration for for the client
|
||||
USER_DETAILS_FILE = 'user_details.json' # user details file
|
||||
|
||||
RESULT_FILE = 'fitbit_data.json' # The place where we will place the results
|
||||
|
||||
|
||||
# def refresh_callback(token):
|
||||
# """
|
||||
# This method is only called when the authenticate token is out of date
|
||||
# and a new token has been issued which needs to be stored somewhere for
|
||||
# the next run
|
||||
# param (token): A dictionary with the new details
|
||||
# """
|
||||
# print('CALLBACK: The token has been updated since last run')
|
||||
# with open(USER_DETAILS_FILE, 'w') as f:
|
||||
# json.dump(token, f)
|
||||
# print('Successfully written update refresh token')
|
||||
|
||||
def refresh_callback(token):
|
||||
c.kv.put('access_token', token["access_token"])
|
||||
c.kv.put('refresh_token', token["refresh_token"])
|
||||
c.kv.put('expires_at', str(token["expires_at"]))
|
||||
|
||||
# def _get_user_details():
|
||||
# """
|
||||
# The specific user that you want to retrieve data for.
|
||||
# """
|
||||
# with open(USER_DETAILS_FILE) as f:
|
||||
# fitbit_user = json.load(f)
|
||||
# access_token = fitbit_user['access_token']
|
||||
# refresh_token = fitbit_user['refresh_token']
|
||||
# expires_at = fitbit_user['expires_at']
|
||||
|
||||
# print(type(expires_at))
|
||||
# return access_token, refresh_token, expires_at
|
||||
|
||||
|
||||
|
||||
def _get_user_details():
|
||||
access_token = c.kv.get('access_token')[1]["Value"].decode("utf-8")
|
||||
refresh_token = c.kv.get('refresh_token')[1]["Value"].decode("utf-8")
|
||||
expires_at = float(c.kv.get('expires_at')[1]["Value"].decode("utf-8"))
|
||||
return access_token, refresh_token, expires_at
|
||||
|
||||
|
||||
def _get_client_details():
|
||||
"""The client is the application which requires access"""
|
||||
with open(CLIENT_DETAILS_FILE) as f:
|
||||
client_details = json.load(f)
|
||||
client_id = client_details['client_id']
|
||||
client_secret = client_details['client_secret']
|
||||
|
||||
return client_id, client_secret
|
||||
|
||||
|
||||
def _write_results(json_response):
|
||||
with open(RESULT_FILE, 'w') as f:
|
||||
json.dump(json_response, f)
|
||||
print(f'Successfully written result data to file {RESULT_FILE}')
|
||||
|
||||
|
||||
def _get_api_call():
|
||||
"""
|
||||
Date Api in this format:
|
||||
GET /<api-version>/user/<user-id>/<resource-path>/date/<base-date>/<end-date>.<response-format>
|
||||
<user-id> can be left as - to get the current user
|
||||
date format is '%Y-%m-%d' for example 2020-04-01
|
||||
Some examples below
|
||||
"""
|
||||
# # Steps in the last 7 days
|
||||
# steps_last_seven_days = '/1/user/-/activities/log/steps/date/today/7d.json'
|
||||
# # Steps between two dates
|
||||
# steps_dates = '/1/user/-/activities/log/steps/date/2020-04-12/2020-04-18.json'
|
||||
# # calories last 7 days
|
||||
# calories_last_seven_days = '/1/user/-/activities/log/calories/date/today/7d.json'
|
||||
# # profile info
|
||||
# profile_info = '/1/user/-/profile.json'
|
||||
# # Sleep between two dates
|
||||
# sleep_dates = '/1.2/user/-/sleep/date/2020-04-13/2020-04-17.json'
|
||||
weight = '/1.2/user/-/body/log/weight/date/' + str(date_start) + '/' + str(date_end) + '.json'
|
||||
|
||||
return weight
|
||||
|
||||
|
||||
def ReadSheet():
|
||||
"""Shows basic usage of the Sheets API.
|
||||
Prints values from a sample spreadsheet.
|
||||
"""
|
||||
creds = None
|
||||
# The file token.pickle stores the user's access and refresh tokens, and is
|
||||
# created automatically when the authorization flow completes for the first
|
||||
# time.
|
||||
if os.path.exists('token.pickle'):
|
||||
with open('token.pickle', 'rb') as token:
|
||||
creds = pickle.load(token)
|
||||
# If there are no (valid) credentials available, let the user log in.
|
||||
if not creds or not creds.valid:
|
||||
if creds and creds.expired and creds.refresh_token:
|
||||
creds.refresh(Request())
|
||||
else:
|
||||
flow = InstalledAppFlow.from_client_secrets_file(
|
||||
'credentials.json', SCOPES)
|
||||
creds = flow.run_local_server(port=0)
|
||||
# Save the credentials for the next run
|
||||
with open('token.pickle', 'wb') as token:
|
||||
pickle.dump(creds, token)
|
||||
|
||||
service = build('sheets', 'v4', credentials=creds)
|
||||
|
||||
# Call the Sheets API
|
||||
sheet = service.spreadsheets()
|
||||
result = sheet.values().get(spreadsheetId=SAMPLE_SPREADSHEET_ID,
|
||||
range=SAMPLE_RANGE_NAME).execute()
|
||||
curr_values = result.get('values', [])
|
||||
return curr_values
|
||||
|
||||
|
||||
def UpdateSheet(values):
|
||||
"""Shows basic usage of the Sheets API.
|
||||
Prints values from a sample spreadsheet.
|
||||
"""
|
||||
creds = None
|
||||
# The file token.pickle stores the user's access and refresh tokens, and is
|
||||
# created automatically when the authorization flow completes for the first
|
||||
# time.
|
||||
if os.path.exists('token.pickle'):
|
||||
with open('token.pickle', 'rb') as token:
|
||||
creds = pickle.load(token)
|
||||
# If there are no (valid) credentials available, let the user log in.
|
||||
if not creds or not creds.valid:
|
||||
if creds and creds.expired and creds.refresh_token:
|
||||
creds.refresh(Request())
|
||||
else:
|
||||
flow = InstalledAppFlow.from_client_secrets_file(
|
||||
'credentials.json', SCOPES)
|
||||
creds = flow.run_local_server(port=0)
|
||||
# Save the credentials for the next run
|
||||
with open('token.pickle', 'wb') as token:
|
||||
pickle.dump(creds, token)
|
||||
|
||||
service = build('sheets', 'v4', credentials=creds)
|
||||
|
||||
# Call the Sheets API
|
||||
# sheet = service.spreadsheets()
|
||||
# result = sheet.values().get(spreadsheetId=SAMPLE_SPREADSHEET_ID,
|
||||
# range=SAMPLE_RANGE_NAME).execute()
|
||||
# values = result.get('values', [])
|
||||
|
||||
# if not values:
|
||||
# print('No data found.')
|
||||
# else:
|
||||
# print('Name, Major:')
|
||||
# for row in values:
|
||||
# # Print columns A and E, which correspond to indices 0 and 4.
|
||||
# print('%s, %s' % (row[0], row[4]))
|
||||
|
||||
# values = [
|
||||
# [
|
||||
# "TEST", "TEST"
|
||||
# ],
|
||||
# ]
|
||||
body = {
|
||||
'values': values
|
||||
}
|
||||
result = service.spreadsheets().values().append(
|
||||
spreadsheetId=SAMPLE_SPREADSHEET_ID,range=SAMPLE_RANGE_NAME,
|
||||
valueInputOption='RAW', body=body).execute()
|
||||
print('{0} cells appended.'.format(result \
|
||||
.get('updates') \
|
||||
.get('updatedCells')))
|
||||
|
||||
|
||||
|
||||
def run():
|
||||
client_id, client_secret = _get_client_details()
|
||||
access_token, refresh_token, expires_at = _get_user_details()
|
||||
|
||||
#print(f'Running Fitbit request with details: {client_id} {client_secret}'
|
||||
# f' {access_token} {refresh_token} {expires_at}')
|
||||
auth2_client = Fitbit(client_id, client_secret, oauth2=True,
|
||||
access_token=access_token,
|
||||
refresh_token=refresh_token, expires_at=expires_at,
|
||||
refresh_cb=refresh_callback)
|
||||
|
||||
fitbit_url = FITBIT_API + _get_api_call()
|
||||
json_response = auth2_client.make_request(fitbit_url)
|
||||
# _write_results(json_response)
|
||||
# pprint(json_response["weight"])
|
||||
values = []
|
||||
sheet_values = ReadSheet()
|
||||
# pprint(sheet_values)
|
||||
|
||||
for row in json_response["weight"]:
|
||||
row_exists = 0
|
||||
for sheetrow in sheet_values:
|
||||
if sheetrow[0] == row["date"]:
|
||||
row_exists = 1
|
||||
if row_exists == 0:
|
||||
if 'fat' in row.keys():
|
||||
row_list = [row["date"],row["weight"],row["fat"],row["bmi"]]
|
||||
else:
|
||||
row_list = [row["date"],row["weight"],"0",row["bmi"]]
|
||||
values.append(row_list)
|
||||
pprint(date_start)
|
||||
pprint(values)
|
||||
UpdateSheet(values)
|
||||
# sheet_values = ReadSheet()
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
|
||||
date_start = date.today() - timedelta(31)
|
||||
# date_end = date.today()
|
||||
# run()
|
||||
|
||||
###Bulk import
|
||||
# date_start = date(2015, 8, 1)
|
||||
end_date = date(2019, 10, 1)
|
||||
|
||||
while date_start != end_date:
|
||||
# date_end = date.today() + timedelta(30)
|
||||
date_end = date_start + timedelta(30)
|
||||
# date_start = date.today() - timedelta(31)
|
||||
run()
|
||||
# time.sleep(60)
|
||||
date_start = date_start - timedelta(29)
|
||||
BIN
fitbit-collect/token.pickle
Normal file
BIN
fitbit-collect/token.pickle
Normal file
Binary file not shown.
1
fitbit-collect/user_details.json
Normal file
1
fitbit-collect/user_details.json
Normal file
@@ -0,0 +1 @@
|
||||
{"access_token": "eyJhbGciOiJIUzI1NiJ9.eyJhdWQiOiIyMkJRTVAiLCJzdWIiOiIyRk5WTkYiLCJpc3MiOiJGaXRiaXQiLCJ0eXAiOiJhY2Nlc3NfdG9rZW4iLCJzY29wZXMiOiJyc29jIHJhY3QgcnNldCBybG9jIHJ3ZWkgcmhyIHJudXQgcnBybyByc2xlIiwiZXhwIjoxNTg4MzcxNjE0LCJpYXQiOjE1ODgzNDI4MTR9.yi2T21Qt8L9kvY_9QLLh_iOaYuz1BP4ywuILmJVOQhk", "expires_in": 28800, "refresh_token": "c2e0e5c952e7063ab7bd17745b6df50793afb281f0b6172a0abc88d98e279eca", "scope": ["activity", "nutrition", "social", "weight", "heartrate", "profile", "settings", "sleep", "location"], "token_type": "Bearer", "user_id": "2FNVNF", "expires_at": 1588371583.8789062}
|
||||
16
garminexport/Dockerfile
Normal file
16
garminexport/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
FROM alpine:latest
|
||||
|
||||
RUN echo "http://dl-cdn.alpinelinux.org/alpine/edge/testing/" >> /etc/apk/repositories
|
||||
|
||||
RUN apk add --no-cache python3 && \
|
||||
python3 -m ensurepip && \
|
||||
rm -r /usr/lib/python*/ensurepip && \
|
||||
pip3 install --upgrade pip setuptools && \
|
||||
if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi && \
|
||||
if [[ ! -e /usr/bin/python ]]; then ln -sf /usr/bin/python3 /usr/bin/python; fi
|
||||
RUN pip3 install garminexport
|
||||
RUN mkdir /activities
|
||||
|
||||
|
||||
|
||||
ENTRYPOINT ["/usr/bin/garmin-backup"]
|
||||
202
garminexport/LICENSE
Normal file
202
garminexport/LICENSE
Normal file
@@ -0,0 +1,202 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
10
garminexport/Makefile
Normal file
10
garminexport/Makefile
Normal file
@@ -0,0 +1,10 @@
|
||||
|
||||
venv:
|
||||
pipenv install
|
||||
|
||||
clean:
|
||||
find -name '*~' -exec rm {} \;
|
||||
find -name '*pyc' -exec rm {} \;
|
||||
|
||||
test:
|
||||
nosetests --verbose --with-coverage --cover-package=garminexport --cover-branches
|
||||
14
garminexport/Pipfile
Normal file
14
garminexport/Pipfile
Normal file
@@ -0,0 +1,14 @@
|
||||
[[source]]
|
||||
name = "pypi"
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
|
||||
[packages]
|
||||
"garminexport" = {path = ".", editable = true}
|
||||
requests = ">=2.0,<3"
|
||||
python-dateutil = ">=2.0,<3"
|
||||
|
||||
[dev-packages]
|
||||
nose = "~=1.3"
|
||||
coverage = "~=4.2"
|
||||
mock = "~=2.0"
|
||||
206
garminexport/Pipfile.lock
generated
Normal file
206
garminexport/Pipfile.lock
generated
Normal file
@@ -0,0 +1,206 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "59e49afac1bfe0c2329a345793c399f46a2c57f14bf3abb6efa419c736c4009c"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304",
|
||||
"sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519"
|
||||
],
|
||||
"version": "==2020.4.5.1"
|
||||
},
|
||||
"chardet": {
|
||||
"hashes": [
|
||||
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
|
||||
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
|
||||
],
|
||||
"version": "==3.0.4"
|
||||
},
|
||||
"coverage": {
|
||||
"hashes": [
|
||||
"sha256:08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6",
|
||||
"sha256:0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650",
|
||||
"sha256:141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5",
|
||||
"sha256:19e4df788a0581238e9390c85a7a09af39c7b539b29f25c89209e6c3e371270d",
|
||||
"sha256:23cc09ed395b03424d1ae30dcc292615c1372bfba7141eb85e11e50efaa6b351",
|
||||
"sha256:245388cda02af78276b479f299bbf3783ef0a6a6273037d7c60dc73b8d8d7755",
|
||||
"sha256:331cb5115673a20fb131dadd22f5bcaf7677ef758741312bee4937d71a14b2ef",
|
||||
"sha256:386e2e4090f0bc5df274e720105c342263423e77ee8826002dcffe0c9533dbca",
|
||||
"sha256:3a794ce50daee01c74a494919d5ebdc23d58873747fa0e288318728533a3e1ca",
|
||||
"sha256:60851187677b24c6085248f0a0b9b98d49cba7ecc7ec60ba6b9d2e5574ac1ee9",
|
||||
"sha256:63a9a5fc43b58735f65ed63d2cf43508f462dc49857da70b8980ad78d41d52fc",
|
||||
"sha256:6b62544bb68106e3f00b21c8930e83e584fdca005d4fffd29bb39fb3ffa03cb5",
|
||||
"sha256:6ba744056423ef8d450cf627289166da65903885272055fb4b5e113137cfa14f",
|
||||
"sha256:7494b0b0274c5072bddbfd5b4a6c6f18fbbe1ab1d22a41e99cd2d00c8f96ecfe",
|
||||
"sha256:826f32b9547c8091679ff292a82aca9c7b9650f9fda3e2ca6bf2ac905b7ce888",
|
||||
"sha256:93715dffbcd0678057f947f496484e906bf9509f5c1c38fc9ba3922893cda5f5",
|
||||
"sha256:9a334d6c83dfeadae576b4d633a71620d40d1c379129d587faa42ee3e2a85cce",
|
||||
"sha256:af7ed8a8aa6957aac47b4268631fa1df984643f07ef00acd374e456364b373f5",
|
||||
"sha256:bf0a7aed7f5521c7ca67febd57db473af4762b9622254291fbcbb8cd0ba5e33e",
|
||||
"sha256:bf1ef9eb901113a9805287e090452c05547578eaab1b62e4ad456fcc049a9b7e",
|
||||
"sha256:c0afd27bc0e307a1ffc04ca5ec010a290e49e3afbe841c5cafc5c5a80ecd81c9",
|
||||
"sha256:dd579709a87092c6dbee09d1b7cfa81831040705ffa12a1b248935274aee0437",
|
||||
"sha256:df6712284b2e44a065097846488f66840445eb987eb81b3cc6e4149e7b6982e1",
|
||||
"sha256:e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c",
|
||||
"sha256:e2ede7c1d45e65e209d6093b762e98e8318ddeff95317d07a27a2140b80cfd24",
|
||||
"sha256:e4ef9c164eb55123c62411f5936b5c2e521b12356037b6e1c2617cef45523d47",
|
||||
"sha256:eca2b7343524e7ba246cab8ff00cab47a2d6d54ada3b02772e908a45675722e2",
|
||||
"sha256:eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28",
|
||||
"sha256:ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c",
|
||||
"sha256:efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7",
|
||||
"sha256:fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0",
|
||||
"sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025"
|
||||
],
|
||||
"version": "==4.5.4"
|
||||
},
|
||||
"future": {
|
||||
"hashes": [
|
||||
"sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"
|
||||
],
|
||||
"version": "==0.18.2"
|
||||
},
|
||||
"garminexport": {
|
||||
"editable": true,
|
||||
"path": "."
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
"sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb",
|
||||
"sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"
|
||||
],
|
||||
"version": "==2.9"
|
||||
},
|
||||
"mock": {
|
||||
"hashes": [
|
||||
"sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1",
|
||||
"sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba"
|
||||
],
|
||||
"version": "==2.0.0"
|
||||
},
|
||||
"nose": {
|
||||
"hashes": [
|
||||
"sha256:9ff7c6cc443f8c51994b34a667bbcf45afd6d945be7477b52e97516fd17c53ac",
|
||||
"sha256:dadcddc0aefbf99eea214e0f1232b94f2fa9bd98fa8353711dacb112bfcbbb2a",
|
||||
"sha256:f1bffef9cbc82628f6e7d7b40d7e255aefaa1adb6a1b1d26c69a8b79e6208a98"
|
||||
],
|
||||
"version": "==1.3.7"
|
||||
},
|
||||
"pbr": {
|
||||
"hashes": [
|
||||
"sha256:07f558fece33b05caf857474a366dfcc00562bca13dd8b47b2b3e22d9f9bf55c",
|
||||
"sha256:579170e23f8e0c2f24b0de612f71f648eccb79fb1322c814ae6b3c07b5ba23e8"
|
||||
],
|
||||
"version": "==5.4.5"
|
||||
},
|
||||
"python-dateutil": {
|
||||
"hashes": [
|
||||
"sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
|
||||
"sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.8.1"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
"sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee",
|
||||
"sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.23.0"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
"sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
|
||||
"sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
|
||||
],
|
||||
"version": "==1.14.0"
|
||||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc",
|
||||
"sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"
|
||||
],
|
||||
"version": "==1.25.8"
|
||||
}
|
||||
},
|
||||
"develop": {
|
||||
"coverage": {
|
||||
"hashes": [
|
||||
"sha256:08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6",
|
||||
"sha256:0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650",
|
||||
"sha256:141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5",
|
||||
"sha256:19e4df788a0581238e9390c85a7a09af39c7b539b29f25c89209e6c3e371270d",
|
||||
"sha256:23cc09ed395b03424d1ae30dcc292615c1372bfba7141eb85e11e50efaa6b351",
|
||||
"sha256:245388cda02af78276b479f299bbf3783ef0a6a6273037d7c60dc73b8d8d7755",
|
||||
"sha256:331cb5115673a20fb131dadd22f5bcaf7677ef758741312bee4937d71a14b2ef",
|
||||
"sha256:386e2e4090f0bc5df274e720105c342263423e77ee8826002dcffe0c9533dbca",
|
||||
"sha256:3a794ce50daee01c74a494919d5ebdc23d58873747fa0e288318728533a3e1ca",
|
||||
"sha256:60851187677b24c6085248f0a0b9b98d49cba7ecc7ec60ba6b9d2e5574ac1ee9",
|
||||
"sha256:63a9a5fc43b58735f65ed63d2cf43508f462dc49857da70b8980ad78d41d52fc",
|
||||
"sha256:6b62544bb68106e3f00b21c8930e83e584fdca005d4fffd29bb39fb3ffa03cb5",
|
||||
"sha256:6ba744056423ef8d450cf627289166da65903885272055fb4b5e113137cfa14f",
|
||||
"sha256:7494b0b0274c5072bddbfd5b4a6c6f18fbbe1ab1d22a41e99cd2d00c8f96ecfe",
|
||||
"sha256:826f32b9547c8091679ff292a82aca9c7b9650f9fda3e2ca6bf2ac905b7ce888",
|
||||
"sha256:93715dffbcd0678057f947f496484e906bf9509f5c1c38fc9ba3922893cda5f5",
|
||||
"sha256:9a334d6c83dfeadae576b4d633a71620d40d1c379129d587faa42ee3e2a85cce",
|
||||
"sha256:af7ed8a8aa6957aac47b4268631fa1df984643f07ef00acd374e456364b373f5",
|
||||
"sha256:bf0a7aed7f5521c7ca67febd57db473af4762b9622254291fbcbb8cd0ba5e33e",
|
||||
"sha256:bf1ef9eb901113a9805287e090452c05547578eaab1b62e4ad456fcc049a9b7e",
|
||||
"sha256:c0afd27bc0e307a1ffc04ca5ec010a290e49e3afbe841c5cafc5c5a80ecd81c9",
|
||||
"sha256:dd579709a87092c6dbee09d1b7cfa81831040705ffa12a1b248935274aee0437",
|
||||
"sha256:df6712284b2e44a065097846488f66840445eb987eb81b3cc6e4149e7b6982e1",
|
||||
"sha256:e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c",
|
||||
"sha256:e2ede7c1d45e65e209d6093b762e98e8318ddeff95317d07a27a2140b80cfd24",
|
||||
"sha256:e4ef9c164eb55123c62411f5936b5c2e521b12356037b6e1c2617cef45523d47",
|
||||
"sha256:eca2b7343524e7ba246cab8ff00cab47a2d6d54ada3b02772e908a45675722e2",
|
||||
"sha256:eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28",
|
||||
"sha256:ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c",
|
||||
"sha256:efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7",
|
||||
"sha256:fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0",
|
||||
"sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025"
|
||||
],
|
||||
"version": "==4.5.4"
|
||||
},
|
||||
"mock": {
|
||||
"hashes": [
|
||||
"sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1",
|
||||
"sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba"
|
||||
],
|
||||
"version": "==2.0.0"
|
||||
},
|
||||
"nose": {
|
||||
"hashes": [
|
||||
"sha256:9ff7c6cc443f8c51994b34a667bbcf45afd6d945be7477b52e97516fd17c53ac",
|
||||
"sha256:dadcddc0aefbf99eea214e0f1232b94f2fa9bd98fa8353711dacb112bfcbbb2a",
|
||||
"sha256:f1bffef9cbc82628f6e7d7b40d7e255aefaa1adb6a1b1d26c69a8b79e6208a98"
|
||||
],
|
||||
"version": "==1.3.7"
|
||||
},
|
||||
"pbr": {
|
||||
"hashes": [
|
||||
"sha256:07f558fece33b05caf857474a366dfcc00562bca13dd8b47b2b3e22d9f9bf55c",
|
||||
"sha256:579170e23f8e0c2f24b0de612f71f648eccb79fb1322c814ae6b3c07b5ba23e8"
|
||||
],
|
||||
"version": "==5.4.5"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
"sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
|
||||
"sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
|
||||
],
|
||||
"version": "==1.14.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
1
garminexport/README.md
Normal file
1
garminexport/README.md
Normal file
@@ -0,0 +1 @@
|
||||
[](http://droneci.service.dc1.consul/sstent/garminexport)
|
||||
0
garminexport/garminexport/__init__.py
Normal file
0
garminexport/garminexport/__init__.py
Normal file
166
garminexport/garminexport/backup.py
Normal file
166
garminexport/garminexport/backup.py
Normal file
@@ -0,0 +1,166 @@
|
||||
"""Module with methods useful when backing up activities.
|
||||
"""
|
||||
import codecs
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
supported_export_formats = ["json_summary", "json_details", "gpx", "tcx", "fit"]
|
||||
"""The range of supported export formats for activities."""
|
||||
|
||||
format_suffix = {
|
||||
"json_summary": "_summary.json",
|
||||
"json_details": "_details.json",
|
||||
"gpx": ".gpx",
|
||||
"tcx": ".tcx",
|
||||
"fit": ".fit"
|
||||
}
|
||||
"""A table that maps export formats to their file format extensions."""
|
||||
|
||||
not_found_file = ".not_found"
|
||||
"""A file that lists all tried but failed export attempts. The lines in
|
||||
the file are the would-have-been file names, had the exports been successful.
|
||||
An entry in the ``.not_found`` file is a strong indication of an
|
||||
activity-format that simply doesn't exist and therefore should not be retried
|
||||
on the next backup run. One such scenario is for manually created activities,
|
||||
which cannot be exported to ``.fit`` format."""
|
||||
|
||||
|
||||
def export_filename(activity, export_format):
|
||||
"""Returns a destination file name to use for a given activity that is
|
||||
to be exported to a given format. Exported files follow this pattern:
|
||||
``<timestamp>_<activity_id>_<suffix>``.
|
||||
For example: ``2015-02-17T05:45:00+00:00_123456789.tcx``
|
||||
|
||||
:param activity: An activity tuple `(id, starttime)`
|
||||
:type activity: tuple of `(int, datetime)`
|
||||
:param export_format: The export format (see :attr:`export_formats`)
|
||||
:type export_format: str
|
||||
|
||||
:return: The file name to use for the exported activity.
|
||||
:rtype: str
|
||||
"""
|
||||
fn = "{time}_{id}{suffix}".format(
|
||||
id=activity[0],
|
||||
time=activity[1].isoformat(),
|
||||
suffix=format_suffix[export_format])
|
||||
return fn.replace(':', '_') if os.name == 'nt' else fn
|
||||
|
||||
|
||||
def need_backup(activities, backup_dir, export_formats=None):
|
||||
"""From a given set of activities, return all activities that haven't been
|
||||
backed up in a given set of export formats.
|
||||
|
||||
Activities are considered already backed up if they, for each desired
|
||||
export format, have an activity file under the ``backup_dir`` *or*
|
||||
if the activity file is listed in the ``.not_found`` file in the backup
|
||||
directory.
|
||||
|
||||
:param activities: A list of activity tuples `(id, starttime)`
|
||||
:type activities: list of tuples of `(int, datetime)`
|
||||
:param backup_dir: Destination directory for exported activities.
|
||||
:type backup_dir: str
|
||||
:keyword export_formats: Which format(s) to export to. Could be any
|
||||
of: 'json_summary', 'json_details', 'gpx', 'tcx', 'fit'.
|
||||
:type export_formats: list of str
|
||||
:return: All activities that need to be backed up.
|
||||
:rtype: set of tuples of `(int, datetime)`
|
||||
"""
|
||||
need_backup = set()
|
||||
backed_up = os.listdir(backup_dir) + _not_found_activities(backup_dir)
|
||||
|
||||
# get all activities missing at least one export format
|
||||
for activity in activities:
|
||||
activity_files = [export_filename(activity, f) for f in export_formats]
|
||||
if any(f not in backed_up for f in activity_files):
|
||||
need_backup.add(activity)
|
||||
return need_backup
|
||||
|
||||
|
||||
def _not_found_activities(backup_dir):
|
||||
# consider all entries in <backup_dir>/.not_found as backed up
|
||||
# (or rather, as tried but failed back ups)
|
||||
failed_activities = []
|
||||
_not_found = os.path.join(backup_dir, not_found_file)
|
||||
if os.path.isfile(_not_found):
|
||||
with open(_not_found, mode="r") as f:
|
||||
failed_activities = [line.strip() for line in f.readlines()]
|
||||
log.debug("%d tried but failed activities in %s", len(failed_activities), _not_found)
|
||||
return failed_activities
|
||||
|
||||
|
||||
def download(client, activity, retryer, backup_dir, export_formats=None):
|
||||
"""Exports a Garmin Connect activity to a given set of formats
|
||||
and saves the resulting file(s) to a given backup directory.
|
||||
In case a given format cannot be exported for the activity, the
|
||||
file name will be appended to the :attr:`not_found_file` in the
|
||||
backup directory (to prevent it from being retried on subsequent
|
||||
backup runs).
|
||||
|
||||
:param client: A :class:`garminexport.garminclient.GarminClient`
|
||||
instance that is assumed to be connected.
|
||||
:type client: :class:`garminexport.garminclient.GarminClient`
|
||||
:param activity: An activity tuple `(id, starttime)`
|
||||
:type activity: tuple of `(int, datetime)`
|
||||
:param retryer: A :class:`garminexport.retryer.Retryer` instance that
|
||||
will handle failed download attempts.
|
||||
:type retryer: :class:`garminexport.retryer.Retryer`
|
||||
:param backup_dir: Backup directory path (assumed to exist already).
|
||||
:type backup_dir: str
|
||||
:keyword export_formats: Which format(s) to export to. Could be any
|
||||
of: 'json_summary', 'json_details', 'gpx', 'tcx', 'fit'.
|
||||
:type export_formats: list of str
|
||||
"""
|
||||
id = activity[0]
|
||||
|
||||
if 'json_summary' in export_formats:
|
||||
log.debug("getting json summary for %s", id)
|
||||
|
||||
activity_summary = retryer.call(client.get_activity_summary, id)
|
||||
dest = os.path.join(
|
||||
backup_dir, export_filename(activity, 'json_summary'))
|
||||
with codecs.open(dest, encoding="utf-8", mode="w") as f:
|
||||
f.write(json.dumps(activity_summary, ensure_ascii=False, indent=4))
|
||||
|
||||
if 'json_details' in export_formats:
|
||||
log.debug("getting json details for %s", id)
|
||||
activity_details = retryer.call(client.get_activity_details, id)
|
||||
dest = os.path.join(backup_dir, export_filename(activity, 'json_details'))
|
||||
with codecs.open(dest, encoding="utf-8", mode="w") as f:
|
||||
f.write(json.dumps(activity_details, ensure_ascii=False, indent=4))
|
||||
|
||||
not_found_path = os.path.join(backup_dir, not_found_file)
|
||||
with open(not_found_path, mode="a") as not_found:
|
||||
if 'gpx' in export_formats:
|
||||
log.debug("getting gpx for %s", id)
|
||||
activity_gpx = retryer.call(client.get_activity_gpx, id)
|
||||
dest = os.path.join(backup_dir, export_filename(activity, 'gpx'))
|
||||
if activity_gpx is None:
|
||||
not_found.write(os.path.basename(dest) + "\n")
|
||||
else:
|
||||
with codecs.open(dest, encoding="utf-8", mode="w") as f:
|
||||
f.write(activity_gpx)
|
||||
|
||||
if 'tcx' in export_formats:
|
||||
log.debug("getting tcx for %s", id)
|
||||
activity_tcx = retryer.call(client.get_activity_tcx, id)
|
||||
dest = os.path.join(backup_dir, export_filename(activity, 'tcx'))
|
||||
if activity_tcx is None:
|
||||
not_found.write(os.path.basename(dest) + "\n")
|
||||
else:
|
||||
with codecs.open(dest, encoding="utf-8", mode="w") as f:
|
||||
f.write(activity_tcx)
|
||||
|
||||
if 'fit' in export_formats:
|
||||
log.debug("getting fit for %s", id)
|
||||
activity_fit = retryer.call(client.get_activity_fit, id)
|
||||
dest = os.path.join(
|
||||
backup_dir, export_filename(activity, 'fit'))
|
||||
if activity_fit is None:
|
||||
not_found.write(os.path.basename(dest) + "\n")
|
||||
else:
|
||||
with open(dest, mode="wb") as f:
|
||||
f.write(activity_fit)
|
||||
0
garminexport/garminexport/cli/__init__.py
Normal file
0
garminexport/garminexport/cli/__init__.py
Normal file
79
garminexport/garminexport/cli/backup.py
Normal file
79
garminexport/garminexport/cli/backup.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""This script performs backups of activities for a Garmin Connect account. The
|
||||
activities are stored in a local directory on the user's computer. The backups
|
||||
are incremental, meaning that only activities that aren't already stored in the
|
||||
backup directory will be downloaded.
|
||||
|
||||
"""
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
|
||||
from garminexport.backup import supported_export_formats
|
||||
from garminexport.incremental_backup import incremental_backup
|
||||
from garminexport.logging_config import LOG_LEVELS
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)-15s [%(levelname)s] %(message)s")
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_MAX_RETRIES = 7
|
||||
"""The default maximum number of retries to make when fetching a single activity."""
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
"""Parse CLI arguments.
|
||||
|
||||
:return: Namespace object holding parsed arguments as attributes.
|
||||
This object may be directly used by garminexport/garminbackup.py.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="garminbackup",
|
||||
description=(
|
||||
"Performs incremental backups of activities for a "
|
||||
"given Garmin Connect account. Only activities that "
|
||||
"aren't already stored in the backup directory will "
|
||||
"be downloaded."))
|
||||
# positional args
|
||||
parser.add_argument(
|
||||
"username", metavar="<username>", type=str, help="Account user name.")
|
||||
# optional args
|
||||
parser.add_argument(
|
||||
"--password", type=str, help="Account password.")
|
||||
parser.add_argument(
|
||||
"--backup-dir", metavar="DIR", type=str,
|
||||
help="Destination directory for downloaded activities. Default: ./activities/",
|
||||
default=os.path.join(".", "activities"))
|
||||
parser.add_argument(
|
||||
"--log-level", metavar="LEVEL", type=str,
|
||||
help="Desired log output level (DEBUG, INFO, WARNING, ERROR). Default: INFO.",
|
||||
default="INFO")
|
||||
parser.add_argument(
|
||||
"-f", "--format", choices=supported_export_formats,
|
||||
default=None, action='append',
|
||||
help="Desired output formats ({}). Default: ALL.".format(', '.join(supported_export_formats)))
|
||||
parser.add_argument(
|
||||
"-E", "--ignore-errors", action='store_true',
|
||||
help="Ignore errors and keep going. Default: FALSE")
|
||||
parser.add_argument(
|
||||
"--max-retries", metavar="NUM", default=DEFAULT_MAX_RETRIES,
|
||||
type=int,
|
||||
help=("The maximum number of retries to make on failed attempts to fetch an activity. "
|
||||
"Exponential backoff will be used, meaning that the delay between successive attempts "
|
||||
"will double with every retry, starting at one second. DEFAULT: {}").format(DEFAULT_MAX_RETRIES))
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
logging.root.setLevel(LOG_LEVELS[args.log_level])
|
||||
|
||||
try:
|
||||
incremental_backup(username=args.username,
|
||||
password=args.password,
|
||||
backup_dir=args.backup_dir,
|
||||
export_formats=args.format,
|
||||
ignore_errors=args.ignore_errors,
|
||||
max_retries=args.max_retries)
|
||||
|
||||
except Exception as e:
|
||||
log.error("failed with exception: {}".format(e))
|
||||
79
garminexport/garminexport/cli/get_activity.py
Executable file
79
garminexport/garminexport/cli/get_activity.py
Executable file
@@ -0,0 +1,79 @@
|
||||
#! /usr/bin/env python
|
||||
"""A program that downloads one particular activity from a given Garmin
|
||||
Connect account and stores it locally on the user's computer.
|
||||
"""
|
||||
import argparse
|
||||
import getpass
|
||||
import logging
|
||||
import os
|
||||
from datetime import timedelta
|
||||
|
||||
import dateutil.parser
|
||||
|
||||
import garminexport.backup
|
||||
from garminexport.garminclient import GarminClient
|
||||
from garminexport.logging_config import LOG_LEVELS
|
||||
from garminexport.retryer import Retryer, ExponentialBackoffDelayStrategy, MaxRetriesStopStrategy
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)-15s [%(levelname)s] %(message)s")
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Downloads one particular activity for a given Garmin Connect account.")
|
||||
|
||||
# positional args
|
||||
parser.add_argument(
|
||||
"username", metavar="<username>", type=str, help="Account user name.")
|
||||
parser.add_argument(
|
||||
"activity", metavar="<activity>", type=int, help="Activity ID.")
|
||||
parser.add_argument(
|
||||
"format", metavar="<format>", type=str,
|
||||
help="Export format (one of: {}).".format(garminexport.backup.supported_export_formats))
|
||||
|
||||
# optional args
|
||||
parser.add_argument(
|
||||
"--password", type=str, help="Account password.")
|
||||
parser.add_argument(
|
||||
"--destination", metavar="DIR", type=str,
|
||||
help="Destination directory for downloaded activity. Default: ./activities/",
|
||||
default=os.path.join(".", "activities"))
|
||||
parser.add_argument(
|
||||
"--log-level", metavar="LEVEL", type=str,
|
||||
help="Desired log output level (DEBUG, INFO, WARNING, ERROR). Default: INFO.",
|
||||
default="INFO")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.log_level not in LOG_LEVELS:
|
||||
raise ValueError("Illegal log-level argument: {}".format(args.log_level))
|
||||
|
||||
if args.format not in garminexport.backup.supported_export_formats:
|
||||
raise ValueError(
|
||||
"Unrecognized export format: '{}'. Must be one of {}".format(
|
||||
args.format, garminexport.backup.supported_export_formats))
|
||||
|
||||
logging.root.setLevel(LOG_LEVELS[args.log_level])
|
||||
|
||||
try:
|
||||
if not os.path.isdir(args.destination):
|
||||
os.makedirs(args.destination)
|
||||
|
||||
if not args.password:
|
||||
args.password = getpass.getpass("Enter password: ")
|
||||
|
||||
with GarminClient(args.username, args.password) as client:
|
||||
log.info("fetching activity %s ...", args.activity)
|
||||
summary = client.get_activity_summary(args.activity)
|
||||
# set up a retryer that will handle retries of failed activity downloads
|
||||
retryer = Retryer(
|
||||
delay_strategy=ExponentialBackoffDelayStrategy(initial_delay=timedelta(seconds=1)),
|
||||
stop_strategy=MaxRetriesStopStrategy(5))
|
||||
|
||||
start_time = dateutil.parser.parse(summary["summaryDTO"]["startTimeGMT"])
|
||||
garminexport.backup.download(
|
||||
client, (args.activity, start_time), retryer, args.destination, export_formats=[args.format])
|
||||
except Exception as e:
|
||||
log.error("failed with exception: %s", e)
|
||||
raise
|
||||
69
garminexport/garminexport/cli/upload_activity.py
Executable file
69
garminexport/garminexport/cli/upload_activity.py
Executable file
@@ -0,0 +1,69 @@
|
||||
#! /usr/bin/env python
|
||||
"""A program that uploads an activity file to a Garmin Connect account.
|
||||
"""
|
||||
import argparse
|
||||
import getpass
|
||||
import logging
|
||||
|
||||
from garminexport.garminclient import GarminClient
|
||||
from garminexport.logging_config import LOG_LEVELS
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)-15s [%(levelname)s] %(message)s")
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Uploads an activity file to a Garmin Connect account.")
|
||||
|
||||
# positional args
|
||||
parser.add_argument(
|
||||
"username", metavar="<username>", type=str, help="Account user name.")
|
||||
parser.add_argument(
|
||||
"activity", nargs='+', metavar="<file>", type=argparse.FileType("rb"),
|
||||
help="Activity file (.gpx, .tcx, or .fit).")
|
||||
|
||||
# optional args
|
||||
parser.add_argument(
|
||||
"--password", type=str, help="Account password.")
|
||||
parser.add_argument(
|
||||
'-N', '--name', help="Activity name on Garmin Connect.")
|
||||
parser.add_argument(
|
||||
'-D', '--description', help="Activity description on Garmin Connect.")
|
||||
parser.add_argument(
|
||||
'-P', '--private', action='store_true', help="Make activity private on Garmin Connect.")
|
||||
parser.add_argument(
|
||||
'-T', '--type', help="Override activity type (running, cycling, walking, hiking, strength_training, etc.)")
|
||||
parser.add_argument(
|
||||
"--log-level", metavar="LEVEL", type=str,
|
||||
help="Desired log output level (DEBUG, INFO, WARNING, ERROR). Default: INFO.",
|
||||
default="INFO")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if len(args.activity) > 1 and (args.description is not None or args.name is not None):
|
||||
parser.error("When uploading multiple activities, --name or --description cannot be used.")
|
||||
|
||||
if args.log_level not in LOG_LEVELS:
|
||||
raise ValueError("Illegal log-level argument: {}".format(args.log_level))
|
||||
|
||||
logging.root.setLevel(LOG_LEVELS[args.log_level])
|
||||
|
||||
try:
|
||||
if not args.password:
|
||||
args.password = getpass.getpass("Enter password: ")
|
||||
|
||||
with GarminClient(args.username, args.password) as client:
|
||||
for activity in args.activity:
|
||||
log.info("uploading activity file %s ...", activity.name)
|
||||
try:
|
||||
id = client.upload_activity(activity, name=args.name, description=args.description,
|
||||
private=args.private, activity_type=args.type)
|
||||
except Exception as e:
|
||||
log.error("upload failed: {!r}".format(e))
|
||||
else:
|
||||
log.info("upload successful: https://connect.garmin.com/modern/activity/%s", id)
|
||||
|
||||
except Exception as e:
|
||||
log.error("failed with exception: %s", e)
|
||||
raise
|
||||
425
garminexport/garminexport/garminclient.py
Executable file
425
garminexport/garminexport/garminclient.py
Executable file
@@ -0,0 +1,425 @@
|
||||
#! /usr/bin/env python
|
||||
"""A module for authenticating against and communicating with selected
|
||||
parts of the Garmin Connect REST API.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import zipfile
|
||||
from builtins import range
|
||||
from functools import wraps
|
||||
from io import BytesIO
|
||||
|
||||
import dateutil
|
||||
import dateutil.parser
|
||||
import requests
|
||||
|
||||
#
|
||||
# Note: For more detailed information about the API services
|
||||
# used by this module, log in to your Garmin Connect account
|
||||
# through the web browser and visit the API documentation page
|
||||
# for the REST service of interest. For example:
|
||||
# https://connect.garmin.com/proxy/activity-service-1.3/index.html
|
||||
# https://connect.garmin.com/proxy/activity-search-service-1.2/index.html
|
||||
#
|
||||
|
||||
#
|
||||
# Other useful references:
|
||||
# https://github.com/cpfair/tapiriik/blob/master/tapiriik/services/GarminConnect/garminconnect.py
|
||||
# https://forums.garmin.com/showthread.php?72150-connect-garmin-com-signin-question/page2
|
||||
#
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# reduce logging noise from requests library
|
||||
logging.getLogger("requests").setLevel(logging.ERROR)
|
||||
|
||||
SSO_LOGIN_URL = "https://sso.garmin.com/sso/signin"
|
||||
"""The Garmin Connect Single-Sign On login URL."""
|
||||
|
||||
|
||||
def require_session(client_function):
|
||||
"""Decorator that is used to annotate :class:`GarminClient`
|
||||
methods that need an authenticated session before being called.
|
||||
"""
|
||||
|
||||
@wraps(client_function)
|
||||
def check_session(*args, **kwargs):
|
||||
client_object = args[0]
|
||||
if not client_object.session:
|
||||
raise Exception("Attempt to use GarminClient without being connected. Call connect() before first use.'")
|
||||
return client_function(*args, **kwargs)
|
||||
|
||||
return check_session
|
||||
|
||||
|
||||
class GarminClient(object):
|
||||
"""A client class used to authenticate with Garmin Connect and
|
||||
extract data from the user account.
|
||||
|
||||
Since this class implements the context manager protocol, this object
|
||||
can preferably be used together with the with-statement. This will
|
||||
automatically take care of logging in to Garmin Connect before any
|
||||
further interactions and logging out after the block completes or
|
||||
a failure occurs.
|
||||
|
||||
Example of use: ::
|
||||
with GarminClient("my.sample@sample.com", "secretpassword") as client:
|
||||
ids = client.list_activity_ids()
|
||||
for activity_id in ids:
|
||||
gpx = client.get_activity_gpx(activity_id)
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, username, password):
|
||||
"""Initialize a :class:`GarminClient` instance.
|
||||
|
||||
:param username: Garmin Connect user name or email address.
|
||||
:type username: str
|
||||
:param password: Garmin Connect account password.
|
||||
:type password: str
|
||||
"""
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.session = None
|
||||
|
||||
def __enter__(self):
|
||||
self.connect()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.disconnect()
|
||||
|
||||
def connect(self):
|
||||
self.session = requests.Session()
|
||||
self._authenticate()
|
||||
|
||||
def disconnect(self):
|
||||
if self.session:
|
||||
self.session.close()
|
||||
self.session = None
|
||||
|
||||
def _authenticate(self):
|
||||
log.info("authenticating user ...")
|
||||
form_data = {
|
||||
"username": self.username,
|
||||
"password": self.password,
|
||||
"embed": "false"
|
||||
}
|
||||
request_params = {
|
||||
"service": "https://connect.garmin.com/modern"
|
||||
}
|
||||
headers = {'origin': 'https://sso.garmin.com'}
|
||||
auth_response = self.session.post(
|
||||
SSO_LOGIN_URL, headers=headers, params=request_params, data=form_data)
|
||||
log.debug("got auth response: %s", auth_response.text)
|
||||
if auth_response.status_code != 200:
|
||||
raise ValueError("authentication failure: did you enter valid credentials?")
|
||||
auth_ticket_url = self._extract_auth_ticket_url(auth_response.text)
|
||||
log.debug("auth ticket url: '%s'", auth_ticket_url)
|
||||
|
||||
log.info("claiming auth ticket ...")
|
||||
response = self.session.get(auth_ticket_url)
|
||||
if response.status_code != 200:
|
||||
raise RuntimeError(
|
||||
"auth failure: failed to claim auth ticket: {}: {}\n{}".format(
|
||||
auth_ticket_url, response.status_code, response.text))
|
||||
|
||||
# appears like we need to touch base with the old API to initiate
|
||||
# some form of legacy session. otherwise certain downloads will fail.
|
||||
self.session.get('https://connect.garmin.com/legacy/session')
|
||||
|
||||
@staticmethod
|
||||
def _extract_auth_ticket_url(auth_response):
|
||||
"""Extracts an authentication ticket URL from the response of an
|
||||
authentication form submission. The auth ticket URL is typically
|
||||
of form:
|
||||
|
||||
https://connect.garmin.com/modern?ticket=ST-0123456-aBCDefgh1iJkLmN5opQ9R-cas
|
||||
|
||||
:param auth_response: HTML response from an auth form submission.
|
||||
"""
|
||||
match = re.search(r'response_url\s*=\s*"(https:[^"]+)"', auth_response)
|
||||
if not match:
|
||||
raise RuntimeError(
|
||||
"auth failure: unable to extract auth ticket URL. did you provide a correct username/password?")
|
||||
auth_ticket_url = match.group(1).replace("\\", "")
|
||||
return auth_ticket_url
|
||||
|
||||
@require_session
|
||||
def list_activities(self):
|
||||
"""Return all activity ids stored by the logged in user, along
|
||||
with their starting timestamps.
|
||||
|
||||
:returns: The full list of activity identifiers (along with their starting timestamps).
|
||||
:rtype: tuples of (int, datetime)
|
||||
"""
|
||||
ids = []
|
||||
batch_size = 100
|
||||
# fetch in batches since the API doesn't allow more than a certain
|
||||
# number of activities to be retrieved on every invocation
|
||||
for start_index in range(0, sys.maxsize, batch_size):
|
||||
next_batch = self._fetch_activity_ids_and_ts(start_index, batch_size)
|
||||
if not next_batch:
|
||||
break
|
||||
ids.extend(next_batch)
|
||||
return ids
|
||||
|
||||
@require_session
|
||||
def _fetch_activity_ids_and_ts(self, start_index, max_limit=100):
|
||||
"""Return a sequence of activity ids (along with their starting
|
||||
timestamps) starting at a given index, with index 0 being the user's
|
||||
most recently registered activity.
|
||||
|
||||
Should the index be out of bounds or the account empty, an empty list is returned.
|
||||
|
||||
:param start_index: The index of the first activity to retrieve.
|
||||
:type start_index: int
|
||||
:param max_limit: The (maximum) number of activities to retrieve.
|
||||
:type max_limit: int
|
||||
|
||||
:returns: A list of activity identifiers (along with their starting timestamps).
|
||||
:rtype: tuples of (int, datetime)
|
||||
"""
|
||||
log.debug("fetching activities %d through %d ...", start_index, start_index + max_limit - 1)
|
||||
response = self.session.get(
|
||||
"https://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities",
|
||||
params={"start": start_index, "limit": max_limit})
|
||||
if response.status_code != 200:
|
||||
raise Exception(
|
||||
u"failed to fetch activities {} to {} types: {}\n{}".format(
|
||||
start_index, (start_index + max_limit - 1), response.status_code, response.text))
|
||||
activities = json.loads(response.text)
|
||||
if not activities:
|
||||
# index out of bounds or empty account
|
||||
return []
|
||||
|
||||
entries = []
|
||||
for activity in activities:
|
||||
id = int(activity["activityId"])
|
||||
timestamp_utc = dateutil.parser.parse(activity["startTimeGMT"])
|
||||
# make sure UTC timezone gets set
|
||||
timestamp_utc = timestamp_utc.replace(tzinfo=dateutil.tz.tzutc())
|
||||
entries.append((id, timestamp_utc))
|
||||
log.debug("got %d activities.", len(entries))
|
||||
return entries
|
||||
|
||||
@require_session
|
||||
def get_activity_summary(self, activity_id):
|
||||
"""Return a summary about a given activity.
|
||||
The summary contains several statistics, such as duration, GPS starting
|
||||
point, GPS end point, elevation gain, max heart rate, max pace, max speed, etc).
|
||||
|
||||
:param activity_id: Activity identifier.
|
||||
:type activity_id: int
|
||||
:returns: The activity summary as a JSON dict.
|
||||
:rtype: dict
|
||||
"""
|
||||
response = self.session.get(
|
||||
"https://connect.garmin.com/modern/proxy/activity-service/activity/{}".format(activity_id))
|
||||
if response.status_code != 200:
|
||||
log.error(u"failed to fetch json summary for activity %s: %d\n%s",
|
||||
activity_id, response.status_code, response.text)
|
||||
raise Exception(u"failed to fetch json summary for activity {}: {}\n{}".format(
|
||||
activity_id, response.status_code, response.text))
|
||||
return json.loads(response.text)
|
||||
|
||||
@require_session
|
||||
def get_activity_details(self, activity_id):
|
||||
"""Return a JSON representation of a given activity including
|
||||
available measurements such as location (longitude, latitude),
|
||||
heart rate, distance, pace, speed, elevation.
|
||||
|
||||
:param activity_id: Activity identifier.
|
||||
:type activity_id: int
|
||||
:returns: The activity details as a JSON dict.
|
||||
:rtype: dict
|
||||
"""
|
||||
# mounted at xml or json depending on result encoding
|
||||
response = self.session.get(
|
||||
"https://connect.garmin.com/modern/proxy/activity-service/activity/{}/details".format(activity_id))
|
||||
if response.status_code != 200:
|
||||
raise Exception(u"failed to fetch json activityDetails for {}: {}\n{}".format(
|
||||
activity_id, response.status_code, response.text))
|
||||
return json.loads(response.text)
|
||||
|
||||
@require_session
|
||||
def get_activity_gpx(self, activity_id):
|
||||
"""Return a GPX (GPS Exchange Format) representation of a
|
||||
given activity. If the activity cannot be exported to GPX
|
||||
(not yet observed in practice, but that doesn't exclude the
|
||||
possibility), a :obj:`None` value is returned.
|
||||
|
||||
:param activity_id: Activity identifier.
|
||||
:type activity_id: int
|
||||
:returns: The GPX representation of the activity as an XML string
|
||||
or ``None`` if the activity couldn't be exported to GPX.
|
||||
:rtype: str
|
||||
"""
|
||||
response = self.session.get(
|
||||
"https://connect.garmin.com/modern/proxy/download-service/export/gpx/activity/{}".format(activity_id))
|
||||
# An alternate URL that seems to produce the same results
|
||||
# and is the one used when exporting through the Garmin
|
||||
# Connect web page.
|
||||
# response = self.session.get("https://connect.garmin.com/proxy/activity-service-1.1/gpx/activity/{}?full=true".format(activity_id))
|
||||
|
||||
# A 404 (Not Found) or 204 (No Content) response are both indicators
|
||||
# of a gpx file not being available for the activity. It may, for
|
||||
# example be a manually entered activity without any device data.
|
||||
if response.status_code in (404, 204):
|
||||
return None
|
||||
if response.status_code != 200:
|
||||
raise Exception(u"failed to fetch GPX for activity {}: {}\n{}".format(
|
||||
activity_id, response.status_code, response.text))
|
||||
return response.text
|
||||
|
||||
@require_session
|
||||
def get_activity_tcx(self, activity_id):
|
||||
"""Return a TCX (Training Center XML) representation of a
|
||||
given activity. If the activity doesn't have a TCX source (for
|
||||
example, if it was originally uploaded in GPX format, Garmin
|
||||
won't try to synthesize a TCX file) a :obj:`None` value is
|
||||
returned.
|
||||
|
||||
:param activity_id: Activity identifier.
|
||||
:type activity_id: int
|
||||
:returns: The TCX representation of the activity as an XML string
|
||||
or ``None`` if the activity cannot be exported to TCX.
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
response = self.session.get(
|
||||
"https://connect.garmin.com/modern/proxy/download-service/export/tcx/activity/{}".format(activity_id))
|
||||
if response.status_code == 404:
|
||||
return None
|
||||
if response.status_code != 200:
|
||||
raise Exception(u"failed to fetch TCX for activity {}: {}\n{}".format(
|
||||
activity_id, response.status_code, response.text))
|
||||
return response.text
|
||||
|
||||
def get_original_activity(self, activity_id):
|
||||
"""Return the original file that was uploaded for an activity.
|
||||
If the activity doesn't have any file source (for example,
|
||||
if it was entered manually rather than imported from a Garmin
|
||||
device) then :obj:`(None,None)` is returned.
|
||||
|
||||
:param activity_id: Activity identifier.
|
||||
:type activity_id: int
|
||||
:returns: A tuple of the file type (e.g. 'fit', 'tcx', 'gpx') and
|
||||
its contents, or :obj:`(None,None)` if no file is found.
|
||||
:rtype: (str, str)
|
||||
"""
|
||||
response = self.session.get(
|
||||
"https://connect.garmin.com/modern/proxy/download-service/files/activity/{}".format(activity_id))
|
||||
# A 404 (Not Found) response is a clear indicator of a missing .fit
|
||||
# file. As of lately, the endpoint appears to have started to
|
||||
# respond with 500 "NullPointerException" on attempts to download a
|
||||
# .fit file for an activity without one.
|
||||
if response.status_code in [404, 500]:
|
||||
# Manually entered activity, no file source available
|
||||
return None, None
|
||||
if response.status_code != 200:
|
||||
raise Exception(
|
||||
u"failed to get original activity file for {}: {}\n{}".format(
|
||||
activity_id, response.status_code, response.text))
|
||||
|
||||
# return the first entry from the zip archive where the filename is
|
||||
# activity_id (should be the only entry!)
|
||||
zip_file = zipfile.ZipFile(BytesIO(response.content), mode="r")
|
||||
for path in zip_file.namelist():
|
||||
fn, ext = os.path.splitext(path)
|
||||
if fn == str(activity_id):
|
||||
return ext[1:], zip_file.open(path).read()
|
||||
return None, None
|
||||
|
||||
def get_activity_fit(self, activity_id):
|
||||
"""Return a FIT representation for a given activity. If the activity
|
||||
doesn't have a FIT source (for example, if it was entered manually
|
||||
rather than imported from a Garmin device) a :obj:`None` value is
|
||||
returned.
|
||||
|
||||
:param activity_id: Activity identifier.
|
||||
:type activity_id: int
|
||||
:returns: A string with a FIT file for the activity or :obj:`None`
|
||||
if no FIT source exists for this activity (e.g., entered manually).
|
||||
:rtype: str
|
||||
"""
|
||||
fmt, orig_file = self.get_original_activity(activity_id)
|
||||
# if the file extension of the original activity file isn't 'fit',
|
||||
# this activity was uploaded in a different format (e.g. gpx/tcx)
|
||||
# and cannot be exported to fit
|
||||
return orig_file if fmt == 'fit' else None
|
||||
|
||||
@require_session
|
||||
def upload_activity(self, file, format=None, name=None, description=None, activity_type=None, private=None):
|
||||
"""Upload a GPX, TCX, or FIT file for an activity.
|
||||
|
||||
:param file: Path or open file
|
||||
:param format: File format (gpx, tcx, or fit); guessed from filename if None
|
||||
:param name: Optional name for the activity on Garmin Connect
|
||||
:param description: Optional description for the activity on Garmin Connect
|
||||
:param activity_type: Optional activityType key (lowercase: e.g. running, cycling)
|
||||
:param private: If true, then activity will be set as private.
|
||||
:returns: ID of the newly-uploaded activity
|
||||
:rtype: int
|
||||
"""
|
||||
|
||||
if isinstance(file, str):
|
||||
file = open(file, "rb")
|
||||
|
||||
# guess file type if unspecified
|
||||
fn = os.path.basename(file.name)
|
||||
_, ext = os.path.splitext(fn)
|
||||
if format is None:
|
||||
if ext.lower() in ('.gpx', '.tcx', '.fit'):
|
||||
format = ext.lower()[1:]
|
||||
else:
|
||||
raise Exception(u"could not guess file type for {}".format(fn))
|
||||
|
||||
# upload it
|
||||
files = dict(data=(fn, file))
|
||||
response = self.session.post("https://connect.garmin.com/modern/proxy/upload-service/upload/.{}".format(format),
|
||||
files=files, headers={"nk": "NT"})
|
||||
|
||||
# check response and get activity ID
|
||||
try:
|
||||
j = response.json()["detailedImportResult"]
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
raise Exception(u"failed to upload {} for activity: {}\n{}".format(
|
||||
format, response.status_code, response.text))
|
||||
|
||||
if len(j["failures"]) or len(j["successes"]) < 1:
|
||||
raise Exception(u"failed to upload {} for activity: {}\n{}".format(
|
||||
format, response.status_code, j["failures"]))
|
||||
|
||||
if len(j["successes"]) > 1:
|
||||
raise Exception(u"uploading {} resulted in multiple activities ({})".format(
|
||||
format, len(j["successes"])))
|
||||
|
||||
activity_id = j["successes"][0]["internalId"]
|
||||
|
||||
# add optional fields
|
||||
data = {}
|
||||
if name is not None:
|
||||
data['activityName'] = name
|
||||
if description is not None:
|
||||
data['description'] = name
|
||||
if activity_type is not None:
|
||||
data['activityTypeDTO'] = {"typeKey": activity_type}
|
||||
if private:
|
||||
data['privacy'] = {"typeKey": "private"}
|
||||
if data:
|
||||
data['activityId'] = activity_id
|
||||
encoding_headers = {"Content-Type": "application/json; charset=UTF-8"} # see Tapiriik
|
||||
response = self.session.put(
|
||||
"https://connect.garmin.com/proxy/activity-service/activity/{}".format(activity_id),
|
||||
data=json.dumps(data), headers=encoding_headers)
|
||||
if response.status_code != 204:
|
||||
raise Exception(u"failed to set metadata for activity {}: {}\n{}".format(
|
||||
activity_id, response.status_code, response.text))
|
||||
|
||||
return activity_id
|
||||
74
garminexport/garminexport/incremental_backup.py
Normal file
74
garminexport/garminexport/incremental_backup.py
Normal file
@@ -0,0 +1,74 @@
|
||||
#! /usr/bin/env python
|
||||
import getpass
|
||||
import logging
|
||||
import os
|
||||
from datetime import timedelta
|
||||
from typing import List
|
||||
|
||||
import garminexport.backup
|
||||
from garminexport.backup import supported_export_formats
|
||||
from garminexport.garminclient import GarminClient
|
||||
from garminexport.retryer import Retryer, ExponentialBackoffDelayStrategy, MaxRetriesStopStrategy
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def incremental_backup(username: str,
|
||||
password: str = None,
|
||||
backup_dir: str = os.path.join(".", "activities"),
|
||||
export_formats: List[str] = None,
|
||||
ignore_errors: bool = False,
|
||||
max_retries: int = 7):
|
||||
"""Performs (incremental) backups of activities for a given Garmin Connect account.
|
||||
|
||||
:param username: Garmin Connect user name
|
||||
:param password: Garmin Connect user password. Default: None. If not provided, would be asked interactively.
|
||||
:param backup_dir: Destination directory for downloaded activities. Default: ./activities/".
|
||||
:param export_formats: List of desired output formats (json_summary, json_details, gpx, tcx, fit).
|
||||
Default: `None` which means all supported formats will be backed up.
|
||||
:param ignore_errors: Ignore errors and keep going. Default: False.
|
||||
:param max_retries: The maximum number of retries to make on failed attempts to fetch an activity.
|
||||
Exponential backoff will be used, meaning that the delay between successive attempts
|
||||
will double with every retry, starting at one second. Default: 7.
|
||||
|
||||
The activities are stored in a local directory on the user's computer.
|
||||
The backups are incremental, meaning that only activities that aren't already
|
||||
stored in the backup directory will be downloaded.
|
||||
"""
|
||||
# if no --format was specified, all formats are to be backed up
|
||||
export_formats = export_formats if export_formats else supported_export_formats
|
||||
log.info("backing up formats: %s", ", ".join(export_formats))
|
||||
|
||||
if not os.path.isdir(backup_dir):
|
||||
os.makedirs(backup_dir)
|
||||
|
||||
if not password:
|
||||
password = getpass.getpass("Enter password: ")
|
||||
|
||||
# set up a retryer that will handle retries of failed activity downloads
|
||||
retryer = Retryer(
|
||||
delay_strategy=ExponentialBackoffDelayStrategy(initial_delay=timedelta(seconds=1)),
|
||||
stop_strategy=MaxRetriesStopStrategy(max_retries))
|
||||
|
||||
with GarminClient(username, password) as client:
|
||||
# get all activity ids and timestamps from Garmin account
|
||||
log.info("scanning activities for %s ...", username)
|
||||
activities = set(retryer.call(client.list_activities))
|
||||
log.info("account has a total of %d activities", len(activities))
|
||||
|
||||
missing_activities = garminexport.backup.need_backup(activities, backup_dir, export_formats)
|
||||
backed_up = activities - missing_activities
|
||||
log.info("%s contains %d backed up activities", backup_dir, len(backed_up))
|
||||
|
||||
log.info("activities that aren't backed up: %d", len(missing_activities))
|
||||
|
||||
for index, activity in enumerate(missing_activities):
|
||||
id, start = activity
|
||||
log.info("backing up activity %s from %s (%d out of %d) ...",
|
||||
id, start, index + 1, len(missing_activities))
|
||||
try:
|
||||
garminexport.backup.download(client, activity, retryer, backup_dir, export_formats)
|
||||
except Exception as e:
|
||||
log.error("failed with exception: %s", e)
|
||||
if not ignore_errors:
|
||||
raise
|
||||
9
garminexport/garminexport/logging_config.py
Normal file
9
garminexport/garminexport/logging_config.py
Normal file
@@ -0,0 +1,9 @@
|
||||
import logging
|
||||
|
||||
LOG_LEVELS = {
|
||||
"DEBUG": logging.DEBUG,
|
||||
"INFO": logging.INFO,
|
||||
"WARNING": logging.WARNING,
|
||||
"ERROR": logging.ERROR
|
||||
}
|
||||
"""Command-line (string-based) log-level mapping to logging module levels."""
|
||||
215
garminexport/garminexport/retryer.py
Normal file
215
garminexport/garminexport/retryer.py
Normal file
@@ -0,0 +1,215 @@
|
||||
import abc
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GaveUpError(Exception):
|
||||
"""Raised by a :class:`Retryer` that has exceeded its maximum number of retries."""
|
||||
pass
|
||||
|
||||
|
||||
class DelayStrategy(object):
|
||||
"""Used by a :class:`Retryer` to determines how long to wait after an
|
||||
attempt before the next retry. """
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
@abc.abstractmethod
|
||||
def next_delay(self, attempts):
|
||||
"""Returns the time to wait before the next attempt.
|
||||
|
||||
:param attempts: The total number of (failed) attempts performed thus far.
|
||||
:type attempts: int
|
||||
|
||||
:return: The delay before the next attempt.
|
||||
:rtype: `timedelta`
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class FixedDelayStrategy(DelayStrategy):
|
||||
"""A retry :class:`DelayStrategy` that produces a fixed delay between attempts."""
|
||||
|
||||
def __init__(self, delay):
|
||||
"""
|
||||
:param delay: Attempt delay.
|
||||
:type delay: `timedelta`
|
||||
"""
|
||||
self.delay = delay
|
||||
|
||||
def next_delay(self, attempts):
|
||||
return self.delay
|
||||
|
||||
|
||||
class ExponentialBackoffDelayStrategy(DelayStrategy):
|
||||
"""A retry :class:`DelayStrategy` that produces exponentially longer
|
||||
delay between every attempt. The first attempt will be followed
|
||||
by a `<initial-delay> * 2**0` delay. The following delays will be
|
||||
`<initial-delay> * 2**1`, `<initial-delay> * 2**2`, and so on ...
|
||||
"""
|
||||
|
||||
def __init__(self, initial_delay):
|
||||
"""
|
||||
:param initial_delay: Initial delay.
|
||||
:type initial_delay: `timedelta`
|
||||
"""
|
||||
self.initial_delay = initial_delay
|
||||
|
||||
def next_delay(self, attempts):
|
||||
if attempts <= 0:
|
||||
return timedelta(seconds=0)
|
||||
delay_seconds = self.initial_delay.total_seconds() * 2 ** (attempts - 1)
|
||||
return timedelta(seconds=delay_seconds)
|
||||
|
||||
|
||||
class NoDelayStrategy(FixedDelayStrategy):
|
||||
"""A retry :class:`DelayStrategy` that doesn't introduce any delay between attempts."""
|
||||
|
||||
def __init__(self):
|
||||
super(NoDelayStrategy, self).__init__(timedelta(seconds=0))
|
||||
|
||||
|
||||
class ErrorStrategy(object):
|
||||
"""Used by a :class:`Retryer` to determine which errors are to be
|
||||
suppressed and which errors are to be re-raised and thereby end the (re)trying."""
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
@abc.abstractmethod
|
||||
def should_suppress(self, error):
|
||||
"""Called after an attempt that raised an exception to determine if
|
||||
that error should be suppressed (continue retrying) or be re-raised (and end the retrying).
|
||||
|
||||
:param error: Error that was raised from an attempt.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class SuppressAllErrorStrategy(ErrorStrategy):
|
||||
"""An :class:`ErrorStrategy` that suppresses all types of errors raised
|
||||
on attempts to perform the call."""
|
||||
|
||||
def should_suppress(self, error):
|
||||
return True
|
||||
|
||||
|
||||
class StopStrategy(object):
|
||||
"""Determines for how long a :class:`Retryer` should keep (re)trying."""
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
@abc.abstractmethod
|
||||
def should_continue(self, attempts, elapsed_time):
|
||||
"""Called after a failed attempt to determine if we should keep trying.
|
||||
|
||||
:param attempts: Total number of (failed) attempts thus far.
|
||||
:type attempts: int
|
||||
:param elapsed_time: Total elapsed time since first attempt.
|
||||
:type elapsed_time: timedelta
|
||||
|
||||
:return: `True` if the `Retryer` should keep trying, `False` otherwise.
|
||||
:rtype: bool
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class NeverStopStrategy(StopStrategy):
|
||||
"""A :class:`StopStrategy` that never gives up."""
|
||||
|
||||
def should_continue(self, attempts, elapsed_time):
|
||||
return True
|
||||
|
||||
|
||||
class MaxRetriesStopStrategy(StopStrategy):
|
||||
"""A :class:`StopStrategy` that gives up after a certain number of retries."""
|
||||
|
||||
def __init__(self, max_retries):
|
||||
self.max_retries = max_retries
|
||||
|
||||
def should_continue(self, attempts, elapsed_time):
|
||||
return attempts <= self.max_retries
|
||||
|
||||
|
||||
class Retryer(object):
|
||||
"""A :class:`Retryer` makes repeated calls to a function until either
|
||||
the return value satisfies a certain condition (`returnval_predicate`)
|
||||
or until a stop strategy (`stop_strategy`) determines that enough
|
||||
attempts have been made (or a too long time has elapsed). Should the
|
||||
`stop_strategy` decide to abort, a :class:`GaveUpError` is raised.
|
||||
|
||||
The delay between attempts is controlled by a `delay_strategy`.
|
||||
|
||||
Should the attempted call raise an Exception, an `error_strategy` gets
|
||||
to decide if the error should be suppressed or re-raised (in which case
|
||||
the retrying ends with that error).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
returnval_predicate=lambda returnval: True,
|
||||
delay_strategy=NoDelayStrategy(),
|
||||
stop_strategy=NeverStopStrategy(),
|
||||
error_strategy=SuppressAllErrorStrategy()):
|
||||
"""Creates a new :class:`Retryer` set up to use a given set of
|
||||
strategies to control its behavior.
|
||||
|
||||
With only default values, the retryer will keep retrying
|
||||
indefinitely until a value (any value) is returned by
|
||||
the called function. Any raised errors will be suppressed.
|
||||
|
||||
:param returnval_predicate: predicate that determines if a return
|
||||
value is considered successful. When the predicate evaluates to
|
||||
`True`, the `call` function will return with that return value.
|
||||
:type returnval_predicate: `function(returnvalue) => bool`
|
||||
:param delay_strategy: determines the time delay to introduce between
|
||||
attempts.
|
||||
:type delay_strategy: :class:`DelayStrategy`
|
||||
:param stop_strategy: determines when we are to stop retrying.
|
||||
:type stop_strategy: :class:`StopStrategy`
|
||||
:param error_strategy: determines which errors (if any) to suppress
|
||||
when raised by the called function.
|
||||
:type error_strategy: :class:`ErrorStrategy`
|
||||
"""
|
||||
self.returnval_predicate = returnval_predicate
|
||||
self.delay_strategy = delay_strategy
|
||||
self.stop_strategy = stop_strategy
|
||||
self.error_strategy = error_strategy
|
||||
|
||||
def call(self, function, *args, **kw):
|
||||
"""Calls the given `function`, with the given arguments, repeatedly
|
||||
until either (1) a satisfactory result is obtained (as indicated by
|
||||
the `returnval_predicate`), or (2) until the `stop_strategy`
|
||||
determines that no more attempts are to be made (results in a
|
||||
`GaveUpException`), or (3) until the called function raises an error
|
||||
that is not suppressed by the `error_strategy` (the call will raise
|
||||
that error).
|
||||
|
||||
:param function: A `callable`.
|
||||
:param args: Any positional arguments to call `function` with.
|
||||
:param kw: Any keyword arguments to call `function` with.
|
||||
"""
|
||||
name = function.__name__
|
||||
start = datetime.now()
|
||||
attempts = 0
|
||||
while True:
|
||||
try:
|
||||
attempts += 1
|
||||
log.info('{%s}: attempt %d ...', name, attempts)
|
||||
returnval = function(*args, **kw)
|
||||
if self.returnval_predicate(returnval):
|
||||
# return value satisfies predicate, we're done!
|
||||
log.debug('{%s}: success: "%s"', name, returnval)
|
||||
return returnval
|
||||
log.debug('{%s}: failed: return value: %s', name, returnval)
|
||||
except Exception as e:
|
||||
if not self.error_strategy.should_suppress(e):
|
||||
raise e
|
||||
log.debug('{%s}: failed: error: %s', name, e)
|
||||
elapsed_time = datetime.now() - start
|
||||
# should we make another attempt?
|
||||
if not self.stop_strategy.should_continue(attempts, elapsed_time):
|
||||
raise GaveUpError('{{}}: gave up after {} failed attempt(s)'.format(name, attempts))
|
||||
delay = self.delay_strategy.next_delay(attempts)
|
||||
log.info('{%s}: waiting %d seconds for next attempt', name, delay.total_seconds())
|
||||
time.sleep(delay.total_seconds())
|
||||
38
garminexport/samples/lab.py
Normal file
38
garminexport/samples/lab.py
Normal file
@@ -0,0 +1,38 @@
|
||||
#! /usr/bin/env python
|
||||
"""
|
||||
Script intended for Garmin Connect API experimenting in ipython.
|
||||
un as:
|
||||
ipython -i samples/lab.py -- --password=<password> <username>
|
||||
|
||||
and use the client object (or client.session) to interact with
|
||||
Garmin Connect.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import getpass
|
||||
import logging
|
||||
|
||||
from garminexport.garminclient import GarminClient
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)-15s [%(levelname)s] %(message)s")
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
# positional args
|
||||
parser.add_argument(
|
||||
"username", metavar="<username>", type=str, help="Account user name.")
|
||||
# optional args
|
||||
parser.add_argument(
|
||||
"--password", type=str, help="Account password.")
|
||||
|
||||
args = parser.parse_args()
|
||||
print(args)
|
||||
|
||||
if not args.password:
|
||||
args.password = getpass.getpass("Enter password: ")
|
||||
|
||||
client = GarminClient(args.username, args.password)
|
||||
client.connect()
|
||||
|
||||
print("client object ready for use.")
|
||||
47
garminexport/samples/sample.py
Executable file
47
garminexport/samples/sample.py
Executable file
@@ -0,0 +1,47 @@
|
||||
#! /usr/bin/env python
|
||||
|
||||
import argparse
|
||||
import getpass
|
||||
import json
|
||||
import logging
|
||||
|
||||
from garminexport.garminclient import GarminClient
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)-15s [%(levelname)s] %(message)s")
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Export all Garmin Connect activities")
|
||||
# positional args
|
||||
parser.add_argument(
|
||||
"username", metavar="<username>", type=str, help="Account user name.")
|
||||
# optional args
|
||||
parser.add_argument(
|
||||
"--password", type=str, help="Account password.")
|
||||
|
||||
args = parser.parse_args()
|
||||
print(args)
|
||||
|
||||
if not args.password:
|
||||
args.password = getpass.getpass("Enter password: ")
|
||||
|
||||
try:
|
||||
with GarminClient(args.username, args.password) as client:
|
||||
log.info("activities:")
|
||||
activity_ids = client.list_activities()
|
||||
log.info("num ids: %d", len(activity_ids))
|
||||
log.info(activity_ids)
|
||||
|
||||
latest_activity, latest_activity_start = activity_ids[0]
|
||||
activity = client.get_activity_summary(latest_activity)
|
||||
log.info("activity id: %s", activity["activity"]["activityId"])
|
||||
log.info("activity name: '%s'", activity["activity"]["activityName"])
|
||||
log.info("activity description: '%s'", activity["activity"]["activityDescription"])
|
||||
log.info(json.dumps(client.get_activity_details(latest_activity), indent=4))
|
||||
log.info(client.get_activity_gpx(latest_activity))
|
||||
except Exception as e:
|
||||
log.error("failed with exception: %s", e)
|
||||
finally:
|
||||
log.info("done")
|
||||
70
garminexport/setup.py
Normal file
70
garminexport/setup.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""Setup information for the Garmin Connect activity exporter."""
|
||||
|
||||
from setuptools import setup, Extension
|
||||
from os import path
|
||||
# needed for Python 2.7 (ensures open() defaults to text mode with universal
|
||||
# newlines, and accepts an argument to specify the text encoding.
|
||||
from io import open
|
||||
|
||||
here = path.abspath(path.dirname(__file__))
|
||||
|
||||
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
|
||||
long_description = f.read()
|
||||
|
||||
requires = [
|
||||
'requests>=2.0,<3',
|
||||
'python-dateutil~=2.4',
|
||||
]
|
||||
|
||||
test_requires = [
|
||||
'nose~=1.3',
|
||||
'coverage~=4.2',
|
||||
'mock~=2.0',
|
||||
]
|
||||
|
||||
setup(name='garminexport',
|
||||
version='0.1.0',
|
||||
description='Garmin Connect activity exporter and backup tool',
|
||||
long_description=long_description,
|
||||
long_description_content_type='text/markdown',
|
||||
author='Peter Gardfjäll',
|
||||
author_email='peter.gardfjall.work@gmail.com',
|
||||
|
||||
classifiers=[
|
||||
'Development Status :: 4 - Beta',
|
||||
'Intended Audience :: Developers',
|
||||
'Intended Audience :: End Users/Desktop',
|
||||
'Intended Audience :: Developers',
|
||||
'Natural Language :: English',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
],
|
||||
keywords='garmin export backup',
|
||||
url='https://github.com/petergardfjall/garminexport',
|
||||
license='Apache License 2.0',
|
||||
|
||||
project_urls={
|
||||
'Source': 'https://github.com/petergardfjall/garminexport.git',
|
||||
'Tracker': 'https://github.com/petergardfjall/garminexport/issues',
|
||||
},
|
||||
|
||||
packages=[
|
||||
'garminexport',
|
||||
'garminexport.cli',
|
||||
],
|
||||
|
||||
python_requires='>=3.5.*, <4',
|
||||
install_requires=requires,
|
||||
test_requires=test_requires,
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'garmin-backup = garminexport.cli.backup:main',
|
||||
'garmin-get-activity = garminexport.cli.get_activity:main',
|
||||
'garmin-upload-activity = garminexport.cli.upload_activity:main',
|
||||
],
|
||||
},
|
||||
)
|
||||
0
garminexport/tests/__init__.py
Normal file
0
garminexport/tests/__init__.py
Normal file
178
garminexport/tests/test_retryer.py
Normal file
178
garminexport/tests/test_retryer.py
Normal file
@@ -0,0 +1,178 @@
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import time
|
||||
import unittest
|
||||
|
||||
from garminexport.retryer import (
|
||||
Retryer,
|
||||
NoDelayStrategy, FixedDelayStrategy, ExponentialBackoffDelayStrategy,
|
||||
SuppressAllErrorStrategy,
|
||||
NeverStopStrategy
|
||||
)
|
||||
|
||||
class Counter(object):
|
||||
"""An object whose `next_value` method returns increasing values."""
|
||||
|
||||
def __init__(self, start_at=0):
|
||||
self.nextval = start_at
|
||||
|
||||
def next_value(self):
|
||||
current = self.nextval
|
||||
self.nextval += 1
|
||||
return current
|
||||
|
||||
|
||||
class FailNTimesThenReturn(object):
|
||||
"""An object whose `next_value` method fails N times and then, on the Nth
|
||||
attempt, returns a value."""
|
||||
|
||||
def __init__(self, calls_until_success, returnval):
|
||||
self.called = 0
|
||||
self.calls_until_success = calls_until_success
|
||||
self.returnval = returnval
|
||||
|
||||
def next_value(self):
|
||||
self.called += 1
|
||||
if self.called < self.calls_until_success:
|
||||
raise RuntimeError("boom!")
|
||||
return self.returnval
|
||||
|
||||
|
||||
|
||||
class TestRetryer(unittest.TestCase):
|
||||
"""Exercise `Retryer`."""
|
||||
|
||||
|
||||
def test_with_defaults(self):
|
||||
"""Default `Retryer` behavior is to keep trying until a(ny) value is
|
||||
returned."""
|
||||
failing_client = FailNTimesThenReturn(10, "success!")
|
||||
returnval = Retryer().call(failing_client.next_value)
|
||||
self.assertEqual(returnval, "success!")
|
||||
self.assertEqual(failing_client.called, 10)
|
||||
|
||||
|
||||
def test_with_returnval_predicate(self):
|
||||
"""`Retryer` should only return when the returnval_predicate says so."""
|
||||
retryer = Retryer(returnval_predicate=lambda r: r == 20)
|
||||
self.assertEqual(retryer.call(Counter().next_value), 20)
|
||||
|
||||
def test_function_with_positional_args(self):
|
||||
"""`Retryer` should be able to call a function with positional args."""
|
||||
# TODO
|
||||
pass
|
||||
|
||||
def test_function_with_positional_and_kw_args(self):
|
||||
"""`Retryer` should be able to call a function with keyword args."""
|
||||
# TODO
|
||||
pass
|
||||
|
||||
|
||||
def test_bla(self):
|
||||
retryer = Retryer()
|
||||
func = lambda : int(time.time())
|
||||
|
||||
returnval = retryer.call(func)
|
||||
print(returnval)
|
||||
|
||||
|
||||
class TestFixedDelayStrategy(unittest.TestCase):
|
||||
"""Exercise `FixedDelayStrategy`."""
|
||||
|
||||
def setUp(self):
|
||||
# object under test
|
||||
self.strategy = FixedDelayStrategy(timedelta(seconds=10))
|
||||
|
||||
def test_calculate_delay(self):
|
||||
"""`FixedDelayStrategy` should always return the same delay."""
|
||||
self.assertEqual(self.strategy.next_delay(0), timedelta(seconds=10))
|
||||
self.assertEqual(self.strategy.next_delay(1), timedelta(seconds=10))
|
||||
self.assertEqual(self.strategy.next_delay(2), timedelta(seconds=10))
|
||||
self.assertEqual(self.strategy.next_delay(3), timedelta(seconds=10))
|
||||
self.assertEqual(self.strategy.next_delay(10), timedelta(seconds=10))
|
||||
self.assertEqual(self.strategy.next_delay(100), timedelta(seconds=10))
|
||||
|
||||
|
||||
class TestNoDelayStrategy(unittest.TestCase):
|
||||
"""Exercise `NoDelayStrategy`."""
|
||||
|
||||
def setUp(self):
|
||||
# object under test
|
||||
self.strategy = NoDelayStrategy()
|
||||
|
||||
def test_calculate_delay(self):
|
||||
"""`NoDelayStrategy` should always return no delay."""
|
||||
self.assertEqual(self.strategy.next_delay(0), timedelta(seconds=0))
|
||||
self.assertEqual(self.strategy.next_delay(1), timedelta(seconds=0))
|
||||
self.assertEqual(self.strategy.next_delay(2), timedelta(seconds=0))
|
||||
self.assertEqual(self.strategy.next_delay(3), timedelta(seconds=0))
|
||||
self.assertEqual(self.strategy.next_delay(10), timedelta(seconds=0))
|
||||
self.assertEqual(self.strategy.next_delay(100), timedelta(seconds=0))
|
||||
|
||||
|
||||
class TestExponentialBackoffDelayStrategy(unittest.TestCase):
|
||||
"""Exercise `ExponentialBackoffDelayStrategy`."""
|
||||
|
||||
def setUp(self):
|
||||
# object under test
|
||||
self.strategy = ExponentialBackoffDelayStrategy(timedelta(seconds=1))
|
||||
|
||||
def test_calculate_delay(self):
|
||||
"""`ExponentialBackoffDelayStrategy` should return exponentially increasing delay."""
|
||||
self.assertEqual(self.strategy.next_delay(0), timedelta(seconds=0))
|
||||
self.assertEqual(self.strategy.next_delay(1), timedelta(seconds=1))
|
||||
self.assertEqual(self.strategy.next_delay(2), timedelta(seconds=2))
|
||||
self.assertEqual(self.strategy.next_delay(3), timedelta(seconds=4))
|
||||
self.assertEqual(self.strategy.next_delay(4), timedelta(seconds=8))
|
||||
self.assertEqual(self.strategy.next_delay(5), timedelta(seconds=16))
|
||||
self.assertEqual(self.strategy.next_delay(10), timedelta(seconds=512))
|
||||
|
||||
def test_initial_delay(self):
|
||||
"""The initial delay is used to scale the series of delays."""
|
||||
self.strategy = ExponentialBackoffDelayStrategy(timedelta(seconds=2))
|
||||
self.assertEqual(self.strategy.next_delay(0), timedelta(seconds=0))
|
||||
self.assertEqual(self.strategy.next_delay(1), timedelta(seconds=2*1))
|
||||
self.assertEqual(self.strategy.next_delay(2), timedelta(seconds=2*2))
|
||||
self.assertEqual(self.strategy.next_delay(3), timedelta(seconds=2*4))
|
||||
self.assertEqual(self.strategy.next_delay(4), timedelta(seconds=2*8))
|
||||
self.assertEqual(self.strategy.next_delay(5), timedelta(seconds=2*16))
|
||||
self.assertEqual(self.strategy.next_delay(10), timedelta(seconds=2*512))
|
||||
|
||||
|
||||
class TestSuppressAllErrorStrategy(unittest.TestCase):
|
||||
"""Exercise `SuppressAllErrorStrategy`."""
|
||||
|
||||
def setUp(self):
|
||||
# object under test
|
||||
self.strategy = SuppressAllErrorStrategy()
|
||||
|
||||
def test_suppress(self):
|
||||
"""`SuppressAllErrorStrategy` should always suppress."""
|
||||
self.assertTrue(self.strategy.should_suppress(RuntimeError("boom!")))
|
||||
self.assertTrue(self.strategy.should_suppress(Exception("boom!")))
|
||||
# non-exception error
|
||||
self.assertTrue(self.strategy.should_suppress("boom!"))
|
||||
self.assertTrue(self.strategy.should_suppress(None))
|
||||
|
||||
|
||||
class TestNeverStopStrategy(unittest.TestCase):
|
||||
"""Exercise `NeverStopStrategy`"""
|
||||
|
||||
def setUp(self):
|
||||
# object under test
|
||||
self.strategy = NeverStopStrategy()
|
||||
|
||||
def test_suppress(self):
|
||||
"""`SuppressAllErrorStrategy` should always suppress."""
|
||||
self.assertTrue(self.strategy.should_continue(1, timedelta(seconds=1)))
|
||||
self.assertTrue(self.strategy.should_continue(2, timedelta(seconds=4)))
|
||||
self.assertTrue(self.strategy.should_continue(3, timedelta(seconds=4)))
|
||||
self.assertTrue(self.strategy.should_continue(4, timedelta(seconds=5)))
|
||||
self.assertTrue(self.strategy.should_continue(400, timedelta(hours=1)))
|
||||
self.assertTrue(self.strategy.should_continue(4000, timedelta(hours=8)))
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.basicConfig(format="%(asctime)s %(message)s", level=logging.DEBUG)
|
||||
|
||||
unittest.main()
|
||||
26
gitea/Dockerfile
Normal file
26
gitea/Dockerfile
Normal file
@@ -0,0 +1,26 @@
|
||||
FROM alpine:3.14
|
||||
LABEL maintainer="maintainers@gitea.io"
|
||||
|
||||
RUN addgroup -S -g 1000 gitea && \
|
||||
adduser -S -H -D -h /data/git -s /bin/bash -u 1000 -G gitea gitea && \
|
||||
echo "gitea:*" | chpasswd -e
|
||||
|
||||
|
||||
RUN apk add --no-cache \
|
||||
git-lfs \
|
||||
openssh-keygen \
|
||||
bash &&\
|
||||
apk add gitea --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community/ && \
|
||||
mkdir /var/cache/gitea && \
|
||||
mkdir /home/gitea && \
|
||||
chown gitea:gitea /var/cache/gitea
|
||||
|
||||
EXPOSE 22 3000
|
||||
|
||||
|
||||
|
||||
USER gitea:gitea
|
||||
|
||||
ENTRYPOINT ["/usr/bin/gitea"]
|
||||
|
||||
CMD ["web", "-c", "/data/app.ini"]
|
||||
36
openpyn_alpine/Dockerfile
Normal file
36
openpyn_alpine/Dockerfile
Normal file
@@ -0,0 +1,36 @@
|
||||
|
||||
FROM alpine:latest
|
||||
|
||||
ENV OpenVPN-Client 1.0
|
||||
|
||||
RUN echo "http://dl-cdn.alpinelinux.org/alpine/edge/testing/" >> /etc/apk/repositories
|
||||
|
||||
RUN apk add openvpn unzip wget dante-server sudo expect iputils
|
||||
|
||||
RUN apk add --no-cache python3 && \
|
||||
python3 -m ensurepip && \
|
||||
rm -r /usr/lib/python*/ensurepip && \
|
||||
pip3 install --upgrade pip setuptools && \
|
||||
if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi && \
|
||||
if [[ ! -e /usr/bin/python ]]; then ln -sf /usr/bin/python3 /usr/bin/python; fi
|
||||
|
||||
ADD sockd.conf /etc/sockd.conf
|
||||
ADD sockd.sh /usr/local/bin/
|
||||
RUN chmod a+x /usr/local/bin/sockd.sh
|
||||
|
||||
|
||||
RUN pip3 install verboselogs
|
||||
RUN pip3 install --upgrade openpyn
|
||||
|
||||
WORKDIR /root
|
||||
ADD openpyn_init.exp .
|
||||
RUN expect openpyn_init.exp
|
||||
|
||||
|
||||
ADD run.sh /usr/local/bin/startup.sh
|
||||
RUN chmod a+x /usr/local/bin/startup.sh
|
||||
|
||||
EXPOSE 1080
|
||||
|
||||
ENTRYPOINT ["/bin/sh","/usr/local/bin/startup.sh"]
|
||||
|
||||
1
openpyn_alpine/README.md
Normal file
1
openpyn_alpine/README.md
Normal file
@@ -0,0 +1 @@
|
||||
[](http://droneci.service.dc1.consul/sstent/openpyn_alpine)
|
||||
2
openpyn_alpine/credentials
Normal file
2
openpyn_alpine/credentials
Normal file
@@ -0,0 +1,2 @@
|
||||
shapechecker@protonmail.com
|
||||
0okjU0CN1U4juKqs2OmQ
|
||||
11
openpyn_alpine/openpyn_init.exp
Normal file
11
openpyn_alpine/openpyn_init.exp
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/expect -f
|
||||
|
||||
spawn /usr/bin/openpyn --init
|
||||
|
||||
expect "Enter your username for NordVPN"
|
||||
send "stuart.stent@gmail.com\r"
|
||||
|
||||
expect "Enter the password for NordVPN"
|
||||
send "drRp4mQBVU6awAFOk9lO\r"
|
||||
|
||||
expect "\[INFO\] To see usage options type"
|
||||
7
openpyn_alpine/run.sh
Normal file
7
openpyn_alpine/run.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/bin/sh
|
||||
|
||||
if [ -n "$VPNFLAGS" ]; then
|
||||
/usr/bin/openpyn ${VPNFLAGS} -o "--fast-io --cipher AES-128-GCM --mssfix 1431 --script-security 2 --up /usr/local/bin/sockd.sh"
|
||||
else
|
||||
/usr/bin/openpyn nl --max-load 70 --top-servers 10 --tcp --pings 5 -o "--cipher AES-128-GCM --mssfix 1431 --script-security 2 --up /usr/local/bin/sockd.sh"
|
||||
fi
|
||||
28
openpyn_alpine/sockd.conf
Normal file
28
openpyn_alpine/sockd.conf
Normal file
@@ -0,0 +1,28 @@
|
||||
debug: 0
|
||||
logoutput: stderr
|
||||
internal: 0.0.0.0 port = 1080
|
||||
external: tun0
|
||||
socksmethod: none
|
||||
clientmethod: none
|
||||
user.privileged: root
|
||||
user.unprivileged: nobody
|
||||
|
||||
client pass {
|
||||
from: 192.168.1.0/24 port 1-65535 to: 0.0.0.0/0
|
||||
#clientmethod: rfc931 # match all idented users that also are in passwordfile
|
||||
}
|
||||
|
||||
client pass {
|
||||
from: 172.0.0.0/8 port 1-65535 to: 0.0.0.0/0
|
||||
#clientmethod: rfc931 # match all idented users that also are in passwordfile
|
||||
}
|
||||
|
||||
|
||||
client pass {
|
||||
from: 127.0.0.0/8 port 1-65535 to: 0.0.0.0/0
|
||||
}
|
||||
|
||||
socks pass {
|
||||
from: 0.0.0.0/0 to: 0.0.0.0/0
|
||||
protocol: tcp udp
|
||||
}
|
||||
9
openpyn_alpine/sockd.sh
Normal file
9
openpyn_alpine/sockd.sh
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/bin/sh
|
||||
CFGFILE=/etc/sockd.conf
|
||||
PIDFILE=/tmp/sockd.pid
|
||||
WORKERS=10
|
||||
|
||||
/etc/openvpn/up.sh
|
||||
ip route add 192.168.1.0/24 via 172.17.0.1 dev eth0
|
||||
echo -e "nameserver 192.168.1.1\n$(cat /etc/resolv.conf)" > /etc/resolv.conf
|
||||
sockd -f $CFGFILE -p $PIDFILE -N $WORKERS &
|
||||
58
openpyn_alpine/update-resolv-conf.sh
Normal file
58
openpyn_alpine/update-resolv-conf.sh
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Parses DHCP options from openvpn to update resolv.conf
|
||||
# To use set as 'up' and 'down' script in your openvpn *.conf:
|
||||
# up /etc/openvpn/update-resolv-conf
|
||||
# down /etc/openvpn/update-resolv-conf
|
||||
#
|
||||
# Used snippets of resolvconf script by Thomas Hood and Chris Hanson.
|
||||
# Licensed under the GNU GPL. See /usr/share/common-licenses/GPL.
|
||||
#
|
||||
# NordVPN DNS IPs:
|
||||
#
|
||||
foreign_option_1='dhcp-option DNS 103.86.99.100'
|
||||
foreign_option_2='dhcp-option DNS 103.86.96.100'
|
||||
foreign_option_3='dhcp-option DNS 208.67.222.220' #opendns
|
||||
#
|
||||
|
||||
[ -x /sbin/resolvconf ] || exit 0
|
||||
[ "$script_type" ] || exit 0
|
||||
[ "$dev" ] || exit 0
|
||||
|
||||
split_into_parts()
|
||||
{
|
||||
part1="$1"
|
||||
part2="$2"
|
||||
part3="$3"
|
||||
}
|
||||
|
||||
case "$script_type" in
|
||||
up)
|
||||
NMSRVRS=""
|
||||
SRCHS=""
|
||||
for optionvarname in ${!foreign_option_*} ; do
|
||||
option="${!optionvarname}"
|
||||
echo "$option"
|
||||
split_into_parts $option
|
||||
if [ "$part1" = "dhcp-option" ] ; then
|
||||
if [ "$part2" = "DNS" ] ; then
|
||||
NMSRVRS="${NMSRVRS:+$NMSRVRS }$part3"
|
||||
elif [ "$part2" = "DOMAIN" ] ; then
|
||||
SRCHS="${SRCHS:+$SRCHS }$part3"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
R=""
|
||||
[ "$SRCHS" ] && R="search $SRCHS
|
||||
"
|
||||
for NS in $NMSRVRS ; do
|
||||
R="${R}nameserver $NS
|
||||
"
|
||||
done
|
||||
echo -n "$R" | /sbin/resolvconf -a "${dev}.openvpn"
|
||||
/usr/local/bin/sockd.sh
|
||||
;;
|
||||
down)
|
||||
/sbin/resolvconf -d "${dev}.openvpn"
|
||||
;;
|
||||
esac
|
||||
12
rsync/Dockerfile
Normal file
12
rsync/Dockerfile
Normal file
@@ -0,0 +1,12 @@
|
||||
|
||||
FROM alpine:latest
|
||||
|
||||
RUN apk add --no-cache --virtual .run-deps rsync openssh tzdata curl ca-certificates flock bash && rm -rf /var/cache/apk/*
|
||||
COPY docker-entrypoint.sh /
|
||||
COPY litestream /usr/local/bin/litestream
|
||||
RUN chmod +x /docker-entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/litestream
|
||||
|
||||
|
||||
ENTRYPOINT ["/docker-entrypoint.sh"]
|
||||
CMD ["bash"]
|
||||
2
rsync/README.md
Normal file
2
rsync/README.md
Normal file
@@ -0,0 +1,2 @@
|
||||
[](http://droneci.service.dc1.consul/sstent/rsync)
|
||||
Tue Sep 7 13:57:20 EDT 2021
|
||||
117
rsync/docker-entrypoint.sh
Executable file
117
rsync/docker-entrypoint.sh
Executable file
@@ -0,0 +1,117 @@
|
||||
#!/bin/bash
|
||||
|
||||
################################################################################
|
||||
# INIT
|
||||
################################################################################
|
||||
|
||||
# mkdir -p /root/.ssh
|
||||
# > /root/.ssh/authorized_keys
|
||||
# chmod go-rwx /root/.ssh/authorized_keys
|
||||
# sed -i "s/#PasswordAuthentication yes/PasswordAuthentication no/g" /etc/ssh/sshd_config
|
||||
# sed -i 's/root:!/root:*/' /etc/shadow
|
||||
|
||||
# Provide SSH_AUTH_KEY_* via environment variable
|
||||
for item in `env`; do
|
||||
case "$item" in
|
||||
SSH_AUTH_KEY*)
|
||||
ENVVAR=`echo $item | cut -d \= -f 1`
|
||||
printenv $ENVVAR >> /root/.ssh/authorized_keys
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Provide CRON_TASK_* via environment variable
|
||||
> /etc/crontabs/root
|
||||
for item in `env`; do
|
||||
case "$item" in
|
||||
CRON_TASK*)
|
||||
ENVVAR=`echo $item | cut -d \= -f 1`
|
||||
printenv $ENVVAR >> /etc/crontabs/root
|
||||
echo "root" > /etc/crontabs/cron.update
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Generate host SSH keys
|
||||
# if [ ! -e /etc/ssh/ssh_host_rsa_key.pub ]; then
|
||||
# ssh-keygen -A
|
||||
# fi
|
||||
|
||||
# Generate root SSH key
|
||||
# if [ ! -e /root/.ssh/id_rsa.pub ]; then
|
||||
# ssh-keygen -q -N "" -f /root/.ssh/id_rsa
|
||||
# fi
|
||||
|
||||
################################################################################
|
||||
# START as SERVER
|
||||
################################################################################
|
||||
|
||||
# if [ "$1" == "server" ]; then
|
||||
# AUTH=`cat /root/.ssh/authorized_keys`
|
||||
# if [ -z "$AUTH" ]; then
|
||||
# echo "=================================================================================="
|
||||
# echo "ERROR: No SSH_AUTH_KEY provided, you'll not be able to connect to this container. "
|
||||
# echo "=================================================================================="
|
||||
# exit 1
|
||||
# fi
|
||||
|
||||
# SSH_PARAMS="-D -e -p ${SSH_PORT:-22} $SSH_PARAMS"
|
||||
# echo "================================================================================"
|
||||
# echo "Running: /usr/sbin/sshd $SSH_PARAMS "
|
||||
# echo "================================================================================"
|
||||
|
||||
# exec /usr/sbin/sshd -D $SSH_PARAMS
|
||||
# fi
|
||||
|
||||
# echo "Please add this ssh key to your server /home/user/.ssh/authorized_keys "
|
||||
# echo "================================================================================"
|
||||
# echo "`cat /root/.ssh/id_rsa.pub`"
|
||||
# echo "================================================================================"
|
||||
|
||||
################################################################################
|
||||
# START as CLIENT via crontab
|
||||
################################################################################
|
||||
|
||||
if [ "$1" == "client" ]; then
|
||||
exec /usr/sbin/crond -f
|
||||
fi
|
||||
|
||||
if [ "$NOMAD_TASK_NAME" == "init" ]; then
|
||||
echo "Starting RSYNC"
|
||||
flock -x /locks/${NOMAD_GROUP_NAME}_rsync.lock rsync -avv --exclude=Backups --exclude='*.db*' --exclude='*.db' --exclude='*db-litestream' --exclude='generations' /configbackup/ /config/ --delete-before --delete-excluded
|
||||
echo "Ensure no DBs"
|
||||
rm -rf /config/*.db*
|
||||
rm -rf /config/database.sqlite
|
||||
|
||||
|
||||
echo "Starting DB Restore"
|
||||
/usr/local/bin/litestream restore -config /local/litestream.yml /config/${DB_NAME}
|
||||
chown ${PUID:-1000}:${PGID:-1000} /config/*.db*
|
||||
if [ -n "$DBCHMOD" ]; then
|
||||
chmod ${DBCHMOD} /config/${DB_NAME:${NOMAD_GROUP_NAME}}.db;
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "$NOMAD_TASK_NAME" == "finalsync" ]; then
|
||||
echo "Starting RSYNC"
|
||||
flock -x /locks/${NOMAD_GROUP_NAME}_rsync.lock rsync -avv --exclude=Backups --exclude='*.db*' --exclude='*.db' --exclude='*db-litestream' --exclude='generations' /config/ /configbackup/
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "$NOMAD_TASK_NAME" == "db-sync" ]; then
|
||||
echo "Starting DBsync: sleep"
|
||||
#give time for app to datart properly
|
||||
sleep 1m
|
||||
echo "Starting DBsync"
|
||||
exec flock -x /configbackup/${NOMAD_GROUP_NAME}_litesync.lock /usr/local/bin/litestream replicate -config /local/litestream.yml
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
||||
################################################################################
|
||||
# Anything else
|
||||
################################################################################
|
||||
exec "$@"
|
||||
BIN
rsync/litestream
Executable file
BIN
rsync/litestream
Executable file
Binary file not shown.
BIN
rsync/litestream-v0.4.0-alpha.3-linux-arm7-static.tar.gz
Normal file
BIN
rsync/litestream-v0.4.0-alpha.3-linux-arm7-static.tar.gz
Normal file
Binary file not shown.
Reference in New Issue
Block a user