mirror of
https://github.com/sstent/go-garth.git
synced 2025-12-06 08:01:42 +00:00
sync
This commit is contained in:
306
activities.go
Normal file
306
activities.go
Normal file
@@ -0,0 +1,306 @@
|
||||
package garth
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Activity represents a summary of a Garmin activity
|
||||
type Activity struct {
|
||||
ActivityID int64 `json:"activityId"`
|
||||
Name string `json:"activityName"`
|
||||
Type string `json:"activityType"`
|
||||
StartTime time.Time `json:"startTime"`
|
||||
Distance float64 `json:"distance"`
|
||||
Duration float64 `json:"duration"`
|
||||
Calories int `json:"calories"`
|
||||
}
|
||||
|
||||
// ActivityDetails contains detailed information about an activity
|
||||
type ActivityDetails struct {
|
||||
ActivityID int64 `json:"activityId"`
|
||||
Name string `json:"activityName"`
|
||||
Description string `json:"description"`
|
||||
Type string `json:"activityType"`
|
||||
StartTime time.Time `json:"startTime"`
|
||||
Distance float64 `json:"distance"`
|
||||
Duration float64 `json:"duration"`
|
||||
Calories int `json:"calories"`
|
||||
ElevationGain float64 `json:"elevationGain"`
|
||||
ElevationLoss float64 `json:"elevationLoss"`
|
||||
MaxHeartRate int `json:"maxHeartRate"`
|
||||
AvgHeartRate int `json:"avgHeartRate"`
|
||||
MaxSpeed float64 `json:"maxSpeed"`
|
||||
AvgSpeed float64 `json:"avgSpeed"`
|
||||
Steps int `json:"steps"`
|
||||
Stress int `json:"stress"`
|
||||
TotalSteps int `json:"totalSteps"`
|
||||
Device json.RawMessage `json:"device"`
|
||||
Location json.RawMessage `json:"location"`
|
||||
Weather json.RawMessage `json:"weather"`
|
||||
HeartRateZones json.RawMessage `json:"heartRateZones"`
|
||||
TrainingEffect json.RawMessage `json:"trainingEffect"`
|
||||
ActivityMetrics json.RawMessage `json:"activityMetrics"`
|
||||
}
|
||||
|
||||
// ActivityListOptions provides filtering options for listing activities
|
||||
type ActivityListOptions struct {
|
||||
Limit int
|
||||
StartDate time.Time
|
||||
EndDate time.Time
|
||||
ActivityType string
|
||||
NameContains string
|
||||
}
|
||||
|
||||
// ActivityUpdate represents fields that can be updated on an activity
|
||||
type ActivityUpdate struct {
|
||||
Name string `json:"activityName,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Type string `json:"activityType,omitempty"`
|
||||
StartTime time.Time `json:"startTime,omitempty"`
|
||||
Distance float64 `json:"distance,omitempty"`
|
||||
Duration float64 `json:"duration,omitempty"`
|
||||
}
|
||||
|
||||
// ActivityService provides access to activity operations
|
||||
type ActivityService struct {
|
||||
client *APIClient
|
||||
}
|
||||
|
||||
// NewActivityService creates a new ActivityService instance
|
||||
func NewActivityService(client *APIClient) *ActivityService {
|
||||
return &ActivityService{client: client}
|
||||
}
|
||||
|
||||
// List retrieves a list of activities for the current user with optional filters
|
||||
func (s *ActivityService) List(ctx context.Context, opts ActivityListOptions) ([]Activity, error) {
|
||||
params := url.Values{}
|
||||
if opts.Limit > 0 {
|
||||
params.Set("limit", strconv.Itoa(opts.Limit))
|
||||
}
|
||||
if !opts.StartDate.IsZero() {
|
||||
params.Set("startDate", opts.StartDate.Format(time.RFC3339))
|
||||
}
|
||||
if !opts.EndDate.IsZero() {
|
||||
params.Set("endDate", opts.EndDate.Format(time.RFC3339))
|
||||
}
|
||||
if opts.ActivityType != "" {
|
||||
params.Set("activityType", opts.ActivityType)
|
||||
}
|
||||
if opts.NameContains != "" {
|
||||
params.Set("nameContains", opts.NameContains)
|
||||
}
|
||||
|
||||
path := "/activitylist-service/activities/search/activities"
|
||||
if len(params) > 0 {
|
||||
path += "?" + params.Encode()
|
||||
}
|
||||
|
||||
resp, err := s.client.Get(ctx, path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, &APIError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: "Failed to get activities list",
|
||||
}
|
||||
}
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to read activities response",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
var activities []Activity
|
||||
if err := json.Unmarshal(body, &activities); err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to parse activities data",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
return activities, nil
|
||||
}
|
||||
|
||||
// Create creates a new activity
|
||||
func (s *ActivityService) Create(ctx context.Context, activity Activity) (*Activity, error) {
|
||||
jsonBody, err := json.Marshal(activity)
|
||||
if err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to marshal activity",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
resp, err := s.client.Post(ctx, "/activity-service/activity", bytes.NewReader(jsonBody))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusCreated {
|
||||
return nil, &APIError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: "Failed to create activity",
|
||||
}
|
||||
}
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to read activity response",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
var createdActivity Activity
|
||||
if err := json.Unmarshal(body, &createdActivity); err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to parse activity data",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
return &createdActivity, nil
|
||||
}
|
||||
|
||||
// Update updates an existing activity
|
||||
func (s *ActivityService) Update(ctx context.Context, activityID int64, update ActivityUpdate) (*Activity, error) {
|
||||
jsonBody, err := json.Marshal(update)
|
||||
if err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to marshal activity update",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
path := "/activity-service/activity/" + strconv.FormatInt(activityID, 10)
|
||||
resp, err := s.client.Put(ctx, path, bytes.NewReader(jsonBody))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, &APIError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: "Failed to update activity",
|
||||
}
|
||||
}
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to read activity response",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
var updatedActivity Activity
|
||||
if err := json.Unmarshal(body, &updatedActivity); err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to parse activity data",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
return &updatedActivity, nil
|
||||
}
|
||||
|
||||
// Delete deletes an existing activity
|
||||
func (s *ActivityService) Delete(ctx context.Context, activityID int64) error {
|
||||
path := "/activity-service/activity/" + strconv.FormatInt(activityID, 10)
|
||||
resp, err := s.client.Delete(ctx, path, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusNoContent && resp.StatusCode != http.StatusOK {
|
||||
return &APIError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: "Failed to delete activity",
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get retrieves detailed information about a specific activity
|
||||
func (s *ActivityService) Get(ctx context.Context, activityID int64) (*ActivityDetails, error) {
|
||||
path := "/activity-service/activity/" + strconv.FormatInt(activityID, 10)
|
||||
|
||||
resp, err := s.client.Get(ctx, path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, &APIError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: "Failed to get activity details",
|
||||
}
|
||||
}
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to read activity response",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
var details ActivityDetails
|
||||
if err := json.Unmarshal(body, &details); err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to parse activity data",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
return &details, nil
|
||||
}
|
||||
|
||||
// Export exports an activity in the specified format (gpx, tcx, original)
|
||||
func (s *ActivityService) Export(ctx context.Context, activityID int64, format string) (io.ReadCloser, error) {
|
||||
path := "/download-service/export/" + format + "/activity/" + strconv.FormatInt(activityID, 10)
|
||||
|
||||
resp, err := s.client.Get(ctx, path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
defer resp.Body.Close()
|
||||
return nil, &APIError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: "Failed to export activity",
|
||||
}
|
||||
}
|
||||
|
||||
return resp.Body, nil
|
||||
}
|
||||
90
activities_test.go
Normal file
90
activities_test.go
Normal file
@@ -0,0 +1,90 @@
|
||||
package garth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestActivityService_List(t *testing.T) {
|
||||
// Create test server
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write([]byte(`[{
|
||||
"activityId": 123456789,
|
||||
"activityName": "Morning Run",
|
||||
"activityType": "running",
|
||||
"startTime": "2025-08-29T06:00:00Z",
|
||||
"distance": 5000,
|
||||
"duration": 1800,
|
||||
"calories": 350
|
||||
}]`))
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
// Create client
|
||||
apiClient := NewAPIClient(ts.URL, http.DefaultClient)
|
||||
activityService := NewActivityService(apiClient)
|
||||
|
||||
// Test List method with filters
|
||||
startDate := time.Date(2025, time.August, 1, 0, 0, 0, 0, time.UTC)
|
||||
endDate := time.Date(2025, time.August, 31, 0, 0, 0, 0, time.UTC)
|
||||
|
||||
opts := ActivityListOptions{
|
||||
Limit: 10,
|
||||
StartDate: startDate,
|
||||
EndDate: endDate,
|
||||
}
|
||||
activities, err := activityService.List(context.Background(), opts)
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
|
||||
// Verify activity data
|
||||
if len(activities) != 1 {
|
||||
t.Fatalf("Expected 1 activity, got %d", len(activities))
|
||||
}
|
||||
if activities[0].Name != "Morning Run" {
|
||||
t.Errorf("Expected activity name 'Morning Run', got '%s'", activities[0].Name)
|
||||
}
|
||||
if activities[0].ActivityID != 123456789 {
|
||||
t.Errorf("Expected activity ID 123456789, got %d", activities[0].ActivityID)
|
||||
}
|
||||
}
|
||||
|
||||
func TestActivityService_Get(t *testing.T) {
|
||||
// Create test server
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write([]byte(`{
|
||||
"activityId": 987654321,
|
||||
"activityName": "Evening Ride",
|
||||
"activityType": "cycling",
|
||||
"startTime": "2025-08-29T18:30:00Z",
|
||||
"distance": 25000,
|
||||
"duration": 3600,
|
||||
"calories": 650
|
||||
}`))
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
// Create client
|
||||
apiClient := NewAPIClient(ts.URL, http.DefaultClient)
|
||||
activityService := NewActivityService(apiClient)
|
||||
|
||||
// Test Get method
|
||||
activity, err := activityService.Get(context.Background(), 987654321)
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
|
||||
// Verify activity details
|
||||
if activity.Name != "Evening Ride" {
|
||||
t.Errorf("Expected activity name 'Evening Ride', got '%s'", activity.Name)
|
||||
}
|
||||
if activity.ActivityID != 987654321 {
|
||||
t.Errorf("Expected activity ID 987654321, got %d", activity.ActivityID)
|
||||
}
|
||||
}
|
||||
523
auth.go
Normal file
523
auth.go
Normal file
@@ -0,0 +1,523 @@
|
||||
package garth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// GarthAuthenticator implements the Authenticator interface
|
||||
type GarthAuthenticator struct {
|
||||
client *http.Client
|
||||
tokenURL string
|
||||
storage TokenStorage
|
||||
userAgent string
|
||||
// Add CSRF token for session management
|
||||
csrfToken string
|
||||
}
|
||||
|
||||
// NewAuthenticator creates a new Garth authentication client
|
||||
func NewAuthenticator(opts ClientOptions) Authenticator {
|
||||
// Create HTTP client with browser-like settings
|
||||
transport := &http.Transport{
|
||||
TLSClientConfig: &tls.Config{
|
||||
MinVersion: tls.VersionTLS12,
|
||||
},
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
}
|
||||
client := &http.Client{
|
||||
Timeout: opts.Timeout,
|
||||
Transport: transport,
|
||||
CheckRedirect: func(req *http.Request, via []*http.Request) error {
|
||||
// Allow up to 10 redirects
|
||||
if len(via) >= 10 {
|
||||
return errors.New("stopped after 10 redirects")
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
auth := &GarthAuthenticator{
|
||||
client: client,
|
||||
tokenURL: opts.TokenURL,
|
||||
storage: opts.Storage,
|
||||
userAgent: "GarthAuthenticator/1.0",
|
||||
}
|
||||
|
||||
// Set authenticator reference in storage if needed
|
||||
if setter, ok := opts.Storage.(AuthenticatorSetter); ok {
|
||||
setter.SetAuthenticator(auth)
|
||||
}
|
||||
|
||||
return auth
|
||||
}
|
||||
|
||||
// Login authenticates with Garmin services
|
||||
func (a *GarthAuthenticator) Login(ctx context.Context, username, password, mfaToken string) (*Token, error) {
|
||||
// Fetch OAuth1 token to initialize session
|
||||
if _, err := a.fetchOAuth1Token(ctx); err != nil {
|
||||
return nil, fmt.Errorf("failed to get OAuth1 token: %w", err)
|
||||
}
|
||||
|
||||
// Get login parameters including CSRF token
|
||||
csrf, err := a.fetchLoginParams(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get login params: %w", err)
|
||||
}
|
||||
|
||||
a.csrfToken = csrf // Store CSRF for session
|
||||
|
||||
// Call authenticate with only the needed parameters
|
||||
token, err := a.authenticate(ctx, username, password, mfaToken, csrf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Save token to storage
|
||||
if err := a.storage.SaveToken(token); err != nil {
|
||||
return nil, fmt.Errorf("failed to save token: %w", err)
|
||||
}
|
||||
|
||||
return token, nil
|
||||
}
|
||||
|
||||
// RefreshToken refreshes an expired access token
|
||||
func (a *GarthAuthenticator) RefreshToken(ctx context.Context, refreshToken string) (*Token, error) {
|
||||
if refreshToken == "" {
|
||||
return nil, &AuthError{
|
||||
StatusCode: http.StatusBadRequest,
|
||||
Message: "Refresh token is required",
|
||||
Type: "invalid_request",
|
||||
}
|
||||
}
|
||||
|
||||
data := url.Values{}
|
||||
data.Set("grant_type", "refresh_token")
|
||||
data.Set("refresh_token", refreshToken)
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", a.tokenURL, strings.NewReader(data.Encode()))
|
||||
if err != nil {
|
||||
return nil, &AuthError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to create refresh request",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||
req.Header.Set("User-Agent", a.userAgent)
|
||||
req.SetBasicAuth("garmin-connect", "garmin-connect-secret")
|
||||
|
||||
resp, err := a.client.Do(req)
|
||||
if err != nil {
|
||||
return nil, &AuthError{
|
||||
StatusCode: http.StatusBadGateway,
|
||||
Message: "Refresh request failed",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return nil, &AuthError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: fmt.Sprintf("Token refresh failed: %s", body),
|
||||
Type: "token_refresh_failure",
|
||||
}
|
||||
}
|
||||
|
||||
var token Token
|
||||
if err := json.NewDecoder(resp.Body).Decode(&token); err != nil {
|
||||
return nil, &AuthError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to parse token response",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
token.Expiry = time.Now().Add(time.Duration(token.ExpiresIn) * time.Second)
|
||||
|
||||
// Persist the refreshed token to storage
|
||||
if err := a.storage.SaveToken(&token); err != nil {
|
||||
return nil, fmt.Errorf("failed to save refreshed token: %w", err)
|
||||
}
|
||||
|
||||
return &token, nil
|
||||
}
|
||||
|
||||
// GetClient returns an authenticated HTTP client
|
||||
func (a *GarthAuthenticator) GetClient() *http.Client {
|
||||
// This would be a client with middleware that automatically
|
||||
// adds authentication headers and handles token refresh
|
||||
return a.client
|
||||
}
|
||||
|
||||
// fetchLoginParams retrieves required tokens from Garmin login page
|
||||
func (a *GarthAuthenticator) fetchLoginParams(ctx context.Context) (csrf string, err error) {
|
||||
// Step 1: Set cookies by accessing the embed endpoint
|
||||
embedURL := "https://sso.garmin.com/sso/embed?" + url.Values{
|
||||
"id": []string{"gauth-widget"},
|
||||
"embedWidget": []string{"true"},
|
||||
"gauthHost": []string{"https://sso.garmin.com/sso"},
|
||||
}.Encode()
|
||||
|
||||
embedReq, err := http.NewRequestWithContext(ctx, "GET", embedURL, nil)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to create embed request: %w", err)
|
||||
}
|
||||
embedReq.Header = a.getEnhancedBrowserHeaders(embedURL)
|
||||
|
||||
_, err = a.client.Do(embedReq)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("embed request failed: %w", err)
|
||||
}
|
||||
|
||||
// Step 2: Get login parameters including CSRF token
|
||||
loginURL := a.buildLoginURL()
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", loginURL, nil)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to create login page request: %w", err)
|
||||
}
|
||||
|
||||
req.Header = a.getEnhancedBrowserHeaders(loginURL)
|
||||
|
||||
resp, err := a.client.Do(req)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("login page request failed: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to read login page response: %w", err)
|
||||
}
|
||||
|
||||
bodyStr := string(body)
|
||||
|
||||
// Use our robust CSRF token extractor with multiple patterns
|
||||
csrf, err = getCSRFToken(bodyStr)
|
||||
if err != nil {
|
||||
// Write HTML to file for debugging
|
||||
filename := fmt.Sprintf("login_page_%d.html", time.Now().Unix())
|
||||
if writeErr := os.WriteFile(filename, body, 0644); writeErr == nil {
|
||||
return "", fmt.Errorf("csrf param not found: %w (HTML saved to %s)", err, filename)
|
||||
}
|
||||
return "", fmt.Errorf("csrf param not found: %w (failed to save HTML for debugging)", err)
|
||||
}
|
||||
|
||||
return csrf, nil
|
||||
}
|
||||
|
||||
// buildLoginURL constructs the complete login URL with parameters
|
||||
func (a *GarthAuthenticator) buildLoginURL() string {
|
||||
// Match Python implementation exactly (order and values)
|
||||
params := url.Values{}
|
||||
params.Set("id", "gauth-widget")
|
||||
params.Set("embedWidget", "true")
|
||||
params.Set("gauthHost", "https://sso.garmin.com/sso/embed")
|
||||
params.Set("service", "https://sso.garmin.com/sso/embed")
|
||||
params.Set("source", "https://sso.garmin.com/sso/embed")
|
||||
params.Set("redirectAfterAccountLoginUrl", "https://sso.garmin.com/sso/embed")
|
||||
params.Set("redirectAfterAccountCreationUrl", "https://sso.garmin.com/sso/embed")
|
||||
params.Set("consumeServiceTicket", "false") // Added from Python implementation
|
||||
params.Set("generateExtraServiceTicket", "true") // Added from Python implementation
|
||||
params.Set("clientId", "GarminConnect")
|
||||
params.Set("locale", "en_US")
|
||||
|
||||
return "https://sso.garmin.com/sso/signin?" + params.Encode()
|
||||
}
|
||||
|
||||
// fetchOAuth1Token retrieves initial OAuth1 token for session
|
||||
func (a *GarthAuthenticator) fetchOAuth1Token(ctx context.Context) (string, error) {
|
||||
oauth1URL := "https://connect.garmin.com/oauthConfirm"
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", oauth1URL, nil)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to create OAuth1 request: %w", err)
|
||||
}
|
||||
|
||||
req.Header.Set("User-Agent", a.userAgent)
|
||||
|
||||
resp, err := a.client.Do(req)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("OAuth1 request failed: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
// We don't actually need the token value since cookies are handled automatically
|
||||
// Just need to ensure the request succeeds to set session cookies
|
||||
return "", nil
|
||||
}
|
||||
|
||||
// authenticate performs the authentication flow
|
||||
func (a *GarthAuthenticator) authenticate(ctx context.Context, username, password, mfaToken, csrf string) (*Token, error) {
|
||||
data := url.Values{}
|
||||
data.Set("username", username)
|
||||
data.Set("password", password)
|
||||
data.Set("embed", "true")
|
||||
data.Set("rememberme", "on")
|
||||
data.Set("_csrf", csrf)
|
||||
data.Set("_eventId", "submit")
|
||||
data.Set("geolocation", "")
|
||||
data.Set("clientId", "GarminConnect")
|
||||
data.Set("service", "https://connect.garmin.com")
|
||||
data.Set("webhost", "https://connect.garmin.com")
|
||||
data.Set("fromPage", "oauth")
|
||||
data.Set("locale", "en_US")
|
||||
data.Set("id", "gauth-widget")
|
||||
data.Set("redirectAfterAccountLoginUrl", "https://connect.garmin.com/oauthConfirm")
|
||||
data.Set("redirectAfterAccountCreationUrl", "https://connect.garmin.com/oauthConfirm")
|
||||
|
||||
loginURL := "https://sso.garmin.com/sso/signin"
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", loginURL, strings.NewReader(data.Encode()))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create SSO request: %w", err)
|
||||
}
|
||||
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||
req.Header.Set("Accept", "application/json")
|
||||
req.Header.Set("User-Agent", a.userAgent)
|
||||
|
||||
resp, err := a.client.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("SSO request failed: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == http.StatusPreconditionFailed {
|
||||
return a.handleMFA(ctx, username, password, mfaToken, "")
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return nil, fmt.Errorf("authentication failed with status: %d, response: %s", resp.StatusCode, body)
|
||||
}
|
||||
|
||||
var authResponse struct {
|
||||
Ticket string `json:"ticket"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&authResponse); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse SSO response: %w", err)
|
||||
}
|
||||
|
||||
if authResponse.Ticket == "" {
|
||||
return nil, errors.New("empty ticket in SSO response")
|
||||
}
|
||||
|
||||
return a.exchangeTicketForToken(ctx, authResponse.Ticket)
|
||||
}
|
||||
|
||||
// exchangeTicketForToken exchanges an SSO ticket for an access token
|
||||
func (a *GarthAuthenticator) exchangeTicketForToken(ctx context.Context, ticket string) (*Token, error) {
|
||||
data := url.Values{}
|
||||
data.Set("grant_type", "authorization_code")
|
||||
data.Set("code", ticket)
|
||||
data.Set("redirect_uri", "https://connect.garmin.com")
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", a.tokenURL, strings.NewReader(data.Encode()))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create token request: %w", err)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||
req.Header.Set("User-Agent", a.userAgent)
|
||||
req.SetBasicAuth("garmin-connect", "garmin-connect-secret")
|
||||
|
||||
resp, err := a.client.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("token exchange failed: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return nil, fmt.Errorf("token exchange failed: %d %s", resp.StatusCode, body)
|
||||
}
|
||||
|
||||
var token Token
|
||||
if err := json.NewDecoder(resp.Body).Decode(&token); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse token response: %w", err)
|
||||
}
|
||||
|
||||
token.Expiry = time.Now().Add(time.Duration(token.ExpiresIn) * time.Second)
|
||||
return &token, nil
|
||||
}
|
||||
|
||||
// handleMFA processes multi-factor authentication
|
||||
func (a *GarthAuthenticator) handleMFA(ctx context.Context, username, password, mfaToken, responseBody string) (*Token, error) {
|
||||
// Extract CSRF token from response body
|
||||
csrfToken, err := extractParam(`name="_csrf"\s+value="([^"]+)"`, responseBody)
|
||||
if err != nil {
|
||||
return nil, &AuthError{
|
||||
StatusCode: http.StatusPreconditionFailed,
|
||||
Message: "MFA CSRF token not found",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare MFA request
|
||||
data := url.Values{}
|
||||
data.Set("username", username)
|
||||
data.Set("password", password)
|
||||
data.Set("mfaToken", mfaToken)
|
||||
data.Set("embed", "true")
|
||||
data.Set("rememberme", "on")
|
||||
data.Set("_csrf", csrfToken)
|
||||
data.Set("_eventId", "submit")
|
||||
data.Set("geolocation", "")
|
||||
data.Set("clientId", "GarminConnect")
|
||||
data.Set("service", "https://connect.garmin.com")
|
||||
data.Set("webhost", "https://connect.garmin.com")
|
||||
data.Set("fromPage", "oauth")
|
||||
data.Set("locale", "en_US")
|
||||
data.Set("id", "gauth-widget")
|
||||
data.Set("redirectAfterAccountLoginUrl", "https://connect.garmin.com/oauthConfirm")
|
||||
data.Set("redirectAfterAccountCreationUrl", "https://connect.garmin.com/oauthConfirm")
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", "https://sso.garmin.com/sso/signin", strings.NewReader(data.Encode()))
|
||||
if err != nil {
|
||||
return nil, &AuthError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to create MFA request",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||
req.Header.Set("Accept", "application/json")
|
||||
req.Header.Set("User-Agent", a.userAgent)
|
||||
|
||||
resp, err := a.client.Do(req)
|
||||
if err != nil {
|
||||
return nil, &AuthError{
|
||||
StatusCode: http.StatusBadGateway,
|
||||
Message: "MFA request failed",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
// Handle MFA response
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return nil, &AuthError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: fmt.Sprintf("MFA failed: %s", body),
|
||||
Type: "mfa_failure",
|
||||
}
|
||||
}
|
||||
|
||||
// Parse MFA response
|
||||
var mfaResponse struct {
|
||||
Ticket string `json:"ticket"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&mfaResponse); err != nil {
|
||||
return nil, &AuthError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to parse MFA response",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
if mfaResponse.Ticket == "" {
|
||||
return nil, &AuthError{
|
||||
StatusCode: http.StatusUnauthorized,
|
||||
Message: "Invalid MFA response - ticket missing",
|
||||
Type: "invalid_mfa_response",
|
||||
}
|
||||
}
|
||||
|
||||
return a.exchangeTicketForToken(ctx, mfaResponse.Ticket)
|
||||
}
|
||||
|
||||
// extractParam helper to extract regex pattern
|
||||
func extractParam(pattern, body string) (string, error) {
|
||||
re := regexp.MustCompile(pattern)
|
||||
matches := re.FindStringSubmatch(body)
|
||||
if len(matches) < 2 {
|
||||
return "", fmt.Errorf("pattern not found: %s", pattern)
|
||||
}
|
||||
return matches[1], nil
|
||||
}
|
||||
|
||||
// getCSRFToken extracts the CSRF token from HTML using multiple patterns
|
||||
func getCSRFToken(html string) (string, error) {
|
||||
// Try different patterns in order of likelihood
|
||||
patterns := []string{
|
||||
`"csrfToken":"([^"]+)"`, // JSON embedded pattern
|
||||
`name=["']_csrf["']\s+value=["']([^"']+)["']`, // Flexible quotes
|
||||
`value=["']([^"']+)["']\s+name=["']_csrf["']`, // Reversed attributes
|
||||
`name="_csrf"\s+value="([^"]+)"`, // Standard pattern
|
||||
`id="__csrf"\s+value="([^"]+)"`, // Alternative ID pattern
|
||||
}
|
||||
|
||||
for _, pattern := range patterns {
|
||||
token, err := extractParam(pattern, html)
|
||||
if err == nil {
|
||||
return token, nil
|
||||
}
|
||||
}
|
||||
|
||||
// Try to extract from JSON structure
|
||||
token, err := extractFromJSON(html)
|
||||
if err == nil {
|
||||
return token, nil
|
||||
}
|
||||
|
||||
return "", errors.New("all CSRF extraction methods failed")
|
||||
}
|
||||
|
||||
// extractFromJSON tries to find the CSRF token in a JSON structure
|
||||
func extractFromJSON(html string) (string, error) {
|
||||
// Pattern to find the JSON config in script tags
|
||||
re := regexp.MustCompile(`window\.__INITIAL_CONFIG__ = (\{.*?\});`)
|
||||
matches := re.FindStringSubmatch(html)
|
||||
if len(matches) < 2 {
|
||||
return "", errors.New("JSON config not found")
|
||||
}
|
||||
|
||||
// Parse the JSON
|
||||
var config struct {
|
||||
CSRFToken string `json:"csrfToken"`
|
||||
}
|
||||
if err := json.Unmarshal([]byte(matches[1]), &config); err != nil {
|
||||
return "", fmt.Errorf("failed to parse JSON config: %w", err)
|
||||
}
|
||||
|
||||
if config.CSRFToken == "" {
|
||||
return "", errors.New("csrfToken not found in JSON config")
|
||||
}
|
||||
|
||||
return config.CSRFToken, nil
|
||||
}
|
||||
|
||||
// getEnhancedBrowserHeaders returns browser-like headers including Referer and Origin
|
||||
func (a *GarthAuthenticator) getEnhancedBrowserHeaders(referrer string) http.Header {
|
||||
u, _ := url.Parse(referrer)
|
||||
origin := fmt.Sprintf("%s://%s", u.Scheme, u.Host)
|
||||
|
||||
return http.Header{
|
||||
"User-Agent": {a.userAgent},
|
||||
"Accept": {"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7"},
|
||||
"Accept-Language": {"en-US,en;q=0.9"},
|
||||
"Accept-Encoding": {"gzip, deflate, br"},
|
||||
"Connection": {"keep-alive"},
|
||||
"Cache-Control": {"max-age=0"},
|
||||
"Origin": {origin},
|
||||
"Referer": {referrer},
|
||||
"Sec-Fetch-Site": {"same-origin"},
|
||||
"Sec-Fetch-Mode": {"navigate"},
|
||||
"Sec-Fetch-User": {"?1"},
|
||||
"Sec-Fetch-Dest": {"document"},
|
||||
"DNT": {"1"},
|
||||
"Upgrade-Insecure-Requests": {"1"},
|
||||
}
|
||||
}
|
||||
61
auth_test.go
Normal file
61
auth_test.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package garth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"os"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/joho/godotenv"
|
||||
)
|
||||
|
||||
func TestRealAuthentication(t *testing.T) {
|
||||
// Load environment variables from .env file
|
||||
if err := godotenv.Load(); err != nil {
|
||||
t.Fatalf("Error loading .env file: %v", err)
|
||||
}
|
||||
|
||||
// Get credentials from environment
|
||||
username := os.Getenv("GARMIN_USERNAME")
|
||||
password := os.Getenv("GARMIN_PASSWORD")
|
||||
if username == "" || password == "" {
|
||||
t.Fatal("GARMIN_USERNAME or GARMIN_PASSWORD not set in .env")
|
||||
}
|
||||
|
||||
// Add timeout to prevent hanging
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// Create token storage (using memory storage for this test)
|
||||
storage := NewMemoryStorage()
|
||||
|
||||
// Create authenticator
|
||||
auth := NewAuthenticator(ClientOptions{
|
||||
Storage: storage,
|
||||
TokenURL: "https://connectapi.garmin.com/oauth-service/oauth/token",
|
||||
Timeout: 30 * time.Second,
|
||||
})
|
||||
|
||||
// Perform authentication with timeout context
|
||||
token, err := auth.Login(ctx, username, password, "")
|
||||
if err != nil {
|
||||
t.Fatalf("Authentication failed: %v", err)
|
||||
}
|
||||
|
||||
log.Printf("Authentication successful! Token details:")
|
||||
log.Printf("Access Token: %s", token.AccessToken)
|
||||
log.Printf("Expires: %s", token.Expiry.Format(time.RFC3339))
|
||||
log.Printf("Refresh Token: %s", token.RefreshToken)
|
||||
|
||||
// Verify token storage
|
||||
storedToken, err := storage.GetToken()
|
||||
if err != nil {
|
||||
t.Fatalf("Token storage verification failed: %v", err)
|
||||
}
|
||||
if storedToken.AccessToken != token.AccessToken {
|
||||
t.Fatal("Stored token doesn't match authenticated token")
|
||||
}
|
||||
|
||||
log.Println("Token storage verification successful")
|
||||
}
|
||||
144
client.go
Normal file
144
client.go
Normal file
@@ -0,0 +1,144 @@
|
||||
package garth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
// AuthTransport implements http.RoundTripper to inject authentication headers
|
||||
type AuthTransport struct {
|
||||
base http.RoundTripper
|
||||
auth *GarthAuthenticator
|
||||
storage TokenStorage
|
||||
userAgent string
|
||||
mutex sync.Mutex // Protects refreshing token
|
||||
}
|
||||
|
||||
// NewAuthTransport creates a new authenticated transport
|
||||
func NewAuthTransport(auth *GarthAuthenticator, storage TokenStorage, base http.RoundTripper) *AuthTransport {
|
||||
if base == nil {
|
||||
base = http.DefaultTransport
|
||||
}
|
||||
|
||||
return &AuthTransport{
|
||||
base: base,
|
||||
auth: auth,
|
||||
storage: storage,
|
||||
userAgent: "GarthClient/1.0",
|
||||
}
|
||||
}
|
||||
|
||||
// RoundTrip executes a single HTTP transaction with authentication
|
||||
func (t *AuthTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
// Clone request to avoid modifying the original
|
||||
req = cloneRequest(req)
|
||||
|
||||
// Get current token
|
||||
token, err := t.storage.GetToken()
|
||||
if err != nil {
|
||||
return nil, &AuthError{
|
||||
StatusCode: http.StatusUnauthorized,
|
||||
Message: "Token not available",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
// Refresh token if expired
|
||||
if token.IsExpired() {
|
||||
newToken, err := t.refreshToken(req.Context(), token)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
token = newToken
|
||||
}
|
||||
|
||||
// Add Authorization header
|
||||
req.Header.Set("Authorization", "Bearer "+token.AccessToken)
|
||||
req.Header.Set("User-Agent", t.userAgent)
|
||||
|
||||
// Execute request with retry logic
|
||||
var resp *http.Response
|
||||
maxRetries := 3
|
||||
backoff := 200 * time.Millisecond // Initial backoff duration
|
||||
|
||||
for attempt := 0; attempt < maxRetries; attempt++ {
|
||||
resp, err = t.base.RoundTrip(req)
|
||||
if err != nil {
|
||||
// Network error, retry with backoff
|
||||
time.Sleep(backoff)
|
||||
backoff *= 2 // Exponential backoff
|
||||
continue
|
||||
}
|
||||
|
||||
// Handle token expiration during request (e.g. token revoked)
|
||||
if resp.StatusCode == http.StatusUnauthorized {
|
||||
resp.Body.Close()
|
||||
// Refresh token and update request
|
||||
token, err = t.refreshToken(req.Context(), token)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("Authorization", "Bearer "+token.AccessToken)
|
||||
continue
|
||||
}
|
||||
|
||||
// Retry server errors (5xx) and rate limits (429)
|
||||
if resp.StatusCode >= 500 && resp.StatusCode < 600 || resp.StatusCode == http.StatusTooManyRequests {
|
||||
resp.Body.Close()
|
||||
time.Sleep(backoff)
|
||||
backoff *= 2
|
||||
continue
|
||||
}
|
||||
|
||||
// Successful response
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
// Return last error or response if max retries exceeded
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
// refreshToken handles token refresh with mutex protection
|
||||
func (t *AuthTransport) refreshToken(ctx context.Context, token *Token) (*Token, error) {
|
||||
t.mutex.Lock()
|
||||
defer t.mutex.Unlock()
|
||||
|
||||
// Check again in case another goroutine refreshed while waiting
|
||||
currentToken, err := t.storage.GetToken()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !currentToken.IsExpired() {
|
||||
return currentToken, nil
|
||||
}
|
||||
|
||||
// Perform refresh
|
||||
newToken, err := t.auth.RefreshToken(ctx, token.RefreshToken)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Save new token
|
||||
if err := t.storage.SaveToken(newToken); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newToken, nil
|
||||
}
|
||||
|
||||
// cloneRequest returns a clone of the provided HTTP request
|
||||
func cloneRequest(r *http.Request) *http.Request {
|
||||
// Shallow copy of the struct
|
||||
clone := *r
|
||||
// Deep copy of the headers
|
||||
clone.Header = make(http.Header, len(r.Header))
|
||||
for k, v := range r.Header {
|
||||
clone.Header[k] = v
|
||||
}
|
||||
return &clone
|
||||
}
|
||||
57
cmd/debug_auth/main.go
Normal file
57
cmd/debug_auth/main.go
Normal file
@@ -0,0 +1,57 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/joho/godotenv"
|
||||
"github.com/sstent/go-garth"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Load environment variables from project root
|
||||
projectRoot := "../.."
|
||||
if err := godotenv.Load(projectRoot + "/.env"); err != nil {
|
||||
log.Fatalf("Error loading .env file: %v", err)
|
||||
}
|
||||
|
||||
// Get credentials
|
||||
username := os.Getenv("GARMIN_USERNAME")
|
||||
password := os.Getenv("GARMIN_PASSWORD")
|
||||
if username == "" || password == "" {
|
||||
log.Fatal("GARMIN_USERNAME or GARMIN_PASSWORD not set in .env")
|
||||
}
|
||||
|
||||
// Create storage and authenticator
|
||||
storage := garth.NewMemoryStorage()
|
||||
auth := garth.NewAuthenticator(garth.ClientOptions{
|
||||
Storage: storage,
|
||||
TokenURL: "https://connectapi.garmin.com/oauth-service/oauth/token",
|
||||
Timeout: 120,
|
||||
})
|
||||
|
||||
// Perform authentication
|
||||
fmt.Println("Starting authentication...")
|
||||
token, err := auth.Login(context.Background(), username, password, "")
|
||||
if err != nil {
|
||||
log.Fatalf("Authentication failed: %v", err)
|
||||
}
|
||||
|
||||
fmt.Println("\nAuthentication successful! Token details:")
|
||||
fmt.Printf("Access Token: %s\n", token.AccessToken)
|
||||
fmt.Printf("Expires: %s\n", token.Expiry.Format("2006-01-02 15:04:05"))
|
||||
fmt.Printf("Refresh Token: %s\n", token.RefreshToken)
|
||||
|
||||
// Verify token storage
|
||||
storedToken, err := storage.GetToken()
|
||||
if err != nil {
|
||||
log.Fatalf("Token storage verification failed: %v", err)
|
||||
}
|
||||
if storedToken.AccessToken != token.AccessToken {
|
||||
log.Fatal("Stored token doesn't match authenticated token")
|
||||
}
|
||||
|
||||
fmt.Println("Token storage verification successful")
|
||||
}
|
||||
229
connect.go
Normal file
229
connect.go
Normal file
@@ -0,0 +1,229 @@
|
||||
package garth
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
// APIClient manages API requests to Garmin Connect
|
||||
type APIClient struct {
|
||||
baseURL string
|
||||
httpClient *http.Client
|
||||
rateLimit time.Duration
|
||||
logger ErrorLogger // Optional error logger
|
||||
}
|
||||
|
||||
// NewAPIClient creates a new API client instance
|
||||
func NewAPIClient(baseURL string, httpClient *http.Client) *APIClient {
|
||||
return &APIClient{
|
||||
baseURL: baseURL,
|
||||
httpClient: httpClient,
|
||||
rateLimit: 500 * time.Millisecond, // Default rate limit
|
||||
}
|
||||
}
|
||||
|
||||
// SetRateLimit configures request rate limiting
|
||||
func (c *APIClient) SetRateLimit(limit time.Duration) {
|
||||
c.rateLimit = limit
|
||||
}
|
||||
|
||||
// SetRequestsPerSecond configures the maximum number of requests per second
|
||||
func (c *APIClient) SetRequestsPerSecond(rate float64) {
|
||||
interval := time.Duration(float64(time.Second) / rate)
|
||||
c.SetRateLimit(interval)
|
||||
}
|
||||
|
||||
// Get executes a GET request
|
||||
func (c *APIClient) Get(ctx context.Context, path string) (*http.Response, error) {
|
||||
return c.request(ctx, http.MethodGet, path, nil)
|
||||
}
|
||||
|
||||
// Post executes a POST request
|
||||
func (c *APIClient) Post(ctx context.Context, path string, body io.Reader) (*http.Response, error) {
|
||||
return c.request(ctx, http.MethodPost, path, body)
|
||||
}
|
||||
|
||||
// Put executes a PUT request
|
||||
func (c *APIClient) Put(ctx context.Context, path string, body io.Reader) (*http.Response, error) {
|
||||
return c.request(ctx, http.MethodPut, path, body)
|
||||
}
|
||||
|
||||
// Delete executes a DELETE request
|
||||
func (c *APIClient) Delete(ctx context.Context, path string, body io.Reader) (*http.Response, error) {
|
||||
return c.request(ctx, http.MethodDelete, path, body)
|
||||
}
|
||||
|
||||
// handleResponse handles API response and error decoding
|
||||
func handleResponse(resp *http.Response, result interface{}) error {
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode >= 400 {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return &APIError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: string(body),
|
||||
}
|
||||
}
|
||||
|
||||
if result != nil {
|
||||
if err := json.NewDecoder(resp.Body).Decode(result); err != nil {
|
||||
return &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to parse response",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetJSON executes a GET request and decodes the JSON response
|
||||
func (c *APIClient) GetJSON(ctx context.Context, path string, result interface{}) error {
|
||||
resp, err := c.Get(ctx, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return handleResponse(resp, result)
|
||||
}
|
||||
|
||||
// PostJSON executes a POST request with JSON body and decodes the JSON response
|
||||
func (c *APIClient) PostJSON(ctx context.Context, path string, body interface{}, result interface{}) error {
|
||||
jsonBody, err := json.Marshal(body)
|
||||
if err != nil {
|
||||
return &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to marshal request body",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
resp, err := c.Post(ctx, path, bytes.NewReader(jsonBody))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return handleResponse(resp, result)
|
||||
}
|
||||
|
||||
// PutJSON executes a PUT request with JSON body and decodes the JSON response
|
||||
func (c *APIClient) PutJSON(ctx context.Context, path string, body interface{}, result interface{}) error {
|
||||
jsonBody, err := json.Marshal(body)
|
||||
if err != nil {
|
||||
return &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to marshal request body",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
resp, err := c.Put(ctx, path, bytes.NewReader(jsonBody))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return handleResponse(resp, result)
|
||||
}
|
||||
|
||||
// DeleteJSON executes a DELETE request and decodes the JSON response
|
||||
func (c *APIClient) DeleteJSON(ctx context.Context, path string, result interface{}) error {
|
||||
resp, err := c.Delete(ctx, path, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return handleResponse(resp, result)
|
||||
}
|
||||
|
||||
// ErrorLogger defines an interface for logging errors
|
||||
type ErrorLogger interface {
|
||||
Errorf(format string, args ...interface{})
|
||||
}
|
||||
|
||||
// SetLogger sets the error logger for the API client
|
||||
func (c *APIClient) SetLogger(logger ErrorLogger) {
|
||||
c.logger = logger
|
||||
}
|
||||
|
||||
func (c *APIClient) request(ctx context.Context, method, path string, body io.Reader) (*http.Response, error) {
|
||||
// Rate limiting using token bucket algorithm
|
||||
if c.rateLimit > 0 {
|
||||
time.Sleep(c.rateLimit)
|
||||
}
|
||||
|
||||
var resp *http.Response
|
||||
var err error
|
||||
var req *http.Request
|
||||
maxRetries := 3
|
||||
backoff := 500 * time.Millisecond
|
||||
|
||||
for i := 0; i < maxRetries; i++ {
|
||||
var createErr error
|
||||
req, createErr = http.NewRequestWithContext(ctx, method, c.baseURL+path, body)
|
||||
if createErr != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to create request",
|
||||
Cause: createErr,
|
||||
}
|
||||
}
|
||||
|
||||
// Set common headers
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Accept", "application/json")
|
||||
|
||||
resp, err = c.httpClient.Do(req)
|
||||
|
||||
// Retry only on network errors or server-side issues
|
||||
if err != nil || (resp != nil && resp.StatusCode >= 500) {
|
||||
if i < maxRetries-1 {
|
||||
// Exponential backoff with jitter
|
||||
time.Sleep(backoff)
|
||||
backoff = time.Duration(float64(backoff) * 2.5)
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
// Extract query parameters for error context
|
||||
var queryValues url.Values
|
||||
if req != nil {
|
||||
queryValues = req.URL.Query()
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
apiErr := &APIError{
|
||||
StatusCode: http.StatusBadGateway,
|
||||
Message: "Request failed after retries",
|
||||
Cause: err,
|
||||
}
|
||||
reqErr := NewRequestError(method, req.URL.String(), queryValues, http.StatusBadGateway, apiErr)
|
||||
|
||||
// Log error if logger is configured
|
||||
if c.logger != nil {
|
||||
c.logger.Errorf("API request failed: %v, Method: %s, URL: %s", reqErr, method, req.URL.String())
|
||||
}
|
||||
|
||||
return nil, reqErr
|
||||
}
|
||||
|
||||
if resp.StatusCode >= 400 {
|
||||
apiErr := &APIError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: "API request failed",
|
||||
}
|
||||
reqErr := NewRequestError(method, req.URL.String(), queryValues, resp.StatusCode, apiErr)
|
||||
|
||||
// Log error if logger is configured
|
||||
if c.logger != nil {
|
||||
c.logger.Errorf("API request failed with status %d: %s, Method: %s, URL: %s",
|
||||
resp.StatusCode, apiErr.Message, method, req.URL.String())
|
||||
}
|
||||
|
||||
return nil, reqErr
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
92
connect_test.go
Normal file
92
connect_test.go
Normal file
@@ -0,0 +1,92 @@
|
||||
package garth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestAPIClient_Get(t *testing.T) {
|
||||
// Create a test server
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte("OK"))
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
// Create client
|
||||
client := NewAPIClient(ts.URL, http.DefaultClient)
|
||||
|
||||
// Test successful request
|
||||
resp, err := client.Get(context.Background(), "/test")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
t.Errorf("Expected status OK, got %d", resp.StatusCode)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAPIClient_Retry(t *testing.T) {
|
||||
retryCount := 0
|
||||
// Create a test server that fails first two requests
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
retryCount++
|
||||
if retryCount < 3 {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
// Create client with faster backoff for testing
|
||||
client := NewAPIClient(ts.URL, http.DefaultClient)
|
||||
client.SetRateLimit(10 * time.Millisecond)
|
||||
|
||||
// Test retry logic
|
||||
resp, err := client.Get(context.Background(), "/retry-test")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
t.Errorf("Expected status OK after retries, got %d", resp.StatusCode)
|
||||
}
|
||||
if retryCount != 3 {
|
||||
t.Errorf("Expected 3 attempts, got %d", retryCount)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAPIClient_ErrorHandling(t *testing.T) {
|
||||
// Create a test server that returns 404
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
// Create client
|
||||
client := NewAPIClient(ts.URL, http.DefaultClient)
|
||||
|
||||
// Test error handling
|
||||
_, err := client.Get(context.Background(), "/not-found")
|
||||
if err == nil {
|
||||
t.Fatal("Expected error but got none")
|
||||
}
|
||||
|
||||
// Check for RequestError wrapper
|
||||
reqErr, ok := err.(*RequestError)
|
||||
if !ok {
|
||||
t.Fatalf("Expected RequestError, got %T", err)
|
||||
}
|
||||
|
||||
// Check the wrapped APIError
|
||||
apiErr, ok := reqErr.GetCause().(*APIError)
|
||||
if !ok {
|
||||
t.Fatalf("Expected APIError inside RequestError, got %T", reqErr.GetCause())
|
||||
}
|
||||
if apiErr.StatusCode != http.StatusNotFound {
|
||||
t.Errorf("Expected 404 status, got %d", apiErr.StatusCode)
|
||||
}
|
||||
}
|
||||
82
errors.go
Normal file
82
errors.go
Normal file
@@ -0,0 +1,82 @@
|
||||
package garth
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
// RequestError captures request context for enhanced error reporting
|
||||
type RequestError struct {
|
||||
Method string
|
||||
URL string
|
||||
Query url.Values
|
||||
StatusCode int
|
||||
Cause error
|
||||
}
|
||||
|
||||
// NewRequestError creates a new RequestError instance
|
||||
func NewRequestError(method, url string, query url.Values, statusCode int, cause error) *RequestError {
|
||||
return &RequestError{
|
||||
Method: method,
|
||||
URL: url,
|
||||
Query: query,
|
||||
StatusCode: statusCode,
|
||||
Cause: cause,
|
||||
}
|
||||
}
|
||||
|
||||
// Error implements the error interface for RequestError
|
||||
func (e *RequestError) Error() string {
|
||||
return e.Cause.Error()
|
||||
}
|
||||
|
||||
// GetStatusCode returns the HTTP status code
|
||||
func (e *RequestError) GetStatusCode() int {
|
||||
return e.StatusCode
|
||||
}
|
||||
|
||||
// GetType returns the error category
|
||||
func (e *RequestError) GetType() string {
|
||||
return "request_error"
|
||||
}
|
||||
|
||||
// GetCause returns the underlying error
|
||||
func (e *RequestError) GetCause() error {
|
||||
return e.Cause
|
||||
}
|
||||
|
||||
// Unwrap returns the underlying error
|
||||
func (e *RequestError) Unwrap() error {
|
||||
return e.Cause
|
||||
}
|
||||
|
||||
// RequestContext returns the request context details
|
||||
func (e *RequestError) RequestContext() (method, url string, query url.Values) {
|
||||
return e.Method, e.URL, e.Query
|
||||
}
|
||||
|
||||
// Helper functions for common error checks
|
||||
|
||||
// IsNotFoundError checks if an error is a not found error
|
||||
func IsNotFoundError(err error) bool {
|
||||
if e, ok := err.(Error); ok {
|
||||
return e.GetStatusCode() == http.StatusNotFound
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsAuthenticationError checks if an error is an authentication error
|
||||
func IsAuthenticationError(err error) bool {
|
||||
if e, ok := err.(Error); ok {
|
||||
return e.GetStatusCode() == http.StatusUnauthorized
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsRateLimitError checks if an error is a rate limit error
|
||||
func IsRateLimitError(err error) bool {
|
||||
if e, ok := err.(Error); ok {
|
||||
return e.GetStatusCode() == http.StatusTooManyRequests
|
||||
}
|
||||
return false
|
||||
}
|
||||
99
examples/workouts_example.go
Normal file
99
examples/workouts_example.go
Normal file
@@ -0,0 +1,99 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/sstent/go-garth"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Create a new client
|
||||
client := garth.New()
|
||||
|
||||
// For demonstration, we'll use a mock server or skip authentication
|
||||
// In real usage, you would authenticate first:
|
||||
// err := client.Authenticate("username", "password")
|
||||
// if err != nil {
|
||||
// log.Fatalf("Authentication failed: %v", err)
|
||||
// }
|
||||
|
||||
// Example usage of the workout service
|
||||
fmt.Println("Garmin Connect Workout Service Examples")
|
||||
fmt.Println("=======================================")
|
||||
|
||||
// Create a new workout
|
||||
newWorkout := garth.Workout{
|
||||
Name: "Morning Run",
|
||||
Description: "5K easy run",
|
||||
Type: "running",
|
||||
}
|
||||
_ = newWorkout // Prevent unused variable error
|
||||
|
||||
// In a real scenario, you would do:
|
||||
// createdWorkout, err := client.Workouts.Create(context.Background(), newWorkout)
|
||||
// if err != nil {
|
||||
// log.Printf("Failed to create workout: %v", err)
|
||||
// } else {
|
||||
// fmt.Printf("Created workout: %+v\n", createdWorkout)
|
||||
// }
|
||||
|
||||
// List workouts with options
|
||||
opts := garth.WorkoutListOptions{
|
||||
Limit: 10,
|
||||
StartDate: time.Now().AddDate(0, -1, 0), // Last month
|
||||
EndDate: time.Now(),
|
||||
}
|
||||
|
||||
fmt.Printf("Workout list options: %+v\n", opts)
|
||||
|
||||
// Get workout details
|
||||
workoutID := "12345"
|
||||
fmt.Printf("Would fetch workout details for ID: %s\n", workoutID)
|
||||
// workout, err := client.Workouts.Get(context.Background(), workoutID)
|
||||
|
||||
// Export workout
|
||||
fmt.Printf("Would export workout %s in FIT format\n", workoutID)
|
||||
// reader, err := client.Workouts.Export(context.Background(), workoutID, "fit")
|
||||
|
||||
// Search workouts
|
||||
searchOpts := garth.WorkoutListOptions{
|
||||
Limit: 5,
|
||||
}
|
||||
fmt.Printf("Would search workouts with: %+v\n", searchOpts)
|
||||
// results, err := client.Workouts.List(context.Background(), searchOpts)
|
||||
|
||||
// Get workout templates
|
||||
fmt.Println("Would fetch workout templates")
|
||||
// templates, err := client.Workouts.GetWorkoutTemplates(context.Background())
|
||||
|
||||
// Copy workout
|
||||
newName := "Copied Workout"
|
||||
fmt.Printf("Would copy workout %s as %s\n", workoutID, newName)
|
||||
// copied, err := client.Workouts.CopyWorkout(context.Background(), workoutID, newName)
|
||||
|
||||
// Update workout
|
||||
update := garth.Workout{
|
||||
Name: "Updated Morning Run",
|
||||
Description: "Updated description",
|
||||
}
|
||||
fmt.Printf("Would update workout %s with: %+v\n", workoutID, update)
|
||||
// updated, err := client.Workouts.Update(context.Background(), workoutID, update)
|
||||
|
||||
// Delete workout
|
||||
fmt.Printf("Would delete workout: %s\n", workoutID)
|
||||
// err = client.Workouts.Delete(context.Background(), workoutID)
|
||||
|
||||
fmt.Println("\nExample completed. To use with real data:")
|
||||
fmt.Println("1. Set GARMIN_USERNAME and GARMIN_PASSWORD environment variables")
|
||||
fmt.Println("2. Uncomment the authentication and API calls above")
|
||||
fmt.Println("3. Run: go run examples/workouts_example.go")
|
||||
}
|
||||
|
||||
func init() {
|
||||
// Check if credentials are provided
|
||||
if os.Getenv("GARMIN_USERNAME") == "" || os.Getenv("GARMIN_PASSWORD") == "" {
|
||||
fmt.Println("Note: Set GARMIN_USERNAME and GARMIN_PASSWORD environment variables for real API usage")
|
||||
}
|
||||
}
|
||||
80
filestorage.go
Normal file
80
filestorage.go
Normal file
@@ -0,0 +1,80 @@
|
||||
package garth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// fileStorage implements TokenStorage using a file
|
||||
type fileStorage struct {
|
||||
path string
|
||||
auth Authenticator // Reference to authenticator for token refreshes
|
||||
}
|
||||
|
||||
// NewFileStorage creates a new file-based token storage
|
||||
func NewFileStorage(path string) TokenStorage {
|
||||
return &fileStorage{path: path}
|
||||
}
|
||||
|
||||
// SetAuthenticator sets the authenticator for token refreshes
|
||||
func (s *fileStorage) SetAuthenticator(a Authenticator) {
|
||||
s.auth = a
|
||||
}
|
||||
|
||||
// GetToken retrieves token from file, refreshing if expired
|
||||
func (s *fileStorage) GetToken() (*Token, error) {
|
||||
token, err := s.loadToken()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Refresh token if expired
|
||||
if token.IsExpired() {
|
||||
refreshed, err := s.auth.RefreshToken(context.Background(), token.RefreshToken)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := s.SaveToken(refreshed); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return refreshed, nil
|
||||
}
|
||||
return token, nil
|
||||
}
|
||||
|
||||
// loadToken loads token from file without refreshing
|
||||
func (s *fileStorage) loadToken() (*Token, error) {
|
||||
data, err := os.ReadFile(s.path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var token Token
|
||||
if err := json.Unmarshal(data, &token); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if token.AccessToken == "" || token.RefreshToken == "" {
|
||||
return nil, os.ErrNotExist
|
||||
}
|
||||
|
||||
return &token, nil
|
||||
}
|
||||
|
||||
// SaveToken saves token to file
|
||||
func (s *fileStorage) SaveToken(token *Token) error {
|
||||
// Create directory if needed
|
||||
dir := filepath.Dir(s.path)
|
||||
if err := os.MkdirAll(dir, 0700); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
data, err := json.MarshalIndent(token, "", " ")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return os.WriteFile(s.path, data, 0600)
|
||||
}
|
||||
62
fix.md
Normal file
62
fix.md
Normal file
@@ -0,0 +1,62 @@
|
||||
High Priority (Fix These First):
|
||||
|
||||
Fix MFA Response Handling:
|
||||
|
||||
gofunc (a *GarthAuthenticator) authenticate(/* params */) (*Token, error) {
|
||||
// ... existing code ...
|
||||
|
||||
if resp.StatusCode == http.StatusPreconditionFailed {
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read MFA challenge: %w", err)
|
||||
}
|
||||
return a.handleMFA(ctx, username, password, mfaToken, string(body))
|
||||
}
|
||||
|
||||
// ... rest of method
|
||||
}
|
||||
|
||||
Create Complete Client Factory:
|
||||
|
||||
go// Add to garth.go
|
||||
func NewGarminClient(ctx context.Context, username, password, mfaToken string) (*GarminClient, error) {
|
||||
storage := NewMemoryStorage()
|
||||
auth := NewAuthenticator(ClientOptions{
|
||||
Storage: storage,
|
||||
TokenURL: "https://connectapi.garmin.com/oauth-service/oauth/token",
|
||||
Timeout: 30 * time.Second,
|
||||
})
|
||||
|
||||
token, err := auth.Login(ctx, username, password, mfaToken)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
transport := NewAuthTransport(auth.(*GarthAuthenticator), storage, nil)
|
||||
httpClient := &http.Client{Transport: transport}
|
||||
apiClient := NewAPIClient("https://connectapi.garmin.com", httpClient)
|
||||
|
||||
return &GarminClient{
|
||||
Activities: NewActivityService(apiClient),
|
||||
Profile: NewProfileService(apiClient),
|
||||
Workouts: NewWorkoutService(apiClient),
|
||||
}, nil
|
||||
}
|
||||
|
||||
Add Integration Test:
|
||||
|
||||
go//go:build integration
|
||||
// +build integration
|
||||
|
||||
func TestRealGarminFlow(t *testing.T) {
|
||||
client, err := NewGarminClient(ctx, username, password, "")
|
||||
require.NoError(t, err)
|
||||
|
||||
// Test actual API calls
|
||||
profile, err := client.Profile.Get(ctx)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, profile.UserID)
|
||||
|
||||
activities, err := client.Activities.List(ctx, ActivityListOptions{Limit: 5})
|
||||
require.NoError(t, err)
|
||||
}
|
||||
83
garth.go
Normal file
83
garth.go
Normal file
@@ -0,0 +1,83 @@
|
||||
// Package garth provides a Go client for the Garmin Connect API.
|
||||
//
|
||||
// This client supports authentication, user profile management, activity tracking,
|
||||
// workout management, and other Garmin Connect services.
|
||||
//
|
||||
// Features:
|
||||
// - OAuth 2.0 authentication with MFA support
|
||||
// - User profile operations (retrieve, update, delete)
|
||||
// - Activity management (create, read, update, delete)
|
||||
// - Workout management (CRUD operations, scheduling, templates)
|
||||
// - Comprehensive error handling
|
||||
// - Automatic token refresh
|
||||
//
|
||||
// Usage:
|
||||
// 1. Create an Authenticator instance with your credentials
|
||||
// 2. Obtain an access token
|
||||
// 3. Create an APIClient using the authenticator
|
||||
// 4. Use service methods to interact with Garmin Connect API
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// opts := garth.NewClientOptionsFromEnv()
|
||||
// auth := garth.NewBasicAuthenticator(opts)
|
||||
// token, err := auth.Login(ctx, "username", "password", "")
|
||||
// client := garth.NewAPIClient(auth)
|
||||
//
|
||||
// // Get user profile
|
||||
// profile, err := client.Profile().Get(ctx)
|
||||
//
|
||||
// For more details, see the documentation for each service.
|
||||
package garth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Authenticator defines the authentication interface
|
||||
type Authenticator interface {
|
||||
// Login authenticates with Garmin services
|
||||
Login(ctx context.Context, username, password, mfaToken string) (*Token, error)
|
||||
|
||||
// RefreshToken refreshes an expired access token
|
||||
RefreshToken(ctx context.Context, refreshToken string) (*Token, error)
|
||||
|
||||
// GetClient returns an authenticated HTTP client
|
||||
GetClient() *http.Client
|
||||
}
|
||||
|
||||
// ClientOptions configures the Authenticator
|
||||
type ClientOptions struct {
|
||||
TokenURL string // Token exchange endpoint
|
||||
Storage TokenStorage // Token storage implementation
|
||||
Timeout time.Duration // HTTP client timeout
|
||||
}
|
||||
|
||||
// NewClientOptionsFromEnv creates ClientOptions from environment variables
|
||||
func NewClientOptionsFromEnv() ClientOptions {
|
||||
// Default configuration
|
||||
opts := ClientOptions{
|
||||
TokenURL: "https://connectapi.garmin.com/oauth-service/oauth/token",
|
||||
Timeout: 30 * time.Second,
|
||||
}
|
||||
|
||||
// Override from environment variables
|
||||
if url := os.Getenv("GARTH_TOKEN_URL"); url != "" {
|
||||
opts.TokenURL = url
|
||||
}
|
||||
|
||||
if timeoutStr := os.Getenv("GARTH_TIMEOUT"); timeoutStr != "" {
|
||||
if timeout, err := strconv.Atoi(timeoutStr); err == nil {
|
||||
opts.Timeout = time.Duration(timeout) * time.Second
|
||||
}
|
||||
}
|
||||
|
||||
// Default to memory storage
|
||||
opts.Storage = NewMemoryStorage()
|
||||
|
||||
return opts
|
||||
}
|
||||
5
go.mod
Normal file
5
go.mod
Normal file
@@ -0,0 +1,5 @@
|
||||
module github.com/sstent/go-garth
|
||||
|
||||
go 1.22
|
||||
|
||||
require github.com/joho/godotenv v1.5.1 // indirect
|
||||
2
go.sum
Normal file
2
go.sum
Normal file
@@ -0,0 +1,2 @@
|
||||
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
|
||||
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
|
||||
431
go_garth_todo.md
Executable file
431
go_garth_todo.md
Executable file
@@ -0,0 +1,431 @@
|
||||
# Go-Garth Implementation TODO List
|
||||
|
||||
## 🎯 Project Overview
|
||||
Complete the Go implementation of the Garth library to match the functionality of the original Python version for Garmin Connect authentication and API access.
|
||||
|
||||
## 📋 Phase 1: Complete Authentication Foundation (Priority: HIGH)
|
||||
|
||||
### 1.1 Fix Existing Authentication Issues
|
||||
|
||||
**Task**: Complete MFA Implementation in `auth.go`
|
||||
- **File**: `auth.go` - `handleMFA` method
|
||||
- **Requirements**:
|
||||
- Parse MFA challenge from response body
|
||||
- Submit MFA token via POST request
|
||||
- Handle MFA verification response
|
||||
- Extract and return authentication ticket
|
||||
- **Go Style Notes**:
|
||||
- Use structured error handling: `fmt.Errorf("mfa verification failed: %w", err)`
|
||||
- Validate MFA token format before sending
|
||||
- Use context for request timeouts
|
||||
- **Testing**: Create test cases for MFA flow with mock responses
|
||||
|
||||
**Task**: Implement Token Refresh Logic
|
||||
- **File**: `auth.go` - `RefreshToken` method
|
||||
- **Requirements**:
|
||||
- Implement OAuth2 refresh token flow
|
||||
- Handle refresh token expiration
|
||||
- Update stored token after successful refresh
|
||||
- Return new token with updated expiry
|
||||
- **Go Idioms**:
|
||||
```go
|
||||
// Use pointer receivers for methods that modify state
|
||||
func (a *GarthAuthenticator) RefreshToken(ctx context.Context, refreshToken string) (*Token, error) {
|
||||
// Validate input
|
||||
if refreshToken == "" {
|
||||
return nil, errors.New("refresh token cannot be empty")
|
||||
}
|
||||
// Implementation here...
|
||||
}
|
||||
```
|
||||
|
||||
**Task**: Enhance Error Handling
|
||||
- **Files**: `auth.go`, `types.go`
|
||||
- **Requirements**:
|
||||
- Create custom error types for different failure modes
|
||||
- Add HTTP status code context to errors
|
||||
- Parse Garmin-specific error responses
|
||||
- **Implementation**:
|
||||
```go
|
||||
// types.go
|
||||
type AuthError struct {
|
||||
Code int `json:"code"`
|
||||
Message string `json:"message"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
func (e *AuthError) Error() string {
|
||||
return fmt.Sprintf("garmin auth error %d: %s", e.Code, e.Message)
|
||||
}
|
||||
```
|
||||
|
||||
### 1.2 Improve HTTP Client Architecture
|
||||
|
||||
**Task**: Create Authenticated HTTP Client Middleware
|
||||
- **New File**: `client.go`
|
||||
- **Requirements**:
|
||||
- Implement `http.RoundTripper` interface
|
||||
- Automatically add authentication headers
|
||||
- Handle token refresh on 401 responses
|
||||
- Add request/response logging (optional)
|
||||
- **Go Pattern**:
|
||||
```go
|
||||
type AuthTransport struct {
|
||||
base http.RoundTripper
|
||||
auth *GarthAuthenticator
|
||||
storage TokenStorage
|
||||
}
|
||||
|
||||
func (t *AuthTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
// Clone request, add auth headers, handle refresh
|
||||
}
|
||||
```
|
||||
|
||||
**Task**: Add Request Retry Logic
|
||||
- **File**: `client.go`
|
||||
- **Requirements**:
|
||||
- Implement exponential backoff
|
||||
- Retry on specific HTTP status codes (500, 502, 503)
|
||||
- Maximum retry attempts configuration
|
||||
- Context-aware cancellation
|
||||
- **Go Style**: Use `time.After` and `select` for backoff timing
|
||||
|
||||
### 1.3 Expand Storage Options
|
||||
|
||||
**Task**: Add Memory-based Token Storage
|
||||
- **New File**: `memorystorage.go`
|
||||
- **Requirements**:
|
||||
- Implement `TokenStorage` interface
|
||||
- Thread-safe operations using `sync.RWMutex`
|
||||
- Optional token encryption in memory
|
||||
- **Go Concurrency**:
|
||||
```go
|
||||
type MemoryStorage struct {
|
||||
mu sync.RWMutex
|
||||
token *Token
|
||||
}
|
||||
```
|
||||
|
||||
**Task**: Environment Variable Configuration
|
||||
- **File**: `garth.go`
|
||||
- **Requirements**:
|
||||
- Load configuration from environment variables
|
||||
- Provide reasonable defaults
|
||||
- Support custom configuration via struct
|
||||
- **Go Standard**: Use `os.Getenv()` and provide defaults
|
||||
|
||||
## 📋 Phase 2: Garmin Connect API Client (Priority: HIGH)
|
||||
|
||||
### 2.1 Core API Client Structure
|
||||
|
||||
**Task**: Create Base API Client
|
||||
- **New File**: `connect.go`
|
||||
- **Requirements**:
|
||||
- Embed authenticated HTTP client
|
||||
- Base URL configuration for different Garmin services
|
||||
- Common request/response handling
|
||||
- Rate limiting support
|
||||
- **Structure**:
|
||||
```go
|
||||
type ConnectClient struct {
|
||||
client *http.Client
|
||||
baseURL string
|
||||
userAgent string
|
||||
auth Authenticator
|
||||
}
|
||||
|
||||
func NewConnectClient(auth Authenticator, opts ConnectOptions) *ConnectClient
|
||||
```
|
||||
|
||||
**Task**: Implement Common HTTP Helpers
|
||||
- **File**: `connect.go`
|
||||
- **Requirements**:
|
||||
- Generic GET, POST, PUT, DELETE methods
|
||||
- JSON request/response marshaling
|
||||
- Query parameter handling
|
||||
- Error response parsing
|
||||
- **Go Generics** (Go 1.18+):
|
||||
```go
|
||||
func (c *ConnectClient) Get[T any](ctx context.Context, endpoint string, result *T) error
|
||||
```
|
||||
|
||||
### 2.2 User Profile and Account APIs
|
||||
|
||||
**Task**: User Profile Management
|
||||
- **New File**: `profile.go`
|
||||
- **Requirements**:
|
||||
- Get user profile information
|
||||
- Update profile settings
|
||||
- Account preferences
|
||||
- **Types**:
|
||||
```go
|
||||
type UserProfile struct {
|
||||
UserID int64 `json:"userId"`
|
||||
DisplayName string `json:"displayName"`
|
||||
Email string `json:"email"`
|
||||
// Add other profile fields
|
||||
}
|
||||
```
|
||||
|
||||
### 2.3 Activity and Workout APIs
|
||||
|
||||
**Task**: Activity Data Retrieval
|
||||
- **New File**: `activities.go`
|
||||
- **Requirements**:
|
||||
- List activities with pagination
|
||||
- Get detailed activity data
|
||||
- Activity search and filtering
|
||||
- Export activity data (GPX, TCX, etc.)
|
||||
- **Pagination Pattern**:
|
||||
```go
|
||||
type ActivityListOptions struct {
|
||||
Start int `json:"start"`
|
||||
Limit int `json:"limit"`
|
||||
// Add filter options
|
||||
}
|
||||
```
|
||||
|
||||
**Task**: Workout Management
|
||||
- **New File**: `workouts.go`
|
||||
- **Requirements**:
|
||||
- Create, read, update, delete workouts
|
||||
- Workout scheduling
|
||||
- Workout templates
|
||||
- **CRUD Pattern**: Follow consistent naming (Create, Get, Update, Delete methods)
|
||||
|
||||
## 📋 Phase 3: Advanced Features (Priority: MEDIUM)
|
||||
|
||||
### 3.1 Device and Sync Management
|
||||
|
||||
**Task**: Device Information APIs
|
||||
- **New File**: `devices.go`
|
||||
- **Requirements**:
|
||||
- List connected devices
|
||||
- Device settings and preferences
|
||||
- Sync status and history
|
||||
- **Go Struct Tags**: Use proper JSON tags for API marshaling
|
||||
|
||||
### 3.2 Health and Metrics APIs
|
||||
|
||||
**Task**: Health Data Access
|
||||
- **New File**: `health.go`
|
||||
- **Requirements**:
|
||||
- Daily summaries (steps, calories, etc.)
|
||||
- Sleep data
|
||||
- Heart rate data
|
||||
- Weight and body composition
|
||||
- **Time Handling**: Use `time.Time` for all timestamps, handle timezone conversions
|
||||
|
||||
### 3.3 Social and Challenges
|
||||
|
||||
**Task**: Social Features
|
||||
- **New File**: `social.go`
|
||||
- **Requirements**:
|
||||
- Friends and connections
|
||||
- Activity sharing
|
||||
- Challenges and competitions
|
||||
- **Privacy Considerations**: Add warnings about sharing personal data
|
||||
|
||||
## 📋 Phase 4: Developer Experience (Priority: MEDIUM)
|
||||
|
||||
### 4.1 Testing Infrastructure
|
||||
|
||||
**Task**: Unit Tests for Authentication
|
||||
- **New File**: `auth_test.go`
|
||||
- **Requirements**:
|
||||
- Mock HTTP server for testing
|
||||
- Test all authentication flows
|
||||
- Error condition coverage
|
||||
- Use `httptest.Server` for mocking
|
||||
- **Go Testing Pattern**:
|
||||
```go
|
||||
func TestGarthAuthenticator_Login(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
// test cases
|
||||
}{
|
||||
// table-driven tests
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Task**: Integration Tests
|
||||
- **New File**: `integration_test.go`
|
||||
- **Requirements**:
|
||||
- End-to-end API tests (optional, requires credentials)
|
||||
- Build tag for integration tests: `//go:build integration`
|
||||
- Environment variable configuration for test credentials
|
||||
|
||||
### 4.2 Documentation and Examples
|
||||
|
||||
**Task**: Package Documentation
|
||||
- **All Files**: Add comprehensive GoDoc comments
|
||||
- **Requirements**:
|
||||
- Package-level documentation in `garth.go`
|
||||
- Example usage in doc comments
|
||||
- Follow Go documentation conventions
|
||||
- **GoDoc Style**:
|
||||
```go
|
||||
// Package garth provides authentication and API access for Garmin Connect services.
|
||||
//
|
||||
// Basic usage:
|
||||
//
|
||||
// auth := garth.NewAuthenticator(garth.ClientOptions{...})
|
||||
// token, err := auth.Login(ctx, username, password, "")
|
||||
//
|
||||
package garth
|
||||
```
|
||||
|
||||
**Task**: Create Usage Examples
|
||||
- **New Directory**: `examples/`
|
||||
- **Requirements**:
|
||||
- Basic authentication example
|
||||
- Activity data retrieval example
|
||||
- Complete CLI tool example
|
||||
- README with setup instructions
|
||||
|
||||
### 4.3 Configuration and Logging
|
||||
|
||||
**Task**: Structured Logging Support
|
||||
- **New File**: `logging.go`
|
||||
- **Requirements**:
|
||||
- Optional logger interface
|
||||
- Debug logging for HTTP requests/responses
|
||||
- Configurable log levels
|
||||
- **Go Interface**:
|
||||
```go
|
||||
type Logger interface {
|
||||
Debug(msg string, fields ...interface{})
|
||||
Info(msg string, fields ...interface{})
|
||||
Error(msg string, fields ...interface{})
|
||||
}
|
||||
```
|
||||
|
||||
**Task**: Configuration Management
|
||||
- **File**: `config.go`
|
||||
- **Requirements**:
|
||||
- Centralized configuration struct
|
||||
- Environment variable loading
|
||||
- Configuration validation
|
||||
- Default values
|
||||
|
||||
## 📋 Phase 5: Production Readiness (Priority: LOW)
|
||||
|
||||
### 5.1 Performance and Reliability
|
||||
|
||||
**Task**: Add Rate Limiting
|
||||
- **File**: `client.go` or new `ratelimit.go`
|
||||
- **Requirements**:
|
||||
- Token bucket algorithm
|
||||
- Configurable rate limits
|
||||
- Per-endpoint rate limiting if needed
|
||||
- **Go Concurrency**: Use goroutines and channels for rate limiting
|
||||
|
||||
**Task**: Connection Pooling and Timeouts
|
||||
- **File**: `client.go`
|
||||
- **Requirements**:
|
||||
- Configure HTTP client with appropriate timeouts
|
||||
- Connection pooling settings
|
||||
- Keep-alive configuration
|
||||
|
||||
### 5.2 Security Enhancements
|
||||
|
||||
**Task**: Token Encryption at Rest
|
||||
- **File**: `filestorage.go`, new `encryption.go`
|
||||
- **Requirements**:
|
||||
- Optional token encryption using AES
|
||||
- Key derivation from user password or system keyring
|
||||
- Secure key storage recommendations
|
||||
|
||||
### 5.3 Monitoring and Metrics
|
||||
|
||||
**Task**: Add Metrics Collection
|
||||
- **New File**: `metrics.go`
|
||||
- **Requirements**:
|
||||
- Request/response metrics
|
||||
- Error rate tracking
|
||||
- Optional Prometheus metrics export
|
||||
- **Go Pattern**: Use interfaces for pluggable metrics backends
|
||||
|
||||
## 🎨 Go Style and Idiom Guidelines
|
||||
|
||||
### Code Organization
|
||||
- **Package Structure**: Keep related functionality in separate files
|
||||
- **Interfaces**: Define small, focused interfaces
|
||||
- **Error Handling**: Always handle errors, use `fmt.Errorf` for context
|
||||
- **Context**: Pass context.Context as first parameter to all long-running functions
|
||||
|
||||
### Naming Conventions
|
||||
- **Exported Types**: PascalCase (e.g., `GarthAuthenticator`)
|
||||
- **Unexported Types**: camelCase (e.g., `fileStorage`)
|
||||
- **Methods**: Use verb-noun pattern (e.g., `GetToken`, `SaveToken`)
|
||||
- **Constants**: Use PascalCase for exported, camelCase for unexported
|
||||
|
||||
### Error Handling Patterns
|
||||
```go
|
||||
// Wrap errors with context
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to authenticate user %s: %w", username, err)
|
||||
}
|
||||
|
||||
// Use custom error types for different error conditions
|
||||
type AuthenticationError struct {
|
||||
Code int
|
||||
Message string
|
||||
Cause error
|
||||
}
|
||||
```
|
||||
|
||||
### JSON and HTTP Patterns
|
||||
```go
|
||||
// Use struct tags for JSON marshaling
|
||||
type Activity struct {
|
||||
ID int64 `json:"activityId"`
|
||||
Name string `json:"activityName"`
|
||||
StartTime time.Time `json:"startTimeLocal"`
|
||||
}
|
||||
|
||||
// Handle HTTP responses consistently
|
||||
func (c *ConnectClient) makeRequest(ctx context.Context, method, url string, body interface{}) (*http.Response, error) {
|
||||
// Implementation with consistent error handling
|
||||
}
|
||||
```
|
||||
|
||||
### Concurrency Best Practices
|
||||
- Use `sync.RWMutex` for read-heavy workloads
|
||||
- Prefer channels for communication between goroutines
|
||||
- Always handle context cancellation
|
||||
- Use `sync.WaitGroup` for waiting on multiple goroutines
|
||||
|
||||
### Testing Patterns
|
||||
- Use table-driven tests for multiple test cases
|
||||
- Create test helpers for common setup
|
||||
- Use `httptest.Server` for HTTP testing
|
||||
- Mock external dependencies using interfaces
|
||||
|
||||
## 🚀 Implementation Priority Order
|
||||
|
||||
1. **Week 1**: Complete authentication foundation (Phase 1)
|
||||
2. **Week 2-3**: Implement core API client and activity APIs (Phase 2.1, 2.3)
|
||||
3. **Week 4**: Add user profile and device APIs (Phase 2.2, 3.1)
|
||||
4. **Week 5**: Testing and documentation (Phase 4)
|
||||
5. **Week 6+**: Advanced features and production readiness (Phase 3, 5)
|
||||
|
||||
## 📚 Additional Resources
|
||||
|
||||
- [Effective Go](https://golang.org/doc/effective_go.html)
|
||||
- [Go Code Review Comments](https://github.com/golang/go/wiki/CodeReviewComments)
|
||||
- [Original Garth Python Library](https://github.com/matin/garth) - Reference implementation
|
||||
- [Garmin Connect API Documentation](https://connect.garmin.com/dev/) - If available
|
||||
- [OAuth 2.0 RFC](https://tools.ietf.org/html/rfc6749) - Understanding the auth flow
|
||||
|
||||
## ✅ Definition of Done
|
||||
|
||||
Each task is complete when:
|
||||
- [ ] Code follows Go best practices and style guidelines
|
||||
- [ ] All public functions have GoDoc comments
|
||||
- [ ] Unit tests achieve >80% coverage
|
||||
- [ ] Integration tests pass (where applicable)
|
||||
- [ ] No linting errors from `golangci-lint`
|
||||
- [ ] Code is reviewed by senior developer
|
||||
- [ ] Examples and documentation are updated
|
||||
BIN
login_page_1756558819.html
Normal file
BIN
login_page_1756558819.html
Normal file
Binary file not shown.
BIN
login_page_1756564346.html
Normal file
BIN
login_page_1756564346.html
Normal file
Binary file not shown.
BIN
login_page_1756566235.html
Normal file
BIN
login_page_1756566235.html
Normal file
Binary file not shown.
BIN
login_page_1756566416.html
Normal file
BIN
login_page_1756566416.html
Normal file
Binary file not shown.
BIN
login_page_1756566593.html
Normal file
BIN
login_page_1756566593.html
Normal file
Binary file not shown.
BIN
login_page_1756566711.html
Normal file
BIN
login_page_1756566711.html
Normal file
Binary file not shown.
BIN
login_page_1756566845.html
Normal file
BIN
login_page_1756566845.html
Normal file
Binary file not shown.
BIN
login_page_1756782597.html
Normal file
BIN
login_page_1756782597.html
Normal file
Binary file not shown.
36
memorystorage.go
Normal file
36
memorystorage.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package garth
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
// MemoryStorage implements TokenStorage using an in-memory cache
|
||||
type MemoryStorage struct {
|
||||
mu sync.RWMutex
|
||||
token *Token
|
||||
}
|
||||
|
||||
// NewMemoryStorage creates a new in-memory token storage
|
||||
func NewMemoryStorage() *MemoryStorage {
|
||||
return &MemoryStorage{}
|
||||
}
|
||||
|
||||
// GetToken retrieves token from memory
|
||||
func (s *MemoryStorage) GetToken() (*Token, error) {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
|
||||
if s.token == nil {
|
||||
return nil, ErrTokenNotFound
|
||||
}
|
||||
return s.token, nil
|
||||
}
|
||||
|
||||
// SaveToken saves token to memory
|
||||
func (s *MemoryStorage) SaveToken(token *Token) error {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
|
||||
s.token = token
|
||||
return nil
|
||||
}
|
||||
153
profile.go
Normal file
153
profile.go
Normal file
@@ -0,0 +1,153 @@
|
||||
package garth
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Profile represents a user's Garmin profile
|
||||
type Profile struct {
|
||||
UserID string `json:"userId"`
|
||||
Username string `json:"username"`
|
||||
FirstName string `json:"firstName"`
|
||||
LastName string `json:"lastName"`
|
||||
EmailAddress string `json:"emailAddress"`
|
||||
Country string `json:"country"`
|
||||
City string `json:"city"`
|
||||
State string `json:"state"`
|
||||
ProfileImage string `json:"profileImage"`
|
||||
}
|
||||
|
||||
// ProfileService provides access to user profile operations
|
||||
type ProfileService struct {
|
||||
client *APIClient
|
||||
}
|
||||
|
||||
// NewProfileService creates a new ProfileService instance
|
||||
func NewProfileService(client *APIClient) *ProfileService {
|
||||
return &ProfileService{client: client}
|
||||
}
|
||||
|
||||
// Get fetches the current user's profile
|
||||
func (s *ProfileService) Get(ctx context.Context) (*Profile, error) {
|
||||
resp, err := s.client.Get(ctx, "/userprofile-service/userprofile")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, &APIError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: "Failed to get user profile",
|
||||
}
|
||||
}
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to read profile response",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
var profile Profile
|
||||
if err := json.Unmarshal(body, &profile); err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to parse profile data",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
return &profile, nil
|
||||
}
|
||||
|
||||
// UpdateSettings updates the user's profile settings
|
||||
func (s *ProfileService) UpdateSettings(ctx context.Context, settings map[string]interface{}) error {
|
||||
// Serialize settings to JSON
|
||||
jsonData, err := json.Marshal(settings)
|
||||
if err != nil {
|
||||
return &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to serialize settings",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
// Convert JSON data to a Reader
|
||||
reader := bytes.NewReader(jsonData)
|
||||
|
||||
resp, err := s.client.Post(ctx, "/userprofile-service/userprofile/settings", reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusNoContent {
|
||||
return &APIError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: "Failed to update settings",
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Delete deletes the user's profile
|
||||
func (s *ProfileService) Delete(ctx context.Context) error {
|
||||
resp, err := s.client.Delete(ctx, "/userprofile-service/userprofile", nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusNoContent {
|
||||
return &APIError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: "Failed to delete profile",
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetPublic retrieves public profile information for a user
|
||||
func (s *ProfileService) GetPublic(ctx context.Context, userID string) (*Profile, error) {
|
||||
resp, err := s.client.Get(ctx, "/userprofile-service/userprofile/public/"+userID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, &APIError{
|
||||
StatusCode: resp.StatusCode,
|
||||
Message: "Failed to get public profile",
|
||||
}
|
||||
}
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to read profile response",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
var profile Profile
|
||||
if err := json.Unmarshal(body, &profile); err != nil {
|
||||
return nil, &APIError{
|
||||
StatusCode: http.StatusInternalServerError,
|
||||
Message: "Failed to parse profile data",
|
||||
Cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
return &profile, nil
|
||||
}
|
||||
72
profile_test.go
Normal file
72
profile_test.go
Normal file
@@ -0,0 +1,72 @@
|
||||
package garth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestProfileService_Get(t *testing.T) {
|
||||
// Create test server
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write([]byte(`{
|
||||
"userId": "12345",
|
||||
"username": "testuser",
|
||||
"firstName": "Test",
|
||||
"lastName": "User",
|
||||
"emailAddress": "test@example.com",
|
||||
"country": "US",
|
||||
"city": "Seattle",
|
||||
"state": "WA",
|
||||
"profileImage": "https://example.com/avatar.jpg"
|
||||
}`))
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
// Create client
|
||||
apiClient := NewAPIClient(ts.URL, http.DefaultClient)
|
||||
profileService := NewProfileService(apiClient)
|
||||
|
||||
// Test Get method
|
||||
profile, err := profileService.Get(context.Background())
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
|
||||
// Verify profile data
|
||||
if profile.UserID != "12345" {
|
||||
t.Errorf("Expected UserID '12345', got '%s'", profile.UserID)
|
||||
}
|
||||
if profile.Username != "testuser" {
|
||||
t.Errorf("Expected Username 'testuser', got '%s'", profile.Username)
|
||||
}
|
||||
if profile.EmailAddress != "test@example.com" {
|
||||
t.Errorf("Expected Email 'test@example.com', got '%s'", profile.EmailAddress)
|
||||
}
|
||||
}
|
||||
|
||||
func TestProfileService_UpdateSettings(t *testing.T) {
|
||||
// Create test server
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
// Create client
|
||||
apiClient := NewAPIClient(ts.URL, http.DefaultClient)
|
||||
profileService := NewProfileService(apiClient)
|
||||
|
||||
// Test UpdateSettings method
|
||||
settings := map[string]interface{}{
|
||||
"preferences": map[string]string{
|
||||
"units": "metric",
|
||||
"theme": "dark",
|
||||
},
|
||||
}
|
||||
err := profileService.UpdateSettings(context.Background(), settings)
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
}
|
||||
21
python-garth/.coderabbit.yaml
Normal file
21
python-garth/.coderabbit.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json # Schema for CodeRabbit configurations
|
||||
language: "en-US"
|
||||
early_access: true
|
||||
reviews:
|
||||
request_changes_workflow: false
|
||||
high_level_summary: true
|
||||
poem: false
|
||||
review_status: true
|
||||
collapse_walkthrough: false
|
||||
auto_review:
|
||||
enabled: true
|
||||
drafts: false
|
||||
path_filters:
|
||||
- "!tests/**/cassettes/**"
|
||||
path_instructions:
|
||||
- path: "tests/**"
|
||||
instructions: |
|
||||
- test functions shouldn't have a return type hint
|
||||
- it's ok to use `assert` instead of `pytest.assume()`
|
||||
chat:
|
||||
auto_reply: true
|
||||
7
python-garth/.devcontainer/Dockerfile
Normal file
7
python-garth/.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,7 @@
|
||||
FROM mcr.microsoft.com/devcontainers/anaconda:0-3
|
||||
|
||||
# Copy environment.yml (if found) to a temp location so we update the environment. Also
|
||||
# copy "noop.txt" so the COPY instruction does not fail if no environment.yml exists.
|
||||
COPY environment.yml* .devcontainer/noop.txt /tmp/conda-tmp/
|
||||
RUN if [ -f "/tmp/conda-tmp/environment.yml" ]; then umask 0002 && /opt/conda/bin/conda env update -n base -f /tmp/conda-tmp/environment.yml; fi \
|
||||
&& rm -rf /tmp/conda-tmp
|
||||
10
python-garth/.devcontainer/devcontainer.json
Normal file
10
python-garth/.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"name": "Anaconda (Python 3)",
|
||||
"build": {
|
||||
"context": "..",
|
||||
"dockerfile": "Dockerfile"
|
||||
},
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/node:1": {}
|
||||
}
|
||||
}
|
||||
3
python-garth/.devcontainer/noop.txt
Normal file
3
python-garth/.devcontainer/noop.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
This file copied into the container along with environment.yml* from the parent
|
||||
folder. This file is included to prevents the Dockerfile COPY instruction from
|
||||
failing if no environment.yml is found.
|
||||
1
python-garth/.git_disabled/HEAD
Normal file
1
python-garth/.git_disabled/HEAD
Normal file
@@ -0,0 +1 @@
|
||||
ref: refs/heads/main
|
||||
12
python-garth/.git_disabled/config
Normal file
12
python-garth/.git_disabled/config
Normal file
@@ -0,0 +1,12 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/matin/garth.git
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
[branch "main"]
|
||||
remote = origin
|
||||
merge = refs/heads/main
|
||||
vscode-merge-base = origin/main
|
||||
1
python-garth/.git_disabled/description
Normal file
1
python-garth/.git_disabled/description
Normal file
@@ -0,0 +1 @@
|
||||
Unnamed repository; edit this file 'description' to name the repository.
|
||||
15
python-garth/.git_disabled/hooks/applypatch-msg.sample
Executable file
15
python-garth/.git_disabled/hooks/applypatch-msg.sample
Executable file
@@ -0,0 +1,15 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to check the commit log message taken by
|
||||
# applypatch from an e-mail message.
|
||||
#
|
||||
# The hook should exit with non-zero status after issuing an
|
||||
# appropriate message if it wants to stop the commit. The hook is
|
||||
# allowed to edit the commit message file.
|
||||
#
|
||||
# To enable this hook, rename this file to "applypatch-msg".
|
||||
|
||||
. git-sh-setup
|
||||
commitmsg="$(git rev-parse --git-path hooks/commit-msg)"
|
||||
test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
|
||||
:
|
||||
24
python-garth/.git_disabled/hooks/commit-msg.sample
Executable file
24
python-garth/.git_disabled/hooks/commit-msg.sample
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to check the commit log message.
|
||||
# Called by "git commit" with one argument, the name of the file
|
||||
# that has the commit message. The hook should exit with non-zero
|
||||
# status after issuing an appropriate message if it wants to stop the
|
||||
# commit. The hook is allowed to edit the commit message file.
|
||||
#
|
||||
# To enable this hook, rename this file to "commit-msg".
|
||||
|
||||
# Uncomment the below to add a Signed-off-by line to the message.
|
||||
# Doing this in a hook is a bad idea in general, but the prepare-commit-msg
|
||||
# hook is more suited to it.
|
||||
#
|
||||
# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
|
||||
# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"
|
||||
|
||||
# This example catches duplicate Signed-off-by lines.
|
||||
|
||||
test "" = "$(grep '^Signed-off-by: ' "$1" |
|
||||
sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || {
|
||||
echo >&2 Duplicate Signed-off-by lines.
|
||||
exit 1
|
||||
}
|
||||
174
python-garth/.git_disabled/hooks/fsmonitor-watchman.sample
Executable file
174
python-garth/.git_disabled/hooks/fsmonitor-watchman.sample
Executable file
@@ -0,0 +1,174 @@
|
||||
#!/usr/bin/perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
use IPC::Open2;
|
||||
|
||||
# An example hook script to integrate Watchman
|
||||
# (https://facebook.github.io/watchman/) with git to speed up detecting
|
||||
# new and modified files.
|
||||
#
|
||||
# The hook is passed a version (currently 2) and last update token
|
||||
# formatted as a string and outputs to stdout a new update token and
|
||||
# all files that have been modified since the update token. Paths must
|
||||
# be relative to the root of the working tree and separated by a single NUL.
|
||||
#
|
||||
# To enable this hook, rename this file to "query-watchman" and set
|
||||
# 'git config core.fsmonitor .git/hooks/query-watchman'
|
||||
#
|
||||
my ($version, $last_update_token) = @ARGV;
|
||||
|
||||
# Uncomment for debugging
|
||||
# print STDERR "$0 $version $last_update_token\n";
|
||||
|
||||
# Check the hook interface version
|
||||
if ($version ne 2) {
|
||||
die "Unsupported query-fsmonitor hook version '$version'.\n" .
|
||||
"Falling back to scanning...\n";
|
||||
}
|
||||
|
||||
my $git_work_tree = get_working_dir();
|
||||
|
||||
my $retry = 1;
|
||||
|
||||
my $json_pkg;
|
||||
eval {
|
||||
require JSON::XS;
|
||||
$json_pkg = "JSON::XS";
|
||||
1;
|
||||
} or do {
|
||||
require JSON::PP;
|
||||
$json_pkg = "JSON::PP";
|
||||
};
|
||||
|
||||
launch_watchman();
|
||||
|
||||
sub launch_watchman {
|
||||
my $o = watchman_query();
|
||||
if (is_work_tree_watched($o)) {
|
||||
output_result($o->{clock}, @{$o->{files}});
|
||||
}
|
||||
}
|
||||
|
||||
sub output_result {
|
||||
my ($clockid, @files) = @_;
|
||||
|
||||
# Uncomment for debugging watchman output
|
||||
# open (my $fh, ">", ".git/watchman-output.out");
|
||||
# binmode $fh, ":utf8";
|
||||
# print $fh "$clockid\n@files\n";
|
||||
# close $fh;
|
||||
|
||||
binmode STDOUT, ":utf8";
|
||||
print $clockid;
|
||||
print "\0";
|
||||
local $, = "\0";
|
||||
print @files;
|
||||
}
|
||||
|
||||
sub watchman_clock {
|
||||
my $response = qx/watchman clock "$git_work_tree"/;
|
||||
die "Failed to get clock id on '$git_work_tree'.\n" .
|
||||
"Falling back to scanning...\n" if $? != 0;
|
||||
|
||||
return $json_pkg->new->utf8->decode($response);
|
||||
}
|
||||
|
||||
sub watchman_query {
|
||||
my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty')
|
||||
or die "open2() failed: $!\n" .
|
||||
"Falling back to scanning...\n";
|
||||
|
||||
# In the query expression below we're asking for names of files that
|
||||
# changed since $last_update_token but not from the .git folder.
|
||||
#
|
||||
# To accomplish this, we're using the "since" generator to use the
|
||||
# recency index to select candidate nodes and "fields" to limit the
|
||||
# output to file names only. Then we're using the "expression" term to
|
||||
# further constrain the results.
|
||||
my $last_update_line = "";
|
||||
if (substr($last_update_token, 0, 1) eq "c") {
|
||||
$last_update_token = "\"$last_update_token\"";
|
||||
$last_update_line = qq[\n"since": $last_update_token,];
|
||||
}
|
||||
my $query = <<" END";
|
||||
["query", "$git_work_tree", {$last_update_line
|
||||
"fields": ["name"],
|
||||
"expression": ["not", ["dirname", ".git"]]
|
||||
}]
|
||||
END
|
||||
|
||||
# Uncomment for debugging the watchman query
|
||||
# open (my $fh, ">", ".git/watchman-query.json");
|
||||
# print $fh $query;
|
||||
# close $fh;
|
||||
|
||||
print CHLD_IN $query;
|
||||
close CHLD_IN;
|
||||
my $response = do {local $/; <CHLD_OUT>};
|
||||
|
||||
# Uncomment for debugging the watch response
|
||||
# open ($fh, ">", ".git/watchman-response.json");
|
||||
# print $fh $response;
|
||||
# close $fh;
|
||||
|
||||
die "Watchman: command returned no output.\n" .
|
||||
"Falling back to scanning...\n" if $response eq "";
|
||||
die "Watchman: command returned invalid output: $response\n" .
|
||||
"Falling back to scanning...\n" unless $response =~ /^\{/;
|
||||
|
||||
return $json_pkg->new->utf8->decode($response);
|
||||
}
|
||||
|
||||
sub is_work_tree_watched {
|
||||
my ($output) = @_;
|
||||
my $error = $output->{error};
|
||||
if ($retry > 0 and $error and $error =~ m/unable to resolve root .* directory (.*) is not watched/) {
|
||||
$retry--;
|
||||
my $response = qx/watchman watch "$git_work_tree"/;
|
||||
die "Failed to make watchman watch '$git_work_tree'.\n" .
|
||||
"Falling back to scanning...\n" if $? != 0;
|
||||
$output = $json_pkg->new->utf8->decode($response);
|
||||
$error = $output->{error};
|
||||
die "Watchman: $error.\n" .
|
||||
"Falling back to scanning...\n" if $error;
|
||||
|
||||
# Uncomment for debugging watchman output
|
||||
# open (my $fh, ">", ".git/watchman-output.out");
|
||||
# close $fh;
|
||||
|
||||
# Watchman will always return all files on the first query so
|
||||
# return the fast "everything is dirty" flag to git and do the
|
||||
# Watchman query just to get it over with now so we won't pay
|
||||
# the cost in git to look up each individual file.
|
||||
my $o = watchman_clock();
|
||||
$error = $output->{error};
|
||||
|
||||
die "Watchman: $error.\n" .
|
||||
"Falling back to scanning...\n" if $error;
|
||||
|
||||
output_result($o->{clock}, ("/"));
|
||||
$last_update_token = $o->{clock};
|
||||
|
||||
eval { launch_watchman() };
|
||||
return 0;
|
||||
}
|
||||
|
||||
die "Watchman: $error.\n" .
|
||||
"Falling back to scanning...\n" if $error;
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
sub get_working_dir {
|
||||
my $working_dir;
|
||||
if ($^O =~ 'msys' || $^O =~ 'cygwin') {
|
||||
$working_dir = Win32::GetCwd();
|
||||
$working_dir =~ tr/\\/\//;
|
||||
} else {
|
||||
require Cwd;
|
||||
$working_dir = Cwd::cwd();
|
||||
}
|
||||
|
||||
return $working_dir;
|
||||
}
|
||||
8
python-garth/.git_disabled/hooks/post-update.sample
Executable file
8
python-garth/.git_disabled/hooks/post-update.sample
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to prepare a packed repository for use over
|
||||
# dumb transports.
|
||||
#
|
||||
# To enable this hook, rename this file to "post-update".
|
||||
|
||||
exec git update-server-info
|
||||
14
python-garth/.git_disabled/hooks/pre-applypatch.sample
Executable file
14
python-garth/.git_disabled/hooks/pre-applypatch.sample
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to verify what is about to be committed
|
||||
# by applypatch from an e-mail message.
|
||||
#
|
||||
# The hook should exit with non-zero status after issuing an
|
||||
# appropriate message if it wants to stop the commit.
|
||||
#
|
||||
# To enable this hook, rename this file to "pre-applypatch".
|
||||
|
||||
. git-sh-setup
|
||||
precommit="$(git rev-parse --git-path hooks/pre-commit)"
|
||||
test -x "$precommit" && exec "$precommit" ${1+"$@"}
|
||||
:
|
||||
49
python-garth/.git_disabled/hooks/pre-commit.sample
Executable file
49
python-garth/.git_disabled/hooks/pre-commit.sample
Executable file
@@ -0,0 +1,49 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to verify what is about to be committed.
|
||||
# Called by "git commit" with no arguments. The hook should
|
||||
# exit with non-zero status after issuing an appropriate message if
|
||||
# it wants to stop the commit.
|
||||
#
|
||||
# To enable this hook, rename this file to "pre-commit".
|
||||
|
||||
if git rev-parse --verify HEAD >/dev/null 2>&1
|
||||
then
|
||||
against=HEAD
|
||||
else
|
||||
# Initial commit: diff against an empty tree object
|
||||
against=$(git hash-object -t tree /dev/null)
|
||||
fi
|
||||
|
||||
# If you want to allow non-ASCII filenames set this variable to true.
|
||||
allownonascii=$(git config --type=bool hooks.allownonascii)
|
||||
|
||||
# Redirect output to stderr.
|
||||
exec 1>&2
|
||||
|
||||
# Cross platform projects tend to avoid non-ASCII filenames; prevent
|
||||
# them from being added to the repository. We exploit the fact that the
|
||||
# printable range starts at the space character and ends with tilde.
|
||||
if [ "$allownonascii" != "true" ] &&
|
||||
# Note that the use of brackets around a tr range is ok here, (it's
|
||||
# even required, for portability to Solaris 10's /usr/bin/tr), since
|
||||
# the square bracket bytes happen to fall in the designated range.
|
||||
test $(git diff-index --cached --name-only --diff-filter=A -z $against |
|
||||
LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0
|
||||
then
|
||||
cat <<\EOF
|
||||
Error: Attempt to add a non-ASCII file name.
|
||||
|
||||
This can cause problems if you want to work with people on other platforms.
|
||||
|
||||
To be portable it is advisable to rename the file.
|
||||
|
||||
If you know what you are doing you can disable this check using:
|
||||
|
||||
git config hooks.allownonascii true
|
||||
EOF
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If there are whitespace errors, print the offending file names and fail.
|
||||
exec git diff-index --check --cached $against --
|
||||
13
python-garth/.git_disabled/hooks/pre-merge-commit.sample
Executable file
13
python-garth/.git_disabled/hooks/pre-merge-commit.sample
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to verify what is about to be committed.
|
||||
# Called by "git merge" with no arguments. The hook should
|
||||
# exit with non-zero status after issuing an appropriate message to
|
||||
# stderr if it wants to stop the merge commit.
|
||||
#
|
||||
# To enable this hook, rename this file to "pre-merge-commit".
|
||||
|
||||
. git-sh-setup
|
||||
test -x "$GIT_DIR/hooks/pre-commit" &&
|
||||
exec "$GIT_DIR/hooks/pre-commit"
|
||||
:
|
||||
53
python-garth/.git_disabled/hooks/pre-push.sample
Executable file
53
python-garth/.git_disabled/hooks/pre-push.sample
Executable file
@@ -0,0 +1,53 @@
|
||||
#!/bin/sh
|
||||
|
||||
# An example hook script to verify what is about to be pushed. Called by "git
|
||||
# push" after it has checked the remote status, but before anything has been
|
||||
# pushed. If this script exits with a non-zero status nothing will be pushed.
|
||||
#
|
||||
# This hook is called with the following parameters:
|
||||
#
|
||||
# $1 -- Name of the remote to which the push is being done
|
||||
# $2 -- URL to which the push is being done
|
||||
#
|
||||
# If pushing without using a named remote those arguments will be equal.
|
||||
#
|
||||
# Information about the commits which are being pushed is supplied as lines to
|
||||
# the standard input in the form:
|
||||
#
|
||||
# <local ref> <local oid> <remote ref> <remote oid>
|
||||
#
|
||||
# This sample shows how to prevent push of commits where the log message starts
|
||||
# with "WIP" (work in progress).
|
||||
|
||||
remote="$1"
|
||||
url="$2"
|
||||
|
||||
zero=$(git hash-object --stdin </dev/null | tr '[0-9a-f]' '0')
|
||||
|
||||
while read local_ref local_oid remote_ref remote_oid
|
||||
do
|
||||
if test "$local_oid" = "$zero"
|
||||
then
|
||||
# Handle delete
|
||||
:
|
||||
else
|
||||
if test "$remote_oid" = "$zero"
|
||||
then
|
||||
# New branch, examine all commits
|
||||
range="$local_oid"
|
||||
else
|
||||
# Update to existing branch, examine new commits
|
||||
range="$remote_oid..$local_oid"
|
||||
fi
|
||||
|
||||
# Check for WIP commit
|
||||
commit=$(git rev-list -n 1 --grep '^WIP' "$range")
|
||||
if test -n "$commit"
|
||||
then
|
||||
echo >&2 "Found WIP commit in $local_ref, not pushing"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
exit 0
|
||||
169
python-garth/.git_disabled/hooks/pre-rebase.sample
Executable file
169
python-garth/.git_disabled/hooks/pre-rebase.sample
Executable file
@@ -0,0 +1,169 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright (c) 2006, 2008 Junio C Hamano
|
||||
#
|
||||
# The "pre-rebase" hook is run just before "git rebase" starts doing
|
||||
# its job, and can prevent the command from running by exiting with
|
||||
# non-zero status.
|
||||
#
|
||||
# The hook is called with the following parameters:
|
||||
#
|
||||
# $1 -- the upstream the series was forked from.
|
||||
# $2 -- the branch being rebased (or empty when rebasing the current branch).
|
||||
#
|
||||
# This sample shows how to prevent topic branches that are already
|
||||
# merged to 'next' branch from getting rebased, because allowing it
|
||||
# would result in rebasing already published history.
|
||||
|
||||
publish=next
|
||||
basebranch="$1"
|
||||
if test "$#" = 2
|
||||
then
|
||||
topic="refs/heads/$2"
|
||||
else
|
||||
topic=`git symbolic-ref HEAD` ||
|
||||
exit 0 ;# we do not interrupt rebasing detached HEAD
|
||||
fi
|
||||
|
||||
case "$topic" in
|
||||
refs/heads/??/*)
|
||||
;;
|
||||
*)
|
||||
exit 0 ;# we do not interrupt others.
|
||||
;;
|
||||
esac
|
||||
|
||||
# Now we are dealing with a topic branch being rebased
|
||||
# on top of master. Is it OK to rebase it?
|
||||
|
||||
# Does the topic really exist?
|
||||
git show-ref -q "$topic" || {
|
||||
echo >&2 "No such branch $topic"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Is topic fully merged to master?
|
||||
not_in_master=`git rev-list --pretty=oneline ^master "$topic"`
|
||||
if test -z "$not_in_master"
|
||||
then
|
||||
echo >&2 "$topic is fully merged to master; better remove it."
|
||||
exit 1 ;# we could allow it, but there is no point.
|
||||
fi
|
||||
|
||||
# Is topic ever merged to next? If so you should not be rebasing it.
|
||||
only_next_1=`git rev-list ^master "^$topic" ${publish} | sort`
|
||||
only_next_2=`git rev-list ^master ${publish} | sort`
|
||||
if test "$only_next_1" = "$only_next_2"
|
||||
then
|
||||
not_in_topic=`git rev-list "^$topic" master`
|
||||
if test -z "$not_in_topic"
|
||||
then
|
||||
echo >&2 "$topic is already up to date with master"
|
||||
exit 1 ;# we could allow it, but there is no point.
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"`
|
||||
/usr/bin/perl -e '
|
||||
my $topic = $ARGV[0];
|
||||
my $msg = "* $topic has commits already merged to public branch:\n";
|
||||
my (%not_in_next) = map {
|
||||
/^([0-9a-f]+) /;
|
||||
($1 => 1);
|
||||
} split(/\n/, $ARGV[1]);
|
||||
for my $elem (map {
|
||||
/^([0-9a-f]+) (.*)$/;
|
||||
[$1 => $2];
|
||||
} split(/\n/, $ARGV[2])) {
|
||||
if (!exists $not_in_next{$elem->[0]}) {
|
||||
if ($msg) {
|
||||
print STDERR $msg;
|
||||
undef $msg;
|
||||
}
|
||||
print STDERR " $elem->[1]\n";
|
||||
}
|
||||
}
|
||||
' "$topic" "$not_in_next" "$not_in_master"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
<<\DOC_END
|
||||
|
||||
This sample hook safeguards topic branches that have been
|
||||
published from being rewound.
|
||||
|
||||
The workflow assumed here is:
|
||||
|
||||
* Once a topic branch forks from "master", "master" is never
|
||||
merged into it again (either directly or indirectly).
|
||||
|
||||
* Once a topic branch is fully cooked and merged into "master",
|
||||
it is deleted. If you need to build on top of it to correct
|
||||
earlier mistakes, a new topic branch is created by forking at
|
||||
the tip of the "master". This is not strictly necessary, but
|
||||
it makes it easier to keep your history simple.
|
||||
|
||||
* Whenever you need to test or publish your changes to topic
|
||||
branches, merge them into "next" branch.
|
||||
|
||||
The script, being an example, hardcodes the publish branch name
|
||||
to be "next", but it is trivial to make it configurable via
|
||||
$GIT_DIR/config mechanism.
|
||||
|
||||
With this workflow, you would want to know:
|
||||
|
||||
(1) ... if a topic branch has ever been merged to "next". Young
|
||||
topic branches can have stupid mistakes you would rather
|
||||
clean up before publishing, and things that have not been
|
||||
merged into other branches can be easily rebased without
|
||||
affecting other people. But once it is published, you would
|
||||
not want to rewind it.
|
||||
|
||||
(2) ... if a topic branch has been fully merged to "master".
|
||||
Then you can delete it. More importantly, you should not
|
||||
build on top of it -- other people may already want to
|
||||
change things related to the topic as patches against your
|
||||
"master", so if you need further changes, it is better to
|
||||
fork the topic (perhaps with the same name) afresh from the
|
||||
tip of "master".
|
||||
|
||||
Let's look at this example:
|
||||
|
||||
o---o---o---o---o---o---o---o---o---o "next"
|
||||
/ / / /
|
||||
/ a---a---b A / /
|
||||
/ / / /
|
||||
/ / c---c---c---c B /
|
||||
/ / / \ /
|
||||
/ / / b---b C \ /
|
||||
/ / / / \ /
|
||||
---o---o---o---o---o---o---o---o---o---o---o "master"
|
||||
|
||||
|
||||
A, B and C are topic branches.
|
||||
|
||||
* A has one fix since it was merged up to "next".
|
||||
|
||||
* B has finished. It has been fully merged up to "master" and "next",
|
||||
and is ready to be deleted.
|
||||
|
||||
* C has not merged to "next" at all.
|
||||
|
||||
We would want to allow C to be rebased, refuse A, and encourage
|
||||
B to be deleted.
|
||||
|
||||
To compute (1):
|
||||
|
||||
git rev-list ^master ^topic next
|
||||
git rev-list ^master next
|
||||
|
||||
if these match, topic has not merged in next at all.
|
||||
|
||||
To compute (2):
|
||||
|
||||
git rev-list master..topic
|
||||
|
||||
if this is empty, it is fully merged to "master".
|
||||
|
||||
DOC_END
|
||||
24
python-garth/.git_disabled/hooks/pre-receive.sample
Executable file
24
python-garth/.git_disabled/hooks/pre-receive.sample
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to make use of push options.
|
||||
# The example simply echoes all push options that start with 'echoback='
|
||||
# and rejects all pushes when the "reject" push option is used.
|
||||
#
|
||||
# To enable this hook, rename this file to "pre-receive".
|
||||
|
||||
if test -n "$GIT_PUSH_OPTION_COUNT"
|
||||
then
|
||||
i=0
|
||||
while test "$i" -lt "$GIT_PUSH_OPTION_COUNT"
|
||||
do
|
||||
eval "value=\$GIT_PUSH_OPTION_$i"
|
||||
case "$value" in
|
||||
echoback=*)
|
||||
echo "echo from the pre-receive-hook: ${value#*=}" >&2
|
||||
;;
|
||||
reject)
|
||||
exit 1
|
||||
esac
|
||||
i=$((i + 1))
|
||||
done
|
||||
fi
|
||||
42
python-garth/.git_disabled/hooks/prepare-commit-msg.sample
Executable file
42
python-garth/.git_disabled/hooks/prepare-commit-msg.sample
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to prepare the commit log message.
|
||||
# Called by "git commit" with the name of the file that has the
|
||||
# commit message, followed by the description of the commit
|
||||
# message's source. The hook's purpose is to edit the commit
|
||||
# message file. If the hook fails with a non-zero status,
|
||||
# the commit is aborted.
|
||||
#
|
||||
# To enable this hook, rename this file to "prepare-commit-msg".
|
||||
|
||||
# This hook includes three examples. The first one removes the
|
||||
# "# Please enter the commit message..." help message.
|
||||
#
|
||||
# The second includes the output of "git diff --name-status -r"
|
||||
# into the message, just before the "git status" output. It is
|
||||
# commented because it doesn't cope with --amend or with squashed
|
||||
# commits.
|
||||
#
|
||||
# The third example adds a Signed-off-by line to the message, that can
|
||||
# still be edited. This is rarely a good idea.
|
||||
|
||||
COMMIT_MSG_FILE=$1
|
||||
COMMIT_SOURCE=$2
|
||||
SHA1=$3
|
||||
|
||||
/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE"
|
||||
|
||||
# case "$COMMIT_SOURCE,$SHA1" in
|
||||
# ,|template,)
|
||||
# /usr/bin/perl -i.bak -pe '
|
||||
# print "\n" . `git diff --cached --name-status -r`
|
||||
# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;;
|
||||
# *) ;;
|
||||
# esac
|
||||
|
||||
# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
|
||||
# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE"
|
||||
# if test -z "$COMMIT_SOURCE"
|
||||
# then
|
||||
# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE"
|
||||
# fi
|
||||
78
python-garth/.git_disabled/hooks/push-to-checkout.sample
Executable file
78
python-garth/.git_disabled/hooks/push-to-checkout.sample
Executable file
@@ -0,0 +1,78 @@
|
||||
#!/bin/sh
|
||||
|
||||
# An example hook script to update a checked-out tree on a git push.
|
||||
#
|
||||
# This hook is invoked by git-receive-pack(1) when it reacts to git
|
||||
# push and updates reference(s) in its repository, and when the push
|
||||
# tries to update the branch that is currently checked out and the
|
||||
# receive.denyCurrentBranch configuration variable is set to
|
||||
# updateInstead.
|
||||
#
|
||||
# By default, such a push is refused if the working tree and the index
|
||||
# of the remote repository has any difference from the currently
|
||||
# checked out commit; when both the working tree and the index match
|
||||
# the current commit, they are updated to match the newly pushed tip
|
||||
# of the branch. This hook is to be used to override the default
|
||||
# behaviour; however the code below reimplements the default behaviour
|
||||
# as a starting point for convenient modification.
|
||||
#
|
||||
# The hook receives the commit with which the tip of the current
|
||||
# branch is going to be updated:
|
||||
commit=$1
|
||||
|
||||
# It can exit with a non-zero status to refuse the push (when it does
|
||||
# so, it must not modify the index or the working tree).
|
||||
die () {
|
||||
echo >&2 "$*"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Or it can make any necessary changes to the working tree and to the
|
||||
# index to bring them to the desired state when the tip of the current
|
||||
# branch is updated to the new commit, and exit with a zero status.
|
||||
#
|
||||
# For example, the hook can simply run git read-tree -u -m HEAD "$1"
|
||||
# in order to emulate git fetch that is run in the reverse direction
|
||||
# with git push, as the two-tree form of git read-tree -u -m is
|
||||
# essentially the same as git switch or git checkout that switches
|
||||
# branches while keeping the local changes in the working tree that do
|
||||
# not interfere with the difference between the branches.
|
||||
|
||||
# The below is a more-or-less exact translation to shell of the C code
|
||||
# for the default behaviour for git's push-to-checkout hook defined in
|
||||
# the push_to_deploy() function in builtin/receive-pack.c.
|
||||
#
|
||||
# Note that the hook will be executed from the repository directory,
|
||||
# not from the working tree, so if you want to perform operations on
|
||||
# the working tree, you will have to adapt your code accordingly, e.g.
|
||||
# by adding "cd .." or using relative paths.
|
||||
|
||||
if ! git update-index -q --ignore-submodules --refresh
|
||||
then
|
||||
die "Up-to-date check failed"
|
||||
fi
|
||||
|
||||
if ! git diff-files --quiet --ignore-submodules --
|
||||
then
|
||||
die "Working directory has unstaged changes"
|
||||
fi
|
||||
|
||||
# This is a rough translation of:
|
||||
#
|
||||
# head_has_history() ? "HEAD" : EMPTY_TREE_SHA1_HEX
|
||||
if git cat-file -e HEAD 2>/dev/null
|
||||
then
|
||||
head=HEAD
|
||||
else
|
||||
head=$(git hash-object -t tree --stdin </dev/null)
|
||||
fi
|
||||
|
||||
if ! git diff-index --quiet --cached --ignore-submodules $head --
|
||||
then
|
||||
die "Working directory has staged changes"
|
||||
fi
|
||||
|
||||
if ! git read-tree -u -m "$commit"
|
||||
then
|
||||
die "Could not update working tree to new HEAD"
|
||||
fi
|
||||
77
python-garth/.git_disabled/hooks/sendemail-validate.sample
Executable file
77
python-garth/.git_disabled/hooks/sendemail-validate.sample
Executable file
@@ -0,0 +1,77 @@
|
||||
#!/bin/sh
|
||||
|
||||
# An example hook script to validate a patch (and/or patch series) before
|
||||
# sending it via email.
|
||||
#
|
||||
# The hook should exit with non-zero status after issuing an appropriate
|
||||
# message if it wants to prevent the email(s) from being sent.
|
||||
#
|
||||
# To enable this hook, rename this file to "sendemail-validate".
|
||||
#
|
||||
# By default, it will only check that the patch(es) can be applied on top of
|
||||
# the default upstream branch without conflicts in a secondary worktree. After
|
||||
# validation (successful or not) of the last patch of a series, the worktree
|
||||
# will be deleted.
|
||||
#
|
||||
# The following config variables can be set to change the default remote and
|
||||
# remote ref that are used to apply the patches against:
|
||||
#
|
||||
# sendemail.validateRemote (default: origin)
|
||||
# sendemail.validateRemoteRef (default: HEAD)
|
||||
#
|
||||
# Replace the TODO placeholders with appropriate checks according to your
|
||||
# needs.
|
||||
|
||||
validate_cover_letter () {
|
||||
file="$1"
|
||||
# TODO: Replace with appropriate checks (e.g. spell checking).
|
||||
true
|
||||
}
|
||||
|
||||
validate_patch () {
|
||||
file="$1"
|
||||
# Ensure that the patch applies without conflicts.
|
||||
git am -3 "$file" || return
|
||||
# TODO: Replace with appropriate checks for this patch
|
||||
# (e.g. checkpatch.pl).
|
||||
true
|
||||
}
|
||||
|
||||
validate_series () {
|
||||
# TODO: Replace with appropriate checks for the whole series
|
||||
# (e.g. quick build, coding style checks, etc.).
|
||||
true
|
||||
}
|
||||
|
||||
# main -------------------------------------------------------------------------
|
||||
|
||||
if test "$GIT_SENDEMAIL_FILE_COUNTER" = 1
|
||||
then
|
||||
remote=$(git config --default origin --get sendemail.validateRemote) &&
|
||||
ref=$(git config --default HEAD --get sendemail.validateRemoteRef) &&
|
||||
worktree=$(mktemp --tmpdir -d sendemail-validate.XXXXXXX) &&
|
||||
git worktree add -fd --checkout "$worktree" "refs/remotes/$remote/$ref" &&
|
||||
git config --replace-all sendemail.validateWorktree "$worktree"
|
||||
else
|
||||
worktree=$(git config --get sendemail.validateWorktree)
|
||||
fi || {
|
||||
echo "sendemail-validate: error: failed to prepare worktree" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
unset GIT_DIR GIT_WORK_TREE
|
||||
cd "$worktree" &&
|
||||
|
||||
if grep -q "^diff --git " "$1"
|
||||
then
|
||||
validate_patch "$1"
|
||||
else
|
||||
validate_cover_letter "$1"
|
||||
fi &&
|
||||
|
||||
if test "$GIT_SENDEMAIL_FILE_COUNTER" = "$GIT_SENDEMAIL_FILE_TOTAL"
|
||||
then
|
||||
git config --unset-all sendemail.validateWorktree &&
|
||||
trap 'git worktree remove -ff "$worktree"' EXIT &&
|
||||
validate_series
|
||||
fi
|
||||
128
python-garth/.git_disabled/hooks/update.sample
Executable file
128
python-garth/.git_disabled/hooks/update.sample
Executable file
@@ -0,0 +1,128 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to block unannotated tags from entering.
|
||||
# Called by "git receive-pack" with arguments: refname sha1-old sha1-new
|
||||
#
|
||||
# To enable this hook, rename this file to "update".
|
||||
#
|
||||
# Config
|
||||
# ------
|
||||
# hooks.allowunannotated
|
||||
# This boolean sets whether unannotated tags will be allowed into the
|
||||
# repository. By default they won't be.
|
||||
# hooks.allowdeletetag
|
||||
# This boolean sets whether deleting tags will be allowed in the
|
||||
# repository. By default they won't be.
|
||||
# hooks.allowmodifytag
|
||||
# This boolean sets whether a tag may be modified after creation. By default
|
||||
# it won't be.
|
||||
# hooks.allowdeletebranch
|
||||
# This boolean sets whether deleting branches will be allowed in the
|
||||
# repository. By default they won't be.
|
||||
# hooks.denycreatebranch
|
||||
# This boolean sets whether remotely creating branches will be denied
|
||||
# in the repository. By default this is allowed.
|
||||
#
|
||||
|
||||
# --- Command line
|
||||
refname="$1"
|
||||
oldrev="$2"
|
||||
newrev="$3"
|
||||
|
||||
# --- Safety check
|
||||
if [ -z "$GIT_DIR" ]; then
|
||||
echo "Don't run this script from the command line." >&2
|
||||
echo " (if you want, you could supply GIT_DIR then run" >&2
|
||||
echo " $0 <ref> <oldrev> <newrev>)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then
|
||||
echo "usage: $0 <ref> <oldrev> <newrev>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- Config
|
||||
allowunannotated=$(git config --type=bool hooks.allowunannotated)
|
||||
allowdeletebranch=$(git config --type=bool hooks.allowdeletebranch)
|
||||
denycreatebranch=$(git config --type=bool hooks.denycreatebranch)
|
||||
allowdeletetag=$(git config --type=bool hooks.allowdeletetag)
|
||||
allowmodifytag=$(git config --type=bool hooks.allowmodifytag)
|
||||
|
||||
# check for no description
|
||||
projectdesc=$(sed -e '1q' "$GIT_DIR/description")
|
||||
case "$projectdesc" in
|
||||
"Unnamed repository"* | "")
|
||||
echo "*** Project description file hasn't been set" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# --- Check types
|
||||
# if $newrev is 0000...0000, it's a commit to delete a ref.
|
||||
zero=$(git hash-object --stdin </dev/null | tr '[0-9a-f]' '0')
|
||||
if [ "$newrev" = "$zero" ]; then
|
||||
newrev_type=delete
|
||||
else
|
||||
newrev_type=$(git cat-file -t $newrev)
|
||||
fi
|
||||
|
||||
case "$refname","$newrev_type" in
|
||||
refs/tags/*,commit)
|
||||
# un-annotated tag
|
||||
short_refname=${refname##refs/tags/}
|
||||
if [ "$allowunannotated" != "true" ]; then
|
||||
echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2
|
||||
echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
refs/tags/*,delete)
|
||||
# delete tag
|
||||
if [ "$allowdeletetag" != "true" ]; then
|
||||
echo "*** Deleting a tag is not allowed in this repository" >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
refs/tags/*,tag)
|
||||
# annotated tag
|
||||
if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1
|
||||
then
|
||||
echo "*** Tag '$refname' already exists." >&2
|
||||
echo "*** Modifying a tag is not allowed in this repository." >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
refs/heads/*,commit)
|
||||
# branch
|
||||
if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then
|
||||
echo "*** Creating a branch is not allowed in this repository" >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
refs/heads/*,delete)
|
||||
# delete branch
|
||||
if [ "$allowdeletebranch" != "true" ]; then
|
||||
echo "*** Deleting a branch is not allowed in this repository" >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
refs/remotes/*,commit)
|
||||
# tracking branch
|
||||
;;
|
||||
refs/remotes/*,delete)
|
||||
# delete tracking branch
|
||||
if [ "$allowdeletebranch" != "true" ]; then
|
||||
echo "*** Deleting a tracking branch is not allowed in this repository" >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
# Anything else (is there anything else?)
|
||||
echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# --- Finished
|
||||
exit 0
|
||||
BIN
python-garth/.git_disabled/index
Normal file
BIN
python-garth/.git_disabled/index
Normal file
Binary file not shown.
6
python-garth/.git_disabled/info/exclude
Normal file
6
python-garth/.git_disabled/info/exclude
Normal file
@@ -0,0 +1,6 @@
|
||||
# git ls-files --others --exclude-from=.git/info/exclude
|
||||
# Lines that start with '#' are comments.
|
||||
# For a project mostly in C, the following would be a good set of
|
||||
# exclude patterns (uncomment them if you want to use them):
|
||||
# *.[oa]
|
||||
# *~
|
||||
1
python-garth/.git_disabled/logs/HEAD
Normal file
1
python-garth/.git_disabled/logs/HEAD
Normal file
@@ -0,0 +1 @@
|
||||
0000000000000000000000000000000000000000 4b027e14574987c7ff329c5ea4980a624f954cad sstent <stuart.stent@gmail.com> 1756500815 -0700 clone: from https://github.com/matin/garth.git
|
||||
1
python-garth/.git_disabled/logs/refs/heads/main
Normal file
1
python-garth/.git_disabled/logs/refs/heads/main
Normal file
@@ -0,0 +1 @@
|
||||
0000000000000000000000000000000000000000 4b027e14574987c7ff329c5ea4980a624f954cad sstent <stuart.stent@gmail.com> 1756500815 -0700 clone: from https://github.com/matin/garth.git
|
||||
1
python-garth/.git_disabled/logs/refs/remotes/origin/HEAD
Normal file
1
python-garth/.git_disabled/logs/refs/remotes/origin/HEAD
Normal file
@@ -0,0 +1 @@
|
||||
0000000000000000000000000000000000000000 4b027e14574987c7ff329c5ea4980a624f954cad sstent <stuart.stent@gmail.com> 1756500815 -0700 clone: from https://github.com/matin/garth.git
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
47
python-garth/.git_disabled/packed-refs
Normal file
47
python-garth/.git_disabled/packed-refs
Normal file
@@ -0,0 +1,47 @@
|
||||
# pack-refs with: peeled fully-peeled sorted
|
||||
e88832aab75f7cbdc497d40100a25f7ec9d50302 refs/remotes/origin/add-hydration-support
|
||||
dab68681cede95b96b4a857041255cb2f6042205 refs/remotes/origin/add-training-status
|
||||
b8989b38968d32b4a41bdc9678c4156d6703f454 refs/remotes/origin/coderabbitai/chat/20bTtA
|
||||
e6e142469cb12298129e8fef5772e33a569e96ca refs/remotes/origin/dependabot/github_actions/actions/checkout-5
|
||||
4546623a7ce71e5a87ef202b05c316a2d849efca refs/remotes/origin/hydration
|
||||
4b027e14574987c7ff329c5ea4980a624f954cad refs/remotes/origin/main
|
||||
0e739d308dc2c9a36ad9efc791787340bebb2ec4 refs/remotes/origin/minor-auth-refactor
|
||||
66f488625529bc6a7bafadbb11bf064e30f6850f refs/remotes/origin/refactor/restart
|
||||
19a8fc24d787a6b83b4ac87e6ab9ae8af407a1ee refs/tags/0.4.28
|
||||
bed4a7cd32310c986b1c61a45dd6dfb6c5988fba refs/tags/0.4.29
|
||||
53a166de4cc435b8f694d142f9af4454baada131 refs/tags/0.4.30
|
||||
ca2410a9650c87514a806edbe54e89408acbbe76 refs/tags/0.4.31
|
||||
c77cd9b405082b474914caadeac5cc18f0f6d70e refs/tags/0.4.32
|
||||
2a0dae96d44943edf667f6173479317d2623aa17 refs/tags/0.4.33
|
||||
2d1b21192270e6598ec1326ed0d6017ec23ff057 refs/tags/0.4.34
|
||||
ce5874bbb6492db91a56f76b7d9ad123aa790900 refs/tags/0.4.35
|
||||
5ee41c540b1a2ffd69277ffa99d4dd97a382dbc5 refs/tags/0.4.36
|
||||
515933f709db3b00f5b06d5ba65b267dcda191b9 refs/tags/0.4.37
|
||||
43196b588c553fcc335251d248434002caa0dab0 refs/tags/0.4.38
|
||||
69a1fd4bfa2a697e15e15661eae60f6d9541daf2 refs/tags/0.4.39
|
||||
fad9855b65837d7f5944ae2cd982e4af16b22ff0 refs/tags/0.4.40
|
||||
5aadba6f2e01ae5eb76f2064d4d7e94f2483dbf9 refs/tags/0.4.41
|
||||
6aeb0faaf0d6b473d8dc161373068d2f5413fdfe refs/tags/0.4.42
|
||||
34dc0a162c19c3ec4728517239171164b8009819 refs/tags/0.4.43
|
||||
3a5ebbcdd836bce4b9d5a84191819e24084ff5c7 refs/tags/0.4.44
|
||||
316787d1e3ff69c09725b2eb8ded748a4422abb3 refs/tags/0.4.45
|
||||
ae1b425ea0c7560155ee5e9e2e828fda7c1be43d refs/tags/0.4.46
|
||||
960d8c0ac0b68672e9edc7b9738ba77d60fa806a refs/tags/0.4.47
|
||||
b557b886b229ad304568988cf716c510ef7ecbd7 refs/tags/0.5.0
|
||||
3004df3fb907c81153c9536c142474faf231b698 refs/tags/0.5.1
|
||||
d15e2afd439268ed4c9b4db5a8d75d2afeba7a5d refs/tags/0.5.10
|
||||
ad045ff989456934cb312313b01f0f1ad19af614 refs/tags/0.5.11
|
||||
517eeeda1c6b6504abe7898aa0127f98bbdfc261 refs/tags/0.5.12
|
||||
26b5e2eefdd26b5e5b9bb4b48260a702521cc976 refs/tags/0.5.13
|
||||
922f3c305c71fb177c3bb5e3ca6697ed2e34424c refs/tags/0.5.2
|
||||
a05eb5b25ba8612759e6fe3667b14e26c6af014e refs/tags/0.5.3
|
||||
1a17721e24db7c2fa7f5df2326d9b9919f5de8e5 refs/tags/0.5.4
|
||||
5f27385ecec57b7088f63c9a499b58da5661e904 refs/tags/0.5.5
|
||||
f2592742727e3c95545dec27a7f0781dbdf5d2cd refs/tags/0.5.6
|
||||
11b2ed554611bc3ce488df631d54b54afe097b6e refs/tags/0.5.7
|
||||
10061601252844a18419ecfe0249aa18d3ceb1ab refs/tags/0.5.8
|
||||
2c5b7d7e45bcadbae39e8a4f53169a1614523ea2 refs/tags/0.5.9
|
||||
06b6bf391c1a0a9f0b412057a195415a9dc8755e refs/tags/v0.5.14
|
||||
f58c7e650754e2c45f306f2f707efb389032a2e7 refs/tags/v0.5.15
|
||||
c631afe82ac07302abf499e019e2ebe38c1111ac refs/tags/v0.5.16
|
||||
4b027e14574987c7ff329c5ea4980a624f954cad refs/tags/v0.5.17
|
||||
1
python-garth/.git_disabled/refs/heads/main
Normal file
1
python-garth/.git_disabled/refs/heads/main
Normal file
@@ -0,0 +1 @@
|
||||
4b027e14574987c7ff329c5ea4980a624f954cad
|
||||
1
python-garth/.git_disabled/refs/remotes/origin/HEAD
Normal file
1
python-garth/.git_disabled/refs/remotes/origin/HEAD
Normal file
@@ -0,0 +1 @@
|
||||
ref: refs/remotes/origin/main
|
||||
1
python-garth/.gitattributes
vendored
Normal file
1
python-garth/.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.ipynb linguist-documentation=true
|
||||
17
python-garth/.github/dependabot.yml
vendored
Normal file
17
python-garth/.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "20:00"
|
||||
timezone: "America/Mexico_City"
|
||||
open-pull-requests-limit: 5
|
||||
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "20:00"
|
||||
timezone: "America/Mexico_City"
|
||||
open-pull-requests-limit: 5
|
||||
87
python-garth/.github/workflows/ci.yml
vendored
Normal file
87
python-garth/.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- "**"
|
||||
pull_request: {}
|
||||
|
||||
env:
|
||||
COLUMNS: 150
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
checks: write
|
||||
statuses: write
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
name: lint ${{ matrix.python-version }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- uses: astral-sh/setup-uv@v6
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
uv pip install --system -e .
|
||||
uv pip install --system --group linting
|
||||
|
||||
- uses: pre-commit/action@v3.0.1
|
||||
with:
|
||||
extra_args: --all-files --verbose
|
||||
env:
|
||||
SKIP: no-commit-to-branch
|
||||
|
||||
test:
|
||||
name: test ${{ matrix.python-version }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu, macos, windows]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||
|
||||
env:
|
||||
PYTHON: ${{ matrix.python-version }}
|
||||
OS: ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- uses: astral-sh/setup-uv@v6
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
uv pip install --system -e .
|
||||
uv pip install --system --group testing
|
||||
|
||||
- name: test
|
||||
run: make testcov
|
||||
env:
|
||||
CONTEXT: ${{ runner.os }}-py${{ matrix.python-version }}-with-deps
|
||||
|
||||
- name: upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
files: ./coverage/coverage.xml
|
||||
flags: unittests
|
||||
name: codecov-umbrella
|
||||
fail_ci_if_error: true
|
||||
30
python-garth/.github/workflows/publish.yml
vendored
Normal file
30
python-garth/.github/workflows/publish.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Publish to PyPI
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: pypi
|
||||
url: https://pypi.org/p/garth
|
||||
permissions:
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13"
|
||||
|
||||
- uses: astral-sh/setup-uv@v6
|
||||
|
||||
- name: Build package
|
||||
run: |
|
||||
uv build
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
53
python-garth/.gitignore
vendored
Normal file
53
python-garth/.gitignore
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
# Virtual environments
|
||||
env/
|
||||
env3*/
|
||||
venv/
|
||||
.venv/
|
||||
.envrc
|
||||
.env
|
||||
__pypackages__/
|
||||
|
||||
# IDEs and editors
|
||||
.idea/
|
||||
|
||||
# Package distribution and build files
|
||||
*.egg-info/
|
||||
dist/
|
||||
/build/
|
||||
_build/
|
||||
|
||||
# Python bytecode and cache files
|
||||
*.py[cod]
|
||||
.cache/
|
||||
/.ghtopdep_cache/
|
||||
.hypothesis
|
||||
.mypy_cache/
|
||||
.pytest_cache/
|
||||
/.ruff_cache/
|
||||
|
||||
# Benchmark and test files
|
||||
/benchmarks/*.json
|
||||
/htmlcov/
|
||||
/codecov.sh
|
||||
/coverage.lcov
|
||||
.coverage
|
||||
test.py
|
||||
/coverage/
|
||||
|
||||
# Documentation files
|
||||
/docs/changelog.md
|
||||
/site/
|
||||
/site.zip
|
||||
|
||||
# Other files and folders
|
||||
.python-version
|
||||
.DS_Store
|
||||
.auto-format
|
||||
/sandbox/
|
||||
/worktrees/
|
||||
.pdm-python
|
||||
tmp/
|
||||
.pdm.toml
|
||||
|
||||
# exclude saved oauth tokens
|
||||
oauth*_token.json
|
||||
6
python-garth/.markdownlint.json
Normal file
6
python-garth/.markdownlint.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"MD033": {
|
||||
"allowed_elements": ["img", "a", "source", "picture"]
|
||||
},
|
||||
"MD046": false
|
||||
}
|
||||
33
python-garth/.pre-commit-config.yaml
Normal file
33
python-garth/.pre-commit-config.yaml
Normal file
@@ -0,0 +1,33 @@
|
||||
exclude: '.*\.ipynb$'
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
args: ['--unsafe']
|
||||
- id: check-toml
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.6
|
||||
hooks:
|
||||
- id: codespell
|
||||
additional_dependencies:
|
||||
- tomli
|
||||
exclude: 'cassettes/'
|
||||
|
||||
- repo: https://github.com/DavidAnson/markdownlint-cli2
|
||||
rev: v0.12.1
|
||||
hooks:
|
||||
- id: markdownlint-cli2
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: lint
|
||||
name: lint
|
||||
entry: make lint
|
||||
types: [python]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
21
python-garth/LICENSE
Normal file
21
python-garth/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
# MIT License
|
||||
|
||||
Copyright (c) 2023 Matin Tamizi
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
81
python-garth/Makefile
Normal file
81
python-garth/Makefile
Normal file
@@ -0,0 +1,81 @@
|
||||
# Based on Makefile for pydantic (github.com/pydantic/pydantic/blob/main/Makefile)
|
||||
|
||||
.DEFAULT_GOAL := all
|
||||
sources = src tests
|
||||
|
||||
.PHONY: .uv ## Check that uv is installed
|
||||
.uv:
|
||||
@uv --version || echo 'Please install uv: https://docs.astral.sh/uv/getting-started/installation/'
|
||||
|
||||
.PHONY: .pre-commit ## Check that pre-commit is installed
|
||||
.pre-commit:
|
||||
@pre-commit -V || echo 'Please install pre-commit: https://pre-commit.com/'
|
||||
|
||||
.PHONY: install ## Install the package, dependencies, and pre-commit for local development
|
||||
install: .uv .pre-commit
|
||||
uv pip install -e .
|
||||
uv pip install --group dev --group linting --group testing
|
||||
pre-commit install --install-hooks
|
||||
|
||||
.PHONY: sync ## Sync dependencies and lockfiles
|
||||
sync: .uv clean
|
||||
uv pip install -e . --force-reinstall
|
||||
uv sync
|
||||
|
||||
.PHONY: format ## Auto-format python source files
|
||||
format: .uv
|
||||
uv run ruff format $(sources)
|
||||
uv run ruff check --fix $(sources)
|
||||
|
||||
.PHONY: lint ## Lint python source files
|
||||
lint: .uv
|
||||
uv run ruff format --check $(sources)
|
||||
uv run ruff check $(sources)
|
||||
uv run mypy $(sources)
|
||||
|
||||
.PHONY: codespell ## Use Codespell to do spellchecking
|
||||
codespell: .pre-commit
|
||||
pre-commit run codespell --all-files
|
||||
|
||||
.PHONY: test ## Run all tests, skipping the type-checker integration tests
|
||||
test: .uv
|
||||
uv run coverage run -m pytest -v --durations=10
|
||||
|
||||
.PHONY: testcov ## Run tests and generate a coverage report, skipping the type-checker integration tests
|
||||
testcov: test
|
||||
@echo "building coverage html"
|
||||
@uv run coverage html
|
||||
@echo "building coverage xml"
|
||||
@uv run coverage xml -o coverage/coverage.xml
|
||||
|
||||
.PHONY: all ## Run the standard set of checks performed in CI
|
||||
all: lint codespell testcov
|
||||
|
||||
.PHONY: clean ## Clear local caches and build artifacts
|
||||
clean:
|
||||
find . -type d -name __pycache__ -exec rm -r {} +
|
||||
find . -type f -name '*.py[co]' -exec rm -f {} +
|
||||
find . -type f -name '*~' -exec rm -f {} +
|
||||
find . -type f -name '.*~' -exec rm -f {} +
|
||||
rm -rf .cache
|
||||
rm -rf .pytest_cache
|
||||
rm -rf .ruff_cache
|
||||
rm -rf htmlcov
|
||||
rm -rf *.egg-info
|
||||
rm -f .coverage
|
||||
rm -f .coverage.*
|
||||
rm -rf build
|
||||
rm -rf dist
|
||||
rm -rf site
|
||||
rm -rf docs/_build
|
||||
rm -rf docs/.changelog.md docs/.version.md docs/.tmp_schema_mappings.html
|
||||
rm -rf fastapi/test.db
|
||||
rm -rf coverage.xml
|
||||
rm -rf __pypackages__ uv.lock
|
||||
|
||||
.PHONY: help ## Display this message
|
||||
help:
|
||||
@grep -E \
|
||||
'^.PHONY: .*?## .*$$' $(MAKEFILE_LIST) | \
|
||||
sort | \
|
||||
awk 'BEGIN {FS = ".PHONY: |## "}; {printf "\033[36m%-19s\033[0m %s\n", $$2, $$3}'
|
||||
1108
python-garth/README.md
Normal file
1108
python-garth/README.md
Normal file
File diff suppressed because it is too large
Load Diff
1084
python-garth/colabs/chatgpt_analysis_of_stats.ipynb
Normal file
1084
python-garth/colabs/chatgpt_analysis_of_stats.ipynb
Normal file
File diff suppressed because one or more lines are too long
478
python-garth/colabs/sleep.ipynb
Normal file
478
python-garth/colabs/sleep.ipynb
Normal file
File diff suppressed because one or more lines are too long
502
python-garth/colabs/stress.ipynb
Normal file
502
python-garth/colabs/stress.ipynb
Normal file
File diff suppressed because one or more lines are too long
89
python-garth/pyproject.toml
Normal file
89
python-garth/pyproject.toml
Normal file
@@ -0,0 +1,89 @@
|
||||
[project]
|
||||
name = "garth"
|
||||
dynamic = ["version"]
|
||||
description = "Garmin SSO auth + Connect client"
|
||||
authors = [
|
||||
{name = "Matin Tamizi", email = "mtamizi@duck.com"},
|
||||
]
|
||||
dependencies = [
|
||||
"requests>=2.0.0,<3.0.0",
|
||||
"pydantic>=1.10.12,<3.0.0",
|
||||
"requests-oauthlib>=1.3.1,<3.0.0",
|
||||
]
|
||||
requires-python = ">=3.10"
|
||||
readme = "README.md"
|
||||
license = {text = "MIT"}
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Operating System :: MacOS :: MacOS X",
|
||||
"Operating System :: Microsoft :: Windows",
|
||||
"Operating System :: POSIX :: Linux",
|
||||
"Operating System :: OS Independent",
|
||||
]
|
||||
keywords = ["garmin", "garmin api", "garmin connect", "garmin sso"]
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://github.com/matin/garth"
|
||||
"Repository" = "https://github.com/matin/garth"
|
||||
"Issues" = "https://github.com/matin/garth/issues"
|
||||
"Changelog" = "https://github.com/matin/garth/releases"
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.version]
|
||||
path = "src/garth/version.py"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "--ignore=__pypackages__ --ignore-glob=*.yaml"
|
||||
|
||||
[tool.mypy]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 79
|
||||
indent-width = 4
|
||||
target-version = "py310"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "F", "I"]
|
||||
ignore = []
|
||||
|
||||
[tool.ruff.format]
|
||||
quote-style = "double"
|
||||
indent-style = "space"
|
||||
skip-magic-trailing-comma = false
|
||||
line-ending = "auto"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"ipython",
|
||||
"ipdb",
|
||||
"ipykernel",
|
||||
"pandas",
|
||||
"matplotlib",
|
||||
]
|
||||
linting = [
|
||||
"ruff",
|
||||
"mypy",
|
||||
"types-requests",
|
||||
]
|
||||
testing = [
|
||||
"coverage",
|
||||
"pytest",
|
||||
"pytest-vcr",
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
known-first-party = ["garth"]
|
||||
combine-as-imports = true
|
||||
lines-after-imports = 2
|
||||
|
||||
[project.scripts]
|
||||
garth = "garth.cli:main"
|
||||
59
python-garth/src/garth/__init__.py
Normal file
59
python-garth/src/garth/__init__.py
Normal file
@@ -0,0 +1,59 @@
|
||||
from .data import (
|
||||
BodyBatteryData,
|
||||
DailyBodyBatteryStress,
|
||||
HRVData,
|
||||
SleepData,
|
||||
WeightData,
|
||||
)
|
||||
from .http import Client, client
|
||||
from .stats import (
|
||||
DailyHRV,
|
||||
DailyHydration,
|
||||
DailyIntensityMinutes,
|
||||
DailySleep,
|
||||
DailySteps,
|
||||
DailyStress,
|
||||
WeeklyIntensityMinutes,
|
||||
WeeklySteps,
|
||||
WeeklyStress,
|
||||
)
|
||||
from .users import UserProfile, UserSettings
|
||||
from .version import __version__
|
||||
|
||||
|
||||
__all__ = [
|
||||
"BodyBatteryData",
|
||||
"Client",
|
||||
"DailyBodyBatteryStress",
|
||||
"DailyHRV",
|
||||
"DailyHydration",
|
||||
"DailyIntensityMinutes",
|
||||
"DailySleep",
|
||||
"DailySteps",
|
||||
"DailyStress",
|
||||
"HRVData",
|
||||
"SleepData",
|
||||
"WeightData",
|
||||
"UserProfile",
|
||||
"UserSettings",
|
||||
"WeeklyIntensityMinutes",
|
||||
"WeeklySteps",
|
||||
"WeeklyStress",
|
||||
"__version__",
|
||||
"client",
|
||||
"configure",
|
||||
"connectapi",
|
||||
"download",
|
||||
"login",
|
||||
"resume",
|
||||
"save",
|
||||
"upload",
|
||||
]
|
||||
|
||||
configure = client.configure
|
||||
connectapi = client.connectapi
|
||||
download = client.download
|
||||
login = client.login
|
||||
resume = client.load
|
||||
save = client.dump
|
||||
upload = client.upload
|
||||
37
python-garth/src/garth/auth_tokens.py
Normal file
37
python-garth/src/garth/auth_tokens.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class OAuth1Token:
|
||||
oauth_token: str
|
||||
oauth_token_secret: str
|
||||
mfa_token: str | None = None
|
||||
mfa_expiration_timestamp: datetime | None = None
|
||||
domain: str | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class OAuth2Token:
|
||||
scope: str
|
||||
jti: str
|
||||
token_type: str
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
expires_in: int
|
||||
expires_at: int
|
||||
refresh_token_expires_in: int
|
||||
refresh_token_expires_at: int
|
||||
|
||||
@property
|
||||
def expired(self):
|
||||
return self.expires_at < time.time()
|
||||
|
||||
@property
|
||||
def refresh_expired(self):
|
||||
return self.refresh_token_expires_at < time.time()
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.token_type.title()} {self.access_token}"
|
||||
34
python-garth/src/garth/cli.py
Normal file
34
python-garth/src/garth/cli.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import argparse
|
||||
import getpass
|
||||
|
||||
import garth
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(prog="garth")
|
||||
parser.add_argument(
|
||||
"--domain",
|
||||
"-d",
|
||||
default="garmin.com",
|
||||
help=(
|
||||
"Domain for Garmin Connect (default: garmin.com). "
|
||||
"Use garmin.cn for China."
|
||||
),
|
||||
)
|
||||
subparsers = parser.add_subparsers(dest="command")
|
||||
subparsers.add_parser(
|
||||
"login", help="Authenticate with Garmin Connect and print token"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
garth.configure(domain=args.domain)
|
||||
|
||||
match args.command:
|
||||
case "login":
|
||||
email = input("Email: ")
|
||||
password = getpass.getpass("Password: ")
|
||||
garth.login(email, password)
|
||||
token = garth.client.dumps()
|
||||
print(token)
|
||||
case _:
|
||||
parser.print_help()
|
||||
21
python-garth/src/garth/data/__init__.py
Normal file
21
python-garth/src/garth/data/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
__all__ = [
|
||||
"BodyBatteryData",
|
||||
"BodyBatteryEvent",
|
||||
"BodyBatteryReading",
|
||||
"DailyBodyBatteryStress",
|
||||
"HRVData",
|
||||
"SleepData",
|
||||
"StressReading",
|
||||
"WeightData",
|
||||
]
|
||||
|
||||
from .body_battery import (
|
||||
BodyBatteryData,
|
||||
BodyBatteryEvent,
|
||||
BodyBatteryReading,
|
||||
DailyBodyBatteryStress,
|
||||
StressReading,
|
||||
)
|
||||
from .hrv import HRVData
|
||||
from .sleep import SleepData
|
||||
from .weight import WeightData
|
||||
47
python-garth/src/garth/data/_base.py
Normal file
47
python-garth/src/garth/data/_base.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from datetime import date
|
||||
from itertools import chain
|
||||
|
||||
from typing_extensions import Self
|
||||
|
||||
from .. import http
|
||||
from ..utils import date_range, format_end_date
|
||||
|
||||
|
||||
MAX_WORKERS = 10
|
||||
|
||||
|
||||
class Data(ABC):
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def get(
|
||||
cls, day: date | str, *, client: http.Client | None = None
|
||||
) -> Self | list[Self] | None: ...
|
||||
|
||||
@classmethod
|
||||
def list(
|
||||
cls,
|
||||
end: date | str | None = None,
|
||||
days: int = 1,
|
||||
*,
|
||||
client: http.Client | None = None,
|
||||
max_workers: int = MAX_WORKERS,
|
||||
) -> list[Self]:
|
||||
client = client or http.client
|
||||
end = format_end_date(end)
|
||||
|
||||
def fetch_date(date_):
|
||||
if day := cls.get(date_, client=client):
|
||||
return day
|
||||
|
||||
dates = date_range(end, days)
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
data = list(executor.map(fetch_date, dates))
|
||||
data = [day for day in data if day is not None]
|
||||
|
||||
return list(
|
||||
chain.from_iterable(
|
||||
day if isinstance(day, list) else [day] for day in data
|
||||
)
|
||||
)
|
||||
11
python-garth/src/garth/data/body_battery/__init__.py
Normal file
11
python-garth/src/garth/data/body_battery/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
__all__ = [
|
||||
"BodyBatteryData",
|
||||
"BodyBatteryEvent",
|
||||
"BodyBatteryReading",
|
||||
"DailyBodyBatteryStress",
|
||||
"StressReading",
|
||||
]
|
||||
|
||||
from .daily_stress import DailyBodyBatteryStress
|
||||
from .events import BodyBatteryData, BodyBatteryEvent
|
||||
from .readings import BodyBatteryReading, StressReading
|
||||
90
python-garth/src/garth/data/body_battery/daily_stress.py
Normal file
90
python-garth/src/garth/data/body_battery/daily_stress.py
Normal file
@@ -0,0 +1,90 @@
|
||||
from datetime import date, datetime
|
||||
from functools import cached_property
|
||||
from typing import Any
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from ... import http
|
||||
from ...utils import camel_to_snake_dict, format_end_date
|
||||
from .._base import Data
|
||||
from .readings import (
|
||||
BodyBatteryReading,
|
||||
StressReading,
|
||||
parse_body_battery_readings,
|
||||
parse_stress_readings,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DailyBodyBatteryStress(Data):
|
||||
"""Complete daily Body Battery and stress data."""
|
||||
|
||||
user_profile_pk: int
|
||||
calendar_date: date
|
||||
start_timestamp_gmt: datetime
|
||||
end_timestamp_gmt: datetime
|
||||
start_timestamp_local: datetime
|
||||
end_timestamp_local: datetime
|
||||
max_stress_level: int
|
||||
avg_stress_level: int
|
||||
stress_chart_value_offset: int
|
||||
stress_chart_y_axis_origin: int
|
||||
stress_values_array: list[list[int]]
|
||||
body_battery_values_array: list[list[Any]]
|
||||
|
||||
@cached_property
|
||||
def body_battery_readings(self) -> list[BodyBatteryReading]:
|
||||
"""Convert body battery values array to structured readings."""
|
||||
return parse_body_battery_readings(self.body_battery_values_array)
|
||||
|
||||
@property
|
||||
def stress_readings(self) -> list[StressReading]:
|
||||
"""Convert stress values array to structured readings."""
|
||||
return parse_stress_readings(self.stress_values_array)
|
||||
|
||||
@property
|
||||
def current_body_battery(self) -> int | None:
|
||||
"""Get the latest Body Battery level."""
|
||||
readings = self.body_battery_readings
|
||||
return readings[-1].level if readings else None
|
||||
|
||||
@property
|
||||
def max_body_battery(self) -> int | None:
|
||||
"""Get the maximum Body Battery level for the day."""
|
||||
readings = self.body_battery_readings
|
||||
return max(reading.level for reading in readings) if readings else None
|
||||
|
||||
@property
|
||||
def min_body_battery(self) -> int | None:
|
||||
"""Get the minimum Body Battery level for the day."""
|
||||
readings = self.body_battery_readings
|
||||
return min(reading.level for reading in readings) if readings else None
|
||||
|
||||
@property
|
||||
def body_battery_change(self) -> int | None:
|
||||
"""Calculate the Body Battery change for the day."""
|
||||
readings = self.body_battery_readings
|
||||
if not readings or len(readings) < 2:
|
||||
return None
|
||||
return readings[-1].level - readings[0].level
|
||||
|
||||
@classmethod
|
||||
def get(
|
||||
cls,
|
||||
day: date | str | None = None,
|
||||
*,
|
||||
client: http.Client | None = None,
|
||||
) -> Self | None:
|
||||
"""Get complete Body Battery and stress data for a specific date."""
|
||||
client = client or http.client
|
||||
date_str = format_end_date(day)
|
||||
|
||||
path = f"/wellness-service/wellness/dailyStress/{date_str}"
|
||||
response = client.connectapi(path)
|
||||
|
||||
if not isinstance(response, dict):
|
||||
return None
|
||||
|
||||
snake_response = camel_to_snake_dict(response)
|
||||
return cls(**snake_response)
|
||||
227
python-garth/src/garth/data/body_battery/events.py
Normal file
227
python-garth/src/garth/data/body_battery/events.py
Normal file
@@ -0,0 +1,227 @@
|
||||
import logging
|
||||
from datetime import date, datetime
|
||||
from typing import Any
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from ... import http
|
||||
from ...utils import format_end_date
|
||||
from .._base import Data
|
||||
from .readings import BodyBatteryReading, parse_body_battery_readings
|
||||
|
||||
|
||||
MAX_WORKERS = 10
|
||||
|
||||
|
||||
@dataclass
|
||||
class BodyBatteryEvent:
|
||||
"""Body Battery event data."""
|
||||
|
||||
event_type: str
|
||||
event_start_time_gmt: datetime
|
||||
timezone_offset: int
|
||||
duration_in_milliseconds: int
|
||||
body_battery_impact: int
|
||||
feedback_type: str
|
||||
short_feedback: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class BodyBatteryData(Data):
|
||||
"""Legacy Body Battery events data (sleep events only)."""
|
||||
|
||||
event: BodyBatteryEvent | None = None
|
||||
activity_name: str | None = None
|
||||
activity_type: str | None = None
|
||||
activity_id: str | None = None
|
||||
average_stress: float | None = None
|
||||
stress_values_array: list[list[int]] | None = None
|
||||
body_battery_values_array: list[list[Any]] | None = None
|
||||
|
||||
@property
|
||||
def body_battery_readings(self) -> list[BodyBatteryReading]:
|
||||
"""Convert body battery values array to structured readings."""
|
||||
return parse_body_battery_readings(self.body_battery_values_array)
|
||||
|
||||
@property
|
||||
def current_level(self) -> int | None:
|
||||
"""Get the latest Body Battery level."""
|
||||
readings = self.body_battery_readings
|
||||
return readings[-1].level if readings else None
|
||||
|
||||
@property
|
||||
def max_level(self) -> int | None:
|
||||
"""Get the maximum Body Battery level for the day."""
|
||||
readings = self.body_battery_readings
|
||||
return max(reading.level for reading in readings) if readings else None
|
||||
|
||||
@property
|
||||
def min_level(self) -> int | None:
|
||||
"""Get the minimum Body Battery level for the day."""
|
||||
readings = self.body_battery_readings
|
||||
return min(reading.level for reading in readings) if readings else None
|
||||
|
||||
@classmethod
|
||||
def get(
|
||||
cls,
|
||||
date_str: str | date | None = None,
|
||||
*,
|
||||
client: http.Client | None = None,
|
||||
) -> list[Self]:
|
||||
"""Get Body Battery events for a specific date."""
|
||||
client = client or http.client
|
||||
date_str = format_end_date(date_str)
|
||||
|
||||
path = f"/wellness-service/wellness/bodyBattery/events/{date_str}"
|
||||
try:
|
||||
response = client.connectapi(path)
|
||||
except Exception as e:
|
||||
logging.warning(f"Failed to fetch Body Battery events: {e}")
|
||||
return []
|
||||
|
||||
if not isinstance(response, list):
|
||||
return []
|
||||
|
||||
events = []
|
||||
for item in response:
|
||||
try:
|
||||
# Parse event data with validation
|
||||
event_data = item.get("event")
|
||||
|
||||
# Validate event_data exists before accessing properties
|
||||
if event_data is None:
|
||||
logging.warning(f"Missing event data in item: {item}")
|
||||
event = None
|
||||
else:
|
||||
# Validate and parse datetime with explicit error handling
|
||||
event_start_time_str = event_data.get("eventStartTimeGmt")
|
||||
if not event_start_time_str:
|
||||
logging.error(
|
||||
f"Missing eventStartTimeGmt in event data: "
|
||||
f"{event_data}"
|
||||
)
|
||||
raise ValueError(
|
||||
"eventStartTimeGmt is required but missing"
|
||||
)
|
||||
|
||||
try:
|
||||
event_start_time_gmt = datetime.fromisoformat(
|
||||
event_start_time_str.replace("Z", "+00:00")
|
||||
)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logging.error(
|
||||
f"Invalid datetime format "
|
||||
f"'{event_start_time_str}': {e}"
|
||||
)
|
||||
raise ValueError(
|
||||
f"Invalid eventStartTimeGmt format: "
|
||||
f"{event_start_time_str}"
|
||||
) from e
|
||||
|
||||
# Validate numeric fields
|
||||
timezone_offset = event_data.get("timezoneOffset", 0)
|
||||
if not isinstance(timezone_offset, (int, float)):
|
||||
logging.warning(
|
||||
f"Invalid timezone_offset type: "
|
||||
f"{type(timezone_offset)}, using 0"
|
||||
)
|
||||
timezone_offset = 0
|
||||
|
||||
duration_ms = event_data.get("durationInMilliseconds", 0)
|
||||
if not isinstance(duration_ms, (int, float)):
|
||||
logging.warning(
|
||||
f"Invalid durationInMilliseconds type: "
|
||||
f"{type(duration_ms)}, using 0"
|
||||
)
|
||||
duration_ms = 0
|
||||
|
||||
battery_impact = event_data.get("bodyBatteryImpact", 0)
|
||||
if not isinstance(battery_impact, (int, float)):
|
||||
logging.warning(
|
||||
f"Invalid bodyBatteryImpact type: "
|
||||
f"{type(battery_impact)}, using 0"
|
||||
)
|
||||
battery_impact = 0
|
||||
|
||||
event = BodyBatteryEvent(
|
||||
event_type=event_data.get("eventType", ""),
|
||||
event_start_time_gmt=event_start_time_gmt,
|
||||
timezone_offset=int(timezone_offset),
|
||||
duration_in_milliseconds=int(duration_ms),
|
||||
body_battery_impact=int(battery_impact),
|
||||
feedback_type=event_data.get("feedbackType", ""),
|
||||
short_feedback=event_data.get("shortFeedback", ""),
|
||||
)
|
||||
|
||||
# Validate data arrays
|
||||
stress_values = item.get("stressValuesArray")
|
||||
if stress_values is not None and not isinstance(
|
||||
stress_values, list
|
||||
):
|
||||
logging.warning(
|
||||
f"Invalid stressValuesArray type: "
|
||||
f"{type(stress_values)}, using None"
|
||||
)
|
||||
stress_values = None
|
||||
|
||||
battery_values = item.get("bodyBatteryValuesArray")
|
||||
if battery_values is not None and not isinstance(
|
||||
battery_values, list
|
||||
):
|
||||
logging.warning(
|
||||
f"Invalid bodyBatteryValuesArray type: "
|
||||
f"{type(battery_values)}, using None"
|
||||
)
|
||||
battery_values = None
|
||||
|
||||
# Validate average_stress
|
||||
avg_stress = item.get("averageStress")
|
||||
if avg_stress is not None and not isinstance(
|
||||
avg_stress, (int, float)
|
||||
):
|
||||
logging.warning(
|
||||
f"Invalid averageStress type: "
|
||||
f"{type(avg_stress)}, using None"
|
||||
)
|
||||
avg_stress = None
|
||||
|
||||
events.append(
|
||||
cls(
|
||||
event=event,
|
||||
activity_name=item.get("activityName"),
|
||||
activity_type=item.get("activityType"),
|
||||
activity_id=item.get("activityId"),
|
||||
average_stress=avg_stress,
|
||||
stress_values_array=stress_values,
|
||||
body_battery_values_array=battery_values,
|
||||
)
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
# Re-raise validation errors with context
|
||||
logging.error(
|
||||
f"Data validation error for Body Battery event item "
|
||||
f"{item}: {e}"
|
||||
)
|
||||
continue
|
||||
except Exception as e:
|
||||
# Log unexpected errors with full context
|
||||
logging.error(
|
||||
f"Unexpected error parsing Body Battery event item "
|
||||
f"{item}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
continue
|
||||
|
||||
# Log summary of data quality issues
|
||||
total_items = len(response)
|
||||
parsed_events = len(events)
|
||||
if parsed_events < total_items:
|
||||
skipped = total_items - parsed_events
|
||||
logging.info(
|
||||
f"Body Battery events parsing: {parsed_events}/{total_items} "
|
||||
f"successful, {skipped} skipped due to data issues"
|
||||
)
|
||||
|
||||
return events
|
||||
56
python-garth/src/garth/data/body_battery/readings.py
Normal file
56
python-garth/src/garth/data/body_battery/readings.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from typing import Any
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class BodyBatteryReading:
|
||||
"""Individual Body Battery reading."""
|
||||
|
||||
timestamp: int
|
||||
status: str
|
||||
level: int
|
||||
version: float
|
||||
|
||||
|
||||
@dataclass
|
||||
class StressReading:
|
||||
"""Individual stress reading."""
|
||||
|
||||
timestamp: int
|
||||
stress_level: int
|
||||
|
||||
|
||||
def parse_body_battery_readings(
|
||||
body_battery_values_array: list[list[Any]] | None,
|
||||
) -> list[BodyBatteryReading]:
|
||||
"""Convert body battery values array to structured readings."""
|
||||
readings = []
|
||||
for values in body_battery_values_array or []:
|
||||
# Each reading requires 4 values: timestamp, status, level, version
|
||||
if len(values) >= 4:
|
||||
readings.append(
|
||||
BodyBatteryReading(
|
||||
timestamp=values[0],
|
||||
status=values[1],
|
||||
level=values[2],
|
||||
version=values[3],
|
||||
)
|
||||
)
|
||||
# Sort readings by timestamp to ensure chronological order
|
||||
return sorted(readings, key=lambda reading: reading.timestamp)
|
||||
|
||||
|
||||
def parse_stress_readings(
|
||||
stress_values_array: list[list[int]] | None,
|
||||
) -> list[StressReading]:
|
||||
"""Convert stress values array to structured readings."""
|
||||
readings = []
|
||||
for values in stress_values_array or []:
|
||||
# Each reading requires 2 values: timestamp, stress_level
|
||||
if len(values) >= 2:
|
||||
readings.append(
|
||||
StressReading(timestamp=values[0], stress_level=values[1])
|
||||
)
|
||||
# Sort readings by timestamp to ensure chronological order
|
||||
return sorted(readings, key=lambda reading: reading.timestamp)
|
||||
68
python-garth/src/garth/data/hrv.py
Normal file
68
python-garth/src/garth/data/hrv.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from datetime import date, datetime
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from .. import http
|
||||
from ..utils import camel_to_snake_dict
|
||||
from ._base import Data
|
||||
|
||||
|
||||
@dataclass
|
||||
class Baseline:
|
||||
low_upper: int
|
||||
balanced_low: int
|
||||
balanced_upper: int
|
||||
marker_value: float
|
||||
|
||||
|
||||
@dataclass
|
||||
class HRVSummary:
|
||||
calendar_date: date
|
||||
weekly_avg: int
|
||||
last_night_avg: int | None
|
||||
last_night_5_min_high: int
|
||||
baseline: Baseline
|
||||
status: str
|
||||
feedback_phrase: str
|
||||
create_time_stamp: datetime
|
||||
|
||||
|
||||
@dataclass
|
||||
class HRVReading:
|
||||
hrv_value: int
|
||||
reading_time_gmt: datetime
|
||||
reading_time_local: datetime
|
||||
|
||||
|
||||
@dataclass
|
||||
class HRVData(Data):
|
||||
user_profile_pk: int
|
||||
hrv_summary: HRVSummary
|
||||
hrv_readings: list[HRVReading]
|
||||
start_timestamp_gmt: datetime
|
||||
end_timestamp_gmt: datetime
|
||||
start_timestamp_local: datetime
|
||||
end_timestamp_local: datetime
|
||||
sleep_start_timestamp_gmt: datetime
|
||||
sleep_end_timestamp_gmt: datetime
|
||||
sleep_start_timestamp_local: datetime
|
||||
sleep_end_timestamp_local: datetime
|
||||
|
||||
@classmethod
|
||||
def get(
|
||||
cls, day: date | str, *, client: http.Client | None = None
|
||||
) -> Self | None:
|
||||
client = client or http.client
|
||||
path = f"/hrv-service/hrv/{day}"
|
||||
hrv_data = client.connectapi(path)
|
||||
if not hrv_data:
|
||||
return None
|
||||
hrv_data = camel_to_snake_dict(hrv_data)
|
||||
assert isinstance(hrv_data, dict)
|
||||
return cls(**hrv_data)
|
||||
|
||||
@classmethod
|
||||
def list(cls, *args, **kwargs) -> list[Self]:
|
||||
data = super().list(*args, **kwargs)
|
||||
return sorted(data, key=lambda d: d.hrv_summary.calendar_date)
|
||||
123
python-garth/src/garth/data/sleep.py
Normal file
123
python-garth/src/garth/data/sleep.py
Normal file
@@ -0,0 +1,123 @@
|
||||
from datetime import date, datetime
|
||||
from typing import Optional, Union
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from .. import http
|
||||
from ..utils import camel_to_snake_dict, get_localized_datetime
|
||||
from ._base import Data
|
||||
|
||||
|
||||
@dataclass
|
||||
class Score:
|
||||
qualifier_key: str
|
||||
optimal_start: Optional[float] = None
|
||||
optimal_end: Optional[float] = None
|
||||
value: Optional[int] = None
|
||||
ideal_start_in_seconds: Optional[float] = None
|
||||
ideal_end_in_seconds: Optional[float] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class SleepScores:
|
||||
total_duration: Score
|
||||
stress: Score
|
||||
awake_count: Score
|
||||
overall: Score
|
||||
rem_percentage: Score
|
||||
restlessness: Score
|
||||
light_percentage: Score
|
||||
deep_percentage: Score
|
||||
|
||||
|
||||
@dataclass
|
||||
class DailySleepDTO:
|
||||
id: int
|
||||
user_profile_pk: int
|
||||
calendar_date: date
|
||||
sleep_time_seconds: int
|
||||
nap_time_seconds: int
|
||||
sleep_window_confirmed: bool
|
||||
sleep_window_confirmation_type: str
|
||||
sleep_start_timestamp_gmt: int
|
||||
sleep_end_timestamp_gmt: int
|
||||
sleep_start_timestamp_local: int
|
||||
sleep_end_timestamp_local: int
|
||||
device_rem_capable: bool
|
||||
retro: bool
|
||||
unmeasurable_sleep_seconds: Optional[int] = None
|
||||
deep_sleep_seconds: Optional[int] = None
|
||||
light_sleep_seconds: Optional[int] = None
|
||||
rem_sleep_seconds: Optional[int] = None
|
||||
awake_sleep_seconds: Optional[int] = None
|
||||
sleep_from_device: Optional[bool] = None
|
||||
sleep_version: Optional[int] = None
|
||||
awake_count: Optional[int] = None
|
||||
sleep_scores: Optional[SleepScores] = None
|
||||
auto_sleep_start_timestamp_gmt: Optional[int] = None
|
||||
auto_sleep_end_timestamp_gmt: Optional[int] = None
|
||||
sleep_quality_type_pk: Optional[int] = None
|
||||
sleep_result_type_pk: Optional[int] = None
|
||||
average_sp_o2_value: Optional[float] = None
|
||||
lowest_sp_o2_value: Optional[int] = None
|
||||
highest_sp_o2_value: Optional[int] = None
|
||||
average_sp_o2_hr_sleep: Optional[float] = None
|
||||
average_respiration_value: Optional[float] = None
|
||||
lowest_respiration_value: Optional[float] = None
|
||||
highest_respiration_value: Optional[float] = None
|
||||
avg_sleep_stress: Optional[float] = None
|
||||
age_group: Optional[str] = None
|
||||
sleep_score_feedback: Optional[str] = None
|
||||
sleep_score_insight: Optional[str] = None
|
||||
|
||||
@property
|
||||
def sleep_start(self) -> datetime:
|
||||
return get_localized_datetime(
|
||||
self.sleep_start_timestamp_gmt, self.sleep_start_timestamp_local
|
||||
)
|
||||
|
||||
@property
|
||||
def sleep_end(self) -> datetime:
|
||||
return get_localized_datetime(
|
||||
self.sleep_end_timestamp_gmt, self.sleep_end_timestamp_local
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SleepMovement:
|
||||
start_gmt: datetime
|
||||
end_gmt: datetime
|
||||
activity_level: float
|
||||
|
||||
|
||||
@dataclass
|
||||
class SleepData(Data):
|
||||
daily_sleep_dto: DailySleepDTO
|
||||
sleep_movement: Optional[list[SleepMovement]] = None
|
||||
|
||||
@classmethod
|
||||
def get(
|
||||
cls,
|
||||
day: Union[date, str],
|
||||
*,
|
||||
buffer_minutes: int = 60,
|
||||
client: Optional[http.Client] = None,
|
||||
) -> Optional[Self]:
|
||||
client = client or http.client
|
||||
path = (
|
||||
f"/wellness-service/wellness/dailySleepData/{client.username}?"
|
||||
f"nonSleepBufferMinutes={buffer_minutes}&date={day}"
|
||||
)
|
||||
sleep_data = client.connectapi(path)
|
||||
assert sleep_data
|
||||
sleep_data = camel_to_snake_dict(sleep_data)
|
||||
assert isinstance(sleep_data, dict)
|
||||
return (
|
||||
cls(**sleep_data) if sleep_data["daily_sleep_dto"]["id"] else None
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def list(cls, *args, **kwargs) -> list[Self]:
|
||||
data = super().list(*args, **kwargs)
|
||||
return sorted(data, key=lambda x: x.daily_sleep_dto.calendar_date)
|
||||
81
python-garth/src/garth/data/weight.py
Normal file
81
python-garth/src/garth/data/weight.py
Normal file
@@ -0,0 +1,81 @@
|
||||
from datetime import date, datetime, timedelta
|
||||
from itertools import chain
|
||||
|
||||
from pydantic import Field, ValidationInfo, field_validator
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from .. import http
|
||||
from ..utils import (
|
||||
camel_to_snake_dict,
|
||||
format_end_date,
|
||||
get_localized_datetime,
|
||||
)
|
||||
from ._base import MAX_WORKERS, Data
|
||||
|
||||
|
||||
@dataclass
|
||||
class WeightData(Data):
|
||||
sample_pk: int
|
||||
calendar_date: date
|
||||
weight: int
|
||||
source_type: str
|
||||
weight_delta: float
|
||||
timestamp_gmt: int
|
||||
datetime_utc: datetime = Field(..., alias="timestamp_gmt")
|
||||
datetime_local: datetime = Field(..., alias="date")
|
||||
bmi: float | None = None
|
||||
body_fat: float | None = None
|
||||
body_water: float | None = None
|
||||
bone_mass: int | None = None
|
||||
muscle_mass: int | None = None
|
||||
physique_rating: float | None = None
|
||||
visceral_fat: float | None = None
|
||||
metabolic_age: int | None = None
|
||||
|
||||
@field_validator("datetime_local", mode="before")
|
||||
@classmethod
|
||||
def to_localized_datetime(cls, v: int, info: ValidationInfo) -> datetime:
|
||||
return get_localized_datetime(info.data["timestamp_gmt"], v)
|
||||
|
||||
@classmethod
|
||||
def get(
|
||||
cls, day: date | str, *, client: http.Client | None = None
|
||||
) -> Self | None:
|
||||
client = client or http.client
|
||||
path = f"/weight-service/weight/dayview/{day}"
|
||||
data = client.connectapi(path)
|
||||
day_weight_list = data["dateWeightList"] if data else []
|
||||
|
||||
if not day_weight_list:
|
||||
return None
|
||||
|
||||
# Get first (most recent) weight entry for the day
|
||||
weight_data = camel_to_snake_dict(day_weight_list[0])
|
||||
return cls(**weight_data)
|
||||
|
||||
@classmethod
|
||||
def list(
|
||||
cls,
|
||||
end: date | str | None = None,
|
||||
days: int = 1,
|
||||
*,
|
||||
client: http.Client | None = None,
|
||||
max_workers: int = MAX_WORKERS,
|
||||
) -> list[Self]:
|
||||
client = client or http.client
|
||||
end = format_end_date(end)
|
||||
start = end - timedelta(days=days - 1)
|
||||
|
||||
data = client.connectapi(
|
||||
f"/weight-service/weight/range/{start}/{end}?includeAll=true"
|
||||
)
|
||||
weight_summaries = data["dailyWeightSummaries"] if data else []
|
||||
weight_metrics = chain.from_iterable(
|
||||
summary["allWeightMetrics"] for summary in weight_summaries
|
||||
)
|
||||
weight_data_list = (
|
||||
cls(**camel_to_snake_dict(weight_data))
|
||||
for weight_data in weight_metrics
|
||||
)
|
||||
return sorted(weight_data_list, key=lambda d: d.datetime_utc)
|
||||
18
python-garth/src/garth/exc.py
Normal file
18
python-garth/src/garth/exc.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from requests import HTTPError
|
||||
|
||||
|
||||
@dataclass
|
||||
class GarthException(Exception):
|
||||
"""Base exception for all garth exceptions."""
|
||||
|
||||
msg: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class GarthHTTPError(GarthException):
|
||||
error: HTTPError
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.msg}: {self.error}"
|
||||
247
python-garth/src/garth/http.py
Normal file
247
python-garth/src/garth/http.py
Normal file
@@ -0,0 +1,247 @@
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
from typing import IO, Any, Dict, Literal, Tuple
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from requests import HTTPError, Response, Session
|
||||
from requests.adapters import HTTPAdapter, Retry
|
||||
|
||||
from . import sso
|
||||
from .auth_tokens import OAuth1Token, OAuth2Token
|
||||
from .exc import GarthHTTPError
|
||||
from .utils import asdict
|
||||
|
||||
|
||||
USER_AGENT = {"User-Agent": "GCM-iOS-5.7.2.1"}
|
||||
|
||||
|
||||
class Client:
|
||||
sess: Session
|
||||
last_resp: Response
|
||||
domain: str = "garmin.com"
|
||||
oauth1_token: OAuth1Token | Literal["needs_mfa"] | None = None
|
||||
oauth2_token: OAuth2Token | dict[str, Any] | None = None
|
||||
timeout: int = 10
|
||||
retries: int = 3
|
||||
status_forcelist: Tuple[int, ...] = (408, 429, 500, 502, 503, 504)
|
||||
backoff_factor: float = 0.5
|
||||
pool_connections: int = 10
|
||||
pool_maxsize: int = 10
|
||||
_user_profile: Dict[str, Any] | None = None
|
||||
|
||||
def __init__(self, session: Session | None = None, **kwargs):
|
||||
self.sess = session if session else Session()
|
||||
self.sess.headers.update(USER_AGENT)
|
||||
self.configure(
|
||||
timeout=self.timeout,
|
||||
retries=self.retries,
|
||||
status_forcelist=self.status_forcelist,
|
||||
backoff_factor=self.backoff_factor,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def configure(
|
||||
self,
|
||||
/,
|
||||
oauth1_token: OAuth1Token | None = None,
|
||||
oauth2_token: OAuth2Token | None = None,
|
||||
domain: str | None = None,
|
||||
proxies: Dict[str, str] | None = None,
|
||||
ssl_verify: bool | None = None,
|
||||
timeout: int | None = None,
|
||||
retries: int | None = None,
|
||||
status_forcelist: Tuple[int, ...] | None = None,
|
||||
backoff_factor: float | None = None,
|
||||
pool_connections: int | None = None,
|
||||
pool_maxsize: int | None = None,
|
||||
):
|
||||
if oauth1_token is not None:
|
||||
self.oauth1_token = oauth1_token
|
||||
if oauth2_token is not None:
|
||||
self.oauth2_token = oauth2_token
|
||||
if domain:
|
||||
self.domain = domain
|
||||
if proxies is not None:
|
||||
self.sess.proxies.update(proxies)
|
||||
if ssl_verify is not None:
|
||||
self.sess.verify = ssl_verify
|
||||
if timeout is not None:
|
||||
self.timeout = timeout
|
||||
if retries is not None:
|
||||
self.retries = retries
|
||||
if status_forcelist is not None:
|
||||
self.status_forcelist = status_forcelist
|
||||
if backoff_factor is not None:
|
||||
self.backoff_factor = backoff_factor
|
||||
if pool_connections is not None:
|
||||
self.pool_connections = pool_connections
|
||||
if pool_maxsize is not None:
|
||||
self.pool_maxsize = pool_maxsize
|
||||
|
||||
retry = Retry(
|
||||
total=self.retries,
|
||||
status_forcelist=self.status_forcelist,
|
||||
backoff_factor=self.backoff_factor,
|
||||
)
|
||||
adapter = HTTPAdapter(
|
||||
max_retries=retry,
|
||||
pool_connections=self.pool_connections,
|
||||
pool_maxsize=self.pool_maxsize,
|
||||
)
|
||||
self.sess.mount("https://", adapter)
|
||||
|
||||
@property
|
||||
def user_profile(self):
|
||||
if not self._user_profile:
|
||||
self._user_profile = self.connectapi(
|
||||
"/userprofile-service/socialProfile"
|
||||
)
|
||||
assert isinstance(self._user_profile, dict), (
|
||||
"No profile from connectapi"
|
||||
)
|
||||
return self._user_profile
|
||||
|
||||
@property
|
||||
def profile(self):
|
||||
return self.user_profile
|
||||
|
||||
@property
|
||||
def username(self):
|
||||
return self.user_profile["userName"]
|
||||
|
||||
def request(
|
||||
self,
|
||||
method: str,
|
||||
subdomain: str,
|
||||
path: str,
|
||||
/,
|
||||
api: bool = False,
|
||||
referrer: str | bool = False,
|
||||
headers: dict = {},
|
||||
**kwargs,
|
||||
) -> Response:
|
||||
url = f"https://{subdomain}.{self.domain}"
|
||||
url = urljoin(url, path)
|
||||
if referrer is True and self.last_resp:
|
||||
headers["referer"] = self.last_resp.url
|
||||
if api:
|
||||
assert self.oauth1_token, (
|
||||
"OAuth1 token is required for API requests"
|
||||
)
|
||||
if (
|
||||
not isinstance(self.oauth2_token, OAuth2Token)
|
||||
or self.oauth2_token.expired
|
||||
):
|
||||
self.refresh_oauth2()
|
||||
headers["Authorization"] = str(self.oauth2_token)
|
||||
self.last_resp = self.sess.request(
|
||||
method,
|
||||
url,
|
||||
headers=headers,
|
||||
timeout=self.timeout,
|
||||
**kwargs,
|
||||
)
|
||||
try:
|
||||
self.last_resp.raise_for_status()
|
||||
except HTTPError as e:
|
||||
raise GarthHTTPError(
|
||||
msg="Error in request",
|
||||
error=e,
|
||||
)
|
||||
return self.last_resp
|
||||
|
||||
def get(self, *args, **kwargs) -> Response:
|
||||
return self.request("GET", *args, **kwargs)
|
||||
|
||||
def post(self, *args, **kwargs) -> Response:
|
||||
return self.request("POST", *args, **kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs) -> Response:
|
||||
return self.request("DELETE", *args, **kwargs)
|
||||
|
||||
def put(self, *args, **kwargs) -> Response:
|
||||
return self.request("PUT", *args, **kwargs)
|
||||
|
||||
def login(self, *args, **kwargs):
|
||||
self.oauth1_token, self.oauth2_token = sso.login(
|
||||
*args, **kwargs, client=self
|
||||
)
|
||||
return self.oauth1_token, self.oauth2_token
|
||||
|
||||
def resume_login(self, *args, **kwargs):
|
||||
self.oauth1_token, self.oauth2_token = sso.resume_login(
|
||||
*args, **kwargs
|
||||
)
|
||||
return self.oauth1_token, self.oauth2_token
|
||||
|
||||
def refresh_oauth2(self):
|
||||
assert self.oauth1_token and isinstance(
|
||||
self.oauth1_token, OAuth1Token
|
||||
), "OAuth1 token is required for OAuth2 refresh"
|
||||
# There is a way to perform a refresh of an OAuth2 token, but it
|
||||
# appears even Garmin uses this approach when the OAuth2 is expired
|
||||
self.oauth2_token = sso.exchange(self.oauth1_token, self)
|
||||
|
||||
def connectapi(
|
||||
self, path: str, method="GET", **kwargs
|
||||
) -> Dict[str, Any] | None:
|
||||
resp = self.request(method, "connectapi", path, api=True, **kwargs)
|
||||
if resp.status_code == 204:
|
||||
return None
|
||||
return resp.json()
|
||||
|
||||
def download(self, path: str, **kwargs) -> bytes:
|
||||
resp = self.get("connectapi", path, api=True, **kwargs)
|
||||
return resp.content
|
||||
|
||||
def upload(
|
||||
self, fp: IO[bytes], /, path: str = "/upload-service/upload"
|
||||
) -> Dict[str, Any]:
|
||||
fname = os.path.basename(fp.name)
|
||||
files = {"file": (fname, fp)}
|
||||
result = self.connectapi(
|
||||
path,
|
||||
method="POST",
|
||||
files=files,
|
||||
)
|
||||
assert result is not None, "No result from upload"
|
||||
return result
|
||||
|
||||
def dump(self, dir_path: str):
|
||||
dir_path = os.path.expanduser(dir_path)
|
||||
os.makedirs(dir_path, exist_ok=True)
|
||||
with open(os.path.join(dir_path, "oauth1_token.json"), "w") as f:
|
||||
if self.oauth1_token:
|
||||
json.dump(asdict(self.oauth1_token), f, indent=4)
|
||||
with open(os.path.join(dir_path, "oauth2_token.json"), "w") as f:
|
||||
if self.oauth2_token:
|
||||
json.dump(asdict(self.oauth2_token), f, indent=4)
|
||||
|
||||
def dumps(self) -> str:
|
||||
r = []
|
||||
r.append(asdict(self.oauth1_token))
|
||||
r.append(asdict(self.oauth2_token))
|
||||
s = json.dumps(r)
|
||||
return base64.b64encode(s.encode()).decode()
|
||||
|
||||
def load(self, dir_path: str):
|
||||
dir_path = os.path.expanduser(dir_path)
|
||||
with open(os.path.join(dir_path, "oauth1_token.json")) as f:
|
||||
oauth1 = OAuth1Token(**json.load(f))
|
||||
with open(os.path.join(dir_path, "oauth2_token.json")) as f:
|
||||
oauth2 = OAuth2Token(**json.load(f))
|
||||
self.configure(
|
||||
oauth1_token=oauth1, oauth2_token=oauth2, domain=oauth1.domain
|
||||
)
|
||||
|
||||
def loads(self, s: str):
|
||||
oauth1, oauth2 = json.loads(base64.b64decode(s))
|
||||
self.configure(
|
||||
oauth1_token=OAuth1Token(**oauth1),
|
||||
oauth2_token=OAuth2Token(**oauth2),
|
||||
domain=oauth1.get("domain"),
|
||||
)
|
||||
|
||||
|
||||
client = Client()
|
||||
0
python-garth/src/garth/py.typed
Normal file
0
python-garth/src/garth/py.typed
Normal file
259
python-garth/src/garth/sso.py
Normal file
259
python-garth/src/garth/sso.py
Normal file
@@ -0,0 +1,259 @@
|
||||
import asyncio
|
||||
import re
|
||||
import time
|
||||
from typing import Any, Callable, Dict, Literal, Tuple
|
||||
from urllib.parse import parse_qs
|
||||
|
||||
import requests
|
||||
from requests import Session
|
||||
from requests_oauthlib import OAuth1Session
|
||||
|
||||
from . import http
|
||||
from .auth_tokens import OAuth1Token, OAuth2Token
|
||||
from .exc import GarthException
|
||||
|
||||
|
||||
CSRF_RE = re.compile(r'name="_csrf"\s+value="(.+?)"')
|
||||
TITLE_RE = re.compile(r"<title>(.+?)</title>")
|
||||
OAUTH_CONSUMER_URL = "https://thegarth.s3.amazonaws.com/oauth_consumer.json"
|
||||
OAUTH_CONSUMER: Dict[str, str] = {}
|
||||
USER_AGENT = {"User-Agent": "com.garmin.android.apps.connectmobile"}
|
||||
|
||||
|
||||
class GarminOAuth1Session(OAuth1Session):
|
||||
def __init__(
|
||||
self,
|
||||
/,
|
||||
parent: Session | None = None,
|
||||
**kwargs,
|
||||
):
|
||||
global OAUTH_CONSUMER
|
||||
if not OAUTH_CONSUMER:
|
||||
OAUTH_CONSUMER = requests.get(OAUTH_CONSUMER_URL).json()
|
||||
super().__init__(
|
||||
OAUTH_CONSUMER["consumer_key"],
|
||||
OAUTH_CONSUMER["consumer_secret"],
|
||||
**kwargs,
|
||||
)
|
||||
if parent is not None:
|
||||
self.mount("https://", parent.adapters["https://"])
|
||||
self.proxies = parent.proxies
|
||||
self.verify = parent.verify
|
||||
|
||||
|
||||
def login(
|
||||
email: str,
|
||||
password: str,
|
||||
/,
|
||||
client: "http.Client | None" = None,
|
||||
prompt_mfa: Callable | None = lambda: input("MFA code: "),
|
||||
return_on_mfa: bool = False,
|
||||
) -> (
|
||||
Tuple[OAuth1Token, OAuth2Token]
|
||||
| Tuple[Literal["needs_mfa"], dict[str, Any]]
|
||||
):
|
||||
"""Login to Garmin Connect.
|
||||
|
||||
Args:
|
||||
email: Garmin account email
|
||||
password: Garmin account password
|
||||
client: Optional HTTP client to use
|
||||
prompt_mfa: Callable that prompts for MFA code. Returns on MFA if None.
|
||||
return_on_mfa: If True, returns dict with MFA info instead of prompting
|
||||
|
||||
Returns:
|
||||
If return_on_mfa=False (default):
|
||||
Tuple[OAuth1Token, OAuth2Token]: OAuth tokens after login
|
||||
If return_on_mfa=True and MFA required:
|
||||
dict: Contains needs_mfa and client_state for resume_login()
|
||||
"""
|
||||
client = client or http.client
|
||||
|
||||
# Define params based on domain
|
||||
SSO = f"https://sso.{client.domain}/sso"
|
||||
SSO_EMBED = f"{SSO}/embed"
|
||||
SSO_EMBED_PARAMS = dict(
|
||||
id="gauth-widget",
|
||||
embedWidget="true",
|
||||
gauthHost=SSO,
|
||||
)
|
||||
SIGNIN_PARAMS = {
|
||||
**SSO_EMBED_PARAMS,
|
||||
**dict(
|
||||
gauthHost=SSO_EMBED,
|
||||
service=SSO_EMBED,
|
||||
source=SSO_EMBED,
|
||||
redirectAfterAccountLoginUrl=SSO_EMBED,
|
||||
redirectAfterAccountCreationUrl=SSO_EMBED,
|
||||
),
|
||||
}
|
||||
|
||||
# Set cookies
|
||||
client.get("sso", "/sso/embed", params=SSO_EMBED_PARAMS)
|
||||
|
||||
# Get CSRF token
|
||||
client.get(
|
||||
"sso",
|
||||
"/sso/signin",
|
||||
params=SIGNIN_PARAMS,
|
||||
referrer=True,
|
||||
)
|
||||
csrf_token = get_csrf_token(client.last_resp.text)
|
||||
|
||||
# Submit login form with email and password
|
||||
client.post(
|
||||
"sso",
|
||||
"/sso/signin",
|
||||
params=SIGNIN_PARAMS,
|
||||
referrer=True,
|
||||
data=dict(
|
||||
username=email,
|
||||
password=password,
|
||||
embed="true",
|
||||
_csrf=csrf_token,
|
||||
),
|
||||
)
|
||||
title = get_title(client.last_resp.text)
|
||||
|
||||
# Handle MFA
|
||||
if "MFA" in title:
|
||||
if return_on_mfa or prompt_mfa is None:
|
||||
return "needs_mfa", {
|
||||
"signin_params": SIGNIN_PARAMS,
|
||||
"client": client,
|
||||
}
|
||||
|
||||
handle_mfa(client, SIGNIN_PARAMS, prompt_mfa)
|
||||
title = get_title(client.last_resp.text)
|
||||
|
||||
if title != "Success":
|
||||
raise GarthException(f"Unexpected title: {title}")
|
||||
return _complete_login(client)
|
||||
|
||||
|
||||
def get_oauth1_token(ticket: str, client: "http.Client") -> OAuth1Token:
|
||||
sess = GarminOAuth1Session(parent=client.sess)
|
||||
base_url = f"https://connectapi.{client.domain}/oauth-service/oauth/"
|
||||
login_url = f"https://sso.{client.domain}/sso/embed"
|
||||
url = (
|
||||
f"{base_url}preauthorized?ticket={ticket}&login-url={login_url}"
|
||||
"&accepts-mfa-tokens=true"
|
||||
)
|
||||
resp = sess.get(
|
||||
url,
|
||||
headers=USER_AGENT,
|
||||
timeout=client.timeout,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
parsed = parse_qs(resp.text)
|
||||
token = {k: v[0] for k, v in parsed.items()}
|
||||
return OAuth1Token(domain=client.domain, **token) # type: ignore
|
||||
|
||||
|
||||
def exchange(oauth1: OAuth1Token, client: "http.Client") -> OAuth2Token:
|
||||
sess = GarminOAuth1Session(
|
||||
resource_owner_key=oauth1.oauth_token,
|
||||
resource_owner_secret=oauth1.oauth_token_secret,
|
||||
parent=client.sess,
|
||||
)
|
||||
data = dict(mfa_token=oauth1.mfa_token) if oauth1.mfa_token else {}
|
||||
base_url = f"https://connectapi.{client.domain}/oauth-service/oauth/"
|
||||
url = f"{base_url}exchange/user/2.0"
|
||||
headers = {
|
||||
**USER_AGENT,
|
||||
**{"Content-Type": "application/x-www-form-urlencoded"},
|
||||
}
|
||||
resp = sess.post(
|
||||
url,
|
||||
headers=headers,
|
||||
data=data,
|
||||
timeout=client.timeout,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
token = resp.json()
|
||||
return OAuth2Token(**set_expirations(token))
|
||||
|
||||
|
||||
def handle_mfa(
|
||||
client: "http.Client", signin_params: dict, prompt_mfa: Callable
|
||||
) -> None:
|
||||
csrf_token = get_csrf_token(client.last_resp.text)
|
||||
if asyncio.iscoroutinefunction(prompt_mfa):
|
||||
mfa_code = asyncio.run(prompt_mfa())
|
||||
else:
|
||||
mfa_code = prompt_mfa()
|
||||
client.post(
|
||||
"sso",
|
||||
"/sso/verifyMFA/loginEnterMfaCode",
|
||||
params=signin_params,
|
||||
referrer=True,
|
||||
data={
|
||||
"mfa-code": mfa_code,
|
||||
"embed": "true",
|
||||
"_csrf": csrf_token,
|
||||
"fromPage": "setupEnterMfaCode",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def set_expirations(token: dict) -> dict:
|
||||
token["expires_at"] = int(time.time() + token["expires_in"])
|
||||
token["refresh_token_expires_at"] = int(
|
||||
time.time() + token["refresh_token_expires_in"]
|
||||
)
|
||||
return token
|
||||
|
||||
|
||||
def get_csrf_token(html: str) -> str:
|
||||
m = CSRF_RE.search(html)
|
||||
if not m:
|
||||
raise GarthException("Couldn't find CSRF token")
|
||||
return m.group(1)
|
||||
|
||||
|
||||
def get_title(html: str) -> str:
|
||||
m = TITLE_RE.search(html)
|
||||
if not m:
|
||||
raise GarthException("Couldn't find title")
|
||||
return m.group(1)
|
||||
|
||||
|
||||
def resume_login(
|
||||
client_state: dict, mfa_code: str
|
||||
) -> Tuple[OAuth1Token, OAuth2Token]:
|
||||
"""Complete login after MFA code is provided.
|
||||
|
||||
Args:
|
||||
client_state: The client state from login() when MFA was needed
|
||||
mfa_code: The MFA code provided by the user
|
||||
|
||||
Returns:
|
||||
Tuple[OAuth1Token, OAuth2Token]: The OAuth tokens after login
|
||||
"""
|
||||
client = client_state["client"]
|
||||
signin_params = client_state["signin_params"]
|
||||
handle_mfa(client, signin_params, lambda: mfa_code)
|
||||
return _complete_login(client)
|
||||
|
||||
|
||||
def _complete_login(client: "http.Client") -> Tuple[OAuth1Token, OAuth2Token]:
|
||||
"""Complete the login process after successful authentication.
|
||||
|
||||
Args:
|
||||
client: The HTTP client
|
||||
|
||||
Returns:
|
||||
Tuple[OAuth1Token, OAuth2Token]: The OAuth tokens
|
||||
"""
|
||||
# Parse ticket
|
||||
m = re.search(r'embed\?ticket=([^"]+)"', client.last_resp.text)
|
||||
if not m:
|
||||
raise GarthException(
|
||||
"Couldn't find ticket in response"
|
||||
) # pragma: no cover
|
||||
ticket = m.group(1)
|
||||
|
||||
oauth1 = get_oauth1_token(ticket, client)
|
||||
oauth2 = exchange(oauth1, client)
|
||||
|
||||
return oauth1, oauth2
|
||||
18
python-garth/src/garth/stats/__init__.py
Normal file
18
python-garth/src/garth/stats/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
__all__ = [
|
||||
"DailyHRV",
|
||||
"DailyHydration",
|
||||
"DailyIntensityMinutes",
|
||||
"DailySleep",
|
||||
"DailySteps",
|
||||
"DailyStress",
|
||||
"WeeklyIntensityMinutes",
|
||||
"WeeklyStress",
|
||||
"WeeklySteps",
|
||||
]
|
||||
|
||||
from .hrv import DailyHRV
|
||||
from .hydration import DailyHydration
|
||||
from .intensity_minutes import DailyIntensityMinutes, WeeklyIntensityMinutes
|
||||
from .sleep import DailySleep
|
||||
from .steps import DailySteps, WeeklySteps
|
||||
from .stress import DailyStress, WeeklyStress
|
||||
53
python-garth/src/garth/stats/_base.py
Normal file
53
python-garth/src/garth/stats/_base.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from datetime import date, timedelta
|
||||
from typing import ClassVar
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from .. import http
|
||||
from ..utils import camel_to_snake_dict, format_end_date
|
||||
|
||||
|
||||
@dataclass
|
||||
class Stats:
|
||||
calendar_date: date
|
||||
|
||||
_path: ClassVar[str]
|
||||
_page_size: ClassVar[int]
|
||||
|
||||
@classmethod
|
||||
def list(
|
||||
cls,
|
||||
end: date | str | None = None,
|
||||
period: int = 1,
|
||||
*,
|
||||
client: http.Client | None = None,
|
||||
) -> list[Self]:
|
||||
client = client or http.client
|
||||
end = format_end_date(end)
|
||||
period_type = "days" if "daily" in cls._path else "weeks"
|
||||
|
||||
if period > cls._page_size:
|
||||
page = cls.list(end, cls._page_size, client=client)
|
||||
if not page:
|
||||
return []
|
||||
page = (
|
||||
cls.list(
|
||||
end - timedelta(**{period_type: cls._page_size}),
|
||||
period - cls._page_size,
|
||||
client=client,
|
||||
)
|
||||
+ page
|
||||
)
|
||||
return page
|
||||
|
||||
start = end - timedelta(**{period_type: period - 1})
|
||||
path = cls._path.format(start=start, end=end, period=period)
|
||||
page_dirs = client.connectapi(path)
|
||||
if not isinstance(page_dirs, list) or not page_dirs:
|
||||
return []
|
||||
page_dirs = [d for d in page_dirs if isinstance(d, dict)]
|
||||
if page_dirs and "values" in page_dirs[0]:
|
||||
page_dirs = [{**stat, **stat.pop("values")} for stat in page_dirs]
|
||||
page_dirs = [camel_to_snake_dict(stat) for stat in page_dirs]
|
||||
return [cls(**stat) for stat in page_dirs]
|
||||
66
python-garth/src/garth/stats/hrv.py
Normal file
66
python-garth/src/garth/stats/hrv.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from datetime import date, datetime, timedelta
|
||||
from typing import Any, ClassVar, cast
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from .. import http
|
||||
from ..utils import camel_to_snake_dict, format_end_date
|
||||
|
||||
|
||||
@dataclass
|
||||
class HRVBaseline:
|
||||
low_upper: int
|
||||
balanced_low: int
|
||||
balanced_upper: int
|
||||
marker_value: float | None
|
||||
|
||||
|
||||
@dataclass
|
||||
class DailyHRV:
|
||||
calendar_date: date
|
||||
weekly_avg: int | None
|
||||
last_night_avg: int | None
|
||||
last_night_5_min_high: int | None
|
||||
baseline: HRVBaseline | None
|
||||
status: str
|
||||
feedback_phrase: str
|
||||
create_time_stamp: datetime
|
||||
|
||||
_path: ClassVar[str] = "/hrv-service/hrv/daily/{start}/{end}"
|
||||
_page_size: ClassVar[int] = 28
|
||||
|
||||
@classmethod
|
||||
def list(
|
||||
cls,
|
||||
end: date | str | None = None,
|
||||
period: int = 28,
|
||||
*,
|
||||
client: http.Client | None = None,
|
||||
) -> list[Self]:
|
||||
client = client or http.client
|
||||
end = format_end_date(end)
|
||||
|
||||
# Paginate if period is greater than page size
|
||||
if period > cls._page_size:
|
||||
page = cls.list(end, cls._page_size, client=client)
|
||||
if not page:
|
||||
return []
|
||||
page = (
|
||||
cls.list(
|
||||
end - timedelta(days=cls._page_size),
|
||||
period - cls._page_size,
|
||||
client=client,
|
||||
)
|
||||
+ page
|
||||
)
|
||||
return page
|
||||
|
||||
start = end - timedelta(days=period - 1)
|
||||
path = cls._path.format(start=start, end=end)
|
||||
response = client.connectapi(path)
|
||||
if response is None:
|
||||
return []
|
||||
daily_hrv = camel_to_snake_dict(response)["hrv_summaries"]
|
||||
daily_hrv = cast(list[dict[str, Any]], daily_hrv)
|
||||
return [cls(**hrv) for hrv in daily_hrv]
|
||||
17
python-garth/src/garth/stats/hydration.py
Normal file
17
python-garth/src/garth/stats/hydration.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from typing import ClassVar
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
from ._base import Stats
|
||||
|
||||
|
||||
BASE_PATH = "/usersummary-service/stats/hydration"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DailyHydration(Stats):
|
||||
value_in_ml: float
|
||||
goal_in_ml: float
|
||||
|
||||
_path: ClassVar[str] = f"{BASE_PATH}/daily/{{start}}/{{end}}"
|
||||
_page_size: ClassVar[int] = 28
|
||||
28
python-garth/src/garth/stats/intensity_minutes.py
Normal file
28
python-garth/src/garth/stats/intensity_minutes.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from typing import ClassVar
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
from ._base import Stats
|
||||
|
||||
|
||||
BASE_PATH = "/usersummary-service/stats/im"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DailyIntensityMinutes(Stats):
|
||||
weekly_goal: int
|
||||
moderate_value: int | None = None
|
||||
vigorous_value: int | None = None
|
||||
|
||||
_path: ClassVar[str] = f"{BASE_PATH}/daily/{{start}}/{{end}}"
|
||||
_page_size: ClassVar[int] = 28
|
||||
|
||||
|
||||
@dataclass
|
||||
class WeeklyIntensityMinutes(Stats):
|
||||
weekly_goal: int
|
||||
moderate_value: int | None = None
|
||||
vigorous_value: int | None = None
|
||||
|
||||
_path: ClassVar[str] = f"{BASE_PATH}/weekly/{{start}}/{{end}}"
|
||||
_page_size: ClassVar[int] = 52
|
||||
15
python-garth/src/garth/stats/sleep.py
Normal file
15
python-garth/src/garth/stats/sleep.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from typing import ClassVar
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
from ._base import Stats
|
||||
|
||||
|
||||
@dataclass
|
||||
class DailySleep(Stats):
|
||||
value: int | None
|
||||
|
||||
_path: ClassVar[str] = (
|
||||
"/wellness-service/stats/daily/sleep/score/{start}/{end}"
|
||||
)
|
||||
_page_size: ClassVar[int] = 28
|
||||
30
python-garth/src/garth/stats/steps.py
Normal file
30
python-garth/src/garth/stats/steps.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from typing import ClassVar
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
from ._base import Stats
|
||||
|
||||
|
||||
BASE_PATH = "/usersummary-service/stats/steps"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DailySteps(Stats):
|
||||
total_steps: int | None
|
||||
total_distance: int | None
|
||||
step_goal: int
|
||||
|
||||
_path: ClassVar[str] = f"{BASE_PATH}/daily/{{start}}/{{end}}"
|
||||
_page_size: ClassVar[int] = 28
|
||||
|
||||
|
||||
@dataclass
|
||||
class WeeklySteps(Stats):
|
||||
total_steps: int
|
||||
average_steps: float
|
||||
average_distance: float
|
||||
total_distance: float
|
||||
wellness_data_days_count: int
|
||||
|
||||
_path: ClassVar[str] = f"{BASE_PATH}/weekly/{{end}}/{{period}}"
|
||||
_page_size: ClassVar[int] = 52
|
||||
28
python-garth/src/garth/stats/stress.py
Normal file
28
python-garth/src/garth/stats/stress.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from typing import ClassVar
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
from ._base import Stats
|
||||
|
||||
|
||||
BASE_PATH = "/usersummary-service/stats/stress"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DailyStress(Stats):
|
||||
overall_stress_level: int
|
||||
rest_stress_duration: int | None = None
|
||||
low_stress_duration: int | None = None
|
||||
medium_stress_duration: int | None = None
|
||||
high_stress_duration: int | None = None
|
||||
|
||||
_path: ClassVar[str] = f"{BASE_PATH}/daily/{{start}}/{{end}}"
|
||||
_page_size: ClassVar[int] = 28
|
||||
|
||||
|
||||
@dataclass
|
||||
class WeeklyStress(Stats):
|
||||
value: int
|
||||
|
||||
_path: ClassVar[str] = f"{BASE_PATH}/weekly/{{end}}/{{period}}"
|
||||
_page_size: ClassVar[int] = 52
|
||||
5
python-garth/src/garth/users/__init__.py
Normal file
5
python-garth/src/garth/users/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from .profile import UserProfile
|
||||
from .settings import UserSettings
|
||||
|
||||
|
||||
__all__ = ["UserProfile", "UserSettings"]
|
||||
79
python-garth/src/garth/users/profile.py
Normal file
79
python-garth/src/garth/users/profile.py
Normal file
@@ -0,0 +1,79 @@
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from .. import http
|
||||
from ..utils import camel_to_snake_dict
|
||||
|
||||
|
||||
@dataclass
|
||||
class UserProfile:
|
||||
id: int
|
||||
profile_id: int
|
||||
garmin_guid: str
|
||||
display_name: str
|
||||
full_name: str
|
||||
user_name: str
|
||||
profile_image_type: str | None
|
||||
profile_image_url_large: str | None
|
||||
profile_image_url_medium: str | None
|
||||
profile_image_url_small: str | None
|
||||
location: str | None
|
||||
facebook_url: str | None
|
||||
twitter_url: str | None
|
||||
personal_website: str | None
|
||||
motivation: str | None
|
||||
bio: str | None
|
||||
primary_activity: str | None
|
||||
favorite_activity_types: list[str]
|
||||
running_training_speed: float
|
||||
cycling_training_speed: float
|
||||
favorite_cycling_activity_types: list[str]
|
||||
cycling_classification: str | None
|
||||
cycling_max_avg_power: float
|
||||
swimming_training_speed: float
|
||||
profile_visibility: str
|
||||
activity_start_visibility: str
|
||||
activity_map_visibility: str
|
||||
course_visibility: str
|
||||
activity_heart_rate_visibility: str
|
||||
activity_power_visibility: str
|
||||
badge_visibility: str
|
||||
show_age: bool
|
||||
show_weight: bool
|
||||
show_height: bool
|
||||
show_weight_class: bool
|
||||
show_age_range: bool
|
||||
show_gender: bool
|
||||
show_activity_class: bool
|
||||
show_vo_2_max: bool
|
||||
show_personal_records: bool
|
||||
show_last_12_months: bool
|
||||
show_lifetime_totals: bool
|
||||
show_upcoming_events: bool
|
||||
show_recent_favorites: bool
|
||||
show_recent_device: bool
|
||||
show_recent_gear: bool
|
||||
show_badges: bool
|
||||
other_activity: str | None
|
||||
other_primary_activity: str | None
|
||||
other_motivation: str | None
|
||||
user_roles: list[str]
|
||||
name_approved: bool
|
||||
user_profile_full_name: str
|
||||
make_golf_scorecards_private: bool
|
||||
allow_golf_live_scoring: bool
|
||||
allow_golf_scoring_by_connections: bool
|
||||
user_level: int
|
||||
user_point: int
|
||||
level_update_date: str
|
||||
level_is_viewed: bool
|
||||
level_point_threshold: int
|
||||
user_point_offset: int
|
||||
user_pro: bool
|
||||
|
||||
@classmethod
|
||||
def get(cls, /, client: http.Client | None = None) -> Self:
|
||||
client = client or http.client
|
||||
profile = client.connectapi("/userprofile-service/socialProfile")
|
||||
assert isinstance(profile, dict)
|
||||
return cls(**camel_to_snake_dict(profile))
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user