mirror of
https://github.com/sstent/FitTrack_ReportGenerator.git
synced 2026-01-26 17:12:28 +00:00
feat: Initial implementation of FitTrack Report Generator
This commit introduces the initial version of the FitTrack Report Generator, a FastAPI application for analyzing workout files. Key features include: - Parsing of FIT, TCX, and GPX workout files. - Analysis of power, heart rate, speed, and elevation data. - Generation of summary reports and charts. - REST API for single and batch workout analysis. The project structure has been set up with a `src` directory for core logic, an `api` directory for the FastAPI application, and a `tests` directory for unit, integration, and contract tests. The development workflow is configured to use Docker and modern Python tooling.
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
139
tests/unit/test_batch_processor.py
Normal file
139
tests/unit/test_batch_processor.py
Normal file
@@ -0,0 +1,139 @@
|
||||
import pytest
|
||||
import zipfile
|
||||
import io
|
||||
from unittest.mock import MagicMock, patch
|
||||
from src.core.batch_processor import BatchProcessor
|
||||
from src.core.workout_data import WorkoutData, WorkoutMetadata
|
||||
from datetime import datetime, timedelta
|
||||
import pandas as pd
|
||||
|
||||
@pytest.fixture
|
||||
def mock_workout_data():
|
||||
metadata = WorkoutMetadata(
|
||||
start_time=datetime(2025, 1, 1, 10, 0, 0),
|
||||
duration=timedelta(minutes=10),
|
||||
device="Garmin",
|
||||
file_type="FIT"
|
||||
)
|
||||
time_series_data = pd.DataFrame({
|
||||
"power": [100, 110, 120],
|
||||
"heart_rate": [150, 155, 160]
|
||||
})
|
||||
return WorkoutData(metadata=metadata, time_series_data=time_series_data)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_file_parser():
|
||||
parser = MagicMock()
|
||||
parser.parse.return_value = MagicMock(spec=WorkoutData)
|
||||
return parser
|
||||
|
||||
@pytest.fixture
|
||||
def mock_workout_analyzer():
|
||||
analyzer = MagicMock()
|
||||
analyzer.calculate_summary_metrics.return_value = {"avg_power": 100}
|
||||
return analyzer
|
||||
|
||||
@pytest.fixture
|
||||
def mock_report_generator():
|
||||
generator = MagicMock()
|
||||
generator.generate_html_report.return_value = "<html>report</html>"
|
||||
return generator
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db_session():
|
||||
session = MagicMock()
|
||||
return session
|
||||
|
||||
def create_zip_file(file_names_and_content):
|
||||
zip_buffer = io.BytesIO()
|
||||
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zf:
|
||||
for name, content in file_names_and_content.items():
|
||||
zf.writestr(name, content)
|
||||
zip_buffer.seek(0)
|
||||
return zip_buffer
|
||||
|
||||
def test_batch_processor_initialization(mock_db_session):
|
||||
processor = BatchProcessor(db_session=mock_db_session)
|
||||
assert processor.db_session == mock_db_session
|
||||
|
||||
@patch('src.core.file_parser.FitParser')
|
||||
@patch('src.core.file_parser.TcxParser')
|
||||
@patch('src.core.file_parser.GpxParser')
|
||||
@patch('src.core.workout_analyzer.WorkoutAnalyzer')
|
||||
@patch('src.core.report_generator.ReportGenerator')
|
||||
def test_process_zip_file_single_fit(mock_report_generator_cls, mock_workout_analyzer_cls, mock_gpx_parser_cls, mock_tcx_parser_cls, mock_fit_parser_cls, mock_db_session, mock_workout_data):
|
||||
# Mock parsers to return mock_workout_data
|
||||
mock_fit_parser_cls.return_value.parse.return_value = mock_workout_data
|
||||
mock_workout_analyzer_cls.return_value.calculate_summary_metrics.return_value = {"avg_power": 100}
|
||||
mock_report_generator_cls.return_value.generate_html_report.return_value = "<html>report</html>"
|
||||
|
||||
zip_content = create_zip_file({"workout.fit": b"dummy_fit_content"})
|
||||
processor = BatchProcessor(db_session=mock_db_session)
|
||||
results = processor.process_zip_file(zip_content, user_id=None, ftp_value=None)
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0]["file_name"] == "workout.fit"
|
||||
assert results[0]["status"] == "completed"
|
||||
mock_fit_parser_cls.return_value.parse.assert_called_once()
|
||||
mock_workout_analyzer_cls.assert_called_once()
|
||||
mock_db_session.add.assert_called_once()
|
||||
mock_db_session.commit.assert_called_once()
|
||||
|
||||
@patch('src.core.file_parser.FitParser')
|
||||
@patch('src.core.file_parser.TcxParser')
|
||||
@patch('src.core.file_parser.GpxParser')
|
||||
@patch('src.core.workout_analyzer.WorkoutAnalyzer')
|
||||
@patch('src.core.report_generator.ReportGenerator')
|
||||
def test_process_zip_file_multiple_files(mock_report_generator_cls, mock_workout_analyzer_cls, mock_gpx_parser_cls, mock_tcx_parser_cls, mock_fit_parser_cls, mock_db_session, mock_workout_data):
|
||||
mock_fit_parser_cls.return_value.parse.return_value = mock_workout_data
|
||||
mock_tcx_parser_cls.return_value.parse.return_value = mock_workout_data
|
||||
mock_workout_analyzer_cls.return_value.calculate_summary_metrics.return_value = {"avg_power": 100}
|
||||
mock_report_generator_cls.return_value.generate_html_report.return_value = "<html>report</html>"
|
||||
|
||||
zip_content = create_zip_file({"workout1.fit": b"dummy_fit_content", "workout2.tcx": b"dummy_tcx_content"})
|
||||
processor = BatchProcessor(db_session=mock_db_session)
|
||||
results = processor.process_zip_file(zip_content, user_id=None, ftp_value=None)
|
||||
|
||||
assert len(results) == 2
|
||||
assert any(r["file_name"] == "workout1.fit" for r in results)
|
||||
assert any(r["file_name"] == "workout2.tcx" for r in results)
|
||||
assert all(r["status"] == "completed" for r in results)
|
||||
assert mock_fit_parser_cls.return_value.parse.call_count == 1
|
||||
assert mock_tcx_parser_cls.return_value.parse.call_count == 1
|
||||
assert mock_workout_analyzer_cls.call_count == 2
|
||||
assert mock_db_session.add.call_count == 2
|
||||
assert mock_db_session.commit.call_count == 2
|
||||
|
||||
@patch('src.core.file_parser.FitParser')
|
||||
@patch('src.core.workout_analyzer.WorkoutAnalyzer')
|
||||
def test_process_zip_file_unsupported_file_type(mock_workout_analyzer_cls, mock_fit_parser_cls, mock_db_session):
|
||||
zip_content = create_zip_file({"document.txt": b"some text"})
|
||||
processor = BatchProcessor(db_session=mock_db_session)
|
||||
results = processor.process_zip_file(zip_content, user_id=None, ftp_value=None)
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0]["file_name"] == "document.txt"
|
||||
assert results[0]["status"] == "failed"
|
||||
assert "Unsupported file type" in results[0]["error_message"]
|
||||
mock_fit_parser_cls.return_value.parse.assert_not_called()
|
||||
mock_workout_analyzer_cls.assert_not_called()
|
||||
mock_db_session.add.assert_not_called()
|
||||
mock_db_session.commit.assert_not_called()
|
||||
|
||||
@patch('src.core.file_parser.FitParser')
|
||||
@patch('src.core.workout_analyzer.WorkoutAnalyzer')
|
||||
def test_process_zip_file_parsing_error(mock_workout_analyzer_cls, mock_fit_parser_cls, mock_db_session):
|
||||
mock_fit_parser_cls.return_value.parse.side_effect = Exception("Corrupted file")
|
||||
|
||||
zip_content = create_zip_file({"corrupted.fit": b"bad content"})
|
||||
processor = BatchProcessor(db_session=mock_db_session)
|
||||
results = processor.process_zip_file(zip_content, user_id=None, ftp_value=None)
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0]["file_name"] == "corrupted.fit"
|
||||
assert results[0]["status"] == "failed"
|
||||
assert "Corrupted file" in results[0]["error_message"]
|
||||
mock_fit_parser_cls.return_value.parse.assert_called_once()
|
||||
mock_workout_analyzer_cls.assert_not_called()
|
||||
mock_db_session.add.assert_not_called()
|
||||
mock_db_session.commit.assert_not_called()
|
||||
103
tests/unit/test_chart_generator.py
Normal file
103
tests/unit/test_chart_generator.py
Normal file
@@ -0,0 +1,103 @@
|
||||
import pytest
|
||||
import pandas as pd
|
||||
from datetime import datetime, timedelta
|
||||
from src.core.workout_data import WorkoutData, WorkoutMetadata, PowerData, HeartRateData, SpeedData, ElevationData
|
||||
from src.core.chart_generator import ChartGenerator
|
||||
|
||||
@pytest.fixture
|
||||
def sample_workout_data():
|
||||
# Create dummy time-series data
|
||||
timestamps = pd.to_datetime([datetime(2025, 1, 1, 10, 0, 0) + timedelta(seconds=i) for i in range(600)])
|
||||
power = pd.Series([150 + 50 * (i % 10) for i in range(600)], index=timestamps)
|
||||
heart_rate = pd.Series([120 + 10 * (i % 5) for i in range(600)], index=timestamps)
|
||||
speed = pd.Series([5 + 2 * (i % 7) for i in range(600)], index=timestamps)
|
||||
altitude = pd.Series([100 + 10 * (i % 12) for i in range(600)], index=timestamps)
|
||||
|
||||
time_series_data = pd.DataFrame({
|
||||
"power": power,
|
||||
"heart_rate": heart_rate,
|
||||
"speed": speed,
|
||||
"altitude": altitude
|
||||
})
|
||||
|
||||
metadata = WorkoutMetadata(
|
||||
start_time=datetime(2025, 1, 1, 10, 0, 0),
|
||||
duration=timedelta(minutes=10),
|
||||
device="Garmin",
|
||||
file_type="FIT"
|
||||
)
|
||||
|
||||
power_data = PowerData(
|
||||
raw_power_stream=power.tolist(),
|
||||
average_power=power.mean(),
|
||||
normalized_power=power.mean() * 1.05, # Dummy value
|
||||
intensity_factor=0.8,
|
||||
training_stress_score=50,
|
||||
zone_distribution={'Z1': 100, 'Z2': 200, 'Z3': 300}
|
||||
)
|
||||
|
||||
heart_rate_data = HeartRateData(
|
||||
raw_hr_stream=heart_rate.tolist(),
|
||||
average_hr=heart_rate.mean(),
|
||||
max_hr=heart_rate.max(),
|
||||
zone_distribution={'Z1': 150, 'Z2': 250, 'Z3': 200}
|
||||
)
|
||||
|
||||
speed_data = SpeedData(
|
||||
raw_speed_stream=speed.tolist(),
|
||||
average_speed=speed.mean(),
|
||||
max_speed=speed.max(),
|
||||
zone_distribution={'S1': 100, 'S2': 200, 'S3': 300}
|
||||
)
|
||||
|
||||
elevation_data = ElevationData(
|
||||
raw_elevation_stream=altitude.tolist(),
|
||||
total_ascent=100,
|
||||
total_descent=50,
|
||||
max_elevation=200,
|
||||
min_elevation=50
|
||||
)
|
||||
|
||||
return WorkoutData(
|
||||
metadata=metadata,
|
||||
time_series_data=time_series_data,
|
||||
power_data=power_data,
|
||||
heart_rate_data=heart_rate_data,
|
||||
speed_data=speed_data,
|
||||
elevation_data=elevation_data
|
||||
)
|
||||
|
||||
def test_generate_power_curve_chart(sample_workout_data, tmp_path):
|
||||
chart_generator = ChartGenerator(sample_workout_data)
|
||||
output_file = tmp_path / "power_curve.png"
|
||||
chart_generator.generate_power_curve_chart(output_file)
|
||||
assert output_file.exists()
|
||||
assert output_file.stat().st_size > 0
|
||||
|
||||
def test_generate_elevation_profile_chart(sample_workout_data, tmp_path):
|
||||
chart_generator = ChartGenerator(sample_workout_data)
|
||||
output_file = tmp_path / "elevation_profile.png"
|
||||
chart_generator.generate_elevation_profile_chart(output_file)
|
||||
assert output_file.exists()
|
||||
assert output_file.stat().st_size > 0
|
||||
|
||||
def test_generate_power_zone_distribution_chart(sample_workout_data, tmp_path):
|
||||
chart_generator = ChartGenerator(sample_workout_data)
|
||||
output_file = tmp_path / "power_zone_distribution.png"
|
||||
chart_generator.generate_zone_distribution_chart("power", output_file)
|
||||
assert output_file.exists()
|
||||
assert output_file.stat().st_size > 0
|
||||
|
||||
def test_generate_hr_zone_distribution_chart(sample_workout_data, tmp_path):
|
||||
chart_generator = ChartGenerator(sample_workout_data)
|
||||
output_file = tmp_path / "hr_zone_distribution.png"
|
||||
chart_generator.generate_zone_distribution_chart("heart_rate", output_file)
|
||||
assert output_file.exists()
|
||||
assert output_file.stat().st_size > 0
|
||||
|
||||
def test_generate_speed_zone_distribution_chart(sample_workout_data, tmp_path):
|
||||
chart_generator = ChartGenerator(sample_workout_data)
|
||||
output_file = tmp_path / "speed_zone_distribution.png"
|
||||
chart_generator.generate_zone_distribution_chart("speed", output_file)
|
||||
assert output_file.exists()
|
||||
assert output_file.stat().st_size > 0
|
||||
51
tests/unit/test_fit_parser.py
Normal file
51
tests/unit/test_fit_parser.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
from src.core.file_parser import FitParser, WorkoutData, WorkoutMetadata, PowerData, HeartRateData, SpeedData, ElevationData
|
||||
from datetime import datetime, timedelta
|
||||
import pandas as pd
|
||||
|
||||
@pytest.fixture
|
||||
def mock_fit_file():
|
||||
with patch('fitparse.FitFile') as mock_fit_file_class:
|
||||
mock_fit_file_instance = MagicMock()
|
||||
mock_fit_file_class.return_value = mock_fit_file_instance
|
||||
|
||||
# Mocking get_messages to return some dummy records
|
||||
mock_record1 = MagicMock()
|
||||
mock_record1.as_dict.return_value = {
|
||||
'timestamp': datetime(2023, 1, 1, 10, 0, 0),
|
||||
'power': 150,
|
||||
'heart_rate': 130,
|
||||
'speed': 5.0,
|
||||
'altitude': 100.0
|
||||
}
|
||||
mock_record2 = MagicMock()
|
||||
mock_record2.as_dict.return_value = {
|
||||
'timestamp': datetime(2023, 1, 1, 10, 1, 0),
|
||||
'power': 160,
|
||||
'heart_rate': 135,
|
||||
'speed': 5.5,
|
||||
'altitude': 105.0
|
||||
}
|
||||
mock_fit_file_instance.get_messages.return_value = [mock_record1, mock_record2]
|
||||
yield mock_fit_file_class
|
||||
|
||||
def test_fit_parser_initialization():
|
||||
parser = FitParser("dummy.fit")
|
||||
assert parser.file_path == "dummy.fit"
|
||||
|
||||
def test_fit_parser_parse_method_returns_workout_data(mock_fit_file):
|
||||
parser = FitParser("dummy.fit")
|
||||
workout_data = parser.parse()
|
||||
|
||||
assert isinstance(workout_data, WorkoutData)
|
||||
assert isinstance(workout_data.metadata, WorkoutMetadata)
|
||||
assert workout_data.metadata.file_type == "FIT"
|
||||
assert isinstance(workout_data.time_series_data, pd.DataFrame)
|
||||
assert not workout_data.time_series_data.empty
|
||||
assert "power" in workout_data.time_series_data.columns
|
||||
assert "heart_rate" in workout_data.time_series_data.columns
|
||||
assert "speed" in workout_data.time_series_data.columns
|
||||
assert "altitude" in workout_data.time_series_data.columns
|
||||
assert workout_data.metadata.start_time == datetime(2023, 1, 1, 10, 0, 0)
|
||||
assert workout_data.metadata.duration == timedelta(minutes=1)
|
||||
58
tests/unit/test_gpx_parser.py
Normal file
58
tests/unit/test_gpx_parser.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch, mock_open
|
||||
from src.core.file_parser import GpxParser, WorkoutData, WorkoutMetadata
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
@pytest.fixture
|
||||
def mock_gpxpy_parse():
|
||||
with patch('gpxpy.parse') as mock_parse:
|
||||
mock_gpx = MagicMock()
|
||||
mock_parse.return_value = mock_gpx
|
||||
|
||||
# Mock GPX data
|
||||
mock_gpx.time = datetime(2023, 1, 1, 10, 0, 0)
|
||||
mock_gpx.get_moving_data.return_value.moving_time = 3600
|
||||
|
||||
mock_point1 = MagicMock()
|
||||
mock_point1.time = datetime(2023, 1, 1, 10, 0, 0)
|
||||
mock_point1.latitude = 40.0
|
||||
mock_point1.longitude = -105.0
|
||||
mock_point1.elevation = 1600.0
|
||||
|
||||
mock_point2 = MagicMock()
|
||||
mock_point2.time = datetime(2023, 1, 1, 10, 1, 0)
|
||||
mock_point2.latitude = 40.1
|
||||
mock_point2.longitude = -105.1
|
||||
mock_point2.elevation = 1610.0
|
||||
|
||||
mock_segment = MagicMock()
|
||||
mock_segment.points = [mock_point1, mock_point2]
|
||||
|
||||
mock_track = MagicMock()
|
||||
mock_track.segments = [mock_segment]
|
||||
|
||||
mock_gpx.tracks = [mock_track]
|
||||
|
||||
yield mock_parse
|
||||
|
||||
def test_gpx_parser_initialization():
|
||||
parser = GpxParser("dummy.gpx")
|
||||
assert parser.file_path == "dummy.gpx"
|
||||
|
||||
def test_gpx_parser_parse_method_returns_workout_data(mock_gpxpy_parse):
|
||||
# Mock the open function as well, since GpxParser directly opens the file
|
||||
with patch('builtins.open', mock_open(read_data="<gpx></gpx>")):
|
||||
parser = GpxParser("dummy.gpx")
|
||||
workout_data = parser.parse()
|
||||
|
||||
mock_gpxpy_parse.assert_called_once() # gpxpy.parse is called
|
||||
|
||||
assert isinstance(workout_data, WorkoutData)
|
||||
assert isinstance(workout_data.metadata, WorkoutMetadata)
|
||||
assert workout_data.metadata.file_type == "GPX"
|
||||
assert workout_data.metadata.start_time == datetime(2023, 1, 1, 10, 0, 0)
|
||||
assert workout_data.metadata.duration == timedelta(seconds=3600)
|
||||
assert not workout_data.time_series_data.empty
|
||||
assert "latitude" in workout_data.time_series_data.columns
|
||||
assert "longitude" in workout_data.time_series_data.columns
|
||||
assert "elevation" in workout_data.time_series_data.columns
|
||||
34
tests/unit/test_tcx_parser.py
Normal file
34
tests/unit/test_tcx_parser.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
from src.core.file_parser import TcxParser, WorkoutData, WorkoutMetadata
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
@pytest.fixture
|
||||
def mock_tcx_parser():
|
||||
# Patch the TCXParser class where it's imported in src.core.file_parser
|
||||
with patch('src.core.file_parser.TCXParser') as mock_tcx_parser_class:
|
||||
mock_tcx_instance = MagicMock()
|
||||
mock_tcx_parser_class.return_value = mock_tcx_instance
|
||||
|
||||
mock_tcx_instance.started_at = datetime(2023, 1, 1, 10, 0, 0)
|
||||
mock_tcx_instance.duration = 3600 # 1 hour
|
||||
# Mock other attributes as needed for future tests
|
||||
|
||||
yield mock_tcx_parser_class
|
||||
|
||||
def test_tcx_parser_initialization():
|
||||
parser = TcxParser("dummy.tcx")
|
||||
assert parser.file_path == "dummy.tcx"
|
||||
|
||||
def test_tcx_parser_parse_method_returns_workout_data(mock_tcx_parser):
|
||||
parser = TcxParser("dummy.tcx")
|
||||
workout_data = parser.parse()
|
||||
|
||||
mock_tcx_parser.assert_called_once_with("dummy.tcx")
|
||||
|
||||
assert isinstance(workout_data, WorkoutData)
|
||||
assert isinstance(workout_data.metadata, WorkoutMetadata)
|
||||
assert workout_data.metadata.file_type == "TCX"
|
||||
assert workout_data.metadata.start_time == datetime(2023, 1, 1, 10, 0, 0)
|
||||
assert workout_data.metadata.duration == timedelta(seconds=3600)
|
||||
assert workout_data.time_series_data.empty # Currently, no time series data is mocked
|
||||
84
tests/unit/test_workout_data.py
Normal file
84
tests/unit/test_workout_data.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from datetime import datetime, timedelta
|
||||
import pandas as pd
|
||||
from src.core.workout_data import WorkoutData, WorkoutMetadata, PowerData, HeartRateData, SpeedData, ElevationData
|
||||
|
||||
def test_workout_metadata_creation():
|
||||
metadata = WorkoutMetadata(
|
||||
start_time=datetime(2023, 1, 1, 10, 0, 0),
|
||||
duration=timedelta(hours=1),
|
||||
device="Garmin",
|
||||
file_type="FIT"
|
||||
)
|
||||
assert metadata.start_time == datetime(2023, 1, 1, 10, 0, 0)
|
||||
assert metadata.duration == timedelta(hours=1)
|
||||
assert metadata.device == "Garmin"
|
||||
assert metadata.file_type == "FIT"
|
||||
|
||||
def test_power_data_creation():
|
||||
power_data = PowerData(
|
||||
raw_power_stream=[100.0, 150.0, 200.0],
|
||||
average_power=150.0,
|
||||
normalized_power=160.0,
|
||||
intensity_factor=0.8,
|
||||
training_stress_score=75.0,
|
||||
zone_distribution={"Zone 2": "30min"}
|
||||
)
|
||||
assert power_data.average_power == 150.0
|
||||
assert power_data.raw_power_stream == [100.0, 150.0, 200.0]
|
||||
|
||||
def test_heart_rate_data_creation():
|
||||
hr_data = HeartRateData(
|
||||
raw_hr_stream=[120, 130, 140],
|
||||
average_hr=130.0,
|
||||
max_hr=180,
|
||||
zone_distribution={"Zone 3": "20min"}
|
||||
)
|
||||
assert hr_data.average_hr == 130.0
|
||||
assert hr_data.raw_hr_stream == [120, 130, 140]
|
||||
|
||||
def test_speed_data_creation():
|
||||
speed_data = SpeedData(
|
||||
raw_speed_stream=[5.0, 6.0, 7.0],
|
||||
average_speed=6.0,
|
||||
max_speed=8.0
|
||||
)
|
||||
assert speed_data.average_speed == 6.0
|
||||
|
||||
def test_elevation_data_creation():
|
||||
elevation_data = ElevationData(
|
||||
raw_elevation_stream=[100.0, 110.0, 105.0],
|
||||
total_ascent=20.0,
|
||||
total_descent=15.0,
|
||||
max_elevation=110.0,
|
||||
min_elevation=95.0
|
||||
)
|
||||
assert elevation_data.total_ascent == 20.0
|
||||
|
||||
def test_workout_data_creation():
|
||||
metadata = WorkoutMetadata(
|
||||
start_time=datetime(2023, 1, 1, 10, 0, 0),
|
||||
duration=timedelta(hours=1),
|
||||
device="Garmin",
|
||||
file_type="FIT"
|
||||
)
|
||||
power_data = PowerData(average_power=150.0)
|
||||
hr_data = HeartRateData(average_hr=130.0)
|
||||
speed_data = SpeedData(average_speed=25.0)
|
||||
elevation_data = ElevationData(total_ascent=100.0)
|
||||
time_series = pd.DataFrame({"timestamp": [datetime(2023, 1, 1, 10, 0, 0)], "power": [150]})
|
||||
|
||||
workout_data = WorkoutData(
|
||||
metadata=metadata,
|
||||
time_series_data=time_series,
|
||||
power_data=power_data,
|
||||
heart_rate_data=hr_data,
|
||||
speed_data=speed_data,
|
||||
elevation_data=elevation_data
|
||||
)
|
||||
|
||||
assert workout_data.metadata.file_type == "FIT"
|
||||
assert workout_data.power_data.average_power == 150.0
|
||||
assert workout_data.heart_rate_data.average_hr == 130.0
|
||||
assert workout_data.speed_data.average_speed == 25.0
|
||||
assert workout_data.elevation_data.total_ascent == 100.0
|
||||
assert not workout_data.time_series_data.empty
|
||||
Reference in New Issue
Block a user