mirror of
https://github.com/sstent/FitTrack_ReportGenerator.git
synced 2026-01-26 17:12:28 +00:00
sync
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,139 +0,0 @@
|
||||
import pytest
|
||||
import zipfile
|
||||
import io
|
||||
from unittest.mock import MagicMock, patch
|
||||
from src.core.batch_processor import BatchProcessor
|
||||
from src.core.workout_data import WorkoutData, WorkoutMetadata
|
||||
from datetime import datetime, timedelta
|
||||
import pandas as pd
|
||||
|
||||
@pytest.fixture
|
||||
def mock_workout_data():
|
||||
metadata = WorkoutMetadata(
|
||||
start_time=datetime(2025, 1, 1, 10, 0, 0),
|
||||
duration=timedelta(minutes=10),
|
||||
device="Garmin",
|
||||
file_type="FIT"
|
||||
)
|
||||
time_series_data = pd.DataFrame({
|
||||
"power": [100, 110, 120],
|
||||
"heart_rate": [150, 155, 160]
|
||||
})
|
||||
return WorkoutData(metadata=metadata, time_series_data=time_series_data)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_file_parser():
|
||||
parser = MagicMock()
|
||||
parser.parse.return_value = MagicMock(spec=WorkoutData)
|
||||
return parser
|
||||
|
||||
@pytest.fixture
|
||||
def mock_workout_analyzer():
|
||||
analyzer = MagicMock()
|
||||
analyzer.calculate_summary_metrics.return_value = {"avg_power": 100}
|
||||
return analyzer
|
||||
|
||||
@pytest.fixture
|
||||
def mock_report_generator():
|
||||
generator = MagicMock()
|
||||
generator.generate_html_report.return_value = "<html>report</html>"
|
||||
return generator
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db_session():
|
||||
session = MagicMock()
|
||||
return session
|
||||
|
||||
def create_zip_file(file_names_and_content):
|
||||
zip_buffer = io.BytesIO()
|
||||
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zf:
|
||||
for name, content in file_names_and_content.items():
|
||||
zf.writestr(name, content)
|
||||
zip_buffer.seek(0)
|
||||
return zip_buffer
|
||||
|
||||
def test_batch_processor_initialization(mock_db_session):
|
||||
processor = BatchProcessor(db_session=mock_db_session)
|
||||
assert processor.db_session == mock_db_session
|
||||
|
||||
@patch('src.core.file_parser.FitParser')
|
||||
@patch('src.core.file_parser.TcxParser')
|
||||
@patch('src.core.file_parser.GpxParser')
|
||||
@patch('src.core.workout_analyzer.WorkoutAnalyzer')
|
||||
@patch('src.core.report_generator.ReportGenerator')
|
||||
def test_process_zip_file_single_fit(mock_report_generator_cls, mock_workout_analyzer_cls, mock_gpx_parser_cls, mock_tcx_parser_cls, mock_fit_parser_cls, mock_db_session, mock_workout_data):
|
||||
# Mock parsers to return mock_workout_data
|
||||
mock_fit_parser_cls.return_value.parse.return_value = mock_workout_data
|
||||
mock_workout_analyzer_cls.return_value.calculate_summary_metrics.return_value = {"avg_power": 100}
|
||||
mock_report_generator_cls.return_value.generate_html_report.return_value = "<html>report</html>"
|
||||
|
||||
zip_content = create_zip_file({"workout.fit": b"dummy_fit_content"})
|
||||
processor = BatchProcessor(db_session=mock_db_session)
|
||||
results = processor.process_zip_file(zip_content, user_id=None, ftp_value=None)
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0]["file_name"] == "workout.fit"
|
||||
assert results[0]["status"] == "completed"
|
||||
mock_fit_parser_cls.return_value.parse.assert_called_once()
|
||||
mock_workout_analyzer_cls.assert_called_once()
|
||||
mock_db_session.add.assert_called_once()
|
||||
mock_db_session.commit.assert_called_once()
|
||||
|
||||
@patch('src.core.file_parser.FitParser')
|
||||
@patch('src.core.file_parser.TcxParser')
|
||||
@patch('src.core.file_parser.GpxParser')
|
||||
@patch('src.core.workout_analyzer.WorkoutAnalyzer')
|
||||
@patch('src.core.report_generator.ReportGenerator')
|
||||
def test_process_zip_file_multiple_files(mock_report_generator_cls, mock_workout_analyzer_cls, mock_gpx_parser_cls, mock_tcx_parser_cls, mock_fit_parser_cls, mock_db_session, mock_workout_data):
|
||||
mock_fit_parser_cls.return_value.parse.return_value = mock_workout_data
|
||||
mock_tcx_parser_cls.return_value.parse.return_value = mock_workout_data
|
||||
mock_workout_analyzer_cls.return_value.calculate_summary_metrics.return_value = {"avg_power": 100}
|
||||
mock_report_generator_cls.return_value.generate_html_report.return_value = "<html>report</html>"
|
||||
|
||||
zip_content = create_zip_file({"workout1.fit": b"dummy_fit_content", "workout2.tcx": b"dummy_tcx_content"})
|
||||
processor = BatchProcessor(db_session=mock_db_session)
|
||||
results = processor.process_zip_file(zip_content, user_id=None, ftp_value=None)
|
||||
|
||||
assert len(results) == 2
|
||||
assert any(r["file_name"] == "workout1.fit" for r in results)
|
||||
assert any(r["file_name"] == "workout2.tcx" for r in results)
|
||||
assert all(r["status"] == "completed" for r in results)
|
||||
assert mock_fit_parser_cls.return_value.parse.call_count == 1
|
||||
assert mock_tcx_parser_cls.return_value.parse.call_count == 1
|
||||
assert mock_workout_analyzer_cls.call_count == 2
|
||||
assert mock_db_session.add.call_count == 2
|
||||
assert mock_db_session.commit.call_count == 2
|
||||
|
||||
@patch('src.core.file_parser.FitParser')
|
||||
@patch('src.core.workout_analyzer.WorkoutAnalyzer')
|
||||
def test_process_zip_file_unsupported_file_type(mock_workout_analyzer_cls, mock_fit_parser_cls, mock_db_session):
|
||||
zip_content = create_zip_file({"document.txt": b"some text"})
|
||||
processor = BatchProcessor(db_session=mock_db_session)
|
||||
results = processor.process_zip_file(zip_content, user_id=None, ftp_value=None)
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0]["file_name"] == "document.txt"
|
||||
assert results[0]["status"] == "failed"
|
||||
assert "Unsupported file type" in results[0]["error_message"]
|
||||
mock_fit_parser_cls.return_value.parse.assert_not_called()
|
||||
mock_workout_analyzer_cls.assert_not_called()
|
||||
mock_db_session.add.assert_not_called()
|
||||
mock_db_session.commit.assert_not_called()
|
||||
|
||||
@patch('src.core.file_parser.FitParser')
|
||||
@patch('src.core.workout_analyzer.WorkoutAnalyzer')
|
||||
def test_process_zip_file_parsing_error(mock_workout_analyzer_cls, mock_fit_parser_cls, mock_db_session):
|
||||
mock_fit_parser_cls.return_value.parse.side_effect = Exception("Corrupted file")
|
||||
|
||||
zip_content = create_zip_file({"corrupted.fit": b"bad content"})
|
||||
processor = BatchProcessor(db_session=mock_db_session)
|
||||
results = processor.process_zip_file(zip_content, user_id=None, ftp_value=None)
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0]["file_name"] == "corrupted.fit"
|
||||
assert results[0]["status"] == "failed"
|
||||
assert "Corrupted file" in results[0]["error_message"]
|
||||
mock_fit_parser_cls.return_value.parse.assert_called_once()
|
||||
mock_workout_analyzer_cls.assert_not_called()
|
||||
mock_db_session.add.assert_not_called()
|
||||
mock_db_session.commit.assert_not_called()
|
||||
@@ -1,53 +1,60 @@
|
||||
import pytest
|
||||
import pandas as pd
|
||||
from datetime import datetime, timedelta
|
||||
from src.core.workout_data import WorkoutData, WorkoutMetadata, PowerData, HeartRateData, SpeedData, ElevationData
|
||||
from src.core.workout_data import (
|
||||
WorkoutData,
|
||||
WorkoutMetadata,
|
||||
PowerData,
|
||||
HeartRateData,
|
||||
SpeedData,
|
||||
ElevationData,
|
||||
)
|
||||
from src.core.chart_generator import ChartGenerator
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_workout_data():
|
||||
# Create dummy time-series data
|
||||
timestamps = pd.to_datetime([datetime(2025, 1, 1, 10, 0, 0) + timedelta(seconds=i) for i in range(600)])
|
||||
timestamps = pd.to_datetime(
|
||||
[datetime(2025, 1, 1, 10, 0, 0) + timedelta(seconds=i) for i in range(600)]
|
||||
)
|
||||
power = pd.Series([150 + 50 * (i % 10) for i in range(600)], index=timestamps)
|
||||
heart_rate = pd.Series([120 + 10 * (i % 5) for i in range(600)], index=timestamps)
|
||||
speed = pd.Series([5 + 2 * (i % 7) for i in range(600)], index=timestamps)
|
||||
altitude = pd.Series([100 + 10 * (i % 12) for i in range(600)], index=timestamps)
|
||||
|
||||
time_series_data = pd.DataFrame({
|
||||
"power": power,
|
||||
"heart_rate": heart_rate,
|
||||
"speed": speed,
|
||||
"altitude": altitude
|
||||
})
|
||||
time_series_data = pd.DataFrame(
|
||||
{"power": power, "heart_rate": heart_rate, "speed": speed, "altitude": altitude}
|
||||
)
|
||||
|
||||
metadata = WorkoutMetadata(
|
||||
start_time=datetime(2025, 1, 1, 10, 0, 0),
|
||||
duration=timedelta(minutes=10),
|
||||
device="Garmin",
|
||||
file_type="FIT"
|
||||
file_type="FIT",
|
||||
)
|
||||
|
||||
power_data = PowerData(
|
||||
raw_power_stream=power.tolist(),
|
||||
average_power=power.mean(),
|
||||
normalized_power=power.mean() * 1.05, # Dummy value
|
||||
normalized_power=power.mean() * 1.05, # Dummy value
|
||||
intensity_factor=0.8,
|
||||
training_stress_score=50,
|
||||
zone_distribution={'Z1': 100, 'Z2': 200, 'Z3': 300}
|
||||
zone_distribution={"Z1": 100, "Z2": 200, "Z3": 300},
|
||||
)
|
||||
|
||||
heart_rate_data = HeartRateData(
|
||||
raw_hr_stream=heart_rate.tolist(),
|
||||
average_hr=heart_rate.mean(),
|
||||
max_hr=heart_rate.max(),
|
||||
zone_distribution={'Z1': 150, 'Z2': 250, 'Z3': 200}
|
||||
zone_distribution={"Z1": 150, "Z2": 250, "Z3": 200},
|
||||
)
|
||||
|
||||
speed_data = SpeedData(
|
||||
raw_speed_stream=speed.tolist(),
|
||||
average_speed=speed.mean(),
|
||||
max_speed=speed.max(),
|
||||
zone_distribution={'S1': 100, 'S2': 200, 'S3': 300}
|
||||
zone_distribution={"S1": 100, "S2": 200, "S3": 300},
|
||||
)
|
||||
|
||||
elevation_data = ElevationData(
|
||||
@@ -55,7 +62,7 @@ def sample_workout_data():
|
||||
total_ascent=100,
|
||||
total_descent=50,
|
||||
max_elevation=200,
|
||||
min_elevation=50
|
||||
min_elevation=50,
|
||||
)
|
||||
|
||||
return WorkoutData(
|
||||
@@ -64,40 +71,40 @@ def sample_workout_data():
|
||||
power_data=power_data,
|
||||
heart_rate_data=heart_rate_data,
|
||||
speed_data=speed_data,
|
||||
elevation_data=elevation_data
|
||||
elevation_data=elevation_data,
|
||||
)
|
||||
|
||||
def test_generate_power_curve_chart(sample_workout_data, tmp_path):
|
||||
chart_generator = ChartGenerator(sample_workout_data)
|
||||
output_file = tmp_path / "power_curve.png"
|
||||
chart_generator.generate_power_curve_chart(output_file)
|
||||
assert output_file.exists()
|
||||
assert output_file.stat().st_size > 0
|
||||
|
||||
def test_generate_elevation_profile_chart(sample_workout_data, tmp_path):
|
||||
def test_generate_power_curve_chart(sample_workout_data):
|
||||
chart_generator = ChartGenerator(sample_workout_data)
|
||||
output_file = tmp_path / "elevation_profile.png"
|
||||
chart_generator.generate_elevation_profile_chart(output_file)
|
||||
assert output_file.exists()
|
||||
assert output_file.stat().st_size > 0
|
||||
chart_bytes = chart_generator.generate_power_curve_chart()
|
||||
assert isinstance(chart_bytes, bytes)
|
||||
assert len(chart_bytes) > 0
|
||||
|
||||
def test_generate_power_zone_distribution_chart(sample_workout_data, tmp_path):
|
||||
chart_generator = ChartGenerator(sample_workout_data)
|
||||
output_file = tmp_path / "power_zone_distribution.png"
|
||||
chart_generator.generate_zone_distribution_chart("power", output_file)
|
||||
assert output_file.exists()
|
||||
assert output_file.stat().st_size > 0
|
||||
|
||||
def test_generate_hr_zone_distribution_chart(sample_workout_data, tmp_path):
|
||||
def test_generate_elevation_profile_chart(sample_workout_data):
|
||||
chart_generator = ChartGenerator(sample_workout_data)
|
||||
output_file = tmp_path / "hr_zone_distribution.png"
|
||||
chart_generator.generate_zone_distribution_chart("heart_rate", output_file)
|
||||
assert output_file.exists()
|
||||
assert output_file.stat().st_size > 0
|
||||
chart_bytes = chart_generator.generate_elevation_profile_chart()
|
||||
assert isinstance(chart_bytes, bytes)
|
||||
assert len(chart_bytes) > 0
|
||||
|
||||
def test_generate_speed_zone_distribution_chart(sample_workout_data, tmp_path):
|
||||
|
||||
def test_generate_power_zone_distribution_chart(sample_workout_data):
|
||||
chart_generator = ChartGenerator(sample_workout_data)
|
||||
output_file = tmp_path / "speed_zone_distribution.png"
|
||||
chart_generator.generate_zone_distribution_chart("speed", output_file)
|
||||
assert output_file.exists()
|
||||
assert output_file.stat().st_size > 0
|
||||
chart_bytes = chart_generator.generate_zone_distribution_chart("power")
|
||||
assert isinstance(chart_bytes, bytes)
|
||||
assert len(chart_bytes) > 0
|
||||
|
||||
|
||||
def test_generate_hr_zone_distribution_chart(sample_workout_data):
|
||||
chart_generator = ChartGenerator(sample_workout_data)
|
||||
chart_bytes = chart_generator.generate_zone_distribution_chart("heart_rate")
|
||||
assert isinstance(chart_bytes, bytes)
|
||||
assert len(chart_bytes) > 0
|
||||
|
||||
|
||||
def test_generate_speed_zone_distribution_chart(sample_workout_data):
|
||||
chart_generator = ChartGenerator(sample_workout_data)
|
||||
chart_bytes = chart_generator.generate_zone_distribution_chart("speed")
|
||||
assert isinstance(chart_bytes, bytes)
|
||||
assert len(chart_bytes) > 0
|
||||
|
||||
@@ -1,42 +1,52 @@
|
||||
import io
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
from src.core.file_parser import FitParser, WorkoutData, WorkoutMetadata, PowerData, HeartRateData, SpeedData, ElevationData
|
||||
from src.core.file_parser import (
|
||||
FitParser,
|
||||
WorkoutData,
|
||||
WorkoutMetadata,
|
||||
PowerData,
|
||||
HeartRateData,
|
||||
SpeedData,
|
||||
ElevationData,
|
||||
)
|
||||
from datetime import datetime, timedelta
|
||||
import pandas as pd
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_fit_file():
|
||||
with patch('fitparse.FitFile') as mock_fit_file_class:
|
||||
with patch("fitparse.FitFile") as mock_fit_file_class:
|
||||
mock_fit_file_instance = MagicMock()
|
||||
mock_fit_file_class.return_value = mock_fit_file_instance
|
||||
|
||||
# Mocking get_messages to return some dummy records
|
||||
mock_record1 = MagicMock()
|
||||
mock_record1.as_dict.return_value = {
|
||||
'timestamp': datetime(2023, 1, 1, 10, 0, 0),
|
||||
'power': 150,
|
||||
'heart_rate': 130,
|
||||
'speed': 5.0,
|
||||
'altitude': 100.0
|
||||
"timestamp": datetime(2023, 1, 1, 10, 0, 0),
|
||||
"power": 150,
|
||||
"heart_rate": 130,
|
||||
"speed": 5.0,
|
||||
"altitude": 100.0,
|
||||
}
|
||||
mock_record2 = MagicMock()
|
||||
mock_record2.as_dict.return_value = {
|
||||
'timestamp': datetime(2023, 1, 1, 10, 1, 0),
|
||||
'power': 160,
|
||||
'heart_rate': 135,
|
||||
'speed': 5.5,
|
||||
'altitude': 105.0
|
||||
"timestamp": datetime(2023, 1, 1, 10, 1, 0),
|
||||
"power": 160,
|
||||
"heart_rate": 135,
|
||||
"speed": 5.5,
|
||||
"altitude": 105.0,
|
||||
}
|
||||
mock_fit_file_instance.get_messages.return_value = [mock_record1, mock_record2]
|
||||
yield mock_fit_file_class
|
||||
|
||||
def test_fit_parser_initialization():
|
||||
parser = FitParser("dummy.fit")
|
||||
assert parser.file_path == "dummy.fit"
|
||||
|
||||
def test_fit_parser_parse_method_returns_workout_data(mock_fit_file):
|
||||
parser = FitParser("dummy.fit")
|
||||
workout_data = parser.parse()
|
||||
# Mock the FitFile constructor directly within the test
|
||||
with patch('fitparse.FitFile') as MockFitFile:
|
||||
MockFitFile.return_value = mock_fit_file.return_value # Use the mocked instance from the fixture
|
||||
parser = FitParser()
|
||||
workout_data = parser.parse(io.BytesIO(b"dummy content"))
|
||||
|
||||
assert isinstance(workout_data, WorkoutData)
|
||||
assert isinstance(workout_data.metadata, WorkoutMetadata)
|
||||
@@ -48,4 +58,4 @@ def test_fit_parser_parse_method_returns_workout_data(mock_fit_file):
|
||||
assert "speed" in workout_data.time_series_data.columns
|
||||
assert "altitude" in workout_data.time_series_data.columns
|
||||
assert workout_data.metadata.start_time == datetime(2023, 1, 1, 10, 0, 0)
|
||||
assert workout_data.metadata.duration == timedelta(minutes=1)
|
||||
assert workout_data.metadata.duration == timedelta(minutes=1)
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import io
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch, mock_open
|
||||
from src.core.file_parser import GpxParser, WorkoutData, WorkoutMetadata
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_gpxpy_parse():
|
||||
with patch('gpxpy.parse') as mock_parse:
|
||||
with patch("gpxpy.parse") as mock_parse:
|
||||
mock_gpx = MagicMock()
|
||||
mock_parse.return_value = mock_gpx
|
||||
|
||||
@@ -35,17 +37,14 @@ def mock_gpxpy_parse():
|
||||
|
||||
yield mock_parse
|
||||
|
||||
def test_gpx_parser_initialization():
|
||||
parser = GpxParser("dummy.gpx")
|
||||
assert parser.file_path == "dummy.gpx"
|
||||
|
||||
def test_gpx_parser_parse_method_returns_workout_data(mock_gpxpy_parse):
|
||||
# Mock the open function as well, since GpxParser directly opens the file
|
||||
with patch('builtins.open', mock_open(read_data="<gpx></gpx>")):
|
||||
parser = GpxParser("dummy.gpx")
|
||||
workout_data = parser.parse()
|
||||
with patch("builtins.open", mock_open(read_data="<gpx></gpx>")):
|
||||
parser = GpxParser()
|
||||
workout_data = parser.parse(io.BytesIO(b"<gpx></gpx>"))
|
||||
|
||||
mock_gpxpy_parse.assert_called_once() # gpxpy.parse is called
|
||||
mock_gpxpy_parse.assert_called_once() # gpxpy.parse is called
|
||||
|
||||
assert isinstance(workout_data, WorkoutData)
|
||||
assert isinstance(workout_data.metadata, WorkoutMetadata)
|
||||
@@ -55,4 +54,4 @@ def test_gpx_parser_parse_method_returns_workout_data(mock_gpxpy_parse):
|
||||
assert not workout_data.time_series_data.empty
|
||||
assert "latitude" in workout_data.time_series_data.columns
|
||||
assert "longitude" in workout_data.time_series_data.columns
|
||||
assert "elevation" in workout_data.time_series_data.columns
|
||||
assert "elevation" in workout_data.time_series_data.columns
|
||||
|
||||
@@ -1,34 +1,35 @@
|
||||
import io
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
from unittest.mock import MagicMock, patch, ANY
|
||||
from src.core.file_parser import TcxParser, WorkoutData, WorkoutMetadata
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_tcx_parser():
|
||||
# Patch the TCXParser class where it's imported in src.core.file_parser
|
||||
with patch('src.core.file_parser.TCXParser') as mock_tcx_parser_class:
|
||||
with patch("src.core.file_parser.TCXParser") as mock_tcx_parser_class:
|
||||
mock_tcx_instance = MagicMock()
|
||||
mock_tcx_parser_class.return_value = mock_tcx_instance
|
||||
|
||||
mock_tcx_instance.started_at = datetime(2023, 1, 1, 10, 0, 0)
|
||||
mock_tcx_instance.duration = 3600 # 1 hour
|
||||
mock_tcx_instance.duration = 3600 # 1 hour
|
||||
# Mock other attributes as needed for future tests
|
||||
|
||||
yield mock_tcx_parser_class
|
||||
|
||||
def test_tcx_parser_initialization():
|
||||
parser = TcxParser("dummy.tcx")
|
||||
assert parser.file_path == "dummy.tcx"
|
||||
|
||||
def test_tcx_parser_parse_method_returns_workout_data(mock_tcx_parser):
|
||||
parser = TcxParser("dummy.tcx")
|
||||
workout_data = parser.parse()
|
||||
parser = TcxParser()
|
||||
workout_data = parser.parse(io.BytesIO(b"<tcx></tcx>"))
|
||||
|
||||
mock_tcx_parser.assert_called_once_with("dummy.tcx")
|
||||
mock_tcx_parser.assert_called_once_with(ANY)
|
||||
|
||||
assert isinstance(workout_data, WorkoutData)
|
||||
assert isinstance(workout_data.metadata, WorkoutMetadata)
|
||||
assert workout_data.metadata.file_type == "TCX"
|
||||
assert workout_data.metadata.start_time == datetime(2023, 1, 1, 10, 0, 0)
|
||||
assert workout_data.metadata.duration == timedelta(seconds=3600)
|
||||
assert workout_data.time_series_data.empty # Currently, no time series data is mocked
|
||||
assert (
|
||||
workout_data.time_series_data.empty
|
||||
) # Currently, no time series data is mocked
|
||||
|
||||
@@ -1,19 +1,28 @@
|
||||
from datetime import datetime, timedelta
|
||||
import pandas as pd
|
||||
from src.core.workout_data import WorkoutData, WorkoutMetadata, PowerData, HeartRateData, SpeedData, ElevationData
|
||||
from src.core.workout_data import (
|
||||
WorkoutData,
|
||||
WorkoutMetadata,
|
||||
PowerData,
|
||||
HeartRateData,
|
||||
SpeedData,
|
||||
ElevationData,
|
||||
)
|
||||
|
||||
|
||||
def test_workout_metadata_creation():
|
||||
metadata = WorkoutMetadata(
|
||||
start_time=datetime(2023, 1, 1, 10, 0, 0),
|
||||
duration=timedelta(hours=1),
|
||||
device="Garmin",
|
||||
file_type="FIT"
|
||||
file_type="FIT",
|
||||
)
|
||||
assert metadata.start_time == datetime(2023, 1, 1, 10, 0, 0)
|
||||
assert metadata.duration == timedelta(hours=1)
|
||||
assert metadata.device == "Garmin"
|
||||
assert metadata.file_type == "FIT"
|
||||
|
||||
|
||||
def test_power_data_creation():
|
||||
power_data = PowerData(
|
||||
raw_power_stream=[100.0, 150.0, 200.0],
|
||||
@@ -21,51 +30,55 @@ def test_power_data_creation():
|
||||
normalized_power=160.0,
|
||||
intensity_factor=0.8,
|
||||
training_stress_score=75.0,
|
||||
zone_distribution={"Zone 2": "30min"}
|
||||
zone_distribution={"Zone 2": "30min"},
|
||||
)
|
||||
assert power_data.average_power == 150.0
|
||||
assert power_data.raw_power_stream == [100.0, 150.0, 200.0]
|
||||
|
||||
|
||||
def test_heart_rate_data_creation():
|
||||
hr_data = HeartRateData(
|
||||
raw_hr_stream=[120, 130, 140],
|
||||
average_hr=130.0,
|
||||
max_hr=180,
|
||||
zone_distribution={"Zone 3": "20min"}
|
||||
zone_distribution={"Zone 3": "20min"},
|
||||
)
|
||||
assert hr_data.average_hr == 130.0
|
||||
assert hr_data.raw_hr_stream == [120, 130, 140]
|
||||
|
||||
|
||||
def test_speed_data_creation():
|
||||
speed_data = SpeedData(
|
||||
raw_speed_stream=[5.0, 6.0, 7.0],
|
||||
average_speed=6.0,
|
||||
max_speed=8.0
|
||||
raw_speed_stream=[5.0, 6.0, 7.0], average_speed=6.0, max_speed=8.0
|
||||
)
|
||||
assert speed_data.average_speed == 6.0
|
||||
|
||||
|
||||
def test_elevation_data_creation():
|
||||
elevation_data = ElevationData(
|
||||
raw_elevation_stream=[100.0, 110.0, 105.0],
|
||||
total_ascent=20.0,
|
||||
total_descent=15.0,
|
||||
max_elevation=110.0,
|
||||
min_elevation=95.0
|
||||
min_elevation=95.0,
|
||||
)
|
||||
assert elevation_data.total_ascent == 20.0
|
||||
|
||||
|
||||
def test_workout_data_creation():
|
||||
metadata = WorkoutMetadata(
|
||||
start_time=datetime(2023, 1, 1, 10, 0, 0),
|
||||
duration=timedelta(hours=1),
|
||||
device="Garmin",
|
||||
file_type="FIT"
|
||||
file_type="FIT",
|
||||
)
|
||||
power_data = PowerData(average_power=150.0)
|
||||
hr_data = HeartRateData(average_hr=130.0)
|
||||
speed_data = SpeedData(average_speed=25.0)
|
||||
elevation_data = ElevationData(total_ascent=100.0)
|
||||
time_series = pd.DataFrame({"timestamp": [datetime(2023, 1, 1, 10, 0, 0)], "power": [150]})
|
||||
time_series = pd.DataFrame(
|
||||
{"timestamp": [datetime(2023, 1, 1, 10, 0, 0)], "power": [150]}
|
||||
)
|
||||
|
||||
workout_data = WorkoutData(
|
||||
metadata=metadata,
|
||||
@@ -73,7 +86,7 @@ def test_workout_data_creation():
|
||||
power_data=power_data,
|
||||
heart_rate_data=hr_data,
|
||||
speed_data=speed_data,
|
||||
elevation_data=elevation_data
|
||||
elevation_data=elevation_data,
|
||||
)
|
||||
|
||||
assert workout_data.metadata.file_type == "FIT"
|
||||
@@ -81,4 +94,4 @@ def test_workout_data_creation():
|
||||
assert workout_data.heart_rate_data.average_hr == 130.0
|
||||
assert workout_data.speed_data.average_speed == 25.0
|
||||
assert workout_data.elevation_data.total_ascent == 100.0
|
||||
assert not workout_data.time_series_data.empty
|
||||
assert not workout_data.time_series_data.empty
|
||||
|
||||
Reference in New Issue
Block a user