diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml
index 6850b560885..0adc1109124 100644
--- a/.github/workflows/gh-pages.yml
+++ b/.github/workflows/gh-pages.yml
@@ -47,6 +47,9 @@ jobs:
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3
+ with:
+ # if your default branches is not master, please change it here
+ ref: master
- name: Cache Data Files
if: inputs.save_data_in_github_cache
diff --git a/run_page/config.py b/run_page/config.py
index a5693db101d..aa4f16b97b7 100644
--- a/run_page/config.py
+++ b/run_page/config.py
@@ -20,10 +20,9 @@
JSON_FILE = os.path.join(parent, "src", "static", "activities.json")
SYNCED_FILE = os.path.join(parent, "imported.json")
-# TODO: Move into nike_sync
-BASE_URL = "https://api.nike.com/sport/v3/me"
-TOKEN_REFRESH_URL = "https://unite.nike.com/tokenRefresh"
-NIKE_CLIENT_ID = "HlHa2Cje3ctlaOqnxvgZXNaAs7T9nAuH"
+# TODO: Move into nike_sync NRC THINGS
+
+
BASE_TIMEZONE = "Asia/Shanghai"
diff --git a/run_page/keep_sync.py b/run_page/keep_sync.py
index ae75e4a6c67..be5c020614a 100644
--- a/run_page/keep_sync.py
+++ b/run_page/keep_sync.py
@@ -1,7 +1,6 @@
import argparse
import base64
import json
-import math
import os
import time
import zlib
@@ -16,12 +15,16 @@
from Crypto.Cipher import AES
from generator import Generator
from utils import adjust_time
+import xml.etree.ElementTree as ET
# need to test
LOGIN_API = "https://api.gotokeep.com/v1.1/users/login"
RUN_DATA_API = "https://api.gotokeep.com/pd/v3/stats/detail?dateUnit=all&type=running&lastDate={last_date}"
RUN_LOG_API = "https://api.gotokeep.com/pd/v3/runninglog/{run_id}"
+HR_FRAME_THRESHOLD_IN_DECISECOND = 100 # Maximum time difference to consider a data point as the nearest, the unit is decisecond(分秒)
+
+TIMESTAMP_THRESHOLD_IN_DECISECOND = 3_600_000 # Threshold for target timestamp adjustment, the unit of timestamp is decisecond(分秒), so the 3_600_000 stands for 100 hours sports time. 100h = 100 * 60 * 60 * 10
# If your points need trans from gcj02 to wgs84 coordinate which use by Mapbox
TRANS_GCJ02_TO_WGS84 = True
@@ -88,6 +91,17 @@ def parse_raw_data_to_nametuple(
keep_id = run_data["id"].split("_")[1]
start_time = run_data["startTime"]
+ avg_heart_rate = None
+ decoded_hr_data = []
+ if run_data["heartRate"]:
+ avg_heart_rate = run_data["heartRate"].get("averageHeartRate", None)
+ heart_rate_data = run_data["heartRate"].get("heartRates", None)
+ if heart_rate_data is not None:
+ decoded_hr_data = decode_runmap_data(heart_rate_data)
+ # fix #66
+ if avg_heart_rate and avg_heart_rate < 0:
+ avg_heart_rate = None
+
if run_data["geoPoints"]:
run_points_data = decode_runmap_data(run_data["geoPoints"], True)
run_points_data_gpx = run_points_data
@@ -99,20 +113,20 @@ def parse_raw_data_to_nametuple(
for i, p in enumerate(run_points_data_gpx):
p["latitude"] = run_points_data[i][0]
p["longitude"] = run_points_data[i][1]
- else:
- run_points_data = [[p["latitude"], p["longitude"]] for p in run_points_data]
+
+ for p in run_points_data_gpx:
+ p_hr = find_nearest_hr(decoded_hr_data, int(p["timestamp"]), start_time)
+ if p_hr:
+ p["hr"] = p_hr
if with_download_gpx:
- if str(keep_id) not in old_gpx_ids:
+ if (
+ str(keep_id) not in old_gpx_ids
+ and run_data["dataType"] == "outdoorRunning"
+ ):
gpx_data = parse_points_to_gpx(run_points_data_gpx, start_time)
download_keep_gpx(gpx_data, str(keep_id))
else:
print(f"ID {keep_id} no gps data")
- heart_rate = None
- if run_data["heartRate"]:
- heart_rate = run_data["heartRate"].get("averageHeartRate", None)
- # fix #66
- if heart_rate and heart_rate < 0:
- heart_rate = None
polyline_str = polyline.encode(run_points_data) if run_points_data else ""
start_latlng = start_point(*run_points_data[0]) if run_points_data else None
start_date = datetime.utcfromtimestamp(start_time / 1000)
@@ -133,7 +147,7 @@ def parse_raw_data_to_nametuple(
"start_date_local": datetime.strftime(start_date_local, "%Y-%m-%d %H:%M:%S"),
"end_local": datetime.strftime(end_local, "%Y-%m-%d %H:%M:%S"),
"length": run_data["distance"],
- "average_heartrate": int(heart_rate) if heart_rate else None,
+ "average_heartrate": int(avg_heart_rate) if avg_heart_rate else None,
"map": run_map(polyline_str),
"start_latlng": start_latlng,
"distance": run_data["distance"],
@@ -173,18 +187,34 @@ def get_all_keep_tracks(email, password, old_tracks_ids, with_download_gpx=False
def parse_points_to_gpx(run_points_data, start_time):
- # future to support heart rate
+ """
+ Convert run points data to GPX format.
+
+ Args:
+ run_id (str): The ID of the run.
+ run_points_data (list of dict): A list of run data points.
+ start_time (int): The start time for adjusting timestamps. Note that the unit of the start_time is millsecond
+
+ Returns:
+ gpx_data (str): GPX data in string format.
+ """
points_dict_list = []
+ # early timestamp fields in keep's data stands for delta time, but in newly data timestamp field stands for exactly time,
+ # so it does'nt need to plus extra start_time
+ if run_points_data[0]["timestamp"] > TIMESTAMP_THRESHOLD_IN_DECISECOND:
+ start_time = 0
+
for point in run_points_data:
points_dict = {
"latitude": point["latitude"],
"longitude": point["longitude"],
"time": datetime.utcfromtimestamp(
- (point["timestamp"] * 100 + start_time) / 1000
+ (point["timestamp"] * 100 + start_time)
+ / 1000 # note that the timestamp of a point is decisecond(分秒)
),
+ "elevation": point.get("verticalAccuracy"),
+ "hr": point.get("hr"),
}
- if "verticalAccuracy" in point:
- points_dict["elevation"] = point["verticalAccuracy"]
points_dict_list.append(points_dict)
gpx = gpxpy.gpx.GPX()
gpx.nsmap["gpxtpx"] = "http://www.garmin.com/xmlschemas/TrackPointExtension/v1"
@@ -196,12 +226,67 @@ def parse_points_to_gpx(run_points_data, start_time):
gpx_segment = gpxpy.gpx.GPXTrackSegment()
gpx_track.segments.append(gpx_segment)
for p in points_dict_list:
- point = gpxpy.gpx.GPXTrackPoint(**p)
+ point = gpxpy.gpx.GPXTrackPoint(
+ latitude=p["latitude"],
+ longitude=p["longitude"],
+ time=p["time"],
+ elevation=p.get("elevation"),
+ )
+ if p.get("hr") is not None:
+ gpx_extension_hr = ET.fromstring(
+ f"""
+ {p["hr"]}
+
+ """
+ )
+ point.extensions.append(gpx_extension_hr)
gpx_segment.points.append(point)
-
return gpx.to_xml()
+def find_nearest_hr(
+ hr_data_list, target_time, start_time, threshold=HR_FRAME_THRESHOLD_IN_DECISECOND
+):
+ """
+ Find the nearest heart rate data point to the target time.
+ if cannot found suitable HR data within the specified time frame (within 10 seconds by default), there will be no hr data return
+ Args:
+ heart_rate_data (list of dict): A list of heart rate data points, where each point is a dictionary
+ containing at least "timestamp" and "beatsPerMinute" keys.
+ target_time (float): The target timestamp for which to find the nearest heart rate data point. Please Note that the unit of target_time is decisecond(分秒),
+ means 1/10 of a second ,this is very unsual!! so when we convert a target_time to second we need to divide by 10, and when we convert a target time to millsecond
+ we need to times 100.
+ start_time (float): The reference start time. the unit of start_time is normal millsecond timestamp
+ threshold (float, optional): The maximum allowed time difference to consider a data point as the nearest.
+ Default is HR_THRESHOLD, the unit is decisecond(分秒)
+
+ Returns:
+ int or None: The heart rate value of the nearest data point, or None if no suitable data point is found.
+ """
+ closest_element = None
+ # init difference value
+ min_difference = float("inf")
+ if target_time > TIMESTAMP_THRESHOLD_IN_DECISECOND:
+ target_time = (
+ target_time * 100 - start_time
+ ) / 100 # note that the unit of target_time is decisecond(分秒) and the unit of start_time is normal millsecond
+
+ for item in hr_data_list:
+ timestamp = item["timestamp"]
+ difference = abs(timestamp - target_time)
+
+ if difference <= threshold and difference < min_difference:
+ closest_element = item
+ min_difference = difference
+
+ if closest_element:
+ hr = closest_element.get("beatsPerMinute")
+ if hr and hr > 0:
+ return hr
+
+ return None
+
+
def download_keep_gpx(gpx_data, keep_id):
try:
print(f"downloading keep_id {str(keep_id)} gpx")
diff --git a/run_page/nike_sync.py b/run_page/nike_sync.py
index 35839055c5c..d7599e0fd63 100644
--- a/run_page/nike_sync.py
+++ b/run_page/nike_sync.py
@@ -1,4 +1,5 @@
import argparse
+from base64 import b64decode
import json
import logging
import os.path
@@ -11,13 +12,10 @@
import httpx
from config import (
BASE_TIMEZONE,
- BASE_URL,
GPX_FOLDER,
JSON_FILE,
- NIKE_CLIENT_ID,
OUTPUT_DIR,
SQL_FILE,
- TOKEN_REFRESH_URL,
run_map,
)
from generator import Generator
@@ -27,6 +25,16 @@
# logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("nike_sync")
+BASE_URL = "https://api.nike.com/sport/v3/me"
+TOKEN_REFRESH_URL = "https://api.nike.com/idn/shim/oauth/2.0/token"
+NIKE_CLIENT_ID = "VmhBZWFmRUdKNkc4ZTlEeFJVejhpRTUwQ1o5TWlKTUc="
+NIKE_UX_ID = "Y29tLm5pa2Uuc3BvcnQucnVubmluZy5pb3MuNS4xNQ=="
+NIKE_HEADERS = {
+ "Host": "api.nike.com",
+ "Accept": "application/json",
+ "Content-Type": "application/json",
+}
+
class Nike:
def __init__(self, refresh_token):
@@ -34,10 +42,12 @@ def __init__(self, refresh_token):
response = self.client.post(
TOKEN_REFRESH_URL,
+ headers=NIKE_HEADERS,
json={
"refresh_token": refresh_token,
- "client_id": NIKE_CLIENT_ID,
+ "client_id": b64decode(NIKE_CLIENT_ID).decode(),
"grant_type": "refresh_token",
+ "ux_id": b64decode(NIKE_UX_ID).decode(),
},
timeout=60,
)
diff --git a/run_page/strava_to_garmin_sync.py b/run_page/strava_to_garmin_sync.py
index 41a786136b3..34adba7d7a5 100644
--- a/run_page/strava_to_garmin_sync.py
+++ b/run_page/strava_to_garmin_sync.py
@@ -92,7 +92,7 @@ async def upload_to_activities(
return files_list
# strava rate limit
- for i in strava_activities[: len(strava_activities)]:
+ for i in sorted(strava_activities, key=lambda i: int(i.id)):
try:
data = strava_web_client.get_activity_data(i.id, fmt=format)
files_list.append(data)
diff --git a/src/components/RunTable/RunRow.tsx b/src/components/RunTable/RunRow.tsx
index 9de84cf1153..11cdbb5aa5b 100644
--- a/src/components/RunTable/RunRow.tsx
+++ b/src/components/RunTable/RunRow.tsx
@@ -1,10 +1,10 @@
import React from 'react';
-import { formatPace, colorFromType, formatRunTime, Activity } from '@/utils/utils';
+import { formatPace, colorFromType, formatRunTime, Activity, RunIds } from '@/utils/utils';
import styles from './style.module.scss';
interface IRunRowProperties {
elementIndex: number;
- locateActivity: (_date: string) => void;
+ locateActivity: (_runIds: RunIds) => void;
run: Activity;
runIndex: number;
setRunIndex: (_ndex: number) => void;
@@ -17,7 +17,11 @@ const RunRow = ({ elementIndex, locateActivity, run, runIndex, setRunIndex }: IR
const type = run.type;
const runTime = formatRunTime(run.moving_time);
const handleClick = () => {
- if (runIndex === elementIndex) return;
+ if (runIndex === elementIndex) {
+ setRunIndex(-1);
+ locateActivity([]);
+ return
+ };
setRunIndex(elementIndex);
locateActivity([run.run_id]);
};
diff --git a/src/components/RunTable/index.tsx b/src/components/RunTable/index.tsx
index 9a17909b9e3..5a2a31a9de8 100644
--- a/src/components/RunTable/index.tsx
+++ b/src/components/RunTable/index.tsx
@@ -4,13 +4,14 @@ import {
sortDateFuncReverse,
convertMovingTime2Sec,
Activity,
+ RunIds,
} from '@/utils/utils';
import RunRow from './RunRow';
import styles from './style.module.scss';
interface IRunTableProperties {
runs: Activity[];
- locateActivity: (_date: string) => void;
+ locateActivity: (_runIds: RunIds) => void;
setActivity: (_runs: Activity[]) => void;
runIndex: number;
setRunIndex: (_index: number) => void;
diff --git a/src/components/RunTable/style.module.scss b/src/components/RunTable/style.module.scss
index b0e02d1c19d..0ae00c8c6a5 100644
--- a/src/components/RunTable/style.module.scss
+++ b/src/components/RunTable/style.module.scss
@@ -26,6 +26,7 @@
}
.runRow {
+ cursor: pointer;
td {
padding: 0.5rem;
border: 0;
diff --git a/src/pages/index.tsx b/src/pages/index.tsx
index 76573268b71..b3b8c0e4391 100644
--- a/src/pages/index.tsx
+++ b/src/pages/index.tsx
@@ -22,6 +22,7 @@ import {
scrollToMap,
sortDateFunc,
titleForShow,
+ RunIds,
} from '@/utils/utils';
const Index = () => {
@@ -50,7 +51,7 @@ const Index = () => {
scrollToMap();
setActivity(filterAndSortRuns(activities, item, func, sortDateFunc));
setRunIndex(-1);
- setTitle(`${item} ${name} Heatmap`);
+ setTitle(`${item} ${name} Running Heatmap`);
};
const changeYear = (y: string) => {
@@ -79,10 +80,13 @@ const Index = () => {
changeByItem(type, 'Type', filterTypeRuns, false);
};
- const locateActivity = (runIds: [Number]) => {
+
+ const locateActivity = (runIds: RunIds) => {
const ids = new Set(runIds);
- const selectedRuns = runs.filter((r) => ids.has(r.run_id));
+ const selectedRuns = !runIds.length
+ ? runs
+ : runs.filter((r: any) => ids.has(r.run_id));
if (!selectedRuns.length) {
return;
diff --git a/src/utils/utils.ts b/src/utils/utils.ts
index 32b9ab3f5c5..387496ee3f3 100644
--- a/src/utils/utils.ts
+++ b/src/utils/utils.ts
@@ -24,6 +24,8 @@ import { FeatureCollection, LineString } from 'geojson';
export type Coordinate = [number, number];
+export type RunIds = Array | [];
+
export interface Activity {
run_id: number;
name: string;