Skip to content

Commit

Permalink
Merge branch 'upstream'
Browse files Browse the repository at this point in the history
* upstream:
  feat: `RunRow` toggle selection & cursor pointer (yihong0618#551)
  perf: use auto instead of scroll (yihong0618#550)
  doc: add nrc detail written by @angolap
  Update strava_to_garmin_sync.py (yihong0618#544)
  Master (yihong0618#545)
  fix: nrc sync
  fix: gh-pages build: drop the ref while trigger from Run Data Sync (yihong0618#540)
  upd deploy to github-pages  (yihong0618#537)
  feat: reduce grid svg file size (yihong0618#535)
  feat:suport heart_rate in export gpx file (yihong0618#529)
  feat: add NavigationControl (yihong0618#534)

# Conflicts:
#	README-CN.md
#	README.md
#	src/components/RunTable/RunRow.tsx
#	src/pages/index.tsx
  • Loading branch information
ben-29 committed Nov 26, 2023
2 parents ac30acf + 048e941 commit c1153e1
Show file tree
Hide file tree
Showing 10 changed files with 142 additions and 33 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/gh-pages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,9 @@ jobs:
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3
with:
# if your default branches is not master, please change it here
ref: master

- name: Cache Data Files
if: inputs.save_data_in_github_cache
Expand Down
7 changes: 3 additions & 4 deletions run_page/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,9 @@
JSON_FILE = os.path.join(parent, "src", "static", "activities.json")
SYNCED_FILE = os.path.join(parent, "imported.json")

# TODO: Move into nike_sync
BASE_URL = "https://api.nike.com/sport/v3/me"
TOKEN_REFRESH_URL = "https://unite.nike.com/tokenRefresh"
NIKE_CLIENT_ID = "HlHa2Cje3ctlaOqnxvgZXNaAs7T9nAuH"
# TODO: Move into nike_sync NRC THINGS


BASE_TIMEZONE = "Asia/Shanghai"


Expand Down
119 changes: 102 additions & 17 deletions run_page/keep_sync.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import argparse
import base64
import json
import math
import os
import time
import zlib
Expand All @@ -16,12 +15,16 @@
from Crypto.Cipher import AES
from generator import Generator
from utils import adjust_time
import xml.etree.ElementTree as ET

# need to test
LOGIN_API = "https://api.gotokeep.com/v1.1/users/login"
RUN_DATA_API = "https://api.gotokeep.com/pd/v3/stats/detail?dateUnit=all&type=running&lastDate={last_date}"
RUN_LOG_API = "https://api.gotokeep.com/pd/v3/runninglog/{run_id}"

HR_FRAME_THRESHOLD_IN_DECISECOND = 100 # Maximum time difference to consider a data point as the nearest, the unit is decisecond(分秒)

TIMESTAMP_THRESHOLD_IN_DECISECOND = 3_600_000 # Threshold for target timestamp adjustment, the unit of timestamp is decisecond(分秒), so the 3_600_000 stands for 100 hours sports time. 100h = 100 * 60 * 60 * 10

# If your points need trans from gcj02 to wgs84 coordinate which use by Mapbox
TRANS_GCJ02_TO_WGS84 = True
Expand Down Expand Up @@ -88,6 +91,17 @@ def parse_raw_data_to_nametuple(
keep_id = run_data["id"].split("_")[1]

start_time = run_data["startTime"]
avg_heart_rate = None
decoded_hr_data = []
if run_data["heartRate"]:
avg_heart_rate = run_data["heartRate"].get("averageHeartRate", None)
heart_rate_data = run_data["heartRate"].get("heartRates", None)
if heart_rate_data is not None:
decoded_hr_data = decode_runmap_data(heart_rate_data)
# fix #66
if avg_heart_rate and avg_heart_rate < 0:
avg_heart_rate = None

if run_data["geoPoints"]:
run_points_data = decode_runmap_data(run_data["geoPoints"], True)
run_points_data_gpx = run_points_data
Expand All @@ -99,20 +113,20 @@ def parse_raw_data_to_nametuple(
for i, p in enumerate(run_points_data_gpx):
p["latitude"] = run_points_data[i][0]
p["longitude"] = run_points_data[i][1]
else:
run_points_data = [[p["latitude"], p["longitude"]] for p in run_points_data]

for p in run_points_data_gpx:
p_hr = find_nearest_hr(decoded_hr_data, int(p["timestamp"]), start_time)
if p_hr:
p["hr"] = p_hr
if with_download_gpx:
if str(keep_id) not in old_gpx_ids:
if (
str(keep_id) not in old_gpx_ids
and run_data["dataType"] == "outdoorRunning"
):
gpx_data = parse_points_to_gpx(run_points_data_gpx, start_time)
download_keep_gpx(gpx_data, str(keep_id))
else:
print(f"ID {keep_id} no gps data")
heart_rate = None
if run_data["heartRate"]:
heart_rate = run_data["heartRate"].get("averageHeartRate", None)
# fix #66
if heart_rate and heart_rate < 0:
heart_rate = None
polyline_str = polyline.encode(run_points_data) if run_points_data else ""
start_latlng = start_point(*run_points_data[0]) if run_points_data else None
start_date = datetime.utcfromtimestamp(start_time / 1000)
Expand All @@ -133,7 +147,7 @@ def parse_raw_data_to_nametuple(
"start_date_local": datetime.strftime(start_date_local, "%Y-%m-%d %H:%M:%S"),
"end_local": datetime.strftime(end_local, "%Y-%m-%d %H:%M:%S"),
"length": run_data["distance"],
"average_heartrate": int(heart_rate) if heart_rate else None,
"average_heartrate": int(avg_heart_rate) if avg_heart_rate else None,
"map": run_map(polyline_str),
"start_latlng": start_latlng,
"distance": run_data["distance"],
Expand Down Expand Up @@ -173,18 +187,34 @@ def get_all_keep_tracks(email, password, old_tracks_ids, with_download_gpx=False


def parse_points_to_gpx(run_points_data, start_time):
# future to support heart rate
"""
Convert run points data to GPX format.
Args:
run_id (str): The ID of the run.
run_points_data (list of dict): A list of run data points.
start_time (int): The start time for adjusting timestamps. Note that the unit of the start_time is millsecond
Returns:
gpx_data (str): GPX data in string format.
"""
points_dict_list = []
# early timestamp fields in keep's data stands for delta time, but in newly data timestamp field stands for exactly time,
# so it does'nt need to plus extra start_time
if run_points_data[0]["timestamp"] > TIMESTAMP_THRESHOLD_IN_DECISECOND:
start_time = 0

for point in run_points_data:
points_dict = {
"latitude": point["latitude"],
"longitude": point["longitude"],
"time": datetime.utcfromtimestamp(
(point["timestamp"] * 100 + start_time) / 1000
(point["timestamp"] * 100 + start_time)
/ 1000 # note that the timestamp of a point is decisecond(分秒)
),
"elevation": point.get("verticalAccuracy"),
"hr": point.get("hr"),
}
if "verticalAccuracy" in point:
points_dict["elevation"] = point["verticalAccuracy"]
points_dict_list.append(points_dict)
gpx = gpxpy.gpx.GPX()
gpx.nsmap["gpxtpx"] = "http://www.garmin.com/xmlschemas/TrackPointExtension/v1"
Expand All @@ -196,12 +226,67 @@ def parse_points_to_gpx(run_points_data, start_time):
gpx_segment = gpxpy.gpx.GPXTrackSegment()
gpx_track.segments.append(gpx_segment)
for p in points_dict_list:
point = gpxpy.gpx.GPXTrackPoint(**p)
point = gpxpy.gpx.GPXTrackPoint(
latitude=p["latitude"],
longitude=p["longitude"],
time=p["time"],
elevation=p.get("elevation"),
)
if p.get("hr") is not None:
gpx_extension_hr = ET.fromstring(
f"""<gpxtpx:TrackPointExtension xmlns:gpxtpx="http://www.garmin.com/xmlschemas/TrackPointExtension/v1">
<gpxtpx:hr>{p["hr"]}</gpxtpx:hr>
</gpxtpx:TrackPointExtension>
"""
)
point.extensions.append(gpx_extension_hr)
gpx_segment.points.append(point)

return gpx.to_xml()


def find_nearest_hr(
hr_data_list, target_time, start_time, threshold=HR_FRAME_THRESHOLD_IN_DECISECOND
):
"""
Find the nearest heart rate data point to the target time.
if cannot found suitable HR data within the specified time frame (within 10 seconds by default), there will be no hr data return
Args:
heart_rate_data (list of dict): A list of heart rate data points, where each point is a dictionary
containing at least "timestamp" and "beatsPerMinute" keys.
target_time (float): The target timestamp for which to find the nearest heart rate data point. Please Note that the unit of target_time is decisecond(分秒),
means 1/10 of a second ,this is very unsual!! so when we convert a target_time to second we need to divide by 10, and when we convert a target time to millsecond
we need to times 100.
start_time (float): The reference start time. the unit of start_time is normal millsecond timestamp
threshold (float, optional): The maximum allowed time difference to consider a data point as the nearest.
Default is HR_THRESHOLD, the unit is decisecond(分秒)
Returns:
int or None: The heart rate value of the nearest data point, or None if no suitable data point is found.
"""
closest_element = None
# init difference value
min_difference = float("inf")
if target_time > TIMESTAMP_THRESHOLD_IN_DECISECOND:
target_time = (
target_time * 100 - start_time
) / 100 # note that the unit of target_time is decisecond(分秒) and the unit of start_time is normal millsecond

for item in hr_data_list:
timestamp = item["timestamp"]
difference = abs(timestamp - target_time)

if difference <= threshold and difference < min_difference:
closest_element = item
min_difference = difference

if closest_element:
hr = closest_element.get("beatsPerMinute")
if hr and hr > 0:
return hr

return None


def download_keep_gpx(gpx_data, keep_id):
try:
print(f"downloading keep_id {str(keep_id)} gpx")
Expand Down
18 changes: 14 additions & 4 deletions run_page/nike_sync.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import argparse
from base64 import b64decode
import json
import logging
import os.path
Expand All @@ -11,13 +12,10 @@
import httpx
from config import (
BASE_TIMEZONE,
BASE_URL,
GPX_FOLDER,
JSON_FILE,
NIKE_CLIENT_ID,
OUTPUT_DIR,
SQL_FILE,
TOKEN_REFRESH_URL,
run_map,
)
from generator import Generator
Expand All @@ -27,17 +25,29 @@
# logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("nike_sync")

BASE_URL = "https://api.nike.com/sport/v3/me"
TOKEN_REFRESH_URL = "https://api.nike.com/idn/shim/oauth/2.0/token"
NIKE_CLIENT_ID = "VmhBZWFmRUdKNkc4ZTlEeFJVejhpRTUwQ1o5TWlKTUc="
NIKE_UX_ID = "Y29tLm5pa2Uuc3BvcnQucnVubmluZy5pb3MuNS4xNQ=="
NIKE_HEADERS = {
"Host": "api.nike.com",
"Accept": "application/json",
"Content-Type": "application/json",
}


class Nike:
def __init__(self, refresh_token):
self.client = httpx.Client()

response = self.client.post(
TOKEN_REFRESH_URL,
headers=NIKE_HEADERS,
json={
"refresh_token": refresh_token,
"client_id": NIKE_CLIENT_ID,
"client_id": b64decode(NIKE_CLIENT_ID).decode(),
"grant_type": "refresh_token",
"ux_id": b64decode(NIKE_UX_ID).decode(),
},
timeout=60,
)
Expand Down
2 changes: 1 addition & 1 deletion run_page/strava_to_garmin_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ async def upload_to_activities(
return files_list

# strava rate limit
for i in strava_activities[: len(strava_activities)]:
for i in sorted(strava_activities, key=lambda i: int(i.id)):
try:
data = strava_web_client.get_activity_data(i.id, fmt=format)
files_list.append(data)
Expand Down
10 changes: 7 additions & 3 deletions src/components/RunTable/RunRow.tsx
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import React from 'react';
import { formatPace, colorFromType, formatRunTime, Activity } from '@/utils/utils';
import { formatPace, colorFromType, formatRunTime, Activity, RunIds } from '@/utils/utils';
import styles from './style.module.scss';

interface IRunRowProperties {
elementIndex: number;
locateActivity: (_date: string) => void;
locateActivity: (_runIds: RunIds) => void;
run: Activity;
runIndex: number;
setRunIndex: (_ndex: number) => void;
Expand All @@ -17,7 +17,11 @@ const RunRow = ({ elementIndex, locateActivity, run, runIndex, setRunIndex }: IR
const type = run.type;
const runTime = formatRunTime(run.moving_time);
const handleClick = () => {
if (runIndex === elementIndex) return;
if (runIndex === elementIndex) {
setRunIndex(-1);
locateActivity([]);
return
};
setRunIndex(elementIndex);
locateActivity([run.run_id]);
};
Expand Down
3 changes: 2 additions & 1 deletion src/components/RunTable/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,14 @@ import {
sortDateFuncReverse,
convertMovingTime2Sec,
Activity,
RunIds,
} from '@/utils/utils';
import RunRow from './RunRow';
import styles from './style.module.scss';

interface IRunTableProperties {
runs: Activity[];
locateActivity: (_date: string) => void;
locateActivity: (_runIds: RunIds) => void;
setActivity: (_runs: Activity[]) => void;
runIndex: number;
setRunIndex: (_index: number) => void;
Expand Down
1 change: 1 addition & 0 deletions src/components/RunTable/style.module.scss
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
}

.runRow {
cursor: pointer;
td {
padding: 0.5rem;
border: 0;
Expand Down
10 changes: 7 additions & 3 deletions src/pages/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import {
scrollToMap,
sortDateFunc,
titleForShow,
RunIds,
} from '@/utils/utils';

const Index = () => {
Expand Down Expand Up @@ -50,7 +51,7 @@ const Index = () => {
scrollToMap();
setActivity(filterAndSortRuns(activities, item, func, sortDateFunc));
setRunIndex(-1);
setTitle(`${item} ${name} Heatmap`);
setTitle(`${item} ${name} Running Heatmap`);
};

const changeYear = (y: string) => {
Expand Down Expand Up @@ -79,10 +80,13 @@ const Index = () => {
changeByItem(type, 'Type', filterTypeRuns, false);
};

const locateActivity = (runIds: [Number]) => {

const locateActivity = (runIds: RunIds) => {
const ids = new Set(runIds);

const selectedRuns = runs.filter((r) => ids.has(r.run_id));
const selectedRuns = !runIds.length
? runs
: runs.filter((r: any) => ids.has(r.run_id));

if (!selectedRuns.length) {
return;
Expand Down
2 changes: 2 additions & 0 deletions src/utils/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ import { FeatureCollection, LineString } from 'geojson';

export type Coordinate = [number, number];

export type RunIds = Array<number> | [];

export interface Activity {
run_id: number;
name: string;
Expand Down

0 comments on commit c1153e1

Please sign in to comment.