Skip to content

Commit

Permalink
Merge pull request #1657 from ranaroussi/feature/cookie-and-crumb
Browse files Browse the repository at this point in the history
Add cookie & crumb to requests
  • Loading branch information
ValueRaider committed Nov 18, 2023
2 parents 6d3d6b6 + 91efcd8 commit 4d4e56c
Show file tree
Hide file tree
Showing 11 changed files with 804 additions and 267 deletions.
23 changes: 18 additions & 5 deletions tests/ticker.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
import unittest
import requests_cache
from typing import Union, Any
import re
from urllib.parse import urlparse, parse_qs, urlencode, urlunparse

ticker_attributes = (
("major_holders", pd.DataFrame),
Expand Down Expand Up @@ -76,7 +78,7 @@ def test_getTz(self):
tkrs = ["IMP.JO", "BHG.JO", "SSW.JO", "BP.L", "INTC"]
for tkr in tkrs:
# First step: remove ticker from tz-cache
yf.utils.get_tz_cache().store(tkr, None)
yf.cache.get_tz_cache().store(tkr, None)

# Test:
dat = yf.Ticker(tkr, session=self.session)
Expand Down Expand Up @@ -295,14 +297,25 @@ def test_no_expensive_calls_introduced(self):
will quickly trigger spam-block when doing bulk download of history data.
"""
symbol = "GOOGL"
range = "1y"
period = "1y"
with requests_cache.CachedSession(backend="memory") as session:
ticker = yf.Ticker(symbol, session=session)
ticker.history(range)
actual_urls_called = tuple([r.url for r in session.cache.filter()])
ticker.history(period=period)
actual_urls_called = [r.url for r in session.cache.filter()]

# Remove 'crumb' argument
for i in range(len(actual_urls_called)):
u = actual_urls_called[i]
parsed_url = urlparse(u)
query_params = parse_qs(parsed_url.query)
query_params.pop('crumb', None)
query_params.pop('cookie', None)
u = urlunparse(parsed_url._replace(query=urlencode(query_params, doseq=True)))
actual_urls_called[i] = u
actual_urls_called = tuple(actual_urls_called)

expected_urls = (
f"https://query2.finance.yahoo.com/v8/finance/chart/{symbol}?events=div%2Csplits%2CcapitalGains&includePrePost=False&interval=1d&range={range}",
f"https://query2.finance.yahoo.com/v8/finance/chart/{symbol}?events=div%2Csplits%2CcapitalGains&includePrePost=False&interval=1d&range={period}",
)
self.assertEqual(
expected_urls,
Expand Down
3 changes: 2 additions & 1 deletion yfinance/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@
from .ticker import Ticker
from .tickers import Tickers
from .multi import download
from .utils import set_tz_cache_location, enable_debug_mode
from .utils import enable_debug_mode
from .cache import set_tz_cache_location

__version__ = version.version
__author__ = "Ran Aroussi"
Expand Down
23 changes: 11 additions & 12 deletions yfinance/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,8 @@
import pandas as pd
import requests

from . import shared
from . import utils
from .data import TickerData
from . import shared, utils, cache
from .data import YfData
from .scrapers.analysis import Analysis
from .scrapers.fundamentals import Fundamentals
from .scrapers.holders import Holders
Expand Down Expand Up @@ -69,12 +68,12 @@ def __init__(self, ticker, session=None):
if utils.is_isin(self.ticker):
self.ticker = utils.get_ticker_by_isin(self.ticker, None, session)

self._data: TickerData = TickerData(self.ticker, session=session)
self._data: YfData = YfData(session=session)

self._analysis = Analysis(self._data)
self._holders = Holders(self._data)
self._quote = Quote(self._data)
self._fundamentals = Fundamentals(self._data)
self._analysis = Analysis(self._data, ticker)
self._holders = Holders(self._data, ticker)
self._quote = Quote(self._data, ticker)
self._fundamentals = Fundamentals(self._data, ticker)

self._fast_info = None

Expand Down Expand Up @@ -1642,20 +1641,20 @@ def map_signals_to_ranges(f, f_up, f_down):
def _get_ticker_tz(self, proxy, timeout):
if self._tz is not None:
return self._tz
cache = utils.get_tz_cache()
tz = cache.lookup(self.ticker)
c = cache.get_tz_cache()
tz = c.lookup(self.ticker)

if tz and not utils.is_valid_timezone(tz):
# Clear from cache and force re-fetch
cache.store(self.ticker, None)
c.store(self.ticker, None)
tz = None

if tz is None:
tz = self._fetch_ticker_tz(proxy, timeout)

if utils.is_valid_timezone(tz):
# info fetch is relatively slow so cache timezone
cache.store(self.ticker, tz)
c.store(self.ticker, tz)
else:
tz = None

Expand Down
Loading

0 comments on commit 4d4e56c

Please sign in to comment.