Skip to content

Commit

Permalink
Merge pull request #1745 from ranaroussi/main
Browse files Browse the repository at this point in the history
sync main -> dev
  • Loading branch information
ValueRaider committed Nov 19, 2023
2 parents 7432d29 + 9b6e35b commit af9a356
Show file tree
Hide file tree
Showing 14 changed files with 875 additions and 325 deletions.
9 changes: 9 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,15 @@
Change Log
===========

0.2.32
------
Add cookie & crumb to requests #1657

0.2.31
------
- Fix TZ cache exception blocking import #1705 #1709
- Fix merging pre-market events with intraday prices #1703

0.2.30
------
- Fix OperationalError #1698
Expand Down
2 changes: 1 addition & 1 deletion meta.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{% set name = "yfinance" %}
{% set version = "0.2.31b1" %}
{% set version = "0.2.32" %}

package:
name: "{{ name|lower }}"
Expand Down
132 changes: 73 additions & 59 deletions tests/ticker.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
import unittest
import requests_cache
from typing import Union, Any
import re
from urllib.parse import urlparse, parse_qs, urlencode, urlunparse

ticker_attributes = (
("major_holders", pd.DataFrame),
Expand Down Expand Up @@ -76,7 +78,7 @@ def test_getTz(self):
tkrs = ["IMP.JO", "BHG.JO", "SSW.JO", "BP.L", "INTC"]
for tkr in tkrs:
# First step: remove ticker from tz-cache
yf.utils.get_tz_cache().store(tkr, None)
yf.cache.get_tz_cache().store(tkr, None)

# Test:
dat = yf.Ticker(tkr, session=self.session)
Expand Down Expand Up @@ -181,14 +183,25 @@ def test_no_expensive_calls_introduced(self):
will quickly trigger spam-block when doing bulk download of history data.
"""
symbol = "GOOGL"
range = "1y"
period = "1y"
with requests_cache.CachedSession(backend="memory") as session:
ticker = yf.Ticker(symbol, session=session)
ticker.history(range)
actual_urls_called = tuple([r.url for r in session.cache.filter()])
ticker.history(period=period)
actual_urls_called = [r.url for r in session.cache.filter()]

# Remove 'crumb' argument
for i in range(len(actual_urls_called)):
u = actual_urls_called[i]
parsed_url = urlparse(u)
query_params = parse_qs(parsed_url.query)
query_params.pop('crumb', None)
query_params.pop('cookie', None)
u = urlunparse(parsed_url._replace(query=urlencode(query_params, doseq=True)))
actual_urls_called[i] = u
actual_urls_called = tuple(actual_urls_called)

expected_urls = (
f"https://query2.finance.yahoo.com/v8/finance/chart/{symbol}?events=div%2Csplits%2CcapitalGains&includePrePost=False&interval=1d&range={range}",
f"https://query2.finance.yahoo.com/v8/finance/chart/{symbol}?events=div%2Csplits%2CcapitalGains&includePrePost=False&interval=1d&range={period}",
)
self.assertEqual(
expected_urls,
Expand All @@ -211,76 +224,77 @@ def test_actions(self):
self.assertFalse(data.empty, "data is empty")


# Below will fail because not ported to Yahoo API
# class TestTickerEarnings(unittest.TestCase):
# session = None
class TestTickerEarnings(unittest.TestCase):
session = None

@classmethod
def setUpClass(cls):
cls.session = session_gbl

# @classmethod
# def setUpClass(cls):
# cls.session = session_gbl
@classmethod
def tearDownClass(cls):
if cls.session is not None:
cls.session.close()

# @classmethod
# def tearDownClass(cls):
# if cls.session is not None:
# cls.session.close()
def setUp(self):
self.ticker = yf.Ticker("GOOGL", session=self.session)

# def setUp(self):
# self.ticker = yf.Ticker("GOOGL", session=self.session)
def tearDown(self):
self.ticker = None

# def tearDown(self):
# self.ticker = None
def test_earnings_dates(self):
data = self.ticker.earnings_dates
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
self.assertFalse(data.empty, "data is empty")

# def test_earnings(self):
# data = self.ticker.earnings
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")
def test_earnings_dates_with_limit(self):
# use ticker with lots of historic earnings
ticker = yf.Ticker("IBM")
limit = 110
data = ticker.get_earnings_dates(limit=limit)
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
self.assertFalse(data.empty, "data is empty")
self.assertEqual(len(data), limit, "Wrong number or rows")

# data_cached = self.ticker.earnings
# self.assertIs(data, data_cached, "data not cached")
data_cached = ticker.get_earnings_dates(limit=limit)
self.assertIs(data, data_cached, "data not cached")

# def test_quarterly_earnings(self):
# data = self.ticker.quarterly_earnings
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")
# Below will fail because not ported to Yahoo API

# data_cached = self.ticker.quarterly_earnings
# self.assertIs(data, data_cached, "data not cached")
# def test_earnings(self):
# data = self.ticker.earnings
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")

# def test_earnings_forecasts(self):
# data = self.ticker.earnings_forecasts
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")
# data_cached = self.ticker.earnings
# self.assertIs(data, data_cached, "data not cached")

# data_cached = self.ticker.earnings_forecasts
# self.assertIs(data, data_cached, "data not cached")
# def test_quarterly_earnings(self):
# data = self.ticker.quarterly_earnings
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")

# def test_earnings_dates(self):
# data = self.ticker.earnings_dates
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")
# data_cached = self.ticker.quarterly_earnings
# self.assertIs(data, data_cached, "data not cached")

# data_cached = self.ticker.earnings_dates
# self.assertIs(data, data_cached, "data not cached")
# def test_earnings_forecasts(self):
# data = self.ticker.earnings_forecasts
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")

# def test_earnings_trend(self):
# data = self.ticker.earnings_trend
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")
# data_cached = self.ticker.earnings_forecasts
# self.assertIs(data, data_cached, "data not cached")

# data_cached = self.ticker.earnings_trend
# self.assertIs(data, data_cached, "data not cached")
# data_cached = self.ticker.earnings_dates
# self.assertIs(data, data_cached, "data not cached")

# def test_earnings_dates_with_limit(self):
# # use ticker with lots of historic earnings
# ticker = yf.Ticker("IBM")
# limit = 110
# data = ticker.get_earnings_dates(limit=limit)
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")
# self.assertEqual(len(data), limit, "Wrong number or rows")
# def test_earnings_trend(self):
# data = self.ticker.earnings_trend
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")

# data_cached = ticker.get_earnings_dates(limit=limit)
# self.assertIs(data, data_cached, "data not cached")
# data_cached = self.ticker.earnings_trend
# self.assertIs(data, data_cached, "data not cached")


class TestTickerHolders(unittest.TestCase):
Expand Down
3 changes: 2 additions & 1 deletion yfinance/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@
from .ticker import Ticker
from .tickers import Tickers
from .multi import download
from .utils import set_tz_cache_location, enable_debug_mode
from .utils import enable_debug_mode
from .cache import set_tz_cache_location

__version__ = version.version
__author__ = "Ran Aroussi"
Expand Down
26 changes: 13 additions & 13 deletions yfinance/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from __future__ import print_function

import datetime as _datetime
from io import StringIO
import json as _json
import logging
import time as _time
Expand All @@ -34,9 +35,8 @@
import pandas as pd
import requests

from . import shared
from . import utils
from .data import TickerData
from . import shared, utils, cache
from .data import YfData
from .scrapers.analysis import Analysis
from .scrapers.fundamentals import Fundamentals
from .scrapers.holders import Holders
Expand Down Expand Up @@ -69,12 +69,12 @@ def __init__(self, ticker, session=None, proxy=None):
if utils.is_isin(self.ticker):
self.ticker = utils.get_ticker_by_isin(self.ticker, None, session)

self._data: TickerData = TickerData(self.ticker, session=session)
self._data: YfData = YfData(session=session)

self._analysis = Analysis(self._data)
self._holders = Holders(self._data)
self._quote = Quote(self._data)
self._fundamentals = Fundamentals(self._data)
self._analysis = Analysis(self._data, ticker)
self._holders = Holders(self._data, ticker)
self._quote = Quote(self._data, ticker)
self._fundamentals = Fundamentals(self._data, ticker)

self._fast_info = None

Expand Down Expand Up @@ -1644,20 +1644,20 @@ def _get_ticker_tz(self,timeout, proxy=None):
proxy = proxy or self.proxy
if self._tz is not None:
return self._tz
cache = utils.get_tz_cache()
tz = cache.lookup(self.ticker)
c = cache.get_tz_cache()
tz = c.lookup(self.ticker)

if tz and not utils.is_valid_timezone(tz):
# Clear from cache and force re-fetch
cache.store(self.ticker, None)
c.store(self.ticker, None)
tz = None

if tz is None:
tz = self._fetch_ticker_tz(proxy, timeout)

if utils.is_valid_timezone(tz):
# info fetch is relatively slow so cache timezone
cache.store(self.ticker, tz)
c.store(self.ticker, tz)
else:
tz = None

Expand Down Expand Up @@ -2093,7 +2093,7 @@ def get_earnings_dates(self, limit=12, proxy=None) -> Optional[pd.DataFrame]:
"the issue. Thank you for your patience.")

try:
data = pd.read_html(data)[0]
data = pd.read_html(StringIO(data))[0]
except ValueError:
if page_offset == 0:
# Should not fail on first page
Expand Down
Loading

0 comments on commit af9a356

Please sign in to comment.