Skip to content

Commit

Permalink
Add new data to README, remove deprecated stuff, fix tests, v0.2.35
Browse files Browse the repository at this point in the history
Ticker.recommendations*:
- add to README
- organise their unit tests
- remove redundant recommendations_history

Remove deprecated arguments from Ticker.history

Fix 'bad symbol' behaviour & tests
Fix some prices tests

Bump version 0.2.35
  • Loading branch information
ValueRaider committed Jan 7, 2024
1 parent a7c41af commit a2f3cc7
Show file tree
Hide file tree
Showing 10 changed files with 138 additions and 152 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
Change Log
===========

0.2.35
------
Internal fixes for 0.2.34

0.2.34
------
Features:
Expand Down
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,11 @@ msft.insider_transactions
msft.insider_purchases
msft.insider_roster_holders

# show recommendations
msft.recommendations
msft.recommendations_summary
msft.upgrades_downgrades

# Show future and historic earnings dates, returns at most next 4 quarters and last 8 quarters by default.
# Note: If more are needed use msft.get_earnings_dates(limit=XX) with increased limit argument.
msft.earnings_dates
Expand Down
2 changes: 1 addition & 1 deletion meta.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{% set name = "yfinance" %}
{% set version = "0.2.34" %}
{% set version = "0.2.35" %}

package:
name: "{{ name|lower }}"
Expand Down
83 changes: 10 additions & 73 deletions tests/prices.py
Original file line number Diff line number Diff line change
Expand Up @@ -399,71 +399,23 @@ def test_dst_fix(self):
raise

def test_prune_post_intraday_us(self):
# Half-day before USA Thanksgiving. Yahoo normally
# Half-day at USA Thanksgiving. Yahoo normally
# returns an interval starting when regular trading closes,
# even if prepost=False.

# Setup
tkr = "AMZN"
interval = "1h"
interval_td = _dt.timedelta(hours=1)
time_open = _dt.time(9, 30)
time_close = _dt.time(16)
special_day = _dt.date(2022, 11, 25)
special_day = _dt.date(2023, 11, 24)
time_early_close = _dt.time(13)
dat = yf.Ticker(tkr, session=self.session)

# Run
start_d = special_day - _dt.timedelta(days=7)
end_d = special_day + _dt.timedelta(days=7)
df = dat.history(start=start_d, end=end_d, interval=interval, prepost=False, keepna=True)
tg_last_dt = df.loc[str(special_day)].index[-1]
self.assertTrue(tg_last_dt.time() < time_early_close)

# Test no other afternoons (or mornings) were pruned
start_d = _dt.date(special_day.year, 1, 1)
end_d = _dt.date(special_day.year+1, 1, 1)
df = dat.history(start=start_d, end=end_d, interval="1h", prepost=False, keepna=True)
last_dts = _pd.Series(df.index).groupby(df.index.date).last()
f_early_close = (last_dts+interval_td).dt.time < time_close
early_close_dates = last_dts.index[f_early_close].values
self.assertEqual(len(early_close_dates), 1)
self.assertEqual(early_close_dates[0], special_day)

first_dts = _pd.Series(df.index).groupby(df.index.date).first()
f_late_open = first_dts.dt.time > time_open
late_open_dates = first_dts.index[f_late_open]
self.assertEqual(len(late_open_dates), 0)

def test_prune_post_intraday_omx(self):
# Half-day before Sweden Christmas. Yahoo normally
# returns an interval starting when regular trading closes,
# even if prepost=False.
# If prepost=False, test that yfinance is removing prepost intervals.

# Setup
tkr = "AEC.ST"
interval = "1h"
interval_td = _dt.timedelta(hours=1)
time_open = _dt.time(9)
time_close = _dt.time(17, 30)
special_day = _dt.date(2022, 12, 23)
time_early_close = _dt.time(13, 2)
dat = yf.Ticker(tkr, session=self.session)

# Half trading day Jan 5, Apr 14, May 25, Jun 23, Nov 4, Dec 23, Dec 30
half_days = [_dt.date(special_day.year, x[0], x[1]) for x in [(1, 5), (4, 14), (5, 25), (6, 23), (11, 4), (12, 23), (12, 30)]]

# Yahoo has incorrectly classified afternoon of 2022-04-13 as post-market.
# Nothing yfinance can do because Yahoo doesn't return data with prepost=False.
# But need to handle in this test.
expected_incorrect_half_days = [_dt.date(2022, 4, 13)]
half_days = sorted(half_days+expected_incorrect_half_days)

# Run
start_d = special_day - _dt.timedelta(days=7)
end_d = special_day + _dt.timedelta(days=7)
df = dat.history(start=start_d, end=end_d, interval=interval, prepost=False, keepna=True)
tg_last_dt = df.loc[str(special_day)].index[-1]
self.assertTrue(tg_last_dt.time() < time_early_close)

Expand All @@ -472,40 +424,25 @@ def test_prune_post_intraday_omx(self):
end_d = _dt.date(special_day.year+1, 1, 1)
df = dat.history(start=start_d, end=end_d, interval="1h", prepost=False, keepna=True)
last_dts = _pd.Series(df.index).groupby(df.index.date).last()
f_early_close = (last_dts+interval_td).dt.time < time_close
early_close_dates = last_dts.index[f_early_close].values
unexpected_early_close_dates = [d for d in early_close_dates if d not in half_days]
self.assertEqual(len(unexpected_early_close_dates), 0)
self.assertEqual(len(early_close_dates), len(half_days))
self.assertTrue(_np.equal(early_close_dates, half_days).all())

first_dts = _pd.Series(df.index).groupby(df.index.date).first()
f_late_open = first_dts.dt.time > time_open
late_open_dates = first_dts.index[f_late_open]
self.assertEqual(len(late_open_dates), 0)
dfd = dat.history(start=start_d, end=end_d, interval='1d', prepost=False, keepna=True)
self.assertTrue(_np.equal(dfd.index.date, _pd.to_datetime(last_dts.index).date).all())

def test_prune_post_intraday_asx(self):
# Setup
tkr = "BHP.AX"
interval_td = _dt.timedelta(hours=1)
time_open = _dt.time(10)
time_close = _dt.time(16, 12)
# No early closes in 2022
# No early closes in 2023
dat = yf.Ticker(tkr, session=self.session)

# Test no afternoons (or mornings) were pruned
start_d = _dt.date(2022, 1, 1)
end_d = _dt.date(2022+1, 1, 1)
# Test no other afternoons (or mornings) were pruned
start_d = _dt.date(2023, 1, 1)
end_d = _dt.date(2023+1, 1, 1)
df = dat.history(start=start_d, end=end_d, interval="1h", prepost=False, keepna=True)
last_dts = _pd.Series(df.index).groupby(df.index.date).last()
f_early_close = (last_dts+interval_td).dt.time < time_close
early_close_dates = last_dts.index[f_early_close].values
self.assertEqual(len(early_close_dates), 0)

first_dts = _pd.Series(df.index).groupby(df.index.date).first()
f_late_open = first_dts.dt.time > time_open
late_open_dates = first_dts.index[f_late_open]
self.assertEqual(len(late_open_dates), 0)
dfd = dat.history(start=start_d, end=end_d, interval='1d', prepost=False, keepna=True)
self.assertTrue(_np.equal(dfd.index.date, _pd.to_datetime(last_dts.index).date).all())

def test_weekly_2rows_fix(self):
tkr = "AMZN"
Expand Down
102 changes: 63 additions & 39 deletions tests/ticker.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

import unittest
import requests_cache
from typing import Union, Any
from typing import Union, Any, get_args, _GenericAlias
from urllib.parse import urlparse, parse_qs, urlencode, urlunparse

ticker_attributes = (
Expand All @@ -31,11 +31,10 @@
("actions", pd.DataFrame),
("shares", pd.DataFrame),
("info", dict),
("calendar", pd.DataFrame),
("calendar", dict),
("recommendations", Union[pd.DataFrame, dict]),
("recommendations_summary", Union[pd.DataFrame, dict]),
("upgrades_downgrades", Union[pd.DataFrame, dict]),
("recommendations_history", Union[pd.DataFrame, dict]),
("earnings", pd.DataFrame),
("quarterly_earnings", pd.DataFrame),
("quarterly_cashflow", pd.DataFrame),
Expand All @@ -58,7 +57,12 @@ def assert_attribute_type(testClass: unittest.TestCase, instance, attribute_name
try:
attribute = getattr(instance, attribute_name)
if attribute is not None and expected_type is not Any:
testClass.assertEqual(type(attribute), expected_type)
err_msg = f'{attribute_name} type is {type(attribute)} not {expected_type}'
if isinstance(expected_type, _GenericAlias) and expected_type.__origin__ is Union:
allowed_types = get_args(expected_type)
testClass.assertTrue(isinstance(attribute, allowed_types), err_msg)
else:
testClass.assertEqual(type(attribute), expected_type, err_msg)
except Exception:
testClass.assertRaises(
YFNotImplementedError, lambda: getattr(instance, attribute_name)
Expand Down Expand Up @@ -136,8 +140,8 @@ def test_goodTicker_withProxy(self):
tkr = "IBM"
dat = yf.Ticker(tkr, session=self.session, proxy=self.proxy)

dat._fetch_ticker_tz(timeout=5)
dat._get_ticker_tz(timeout=5)
dat._fetch_ticker_tz(proxy=None, timeout=5)
dat._get_ticker_tz(proxy=None, timeout=5)
dat.history(period="1wk")

for attribute_name, attribute_type in ticker_attributes:
Expand Down Expand Up @@ -654,6 +658,24 @@ def test_cash_flow_alt_names(self):
def test_bad_freq_value_raises_exception(self):
self.assertRaises(ValueError, lambda: self.ticker.get_cashflow(freq="badarg"))

def test_calendar(self):
data = self.ticker.calendar
self.assertIsInstance(data, dict, "data has wrong type")
self.assertTrue(len(data) > 0, "data is empty")
self.assertIn("Earnings Date", data.keys(), "data missing expected key")
self.assertIn("Earnings Average", data.keys(), "data missing expected key")
self.assertIn("Earnings Low", data.keys(), "data missing expected key")
self.assertIn("Earnings High", data.keys(), "data missing expected key")
self.assertIn("Revenue Average", data.keys(), "data missing expected key")
self.assertIn("Revenue Low", data.keys(), "data missing expected key")
self.assertIn("Revenue High", data.keys(), "data missing expected key")
# dividend date is not available for tested ticker GOOGL
if self.ticker.ticker != "GOOGL":
self.assertIn("Dividend Date", data.keys(), "data missing expected key")
# ex-dividend date is not always available
data_cached = self.ticker.calendar
self.assertIs(data, data_cached, "data not cached")

# Below will fail because not ported to Yahoo API

# def test_sustainability(self):
Expand All @@ -664,6 +686,30 @@ def test_bad_freq_value_raises_exception(self):
# data_cached = self.ticker.sustainability
# self.assertIs(data, data_cached, "data not cached")

# def test_shares(self):
# data = self.ticker.shares
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")


class TestTickerAnalysts(unittest.TestCase):
session = None

@classmethod
def setUpClass(cls):
cls.session = session_gbl

@classmethod
def tearDownClass(cls):
if cls.session is not None:
cls.session.close()

def setUp(self):
self.ticker = yf.Ticker("GOOGL", session=self.session)

def tearDown(self):
self.ticker = None

def test_recommendations(self):
data = self.ticker.recommendations
data_summary = self.ticker.recommendations_summary
Expand All @@ -674,18 +720,16 @@ def test_recommendations(self):
data_cached = self.ticker.recommendations
self.assertIs(data, data_cached, "data not cached")

# def test_recommendations_summary(self): # currently alias for recommendations
# data = self.ticker.recommendations_summary
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")
def test_recommendations_summary(self): # currently alias for recommendations
data = self.ticker.recommendations_summary
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
self.assertFalse(data.empty, "data is empty")

# data_cached = self.ticker.recommendations_summary
# self.assertIs(data, data_cached, "data not cached")
data_cached = self.ticker.recommendations_summary
self.assertIs(data, data_cached, "data not cached")

def test_recommendations_history(self): # alias for upgrades_downgrades
def test_upgrades_downgrades(self):
data = self.ticker.upgrades_downgrades
data_history = self.ticker.recommendations_history
self.assertTrue(data.equals(data_history))
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
self.assertFalse(data.empty, "data is empty")
self.assertTrue(len(data.columns) == 4, "data has wrong number of columns")
Expand All @@ -695,6 +739,8 @@ def test_recommendations_history(self): # alias for upgrades_downgrades
data_cached = self.ticker.upgrades_downgrades
self.assertIs(data, data_cached, "data not cached")

# Below will fail because not ported to Yahoo API

# def test_analyst_price_target(self):
# data = self.ticker.analyst_price_target
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
Expand All @@ -711,28 +757,6 @@ def test_recommendations_history(self): # alias for upgrades_downgrades
# data_cached = self.ticker.revenue_forecasts
# self.assertIs(data, data_cached, "data not cached")

def test_calendar(self):
data = self.ticker.calendar
self.assertIsInstance(data, dict, "data has wrong type")
self.assertTrue(len(data) > 0, "data is empty")
self.assertIn("Earnings Date", data.keys(), "data missing expected key")
self.assertIn("Earnings Average", data.keys(), "data missing expected key")
self.assertIn("Earnings Low", data.keys(), "data missing expected key")
self.assertIn("Earnings High", data.keys(), "data missing expected key")
self.assertIn("Revenue Average", data.keys(), "data missing expected key")
self.assertIn("Revenue Low", data.keys(), "data missing expected key")
self.assertIn("Revenue High", data.keys(), "data missing expected key")
# dividend date is not available for tested ticker GOOGL
if self.ticker.ticker != "GOOGL":
self.assertIn("Dividend Date", data.keys(), "data missing expected key")
# ex-dividend date is not always available
data_cached = self.ticker.calendar
self.assertIs(data, data_cached, "data not cached")

# def test_shares(self):
# data = self.ticker.shares
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")


class TestTickerInfo(unittest.TestCase):
Expand Down Expand Up @@ -777,11 +801,11 @@ def test_complementary_info(self):

# We don't expect this one to have a trailing PEG ratio
data1 = self.tickers[0].info
self.assertEqual(data1['trailingPegRatio'], None)
self.assertIsNone(data1['trailingPegRatio'])

# This one should have a trailing PEG ratio
data2 = self.tickers[2].info
self.assertEqual(data2['trailingPegRatio'], 1.2713)
self.assertIsInstance(data2['trailingPegRatio'], float)
pass

# def test_fast_info_matches_info(self):
Expand Down
15 changes: 0 additions & 15 deletions yfinance/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,6 @@ def history(self, period="1mo", interval="1d",
start=None, end=None, prepost=False, actions=True,
auto_adjust=True, back_adjust=False, repair=False, keepna=False,
proxy=None, rounding=False, timeout=10,
debug=None, # deprecated
raise_errors=False) -> pd.DataFrame:
"""
:Parameters:
Expand Down Expand Up @@ -126,23 +125,12 @@ def history(self, period="1mo", interval="1d",
If not None stops waiting for a response after given number of
seconds. (Can also be a fraction of a second e.g. 0.01)
Default is 10 seconds.
debug: bool
If passed as False, will suppress message printing to console.
DEPRECATED, will be removed in future version
raise_errors: bool
If True, then raise errors as Exceptions instead of logging.
"""
logger = utils.get_yf_logger()
proxy = proxy or self.proxy

if debug is not None:
if debug:
utils.print_once(f"yfinance: Ticker.history(debug={debug}) argument is deprecated and will be removed in future version. Do this instead: logging.getLogger('yfinance').setLevel(logging.ERROR)")
logger.setLevel(logging.ERROR)
else:
utils.print_once(f"yfinance: Ticker.history(debug={debug}) argument is deprecated and will be removed in future version. Do this instead to suppress error messages: logging.getLogger('yfinance').setLevel(logging.CRITICAL)")
logger.setLevel(logging.CRITICAL)

start_user = start
end_user = end
if start or period is None or period.lower() == "max":
Expand Down Expand Up @@ -395,9 +383,6 @@ def history(self, period="1mo", interval="1d",

df = df[~df.index.duplicated(keep='first')] # must do before repair

if isinstance(repair, str) and repair=='silent':
utils.log_once(logging.WARNING, "yfinance: Ticker.history(repair='silent') value is deprecated and will be removed in future version. Repair now silent by default, use logging module to increase verbosity.")
repair = True
if repair:
# Do this before auto/back adjust
logger.debug(f'{self.ticker}: checking OHLC for repairs ...')
Expand Down
Loading

0 comments on commit a2f3cc7

Please sign in to comment.