Hey I'm just getting started with unittests and mocks in Python. Trying to test a function that takes a single column dataframe and returns a float after some calculation based on the dataframe values.
import unittest
from unittest.mock import MagicMock
def avg_annual_fcf_growth_rate(fcf_data_frame):
delta = fcf_data_frame.iloc[9][0] - fcf_data_frame.iloc[0][0]
avg_growth_rate = (delta**(1/9))-1
return avg_growth_rate
class Test_DCF(unittest.TestCase):
def test_fcf_calculation(self):
mock_fcf = MagicMock()
mock_fcf.iloc[9][0].return_value = 100
mock_fcf.iloc[0][0].return_value = 10
result = avg_annual_fcf_growth_rate(mock_fcf)
expected = ((100-10)**(1/9)) - 1
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
#Sample dataframe
Free Cash Flow USD Mil
2008-12 5114.0
2009-12 10909.0
2010-12 11915.0
2011-12 12285.0
2012-12 11175.0
2013-12 16617.0
2014-12 16825.0
2015-12 15409.0
2016-12 19581.0
2017-12 34068.0
I'm running into an issue where the result is a MagicMock object instead of a float. I've tried looking for answers, but not able to wrap my head around how to properly set the return value for the mock object.
Output from running test.
F
======================================================================
FAIL: test_fcf_calculation (__main__.Test_DCF)
----------------------------------------------------------------------
Traceback (most recent call last):
File "path/to/intrinsic_value_dcf_test.py", line 18, in test_fcf_calculation
self.assertEqual(result, expected)
AssertionError: <MagicMock name='mock.iloc.__getitem__().[56 chars]104'> != 0.6486864043382532
----------------------------------------------------------------------
Ran 1 test in 0.004s
FAILED (failures=1)