|
2 | 2 | import nose |
3 | 3 | from datetime import datetime |
4 | 4 |
|
| 5 | +import numpy as np |
5 | 6 | import pandas as pd |
6 | 7 | import pandas.io.data as web |
7 | | -from pandas.util.testing import (network, assert_series_equal) |
8 | | -from numpy.testing.decorators import slow |
9 | | -import numpy as np |
10 | | - |
11 | | -import urllib2 |
| 8 | +from pandas.util.testing import network, with_connectivity_check |
12 | 9 |
|
13 | 10 |
|
14 | 11 | class TestGoogle(unittest.TestCase): |
15 | 12 |
|
16 | | - @network |
| 13 | + @with_connectivity_check("http://www.google.com") |
17 | 14 | def test_google(self): |
18 | 15 | # asserts that google is minimally working and that it throws |
19 | | - # an excecption when DataReader can't get a 200 response from |
| 16 | + # an exception when DataReader can't get a 200 response from |
20 | 17 | # google |
21 | 18 | start = datetime(2010, 1, 1) |
22 | 19 | end = datetime(2013, 01, 27) |
23 | 20 |
|
24 | | - try: |
25 | | - self.assertEquals( |
26 | | - web.DataReader("F", 'google', start, end)['Close'][-1], |
27 | | - 13.68) |
28 | | - except urllib2.URLError: |
29 | | - try: |
30 | | - urllib2.urlopen('http://www.google.com') |
31 | | - except urllib2.URLError: |
32 | | - raise nose.SkipTest |
33 | | - else: |
34 | | - raise |
35 | | - |
36 | | - @network |
37 | | - def test_google_non_existent(self): |
38 | | - # asserts that google is minimally working and that it throws |
39 | | - # an excecption when DataReader can't get a 200 response from |
40 | | - # google |
41 | | - start = datetime(2010, 1, 1) |
42 | | - end = datetime(2013, 01, 27) |
| 21 | + self.assertEquals( |
| 22 | + web.DataReader("F", 'google', start, end)['Close'][-1], |
| 23 | + 13.68) |
43 | 24 |
|
44 | | - try: |
45 | | - self.assertRaises( |
46 | | - Exception, |
47 | | - lambda: web.DataReader("NON EXISTENT TICKER", 'google', |
48 | | - start, end)) |
49 | | - except urllib2.URLError: |
50 | | - try: |
51 | | - urllib2.urlopen('http://www.google.com') |
52 | | - except urllib2.URLError: |
53 | | - raise nose.SkipTest |
54 | | - else: |
55 | | - raise |
| 25 | + self.assertRaises( |
| 26 | + Exception, |
| 27 | + lambda: web.DataReader("NON EXISTENT TICKER", 'google', |
| 28 | + start, end)) |
56 | 29 |
|
57 | 30 |
|
58 | 31 | @network |
59 | 32 | def test_get_quote(self): |
60 | 33 | self.assertRaises(NotImplementedError, |
61 | 34 | lambda: web.get_quote_google(pd.Series(['GOOG', 'AAPL', 'GOOG']))) |
62 | 35 |
|
63 | | - @network |
| 36 | + @with_connectivity_check('http://www.google.com') |
64 | 37 | def test_get_goog_volume(self): |
65 | | - try: |
66 | | - df = web.get_data_google('GOOG') |
67 | | - assert df.Volume.ix['OCT-08-2010'] == 2863473 |
68 | | - except IOError: |
69 | | - try: |
70 | | - urllib2.urlopen('http://www.google.com') |
71 | | - except IOError: |
72 | | - raise nose.SkipTest |
73 | | - else: |
74 | | - raise |
| 38 | + df = web.get_data_google('GOOG') |
| 39 | + assert df.Volume.ix['OCT-08-2010'] == 2863473 |
75 | 40 |
|
76 | | - @network |
| 41 | + @with_connectivity_check('http://www.google.com') |
77 | 42 | def test_get_multi1(self): |
78 | | - try: |
79 | | - sl = ['AAPL', 'AMZN', 'GOOG'] |
80 | | - pan = web.get_data_google(sl, '2012') |
81 | | - ts = pan.Close.GOOG.index[pan.Close.AAPL > pan.Close.GOOG] |
82 | | - assert ts[0].dayofyear == 96 |
83 | | - except IOError: |
84 | | - try: |
85 | | - urllib2.urlopen('http://www.google.com') |
86 | | - except IOError: |
87 | | - raise nose.SkipTest |
88 | | - else: |
89 | | - raise |
| 43 | + sl = ['AAPL', 'AMZN', 'GOOG'] |
| 44 | + pan = web.get_data_google(sl, '2012') |
| 45 | + ts = pan.Close.GOOG.index[pan.Close.AAPL > pan.Close.GOOG] |
| 46 | + assert ts[0].dayofyear == 96 |
90 | 47 |
|
91 | | - @network |
| 48 | + @with_connectivity_check('http://www.google.com') |
92 | 49 | def test_get_multi2(self): |
93 | | - try: |
94 | | - pan = web.get_data_google(['GE', 'MSFT', 'INTC'], 'JAN-01-12', 'JAN-31-12') |
95 | | - expected = [19.02, 28.23, 25.39] |
96 | | - result = pan.Close.ix['01-18-12'][['GE', 'MSFT', 'INTC']].tolist() |
97 | | - assert result == expected |
98 | | - |
99 | | - # sanity checking |
100 | | - t= np.array(result) |
101 | | - assert np.issubdtype(t.dtype, np.floating) |
102 | | - assert t.shape == (3,) |
103 | | - |
104 | | - expected = [[ 18.99, 28.4 , 25.18], |
105 | | - [ 18.58, 28.31, 25.13], |
106 | | - [ 19.03, 28.16, 25.52], |
107 | | - [ 18.81, 28.82, 25.87]] |
108 | | - result = pan.Open.ix['Jan-15-12':'Jan-20-12'][['GE', 'MSFT', 'INTC']].values |
109 | | - assert (result == expected).all() |
110 | | - |
111 | | - # sanity checking |
112 | | - t= np.array(pan) |
113 | | - assert np.issubdtype(t.dtype, np.floating) |
114 | | - except IOError: |
115 | | - try: |
116 | | - urllib2.urlopen('http://www.google.com') |
117 | | - except IOError: |
118 | | - raise nose.SkipTest |
119 | | - else: |
120 | | - raise |
| 50 | + pan = web.get_data_google(['GE', 'MSFT', 'INTC'], 'JAN-01-12', 'JAN-31-12') |
| 51 | + expected = [19.02, 28.23, 25.39] |
| 52 | + result = pan.Close.ix['01-18-12'][['GE', 'MSFT', 'INTC']].tolist() |
| 53 | + assert result == expected |
| 54 | + |
| 55 | + # sanity checking |
| 56 | + t= np.array(result) |
| 57 | + assert np.issubdtype(t.dtype, np.floating) |
| 58 | + assert t.shape == (3,) |
| 59 | + |
| 60 | + expected = [[ 18.99, 28.4 , 25.18], |
| 61 | + [ 18.58, 28.31, 25.13], |
| 62 | + [ 19.03, 28.16, 25.52], |
| 63 | + [ 18.81, 28.82, 25.87]] |
| 64 | + result = pan.Open.ix['Jan-15-12':'Jan-20-12'][['GE', 'MSFT', 'INTC']].values |
| 65 | + assert (result == expected).all() |
| 66 | + |
| 67 | + # sanity checking |
| 68 | + t= np.array(pan) |
| 69 | + assert np.issubdtype(t.dtype, np.floating) |
121 | 70 |
|
122 | 71 | if __name__ == '__main__': |
123 | 72 | nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'], |
|
0 commit comments