Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace Yahoo iCharts API #355

Merged
merged 24 commits into from Jul 2, 2017
Merged
Changes from 1 commit
Commits
File filter...
Filter file types
Jump to…
Jump to file or symbol
Failed to load files and symbols.

Always

Just for now

more resource cleaning

  • Loading branch information
jreback committed Jul 2, 2017
commit 8f452413f637d8728c5b20a42a225df081f04690
@@ -150,6 +150,12 @@ def _fix_old_file_paths(self, path):
return path

def read(self):
try:
return self._read()
finally:
self.close()

def _read(self):
try:
self._sec_ftp_session = FTP(_SEC_FTP, timeout=self.timeout)
self._sec_ftp_session.login()
@@ -100,6 +100,12 @@ def extract_export_url(self, delay=10, max_attempts=10):
return resp.json()[self.export_key]

def read(self):
try:
return self._read()
finally:
self.close()

def _read(self):
export_gzipped_req = self._request(self.extract_export_url())
decompressed_data = self._decompress_export(
export_gzipped_req.content).decode("utf-8")
@@ -20,6 +20,12 @@ def url(self):
return "http://research.stlouisfed.org/fred2/series/"

def read(self):
try:
return self._read()
finally:
self.close()

def _read(self):
if not is_list_like(self.symbols):
names = [self.symbols]
else:
@@ -83,21 +83,21 @@ def assert_option_result(self, df):

def test_get_quote_string(self):
df = web.get_quote_google('GOOG')
assert df.ix['GOOG']['last'] > 0.0
assert df.loc['GOOG', 'last'] > 0.0
tm.assert_index_equal(df.index, pd.Index(['GOOG']))
self.assert_option_result(df)

def test_get_quote_stringlist(self):
df = web.get_quote_google(['GOOG', 'AMZN', 'GOOG'])
assert_series_equal(df.ix[0], df.ix[2])
assert_series_equal(df.iloc[0], df.iloc[2])
tm.assert_index_equal(df.index, pd.Index(['GOOG', 'AMZN', 'GOOG']))
self.assert_option_result(df)

def test_get_goog_volume(self):
for locale in self.locales:
with tm.set_locale(locale):
df = web.get_data_google('GOOG').sort_index()
assert df.Volume.ix['JAN-02-2015'] == 1446662
assert df.Volume.loc['JAN-02-2015'] == 1446662

def test_get_multi1(self):
for locale in self.locales:
@@ -130,13 +130,13 @@ def test_get_multi2(self):
with tm.set_locale(locale):
pan = web.get_data_google(['GE', 'MSFT', 'INTC'],
'JAN-01-12', 'JAN-31-12')
result = pan.Close.ix['01-18-12']
result = pan.Close.loc['01-18-12']
assert_n_failed_equals_n_null_columns(w, result)

# sanity checking

assert np.issubdtype(result.dtype, np.floating)
result = pan.Open.ix['Jan-15-12':'Jan-20-12']
result = pan.Open.loc['Jan-15-12':'Jan-20-12']

assert result.shape == (4, 3)
assert_n_failed_equals_n_null_columns(w, result)
@@ -158,7 +158,7 @@ def test_unicode_date(self):
def test_google_reader_class(self):
r = GoogleDailyReader('GOOG')
df = r.read()
assert df.Volume.ix['JAN-02-2015'] == 1446662
assert df.Volume.loc['JAN-02-2015'] == 1446662

session = requests.Session()
r = GoogleDailyReader('GOOG', session=session)
@@ -50,7 +50,7 @@ def test_land_use(self):
result = read_jsdmx(os.path.join(self.dirpath, 'jsdmx',
'land_use.json'))
assert isinstance(result, pd.DataFrame)
result = result.ix['2010':'2011']
result = result.loc['2010':'2011']

exp_col = pd.MultiIndex.from_product([
['Japan', 'United States'],
@@ -38,14 +38,14 @@ def test_fred_nan(self):
start = datetime(2010, 1, 1)
end = datetime(2013, 1, 27)
df = web.DataReader("DFII5", "fred", start, end)
assert pd.isnull(df.ix['2010-01-01'][0])
assert pd.isnull(df.loc['2010-01-01'][0])

@pytest.mark.skip(reason='Buggy as of 2/18/14; maybe a data revision?')
def test_fred_parts(self): # pragma: no cover
start = datetime(2010, 1, 1)
end = datetime(2013, 1, 27)
df = web.get_data_fred("CPIAUCSL", start, end)
assert df.ix['2010-05-01'][0] == 217.23
assert df.loc['2010-05-01'][0] == 217.23

t = df.CPIAUCSL.values
assert np.issubdtype(t.dtype, np.floating)
@@ -57,7 +57,7 @@ def test_fred_part2(self):
[684.7],
[848.3],
[933.3]]
result = web.get_data_fred("A09024USA144NNBR", start="1915").ix[:5]
result = web.get_data_fred("A09024USA144NNBR", start="1915").iloc[:5]
tm.assert_numpy_array_equal(result.values, np.array(expected))

def test_invalid_series(self):
@@ -159,6 +159,12 @@ def params(self):
'per_page': 25000, 'format': 'json'}

def read(self):
try:
return self._read()
finally:
self.close()

def _read(self):
data = []
for indicator in self.symbols:
# Build URL for api call
@@ -321,7 +327,7 @@ def search(self, string='gdp.*capi', field='name', case=False):
indicators = self.get_indicators()
data = indicators[field]
idx = data.str.contains(string, case=case)
out = indicators.ix[idx].dropna()
out = indicators.loc[idx].dropna()
return out


ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.