Skip to content

Commit 4e26274

Browse files
authored
Update yahoo.py
Fixed Yahoo Finance Download. See PR mementum#488
1 parent b853d7c commit 4e26274

File tree

1 file changed

+4
-45
lines changed

1 file changed

+4
-45
lines changed

backtrader/feeds/yahoo.py

Lines changed: 4 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -225,27 +225,20 @@ class YahooFinanceData(YahooFinanceCSVData):
225225
Whether to use the dividend/split adjusted close and adjust all values
226226
according to it.
227227
228-
- ``urlhist``
229-
230-
The url of the historical quotes in Yahoo Finance used to gather a
231-
``crumb`` authorization cookie for the download
232-
233228
- ``urldown``
234229
235230
The url of the actual download server
236231
237232
- ``retries``
238233
239-
Number of times (each) to try to get a ``crumb`` cookie and download
240-
the data
234+
Number of times (each) to try to download the data
241235
242236
'''
243237

244238
params = (
245239
('proxies', {}),
246240
('period', 'd'),
247241
('reverse', False),
248-
('urlhist', 'https://finance.yahoo.com/quote/{}/history'),
249242
('urldown', 'https://query1.finance.yahoo.com/v7/finance/download'),
250243
('retries', 3),
251244
)
@@ -260,47 +253,12 @@ def start_v7(self):
260253
raise Exception(msg)
261254

262255
self.error = None
263-
url = self.p.urlhist.format(self.p.dataname)
264256

265257
sesskwargs = dict()
266258
if self.p.proxies:
267259
sesskwargs['proxies'] = self.p.proxies
268260

269-
crumb = None
270-
sess = requests.Session()
271-
sess.headers['User-Agent'] = 'backtrader'
272-
for i in range(self.p.retries + 1): # at least once
273-
resp = sess.get(url, **sesskwargs)
274-
if resp.status_code != requests.codes.ok:
275-
continue
276-
277-
txt = resp.text
278-
i = txt.find('CrumbStore')
279-
if i == -1:
280-
continue
281-
i = txt.find('crumb', i)
282-
if i == -1:
283-
continue
284-
istart = txt.find('"', i + len('crumb') + 1)
285-
if istart == -1:
286-
continue
287-
istart += 1
288-
iend = txt.find('"', istart)
289-
if iend == -1:
290-
continue
291-
292-
crumb = txt[istart:iend]
293-
crumb = crumb.encode('ascii').decode('unicode-escape')
294-
break
295-
296-
if crumb is None:
297-
self.error = 'Crumb not found'
298-
self.f = None
299-
return
300-
301-
crumb = urlquote(crumb)
302-
303-
# urldown/ticker?period1=posix1&period2=posix2&interval=1d&events=history&crumb=crumb
261+
# urldown/ticker?period1=posix1&period2=posix2&interval=1d&events=history
304262

305263
# Try to download
306264
urld = '{}/{}'.format(self.p.urldown, self.p.dataname)
@@ -323,10 +281,11 @@ def start_v7(self):
323281

324282
urlargs.append('interval={}'.format(intervals[self.p.timeframe]))
325283
urlargs.append('events=history')
326-
urlargs.append('crumb={}'.format(crumb))
327284

328285
urld = '{}?{}'.format(urld, '&'.join(urlargs))
329286
f = None
287+
sess = requests.Session()
288+
sess.headers['User-Agent'] = 'backtrader'
330289
for i in range(self.p.retries + 1): # at least once
331290
resp = sess.get(urld, **sesskwargs)
332291
if resp.status_code != requests.codes.ok:

0 commit comments

Comments
 (0)