Skip to content

Commit

Permalink
check in DOW29, downloading is too fragile.
Browse files Browse the repository at this point in the history
  • Loading branch information
Simon Broda committed Sep 18, 2019
1 parent 9ae644b commit b44d93c
Show file tree
Hide file tree
Showing 4 changed files with 2,808 additions and 24 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
*.log
docs/build/
docs/site
src/data
src/data/bollerslev_ghysels.txt
docs/Manifest.toml
Manifest.toml
make-require.jl
1 change: 0 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ Optim = "429524aa-4258-5aef-a3af-852621145aeb"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
Retry = "20febd7b-183b-5ae2-ac4a-720e7ce64774"
Roots = "f2b01f46-fcfa-551c-844a-d8ac1e96c665"
SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
Expand Down
44 changes: 22 additions & 22 deletions deps/build.jl
Original file line number Diff line number Diff line change
@@ -1,28 +1,28 @@
using Retry
#using Retry
using DelimitedFiles
datadir = joinpath(@__DIR__, "..", "src", "data")
isdir(datadir) || mkdir(datadir)
@info "Downloading Bollerslev and Ghysels data..."
isfile(joinpath(datadir, "bollerslev_ghysels.txt")) || download("http://people.stern.nyu.edu/wgreene/Text/Edition7/TableF20-1.txt", joinpath(datadir, "bollerslev_ghysels.txt"))

@info "Downloading stock data..."
#"DOW" is excluded because it's listed too late
tickers = ["AAPL", "IBM", "XOM", "KO", "MSFT", "INTC", "MRK", "PG", "VZ", "WBA", "V", "JNJ", "PFE", "CSCO", "TRV", "WMT", "MMM", "UTX", "UNH", "NKE", "HD", "BA", "AXP", "MCD", "CAT", "GS", "JPM", "CVX", "DIS"]
alldata = zeros(2786, 29)
for (j, ticker) in enumerate(tickers)
@repeat 4 try
@info "...$ticker"
filename = joinpath(datadir, "$ticker.csv")
isfile(joinpath(datadir, "$ticker.csv")) || download("http://quotes.wsj.com/$ticker/historical-prices/download?num_rows=100000000&range_days=100000000&startDate=03/19/2008&endDate=04/11/2019", filename)
data = parse.(Float64, readdlm(joinpath(datadir, "$ticker.csv"), ',', String, skipstart=1)[:, 5])
length(data) == 2786 || error("Download failed for $ticker.")
alldata[:, j] .= data
rm(filename)
catch e
@delay_retry if 1==1 end
end
end
alldata = 100 * diff(log.(alldata), dims=1)
open(joinpath(datadir, "dow29.csv"), "w") do io
writedlm(io, alldata, ',')
end
# @info "Downloading stock data..."
# #"DOW" is excluded because it's listed too late
# tickers = ["AAPL", "IBM", "XOM", "KO", "MSFT", "INTC", "MRK", "PG", "VZ", "WBA", "V", "JNJ", "PFE", "CSCO", "TRV", "WMT", "MMM", "UTX", "UNH", "NKE", "HD", "BA", "AXP", "MCD", "CAT", "GS", "JPM", "CVX", "DIS"]
# alldata = zeros(2786, 29)
# for (j, ticker) in enumerate(tickers)
# @repeat 4 try
# @info "...$ticker"
# filename = joinpath(datadir, "$ticker.csv")
# isfile(joinpath(datadir, "$ticker.csv")) || download("http://quotes.wsj.com/$ticker/historical-prices/download?num_rows=100000000&range_days=100000000&startDate=03/19/2008&endDate=04/11/2019", filename)
# data = parse.(Float64, readdlm(joinpath(datadir, "$ticker.csv"), ',', String, skipstart=1)[:, 5])
# length(data) == 2786 || error("Download failed for $ticker.")
# alldata[:, j] .= data
# rm(filename)
# catch e
# @delay_retry if 1==1 end
# end
# end
# alldata = 100 * diff(log.(alldata), dims=1)
# open(joinpath(datadir, "dow29.csv"), "w") do io
# writedlm(io, alldata, ',')
# end
Loading

0 comments on commit b44d93c

Please sign in to comment.