Skip to content

Commit

Permalink
response
Browse files Browse the repository at this point in the history
  • Loading branch information
stockviz committed Aug 14, 2019
1 parent ca95777 commit 3a51a86
Showing 1 changed file with 133 additions and 20 deletions.
153 changes: 133 additions & 20 deletions gold-nifty50.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,33 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 1,
"metadata": {},
"outputs": [],
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"── \u001b[1mAttaching packages\u001b[22m ─────────────────────────────────────── tidyverse 1.2.1 ──\n",
"\u001b[32m✔\u001b[39m \u001b[34mggplot2\u001b[39m 3.2.1 \u001b[32m✔\u001b[39m \u001b[34mpurrr \u001b[39m 0.3.2\n",
"\u001b[32m✔\u001b[39m \u001b[34mtibble \u001b[39m 2.1.3 \u001b[32m✔\u001b[39m \u001b[34mdplyr \u001b[39m 0.8.3\n",
"\u001b[32m✔\u001b[39m \u001b[34mtidyr \u001b[39m 0.8.3 \u001b[32m✔\u001b[39m \u001b[34mstringr\u001b[39m 1.4.0\n",
"\u001b[32m✔\u001b[39m \u001b[34mreadr \u001b[39m 1.3.1 \u001b[32m✔\u001b[39m \u001b[34mforcats\u001b[39m 0.4.0\n",
"── \u001b[1mConflicts\u001b[22m ────────────────────────────────────────── tidyverse_conflicts() ──\n",
"\u001b[31m✖\u001b[39m \u001b[34mdplyr\u001b[39m::\u001b[32mfilter()\u001b[39m masks \u001b[34mstats\u001b[39m::filter()\n",
"\u001b[31m✖\u001b[39m \u001b[34mdplyr\u001b[39m::\u001b[32mlag()\u001b[39m masks \u001b[34mstats\u001b[39m::lag()\n"
]
},
{
"ename": "ERROR",
"evalue": "Error in library(ggthemes): there is no package called ‘ggthemes’\n",
"output_type": "error",
"traceback": [
"Error in library(ggthemes): there is no package called ‘ggthemes’\nTraceback:\n",
"1. library(ggthemes)"
]
}
],
"source": [
"library(tidyverse)\n",
"library(ggthemes)\n",
Expand Down Expand Up @@ -41,9 +65,21 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 2,
"metadata": {},
"outputs": [],
"outputs": [
{
"ename": "ERROR",
"evalue": "Error in eval(lhs, parent, parent): object 'fred' not found\n",
"output_type": "error",
"traceback": [
"Error in eval(lhs, parent, parent): object 'fred' not found\nTraceback:\n",
"1. fred$Meta() %>% inner_join(fred$TimeSeries(), by = \"SERIES_ID\") %>% \n . filter(NAME %like% \"%gold%\") %>% group_by(SERIES_ID, TICKER, \n . NAME) %>% summarize(MIN_TS = min(TIME_STAMP), MAX_TS = max(TIME_STAMP)) %>% \n . select(SERIES_ID, TICKER, NAME, MIN_TS, MAX_TS)",
"2. eval(lhs, parent, parent)",
"3. eval(lhs, parent, parent)"
]
}
],
"source": [
"goldFred <- fred$Meta() %>% inner_join(fred$TimeSeries(), by = 'SERIES_ID') %>%\n",
" filter(NAME %like% '%gold%') %>%\n",
Expand All @@ -56,9 +92,22 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 3,
"metadata": {},
"outputs": [],
"outputs": [
{
"ename": "ERROR",
"evalue": "Error in eval(lhs, parent, parent): object 'fred' not found\n",
"output_type": "error",
"traceback": [
"Error in eval(lhs, parent, parent): object 'fred' not found\nTraceback:\n",
"1. data.frame(fred$TimeSeries() %>% filter(SERIES_ID == seriesIdGold, \n . TIME_STAMP >= as.Date(\"1995-01-01\")) %>% select(TIME_STAMP, \n . VAL))",
"2. fred$TimeSeries() %>% filter(SERIES_ID == seriesIdGold, TIME_STAMP >= \n . as.Date(\"1995-01-01\")) %>% select(TIME_STAMP, VAL)",
"3. eval(lhs, parent, parent)",
"4. eval(lhs, parent, parent)"
]
}
],
"source": [
"seriesIdGold <- -2147252004 #use the afternoon USD fix\n",
"goldDf <- data.frame(fred$TimeSeries() %>% filter(SERIES_ID == seriesIdGold, TIME_STAMP >= as.Date('1995-01-01')) %>% \n",
Expand All @@ -82,9 +131,18 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 4,
"metadata": {},
"outputs": [],
"outputs": [
{
"ename": "ERROR",
"evalue": "Error in eval(expr, envir, enclos): object 'goldDf' not found\n",
"output_type": "error",
"traceback": [
"Error in eval(expr, envir, enclos): object 'goldDf' not found\nTraceback:\n"
]
}
],
"source": [
"firstDate <- min(goldDf$TIME_STAMP)\n",
"lastDate <- max(goldDf$TIME_STAMP)\n",
Expand Down Expand Up @@ -114,9 +172,22 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 5,
"metadata": {},
"outputs": [],
"outputs": [
{
"ename": "ERROR",
"evalue": "Error in eval(lhs, parent, parent): object 'indices' not found\n",
"output_type": "error",
"traceback": [
"Error in eval(lhs, parent, parent): object 'indices' not found\nTraceback:\n",
"1. data.frame(indices$NseTimeSeries() %>% filter(NAME == \"NIFTY 50 TR\" && \n . TIME_STAMP >= \"2000-01-01\" && TIME_STAMP <= lastDate) %>% \n . select(TIME_STAMP, CLOSE))",
"2. indices$NseTimeSeries() %>% filter(NAME == \"NIFTY 50 TR\" && TIME_STAMP >= \n . \"2000-01-01\" && TIME_STAMP <= lastDate) %>% select(TIME_STAMP, \n . CLOSE)",
"3. eval(lhs, parent, parent)",
"4. eval(lhs, parent, parent)"
]
}
],
"source": [
"trIndex <- data.frame(indices$NseTimeSeries() %>%\n",
" filter(NAME == \"NIFTY 50 TR\" && TIME_STAMP >= '2000-01-01' && TIME_STAMP <= lastDate) %>%\n",
Expand All @@ -127,7 +198,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -136,9 +207,22 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 7,
"metadata": {},
"outputs": [],
"outputs": [
{
"ename": "ERROR",
"evalue": "Error in eval(lhs, parent, parent): object 'fred' not found\n",
"output_type": "error",
"traceback": [
"Error in eval(lhs, parent, parent): object 'fred' not found\nTraceback:\n",
"1. data.frame(fred$TimeSeries() %>% filter(SERIES_ID == seriesIdUsdInr, \n . TIME_STAMP >= as.Date(\"2000-01-01\")) %>% select(TIME_STAMP, \n . VAL))",
"2. fred$TimeSeries() %>% filter(SERIES_ID == seriesIdUsdInr, TIME_STAMP >= \n . as.Date(\"2000-01-01\")) %>% select(TIME_STAMP, VAL)",
"3. eval(lhs, parent, parent)",
"4. eval(lhs, parent, parent)"
]
}
],
"source": [
"#DEXINUS, from the example: http://pluto.studio/user/shyams80/notebooks/docs-R/Fred.ipynb\n",
"seriesIdUsdInr <- -2147478748\n",
Expand All @@ -163,9 +247,19 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 8,
"metadata": {},
"outputs": [],
"outputs": [
{
"ename": "ERROR",
"evalue": "Error in merge(trXts, usdInrXts, goldXts): object 'trXts' not found\n",
"output_type": "error",
"traceback": [
"Error in merge(trXts, usdInrXts, goldXts): object 'trXts' not found\nTraceback:\n",
"1. merge(trXts, usdInrXts, goldXts)"
]
}
],
"source": [
"allXts <- merge(trXts, usdInrXts, goldXts)\n",
"#holidays don't match up, so carry the USDINR forward.\n",
Expand All @@ -175,9 +269,19 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 9,
"metadata": {},
"outputs": [],
"outputs": [
{
"ename": "ERROR",
"evalue": "Error in Common.NormalizeMonthlyDates(monthlyReturn(allXts[, 1]/allXts[, : could not find function \"Common.NormalizeMonthlyDates\"\n",
"output_type": "error",
"traceback": [
"Error in Common.NormalizeMonthlyDates(monthlyReturn(allXts[, 1]/allXts[, : could not find function \"Common.NormalizeMonthlyDates\"\nTraceback:\n",
"1. merge(Common.NormalizeMonthlyDates(monthlyReturn(allXts[, 1]/allXts[, \n . 2])), Common.NormalizeMonthlyDates(monthlyReturn(allXts[, \n . 3])))"
]
}
],
"source": [
"#end-of months don't match, so force them to a common on\n",
"monthlies <- merge(Common.NormalizeMonthlyDates(monthlyReturn(allXts[,1]/allXts[,2])), \n",
Expand All @@ -189,9 +293,18 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 10,
"metadata": {},
"outputs": [],
"outputs": [
{
"ename": "ERROR",
"evalue": "Error in Common.PlotCumReturns(monthlies, \"NIFTY 50 TR vs. Gold\", \"in USD\"): could not find function \"Common.PlotCumReturns\"\n",
"output_type": "error",
"traceback": [
"Error in Common.PlotCumReturns(monthlies, \"NIFTY 50 TR vs. Gold\", \"in USD\"): could not find function \"Common.PlotCumReturns\"\nTraceback:\n"
]
}
],
"source": [
"Common.PlotCumReturns(monthlies, \"NIFTY 50 TR vs. Gold\", \"in USD\")"
]
Expand Down Expand Up @@ -229,7 +342,7 @@
"mimetype": "text/x-r-source",
"name": "R",
"pygments_lexer": "r",
"version": "3.4.4"
"version": "3.6.1"
}
},
"nbformat": 4,
Expand Down

0 comments on commit 3a51a86

Please sign in to comment.