Skip to content

Commit

Permalink
Merge pull request #41 from hammerlab/hammer_tf_backend
Browse files Browse the repository at this point in the history
Add notebook that builds simple model running on TF backend
  • Loading branch information
hammer committed Aug 3, 2016
2 parents 22e4e31 + a84bccb commit a983478
Showing 1 changed file with 185 additions and 0 deletions.
185 changes: 185 additions & 0 deletions notebooks/TF exploration.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,185 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import os\n",
"os.environ['THEANO_FLAGS'] = \"'device=cpu'\"\n",
"os.environ['KERAS_BACKEND'] = \"tensorflow\""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"%matplotlib inline\n",
"\n",
"import warnings\n",
"warnings.filterwarnings('ignore')\n",
"\n",
"import numpy as np\n",
"import pandas as pd\n",
"import tensorflow as tf\n",
"import seaborn as sns\n",
"\n",
"from mhcflurry.dataset import Dataset\n",
"from mhcflurry.peptide_encoding import indices_to_hotshot_encoding\n",
"from mhcflurry.regression_target import ic50_to_regression_target"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"file_to_explore=\"/root/.local/share/mhcflurry/2/class1_data/combined_human_class1_dataset.csv\"\n",
"dataset = Dataset.from_csv(\n",
" filename=file_to_explore,\n",
" sep=\",\",\n",
" peptide_column_name=\"peptide\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"df = dataset.to_dataframe()\n",
"df.columns"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"df[df.species == 'human'].groupby('affinity').size().order().tail(10)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"df_reduced = df[df.allele.isin(['HLA-A0201', 'HLA-A2301', 'HLA-A2402', 'HLA-A1101'])][['allele','affinity']].reset_index(drop=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"sns.violinplot(x=df_reduced['allele'], y=np.log(df_reduced['affinity']))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"scaled_affinity = ic50_to_regression_target(df_reduced['affinity'])\n",
"sns.boxplot(x=df_reduced['allele'], y=scaled_affinity)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"df_reduced.groupby('allele').size()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"df_kmers = dataset.kmer_index_encoding()\n",
"training_hotshot = indices_to_hotshot_encoding(df_kmers[0])\n",
"training_labels = ic50_to_regression_target(df_kmers[1])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"from keras.models import Sequential\n",
"from keras.layers import Dense, Activation\n",
"\n",
"model = Sequential()\n",
"model.add(Dense(input_dim=189, output_dim=1))\n",
"model.add(Activation(\"sigmoid\"))\n",
"model.compile(loss=\"mse\", optimizer=\"rmsprop\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"model.fit(training_hotshot, training_labels, nb_epoch=5, batch_size=1)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.6"
}
},
"nbformat": 4,
"nbformat_minor": 0
}

0 comments on commit a983478

Please sign in to comment.