Skip to content

Commit

Permalink
Merge 93751a8 into bc8b67e
Browse files Browse the repository at this point in the history
  • Loading branch information
erikbern authored Dec 24, 2017
2 parents bc8b67e + 93751a8 commit f55ada6
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 11 deletions.
2 changes: 0 additions & 2 deletions convoys/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,6 @@ def plot_cohorts(data, t_max=None, title=None, group_min_size=0, max_groups=100,
pyplot.ylabel('Conversion rate %')
pyplot.legend()
pyplot.gca().grid(True)
pyplot.tight_layout()


def plot_conversion(data, window, projection, group_min_size=0, max_groups=100, window_min_size=1, stride=None, share_params=False, title=None, time=False):
Expand Down Expand Up @@ -468,4 +467,3 @@ def plot_conversion(data, window, projection, group_min_size=0, max_groups=100,
pyplot.ylabel('Conversion rate %')
pyplot.legend()
pyplot.gca().grid(True)
pyplot.tight_layout()
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ autograd==1.2
lifelines==0.11.2
matplotlib>=2.0.0
numpy
pytz
scipy
seaborn==0.8.1
six==1.11.0
17 changes: 9 additions & 8 deletions test_convoys.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import datetime
import matplotlib
import numpy
import pytz
import random
import scipy.stats
matplotlib.use('Agg') # Needed for matplotlib to run in Travis
import convoys
from convoys import Exponential, Gamma, Weibull, Bootstrapper, plot_cohorts


Expand All @@ -14,7 +14,7 @@ def test_exponential_model(c=0.05, lambd=0.1, n=100000):
N = numpy.array([100 for converted_at in C])
B = numpy.array([bool(converted_at > 0) for converted_at in C])
c = numpy.mean(B)
model = Exponential()
model = convoys.Exponential()
model.fit(C, N, B)
assert 0.95*c < model.params['c'] < 1.05*c
assert 0.95*lambd < model.params['lambd'] < 1.05*lambd
Expand All @@ -25,7 +25,7 @@ def test_gamma_model(c=0.05, lambd=0.1, k=10.0, n=100000):
N = numpy.array([1000 for converted_at in C])
B = numpy.array([bool(converted_at > 0) for converted_at in C])
c = numpy.mean(B)
model = Gamma()
model = convoys.Gamma()
model.fit(C, N, B)
assert 0.95*c < model.params['c'] < 1.05*c
assert 0.95*lambd < model.params['lambd'] < 1.05*lambd
Expand All @@ -41,7 +41,7 @@ def sample_weibull():
C = numpy.array([b and sample_weibull() or 1.0 for b in B])
N = numpy.array([1000 for b in B])
c = numpy.mean(B)
model = Weibull()
model = convoys.Weibull()
model.fit(C, N, B)
assert 0.95*c < model.params['c'] < 1.05*c
# TODO: figure out how to make L-BFGS-B run longer
Expand All @@ -54,7 +54,7 @@ def test_bootstrapped_exponential_model(c=0.05, lambd=0.1, n=10000):
N = numpy.array([100 for converted_at in C])
B = numpy.array([bool(converted_at > 0) for converted_at in C])
c = numpy.mean(B)
model = Bootstrapper('exponential')
model = convoys.Bootstrapper('exponential')
model.fit(C, N, B)
y, y_lo, y_hi = model.predict_final(confidence_interval=True)
c_lo = scipy.stats.beta.ppf(0.05, n*c, n*(1-c))
Expand All @@ -66,9 +66,9 @@ def test_bootstrapped_exponential_model(c=0.05, lambd=0.1, n=10000):

def test_plot_cohorts(c=0.05, k=10, lambd=0.1, n=1000):
data = []
now = datetime.datetime(2001, 7, 1, tzinfo=pytz.utc)
now = datetime.datetime(2000, 7, 1)
for x in range(n):
date_a = datetime.datetime(2000, 1, 1, tzinfo=pytz.utc) + datetime.timedelta(days=random.random()*100)
date_a = datetime.datetime(2000, 1, 1) + datetime.timedelta(days=random.random()*100)
if random.random() < c:
delay = scipy.stats.gamma.rvs(a=k, scale=1.0/lambd)
date_b = date_a + datetime.timedelta(days=delay)
Expand All @@ -78,4 +78,5 @@ def test_plot_cohorts(c=0.05, k=10, lambd=0.1, n=1000):
data.append(('foo', date_a, None, now))
else:
data.append(('foo', date_a, None, now))
plot_cohorts(data, projection='gamma')
convoys.plot_cohorts(data, projection='gamma')
convoys.plot_conversion(data, window=datetime.timedelta(days=7), projection='gamma')

0 comments on commit f55ada6

Please sign in to comment.