Skip to content

Commit

Permalink
BUG: properly assign zero probability to outside-outside first-second…
Browse files Browse the repository at this point in the history
… choices
  • Loading branch information
jeffgortmaker committed Jun 19, 2022
1 parent 42930bf commit 98cbf85
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 5 deletions.
5 changes: 5 additions & 0 deletions pyblp/markets/market.py
Original file line number Diff line number Diff line change
Expand Up @@ -1504,6 +1504,8 @@ def compute_micro_dataset_contributions(
if weights.shape[2] == 1 + self.J:
assert eliminated_outside_probabilities is not None
dataset_weights[:, -self.J:, 0] *= eliminated_outside_probabilities.T
if weights.shape[1] == weights.shape[2] == 1 + self.J:
dataset_weights[:, 0, 0] = 0

# truncate numerical errors from below by zero
dataset_weights[dataset_weights < 0] = 0
Expand Down Expand Up @@ -1541,6 +1543,9 @@ def compute_micro_dataset_contributions(
assert eliminated_outside_probabilities is not None
product1[:, -self.J:, 0] *= eliminated_outside_probabilities.T
product2[:, -self.J:, 0] *= eliminated_outside_probabilities_tangent_mapping[p].T
if weights.shape[1] == weights.shape[2] == 1 + self.J:
product1[:, 0, 0] = 0
product2[:, 0, 0] = 0

weights_tangent *= product1 + product2

Expand Down
8 changes: 4 additions & 4 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,17 +396,17 @@ def large_blp_simulation() -> SimulationFixture:
),
),
MicroMoment(
name="diversion interaction",
name="unconditional diversion interaction",
dataset=MicroDataset(
name="inside first and second",
observations=simulation.N,
compute_weights=lambda _, p, a: np.ones((a.size, p.size, p.size)),
compute_weights=lambda _, p, a: np.ones((a.size, 1 + p.size, 1 + p.size)),
market_ids=[simulation.unique_market_ids[0]],
),
value=0,
compute_values=lambda _, p, a: (
np.tile(p.X2[:, [2]], (a.size, 1, p.size)) *
np.tile(p.X2[:, [2]], (a.size, 1, p.size)).swapaxes(1, 2)
np.tile(np.c_[np.r_[0, p.X2[:, 2]]], (a.size, 1, 1 + p.size)) *
np.tile(np.c_[np.r_[0, p.X2[:, 2]]], (a.size, 1, 1 + p.size)).swapaxes(1, 2)
),
),
])
Expand Down
2 changes: 1 addition & 1 deletion tests/test_blp.py
Original file line number Diff line number Diff line change
Expand Up @@ -1482,7 +1482,7 @@ def test_objective_gradient(
def test_finite_differences(theta: Array, _: Any, objective_function: Callable, __: Any) -> Tuple[Array, bool]:
"""Test central finite differences around starting parameter values."""
approximated = compute_finite_differences(lambda x: objective_function(x)[0], theta, epsilon_scale=10.0)
np.testing.assert_allclose(approximated.flatten(), exact.flatten(), atol=1e-8, rtol=1e-2)
np.testing.assert_allclose(approximated.flatten(), exact.flatten(), atol=1e-8, rtol=1e-3)
return theta, True

# test the gradient
Expand Down

0 comments on commit 98cbf85

Please sign in to comment.