diff --git a/stonesoup/predictor/particle.py b/stonesoup/predictor/particle.py index 9b22b8f3b..936c92b98 100644 --- a/stonesoup/predictor/particle.py +++ b/stonesoup/predictor/particle.py @@ -54,6 +54,7 @@ def predict(self, prior, timestamp=None, **kwargs): **kwargs) return Prediction.from_state(prior, + parent=prior, state_vector=new_state_vector, timestamp=timestamp, transition_model=self.transition_model) @@ -341,6 +342,7 @@ def predict(self, prior, timestamp=None, **kwargs): existence_probability=predicted_existence, parent=untransitioned_state, timestamp=timestamp, + transition_model=self.transition_model ) return new_particle_state diff --git a/stonesoup/regulariser/particle.py b/stonesoup/regulariser/particle.py index 0d560bbc3..8ac602af6 100644 --- a/stonesoup/regulariser/particle.py +++ b/stonesoup/regulariser/particle.py @@ -1,10 +1,13 @@ import copy import numpy as np from scipy.stats import multivariate_normal, uniform +from typing import Sequence from .base import Regulariser from ..functions import cholesky_eps from ..types.state import ParticleState +from ..models.transition import TransitionModel +from ..base import Property class MCMCRegulariser(Regulariser): @@ -14,7 +17,7 @@ class MCMCRegulariser(Regulariser): of effectiveness. Sometimes this is not desirable, or possible, when a particular algorithm requires the introduction of new samples as part of the filtering process for example. - This is a particlar implementation of a MCMC move step that uses the Metropolis-Hastings + This is a particular implementation of a MCMC move step that uses the Metropolis-Hastings algorithm [1]_. After resampling, particles are moved a small amount, according do a Gaussian kernel, to a new state only if the Metropolis-Hastings acceptance probability is met by a random number assigned to each particle from a uniform random distribution, otherwise they @@ -24,21 +27,21 @@ class MCMCRegulariser(Regulariser): ---------- .. [1] Robert, Christian P. & Casella, George, Monte Carlo Statistical Methods, Springer, 1999. - .. [2] Ristic, Branco & Arulampalam, Sanjeev & Gordon, Neil, Beyond the Kalman Filter: + .. [2] Ristic, Branko & Arulampalam, Sanjeev & Gordon, Neil, Beyond the Kalman Filter: Particle Filters for Target Tracking Applications, Artech House, 2004. """ - def regularise(self, prior, posterior, detections): + transition_model: TransitionModel = Property(doc="Transition model used for prediction", + default=None) + + def regularise(self, prior, posterior): """Regularise the particles Parameters ---------- - prior : :class:`~.ParticleState` type or list of :class:`~.Particle` - prior particle distribution - posterior : :class:`~.ParticleState` type or list of :class:`~.Particle` - posterior particle distribution - detections : set of :class:`~.Detection` - set of detections containing clutter, - true detections or both + prior : :class:`~.ParticleState` type + prior particle distribution. + posterior : :class:`~.ParticleState` type + posterior particle distribution. Returns ------- @@ -47,15 +50,27 @@ def regularise(self, prior, posterior, detections): """ if not isinstance(posterior, ParticleState): - posterior = ParticleState(None, particle_list=posterior) + raise TypeError('Only ParticleState type is supported!') if not isinstance(prior, ParticleState): - prior = ParticleState(None, particle_list=prior) + raise TypeError('Only ParticleState type is supported!') regularised_particles = copy.copy(posterior) moved_particles = copy.copy(posterior) + transitioned_prior = copy.copy(prior) + + hypotheses = posterior.hypothesis if isinstance(posterior.hypothesis, Sequence) \ + else [posterior.hypothesis] + + transition_model = hypotheses[0].prediction.transition_model or self.transition_model + if transition_model is not None: + time_interval = posterior.timestamp - prior.timestamp + transitioned_prior.state_vector = \ + transition_model.function(prior, noise=False, time_interval=time_interval) + + detections = {hypothesis.measurement for hypothesis in hypotheses if hypothesis} - if detections is not None: + if detections: ndim = prior.state_vector.shape[0] nparticles = len(posterior) @@ -70,13 +85,11 @@ def regularise(self, prior, posterior, detections): hopt * cholesky_eps(covar_est) @ np.random.randn(ndim, nparticles) # Evaluate likelihoods - part_diff = moved_particles.state_vector - prior.state_vector - part_diff_mean = np.average(part_diff, axis=1) - move_likelihood = multivariate_normal.logpdf((part_diff - part_diff_mean).T, + part_diff = moved_particles.state_vector - transitioned_prior.state_vector + move_likelihood = multivariate_normal.logpdf(part_diff.T, cov=covar_est) - post_part_diff = posterior.state_vector - prior.state_vector - post_part_diff_mean = np.average(post_part_diff, axis=1) - post_likelihood = multivariate_normal.logpdf((post_part_diff - post_part_diff_mean).T, + post_part_diff = posterior.state_vector - transitioned_prior.state_vector + post_likelihood = multivariate_normal.logpdf(post_part_diff.T, cov=covar_est) # Evaluate measurement likelihoods diff --git a/stonesoup/regulariser/tests/test_particle.py b/stonesoup/regulariser/tests/test_particle.py index 673855f49..70b601de2 100644 --- a/stonesoup/regulariser/tests/test_particle.py +++ b/stonesoup/regulariser/tests/test_particle.py @@ -1,17 +1,37 @@ import numpy as np import datetime +import pytest from ...types.state import ParticleState from ...types.particle import Particle from ...types.hypothesis import SingleHypothesis from ...types.prediction import ParticleStatePrediction, ParticleMeasurementPrediction from ...models.measurement.linear import LinearGaussian +from ...models.transition.linear import CombinedLinearGaussianTransitionModel, ConstantVelocity from ...types.detection import Detection -from ...types.update import ParticleStateUpdate +from ...types.update import Update, ParticleStateUpdate from ..particle import MCMCRegulariser -def test_regulariser(): +@pytest.mark.parametrize( + "transition_model, model_flag", + [ + ( + CombinedLinearGaussianTransitionModel([ConstantVelocity([0.05])]), # transition_model + False, # model_flag + ), + ( + CombinedLinearGaussianTransitionModel([ConstantVelocity([0.05])]), # transition_model + True, # model_flag + ), + ( + None, # transition_model + False, # model_flag + ) + ], + ids=["with_transition_model_init", "without_transition_model_init", "no_transition_model"] +) +def test_regulariser(transition_model, model_flag): particles = ParticleState(state_vector=None, particle_list=[Particle(np.array([[10], [10]]), 1 / 9), Particle(np.array([[10], [20]]), @@ -32,20 +52,38 @@ def test_regulariser(): 1 / 9), ]) timestamp = datetime.datetime.now() - prediction = ParticleStatePrediction(None, particle_list=particles.particle_list, - timestamp=timestamp) - meas_pred = ParticleMeasurementPrediction(None, particle_list=particles, timestamp=timestamp) + if transition_model is not None: + new_state_vector = transition_model.function(particles, + noise=True, + time_interval=datetime.timedelta(seconds=1)) + else: + new_state_vector = particles.state_vector + + prediction = ParticleStatePrediction(new_state_vector, + timestamp=timestamp, + transition_model=transition_model) + measurement_model = LinearGaussian(ndim_state=2, mapping=(0, 1), noise_covar=np.eye(2)) - measurement = [Detection(state_vector=np.array([[5], [7]]), - timestamp=timestamp, measurement_model=measurement_model)] - state_update = ParticleStateUpdate(None, SingleHypothesis(prediction=prediction, - measurement=measurement, - measurement_prediction=meas_pred), - particle_list=particles.particle_list, timestamp=timestamp) - regulariser = MCMCRegulariser() + measurement = Detection(state_vector=np.array([[5], [7]]), + timestamp=timestamp, measurement_model=measurement_model) + hypothesis = SingleHypothesis(prediction=prediction, + measurement=measurement, + measurement_prediction=None) + + state_update = Update.from_state(state=prediction, + hypothesis=hypothesis, + timestamp=timestamp+datetime.timedelta(seconds=1)) + # A PredictedParticleState is used here as the point at which the regulariser is implemented + # in the updater is before the updated state has taken the updated state type. + state_update.weight = np.array([1/6, 5/48, 5/48, 5/48, 5/48, 5/48, 5/48, 5/48, 5/48]) + + if model_flag: + regulariser = MCMCRegulariser() + else: + regulariser = MCMCRegulariser(transition_model=transition_model) # state check - new_particles = regulariser.regularise(particles, state_update, measurement) + new_particles = regulariser.regularise(prediction, state_update) # Check the shape of the new state vector assert new_particles.state_vector.shape == state_update.state_vector.shape # Check weights are unchanged @@ -53,13 +91,15 @@ def test_regulariser(): # Check that the timestamp is the same assert new_particles.timestamp == state_update.timestamp - # list check - new_particles = regulariser.regularise(particles.particle_list, state_update.particle_list, - measurement) - # Check the shape of the new state vector - assert new_particles.state_vector.shape == state_update.state_vector.shape - # Check weights are unchanged - assert any(new_particles.weight == state_update.weight) + # list check3 + with pytest.raises(TypeError) as e: + new_particles = regulariser.regularise(particles.particle_list, + state_update) + assert "Only ParticleState type is supported!" in str(e.value) + with pytest.raises(Exception) as e: + new_particles = regulariser.regularise(particles, + state_update.particle_list) + assert "Only ParticleState type is supported!" in str(e.value) def test_no_measurement(): @@ -92,7 +132,7 @@ def test_no_measurement(): particle_list=particles.particle_list, timestamp=timestamp) regulariser = MCMCRegulariser() - new_particles = regulariser.regularise(particles, state_update, detections=None) + new_particles = regulariser.regularise(particles, state_update) # Check the shape of the new state vector assert new_particles.state_vector.shape == state_update.state_vector.shape diff --git a/stonesoup/types/multihypothesis.py b/stonesoup/types/multihypothesis.py index bf4e8589e..c862d9916 100644 --- a/stonesoup/types/multihypothesis.py +++ b/stonesoup/types/multihypothesis.py @@ -1,5 +1,5 @@ -from collections.abc import Sized, Iterable, Container -from typing import Sequence +from collections.abc import Sequence +import typing from .detection import MissedDetection from .numeric import Probability @@ -10,13 +10,13 @@ from ..types.prediction import Prediction -class MultipleHypothesis(Type, Sized, Iterable, Container): +class MultipleHypothesis(Type, Sequence): """Multiple Hypothesis base type A Multiple Hypothesis is a container to store a collection of hypotheses. """ - single_hypotheses: Sequence[SingleHypothesis] = Property( + single_hypotheses: typing.Sequence[SingleHypothesis] = Property( default=None, doc="The initial list of :class:`~.SingleHypothesis`. Default `None` " "which initialises with empty list.") @@ -119,7 +119,7 @@ def get_missed_detection_probability(self): return None -class MultipleCompositeHypothesis(Type, Sized, Iterable, Container): +class MultipleCompositeHypothesis(Type, Sequence): """Multiple composite hypothesis type A Multiple Composite Hypothesis is a container to store a collection of composite hypotheses. @@ -128,7 +128,7 @@ class MultipleCompositeHypothesis(Type, Sized, Iterable, Container): redefined. """ - single_hypotheses: Sequence[CompositeHypothesis] = Property( + single_hypotheses: typing.Sequence[CompositeHypothesis] = Property( default=None, doc="The initial list of :class:`~.CompositeHypothesis`. Default `None` which initialises " "with empty list.") diff --git a/stonesoup/updater/particle.py b/stonesoup/updater/particle.py index 245b5da75..545b712a4 100644 --- a/stonesoup/updater/particle.py +++ b/stonesoup/updater/particle.py @@ -46,7 +46,12 @@ def update(self, hypothesis, **kwargs): : :class:`~.ParticleState` The state posterior """ - predicted_state = copy.copy(hypothesis.prediction) + + predicted_state = Update.from_state( + state=hypothesis.prediction, + hypothesis=hypothesis, + timestamp=hypothesis.prediction.timestamp + ) if hypothesis.measurement.measurement_model is None: measurement_model = self.measurement_model @@ -65,13 +70,12 @@ def update(self, hypothesis, **kwargs): if self.resampler is not None: predicted_state = self.resampler.resample(predicted_state) - return Update.from_state( - state=hypothesis.prediction, - state_vector=predicted_state.state_vector, - log_weight=predicted_state.log_weight, - hypothesis=hypothesis, - timestamp=hypothesis.measurement.timestamp, - ) + if self.regulariser is not None: + prior = hypothesis.prediction.parent + predicted_state = self.regulariser.regularise(prior, + predicted_state) + + return predicted_state @lru_cache() def predict_measurement(self, state_prediction, measurement_model=None, @@ -414,8 +418,12 @@ def update(self, hypotheses, **kwargs): # copy prediction prediction = hypotheses.single_hypotheses[0].prediction - updated_state = copy.copy(prediction) - + # updated_state = copy.copy(prediction) + updated_state = Update.from_state( + state=prediction, + hypothesis=hypotheses, + timestamp=prediction.timestamp + ) if any(hypotheses): detections = [single_hypothesis.measurement for single_hypothesis in hypotheses.single_hypotheses] @@ -463,16 +471,10 @@ def update(self, hypotheses, **kwargs): if any(hypotheses): # Regularisation if self.regulariser is not None: - regularised_parts = self.regulariser.regularise(updated_state.parent, - updated_state, - detections) - updated_state.state_vector = regularised_parts.state_vector - - return Update.from_state( - updated_state, - timestamp=updated_state.timestamp, - hypothesis=hypotheses, - ) + updated_state = self.regulariser.regularise(updated_state.parent, + updated_state) + + return updated_state @staticmethod def _log_space_product(A, B): diff --git a/stonesoup/updater/tests/test_particle.py b/stonesoup/updater/tests/test_particle.py index 4f9f421ab..1ed009eba 100644 --- a/stonesoup/updater/tests/test_particle.py +++ b/stonesoup/updater/tests/test_particle.py @@ -12,13 +12,14 @@ from ...types.hypothesis import SingleHypothesis from ...types.multihypothesis import MultipleHypothesis from ...types.particle import Particle +from ...types.state import ParticleState from ...types.prediction import ( ParticleStatePrediction, ParticleMeasurementPrediction) from ...updater.particle import ( ParticleUpdater, GromovFlowParticleUpdater, GromovFlowKalmanParticleUpdater, BernoulliParticleUpdater) from ...predictor.particle import BernoulliParticlePredictor -from ...models.transition.linear import ConstantVelocity +from ...models.transition.linear import ConstantVelocity, CombinedLinearGaussianTransitionModel from ...types.update import BernoulliParticleStateUpdate from ...regulariser.particle import MCMCRegulariser from ...sampler.particle import ParticleSampler @@ -139,7 +140,7 @@ def test_bernoulli_particle(): prediction = predictor.predict(prior, timestamp=new_timestamp) resampler = SystematicResampler() - regulariser = MCMCRegulariser() + regulariser = MCMCRegulariser(transition_model=cv) updater = BernoulliParticleUpdater(measurement_model=None, resampler=resampler, @@ -170,3 +171,76 @@ def test_bernoulli_particle(): assert update.hypothesis == hypotheses # Check that the existence probability is returned assert update.existence_probability is not None + + +@pytest.mark.parametrize("transition_model, model_flag", [ + ( + CombinedLinearGaussianTransitionModel([ConstantVelocity([0.05])]), # transition_model + False # model_flag + ), + ( + CombinedLinearGaussianTransitionModel([ConstantVelocity([0.05])]), # transition_model + True # model_flag + ) + ], ids=["with_transition_model_init", "without_transition_model_init"] +) +def test_regularised_particle(transition_model, model_flag): + + measurement_model = LinearGaussian( + ndim_state=2, mapping=[0], noise_covar=np.array([[10]])) + + if model_flag: + updater = ParticleUpdater(regulariser=MCMCRegulariser(), + measurement_model=measurement_model) + else: + updater = ParticleUpdater(regulariser=MCMCRegulariser(transition_model=transition_model), + measurement_model=measurement_model) + # Measurement model + timestamp = datetime.datetime.now() + particles = [Particle([[10], [10]], 1 / 9), + Particle([[10], [20]], 1 / 9), + Particle([[10], [30]], 1 / 9), + Particle([[20], [10]], 1 / 9), + Particle([[20], [20]], 1 / 9), + Particle([[20], [30]], 1 / 9), + Particle([[30], [10]], 1 / 9), + Particle([[30], [20]], 1 / 9), + Particle([[30], [30]], 1 / 9), + ] + + particles = ParticleState(None, particle_list=particles, timestamp=timestamp) + predicted_state = transition_model.function(particles, + noise=True, + time_interval=datetime.timedelta(seconds=1)) + if not model_flag: + prediction = ParticleStatePrediction(predicted_state, + weight=np.array([1/9]*9), + timestamp=timestamp, + parent=particles) + else: + prediction = ParticleStatePrediction(predicted_state, + weight=np.array([1 / 9] * 9), + timestamp=timestamp, + transition_model=transition_model, + parent=particles) + + measurement = Detection([[40.0]], timestamp=timestamp, measurement_model=measurement_model) + eval_measurement_prediction = ParticleMeasurementPrediction( + StateVectors([prediction.state_vector[0, :]]), timestamp=timestamp) + + measurement_prediction = updater.predict_measurement(prediction) + + assert np.all(eval_measurement_prediction.state_vector == measurement_prediction.state_vector) + assert measurement_prediction.timestamp == timestamp + + updated_state = updater.update(SingleHypothesis( + prediction, measurement, measurement_prediction)) + + # Don't know what the particles will exactly be due to randomness so check + # some obvious properties + + assert np.all(weight == 1 / 9 for weight in updated_state.weight) + assert updated_state.timestamp == timestamp + assert updated_state.hypothesis.measurement_prediction == measurement_prediction + assert updated_state.hypothesis.prediction == prediction + assert updated_state.hypothesis.measurement == measurement