Skip to content

Commit

Permalink
Merge branch 'master' of ssh://github.com/covartech/prt
Browse files Browse the repository at this point in the history
  • Loading branch information
peterTorrione committed Dec 29, 2017
2 parents 81d278f + ca5ecdb commit 3064b83
Show file tree
Hide file tree
Showing 6 changed files with 66 additions and 10 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
.DS_Store
.DS_Store
*.JPG
37 changes: 37 additions & 0 deletions engine/prtAlgorithm.m
Original file line number Diff line number Diff line change
Expand Up @@ -515,6 +515,25 @@ function plotAsClassifier(self)
error('prt:prtAlgorithm:plotAsClassifier','This prtAlgorithm cannot be plotted as a classifier');
end
end
function plotAsRegressor(self)
% plotAsRegressor(self)
% Plot an algorithm as though it were a regressor - e.g.,
% build the decision surface and visualize it. Valid for
% algorithms trained with data sets with 3 or fewer features,
% and when the *very last* action is a prtRegress.
%
% e.g.
% ds = prtDataGenBimodal;
% algoKmeans = prtPreProcKmeans('nClusters',4) + prtClassLogisticDiscriminant;
% algoKmeans = train(algoKmeans,ds);
% algoKmeans.plotAsClassifier;

if isPlottableAsRegressor(self)
plot(prtUtilRegressAlgorithmWrapper('trainedAlgorithm',self));
else
error('prt:prtAlgorithm:plotAsRegressor','This prtAlgorithm cannot be plotted as a regressor');
end
end
function tf = isPlottableAsClassifier(self)

tf = false;
Expand All @@ -533,6 +552,24 @@ function plotAsClassifier(self)
end
end
end
function tf = isPlottableAsRegressor(self)

tf = false;
if isempty(self.dataSetSummary)
return
end

if self.dataSetSummary.nFeatures <= 3
if sum(self.outputNodes)==1
lastNodes = self.connectivityMatrix(find(self.outputNodes,1,'first'),:);
if sum(lastNodes)==1
if isa(self.actionCell{find(lastNodes,1,'first')-1},'prtRegress')
tf = true;
end
end
end
end
end


function [optimizedAlgorithm,performance] = optimize(Obj,DataSet,objFn,tag,parameterName,parameterValues)
Expand Down
4 changes: 3 additions & 1 deletion plot/util/prtPlotUtilAlgorithmGui.m
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,8 @@ function patchButtonDownFunction(hObject, eventData, blockIndex) %#ok<INUSL>
plot(BlockObject);
elseif (blockIndex == length(Layout.Blocks)) && algo.isPlottableAsClassifier
plotAsClassifier(algo);
elseif (blockIndex == length(Layout.Blocks)) && algo.isPlottableAsRegressor
plotAsRegressor(algo);
end

otherwise % case 'normal'
Expand Down Expand Up @@ -192,7 +194,7 @@ function placeBlockFunction(hObject, eventData, BlockObject, position, iBlock, b
set(Layout.Blocks(iBlock).handle,'lineWidth',4);
end

if (iBlock == Layout.nBlocks) && algo.isPlottableAsClassifier
if (iBlock == Layout.nBlocks) && (algo.isPlottableAsClassifier || algo.isPlottableAsRegressor)
set(Layout.Blocks(iBlock).handle,'lineWidth',4);
end

Expand Down
18 changes: 16 additions & 2 deletions regress/prtRegress.m
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,27 @@


assert(Obj.isTrained,'Regressor must be trained before it can be plotted.');
assert(Obj.dataSetSummary.nFeatures < 2, 'nFeatures in the training dataset must be 1');

[OutputDataSet, linGrid] = runRegressorOnGrid(Obj);

colors = Obj.plotOptions.colorsFunction(Obj.dataSetSummary.nTargetDimensions);
lineWidth = Obj.plotOptions.lineWidth;
HandleStructure.regressorPlotHandle = plot(linGrid,OutputDataSet.getObservations,'color',colors(1,:),'lineWidth',lineWidth);

switch Obj.dataSetSummary.nFeatures
case 1
HandleStructure.regressorPlotHandle = plot(linGrid,OutputDataSet.getObservations,'color',colors(1,:),'lineWidth',lineWidth);
case 2
sz = ones(1,Obj.dataSetSummary.nFeatures)*Obj.plotOptions.nSamplesPerDim(Obj.dataSetSummary.nFeatures);
HandleStructure.regressorPlotHandle = surf(...
reshape(linGrid(:,1),sz),...
reshape(linGrid(:,2),sz),...
reshape(OutputDataSet.getObservations,sz));
HandleStructure.regressorPlotHandle.EdgeColor = colors(1,:);
HandleStructure.regressorPlotHandle.FaceColor = 'none';
HandleStructure.regressorPlotHandle.LineWidth = lineWidth;
otherwise
error('nFeatures in the training dataset must be 1 or 2');
end

holdState = get(gca,'nextPlot');
if ~isempty(Obj.dataSet)
Expand Down
10 changes: 4 additions & 6 deletions regress/prtRegressBayesianLinear.m
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,11 @@
includeBias = true;

priorWeightsMean = 0; % If scalar will be initialized by replication
priorWeightsPrecision = eps; % If scalar will be
priorWeightsPrecision = eps; % If scalar will be initialized by replication

priorPrecisionA = 0.1;
priorPrecisionB = 0.1;


mu
lambda
a
Expand All @@ -36,10 +35,11 @@

methods

% Allow for string, value pairs
function self = prtRegressBayesianLinear(varargin)
% Allow for string, value pairs
self = prtUtilAssignStringValuePairs(self,varargin{:});
end

end

methods (Access = protected, Hidden = true)
Expand All @@ -65,9 +65,7 @@
end
y = ds.Y;

XtX = X'*X;

self.lambda = (XtX + lambda0);
self.lambda = (X'*X + lambda0);
self.mu = (mu0*lambda0+ y'*X)/(self.lambda);

self.a = a0 + size(X,1)/2;
Expand Down
4 changes: 4 additions & 0 deletions regress/prtRegressGp.m
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@
if ~isempty(self.meanRegressor)
dataSetEst = self.meanRegressor.run(dataSet);
else
<<<<<<< Updated upstream
% The mean regressor will output dataSet.X of the same size
% as dataSet.Y, with all zeros! dataSetEst.Y should be
% dataSet.Y. Note - at test-time, there may not be
Expand All @@ -127,6 +128,9 @@
% (size(dataSet.Y,2)) is 1, even if there werent any
% targets provided (dataSet.Y is empty)
dataSetEst = prtDataSetRegress(zeros(size(dataSet.X,1),1),dataSet.Y);
=======
dataSetEst = prtDataSetRegress(zeros(dataSet.nObservations,1),zeros(dataSet.nObservations,1));
>>>>>>> Stashed changes
end
dataSetTargetResiduals = dataSet;
if nargout > 1
Expand Down

0 comments on commit 3064b83

Please sign in to comment.