-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathmakeMDPAgent.wppl
134 lines (113 loc) · 3.8 KB
/
makeMDPAgent.wppl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
var hasProperties = function(object, listProperties) {
assert.ok(_.isObject(object) && _.isArray(listProperties), 'fail hasProperties');
return _.every(map(
function(property) {
return _.has(object, property);
}, listProperties));
};
var makeMDPAgentOptimal = function(params, world) {
// *params* should be an object containing *utility*, a utility function, and
// *alpha*, which regulates the agent's softmax noise.
map(function(s) {
assert.ok(params.hasOwnProperty(s), 'makeMDPAgent args');
}, ['utility', 'alpha']);
var stateToActions = world.stateToActions;
var transition = world.transition;
var utility = params.utility;
var alpha = params.alpha;
var act = dp.cache(
function(state) {
return Infer({
method: 'enumerate'
}, function() {
var action = uniformDraw(stateToActions(state));
var eu = expectedUtility(state, action);
factor(alpha * eu);
return action;
});
});
var expectedUtility = dp.cache(
function(state, action) {
var u = utility(state, action);
if (state.terminateAfterAction) {
return u;
} else {
return u + expectation(Infer({
method: 'enumerate'
}, function() {
var nextState = transition(state, action);
var nextAction = sample(act(nextState));
return expectedUtility(nextState, nextAction);
}));
}
});
return {
params,
expectedUtility,
act
};
};
var makeMDPAgentHyperbolic = function(params, world) {
assert.ok(hasProperties(params, ['utility', 'alpha', 'discount', 'sophisticatedOrNaive']),
'makeMDPAgentHyperbolic params');
var stateToActions = world.stateToActions;
var transition = world.transition;
var utility = params.utility;
// we can specify a discount function so that our 'hyperbolic' agent can
// actually be an exponential discounter (or some other kind of discounter)
var paramsDiscountFunction = params.discountFunction;
var discountFunction = (
paramsDiscountFunction ||
function(delay) {
return 1 / (1 + params.discount * delay);
});
var isNaive = params.sophisticatedOrNaive == 'naive';
var act = dp.cache(
function(state, delay) {
var delay = delay ? delay : 0; //make sure delay is never 'undefined'
return Infer({
method: 'enumerate'
}, function() {
var action = uniformDraw(stateToActions(state));
var eu = expectedUtility(state, action, delay);
factor(params.alpha * eu);
return action;
});
});
var expectedUtility = dp.cache(
function(state, action, delay) {
var u = discountFunction(delay) * utility(state, action);
assert.ok(!_.isUndefined(u),
"utility undefined" + JSON.stringify([state, action, delay, utility(state, action)]));
if (state.terminateAfterAction) {
return u;
} else {
return u + expectation(Infer({
method: 'enumerate'
}, function() {
var nextState = transition(state, action);
var perceivedDelay = isNaive ? delay + 1 : 0;
var nextAction = sample(act(nextState, perceivedDelay));
return expectedUtility(nextState, nextAction, delay + 1);
}));
}
});
return {
params,
expectedUtility,
act
};
};
var isOptimalMDPAgent = function(agentParams) {
var optimalProperties = function() {
return !(_.has(agentParams, 'discount') ||
_.has(agentParams, 'discountFunction') ||
_.has(agentParams, 'sophisticatedOrNaive'));
};
return _.isUndefined(agentParams.optimal) ? optimalProperties() : agentParams.optimal;
};
var makeMDPAgent = function(params, world) {
return (isOptimalMDPAgent(params) ?
makeMDPAgentOptimal(params, world) :
makeMDPAgentHyperbolic(params, world));
};