Skip to content

Commit

Permalink
temporarily comment out compilation
Browse files Browse the repository at this point in the history
  • Loading branch information
juliagsy committed May 18, 2023
1 parent 74e942b commit aa04912
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 17 deletions.
22 changes: 14 additions & 8 deletions ivy_gym_demos/optimization/optimize_policy.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# global
import ivy
import ivy.compiler.compiler as ic

# import ivy.compiler.compiler as ic

import ivy_gym
import argparse
Expand Down Expand Up @@ -65,8 +66,12 @@ def main(
policy = Policy(in_size, ac_dim)

# compile loss function
compiled_loss_fn = ic.compile(
lambda initial_state, pol_vs: loss_fn(env, initial_state, policy, pol_vs, steps)
# compiled_loss_fn = ic.compile(
# lambda initial_state, pol_vs:
# loss_fn(env, initial_state, policy, pol_vs, steps)
# )
compiled_loss_fn = lambda initial_state, pol_vs: loss_fn(
env, initial_state, policy, pol_vs, steps
)

# optimizer
Expand All @@ -86,9 +91,8 @@ def main(
env.reset()
if iteration == 0:
print(
"\nCompiling loss function for {} environment steps... This may take a while...\n".format(
steps
)
"\nCompiling loss function "
"for {} environment steps... This may take a while...\n".format(steps)
)
score = train_step(compiled_loss_fn, optimizer, env.get_state(), policy, f)
if iteration == 0:
Expand Down Expand Up @@ -135,8 +139,10 @@ def main(
fw = ivy.choose_random_backend(excluded=["numpy"])
if fw == "numpy":
raise Exception(
"Invalid framework selection. Numpy does not support auto-differentiation.\n"
"This demo involves gradient-based optimization, and so auto-diff is required.\n"
"Invalid framework selection. "
"Numpy does not support auto-differentiation.\n"
"This demo involves gradient-based optimization, "
"and so auto-diff is required.\n"
"Please choose a different backend framework."
)
f = ivy.with_backend(backend=fw)
Expand Down
21 changes: 12 additions & 9 deletions ivy_gym_demos/optimization/optimize_trajectory.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# global
import ivy
import ivy.compiler.compiler as ic

# import ivy.compiler.compiler as ic

import ivy_gym
import argparse
Expand Down Expand Up @@ -53,9 +54,10 @@ def main(
logits = ivy.random_uniform(low=-2, high=2, shape=(steps, ac_dim))

# compile loss function
compiled_loss_fn = ic.compile(
lambda initial_state, lgts: loss_fn(env, initial_state, lgts)
)
# compiled_loss_fn = ic.compile(
# lambda initial_state, lgts: loss_fn(env, initial_state, lgts)
# )
compiled_loss_fn = lambda initial_state, lgts: loss_fn(env, initial_state, lgts)

# optimizer
optimizer = ivy.Adam(lr=lr)
Expand All @@ -74,9 +76,8 @@ def main(
env.set_state(starting_state)
if iteration == 0:
print(
"\nCompiling loss function for {} environment steps... This may take a while...\n".format(
steps
)
"\nCompiling loss function "
"for {} environment steps... This may take a while...\n".format(steps)
)
score, logits = train_step(compiled_loss_fn, optimizer, starting_state, logits)
if iteration == 0:
Expand Down Expand Up @@ -124,8 +125,10 @@ def main(
fw = ivy.choose_random_backend(excluded=["numpy"])
if fw == "numpy":
raise Exception(
"Invalid framework selection. Numpy does not support auto-differentiation.\n"
"This demo involves gradient-based optimization, and so auto-diff is required.\n"
"Invalid framework selection. "
"Numpy does not support auto-differentiation.\n"
"This demo involves gradient-based optimization, "
"and so auto-diff is required.\n"
"Please choose a different backend framework."
)
f = ivy.with_backend(backend=fw)
Expand Down

0 comments on commit aa04912

Please sign in to comment.