Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Made changes to model_chess.py to make self play work #80

Merged
merged 2 commits into from Apr 22, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
310 changes: 310 additions & 0 deletions Untitled.ipynb

Large diffs are not rendered by default.

11 changes: 6 additions & 5 deletions src/chess_zero/agent/model_chess.py
Expand Up @@ -65,7 +65,8 @@ def build(self):
x = Conv2D(filters=mc.cnn_filter_num, kernel_size=mc.cnn_first_filter_size, padding="same",
data_format="channels_first", use_bias=False, kernel_regularizer=l2(mc.l2_reg),
name="input_conv-"+str(mc.cnn_first_filter_size)+"-"+str(mc.cnn_filter_num))(x)
x = BatchNormalization(axis=1, name="input_batchnorm")(x)
x = BatchNormalization(axis=-1,
name="input_batchnorm")(x)
x = Activation("relu", name="input_relu")(x)

for i in range(mc.res_layer_num):
Expand All @@ -76,7 +77,7 @@ def build(self):
# for policy output
x = Conv2D(filters=2, kernel_size=1, data_format="channels_first", use_bias=False, kernel_regularizer=l2(mc.l2_reg),
name="policy_conv-1-2")(res_out)
x = BatchNormalization(axis=1, name="policy_batchnorm")(x)
x = BatchNormalization(axis=-1, name="policy_batchnorm")(x)
x = Activation("relu", name="policy_relu")(x)
x = Flatten(name="policy_flatten")(x)
# no output for 'pass'
Expand All @@ -85,7 +86,7 @@ def build(self):
# for value output
x = Conv2D(filters=4, kernel_size=1, data_format="channels_first", use_bias=False, kernel_regularizer=l2(mc.l2_reg),
name="value_conv-1-4")(res_out)
x = BatchNormalization(axis=1, name="value_batchnorm")(x)
x = BatchNormalization(axis=-1, name="value_batchnorm")(x)
x = Activation("relu",name="value_relu")(x)
x = Flatten(name="value_flatten")(x)
x = Dense(mc.value_fc_size, kernel_regularizer=l2(mc.l2_reg), activation="relu", name="value_dense")(x)
Expand All @@ -100,12 +101,12 @@ def _build_residual_block(self, x, index):
x = Conv2D(filters=mc.cnn_filter_num, kernel_size=mc.cnn_filter_size, padding="same",
data_format="channels_first", use_bias=False, kernel_regularizer=l2(mc.l2_reg),
name=res_name+"_conv1-"+str(mc.cnn_filter_size)+"-"+str(mc.cnn_filter_num))(x)
x = BatchNormalization(axis=1, name=res_name+"_batchnorm1")(x)
x = BatchNormalization(axis=-1, name=res_name+"_batchnorm1")(x)
x = Activation("relu",name=res_name+"_relu1")(x)
x = Conv2D(filters=mc.cnn_filter_num, kernel_size=mc.cnn_filter_size, padding="same",
data_format="channels_first", use_bias=False, kernel_regularizer=l2(mc.l2_reg),
name=res_name+"_conv2-"+str(mc.cnn_filter_size)+"-"+str(mc.cnn_filter_num))(x)
x = BatchNormalization(axis=1, name="res"+str(index)+"_batchnorm2")(x)
x = BatchNormalization(axis=-1, name="res"+str(index)+"_batchnorm2")(x)
x = Add(name=res_name+"_add")([in_x, x])
x = Activation("relu", name=res_name+"_relu2")(x)
return x
Expand Down
20 changes: 20 additions & 0 deletions test3.pgn
@@ -0,0 +1,20 @@
[Event "?"]
[Site "?"]
[Date "2019.04.15"]
[Round "?"]
[White "current_model"]
[Black "current_model"]
[Result "0-1"]

1. c4 e5 2. b3 Nc6 3. c5 Nge7 4. Bb2 Ng6 5. b4 a5 6. Nc3 Be7 7. Nd5 Nf4 8. Nh3 Nd4 9. f3 Ng6 10. c6 bxc6 11. b5 Rb8 12. Ng1 Rg8 13. Nxe7 Rf8 14. Rc1 Qxe7 15. Bc3 Nxb5 16. Bxa5 Qh4+ 17. g3 Qd8 18. Nh3 Rh8 19. Rb1 Nd4 20. Rxb8 f5 21. Nf2 Ke7 22. Rb2 Ba6 23. Qb1 Ke6 24. Rg1 h5 25. f4 e4 26. Nd1 Nxf4 27. a3 h4 28. d3 d6 29. gxf4 Kf6 30. Bh3 exd3 31. e3 Nf3+ 32. Kf1 Ke6 33. Rxg7 Qf6 34. Rgg2 d2+ 35. Kf2 Ne1 36. Rb8 Nxg2 37. Qb4 Rxb8 38. Qxb8 Ne1 39. Qg8+ Kd7 40. Bc3 Qe6 41. Qxe6+ Kxe6 42. Bxd2 Nc2 43. Bg2 Nxa3 44. Bxc6 Nc4 45. Bc3 Ke7 46. e4 fxe4 47. Kg2 e3 48. Kf3 e2 49. Nf2 d5 50. Nd3 Kd6 51. Be8 c5 52. Ba4 d4 53. Be1 h3 54. Nc1 Nb2 55. Bb3 c4 56. Bc2 d3 57. Bb1 Bb7+ 58. Ke3 Kc5 59. f5 Nd1+ 60. Kd2 Be4 61. f6 c3# 0-1

[Event "?"]
[Site "?"]
[Date "2019.04.15"]
[Round "?"]
[White "current_model"]
[Black "current_model"]
[Result "1-0"]

1. Nc3 c6 2. Nh3 Qa5 3. Nb1 Qc5 4. e3 d5 5. Ng1 Bg4 6. Nf3 g5 7. b4 Kd8 8. c4 a5 9. a3 Qxe3+ 10. fxe3 Nf6 11. Kf2 Ne4+ 12. Ke1 Na6 13. Qa4 e6 14. d3 Bg7 15. Nd4 f5 16. Nc3 Bf3 17. gxf3 Re8 18. bxa5 Nxc3 19. Bh3 Rf8 20. Qc2 g4 21. Qd2 c5 22. Qf2 cxd4 23. Qg3 Nb4 24. Bb2 Nxd3+ 25. Kd2 b5 26. Qd6+ Ke8 27. cxb5 Kf7 28. Qd7+ Kg8 29. exd4 Rf7 30. Bxc3 h6 31. Qc7 Rxc7 32. b6 Rxc3 33. Kxc3 Kh8 34. Rhf1 Nf4 35. Kb4 Nd3+ 36. Kb5 1-0