diff --git a/.idea/Project_AI.iml b/.idea/Project_AI.iml
index 94259c3..dd7b1a8 100644
--- a/.idea/Project_AI.iml
+++ b/.idea/Project_AI.iml
@@ -4,13 +4,20 @@
-
+
+
+
+
diff --git a/.idea/misc.xml b/.idea/misc.xml
index c3334de..ee9b51e 100644
--- a/.idea/misc.xml
+++ b/.idea/misc.xml
@@ -1,4 +1,4 @@
-
+
\ No newline at end of file
diff --git a/DQN_mulit_tensorflow_2/DQNAgent_ddqn.py b/DQN_mulit_tensorflow_2/DQNAgent_ddqn.py
new file mode 100644
index 0000000..fa87d8a
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/DQNAgent_ddqn.py
@@ -0,0 +1,150 @@
+import tensorflow.keras as keras
+from keras.layers import Dense, Flatten, Conv2D
+from keras import Sequential
+from tensorflow.keras.optimizers import Adam
+from pommerman.agents import BaseAgent
+from pommerman.agents.simple_agent import SimpleAgent
+from pommerman import characters
+
+from gym.spaces import Discrete
+
+import constants
+from replay_memory import replay_Memory
+import numpy as np
+import tensorflow as tf
+
+
+class DQNAgent(BaseAgent):
+ """DQN second try with keras"""
+
+ def __init__(self, character=characters.Bomber):
+ super(DQNAgent, self).__init__(character)
+ self.baseAgent = SimpleAgent()
+
+ self.training_model = self.new_model()
+ self.trained_model = self.new_model()
+
+ self.trained_model.set_weights(self.training_model.get_weights())
+ #self.load_weights()
+
+ self.epsilon = constants.epsilon
+ self.min_epsilon = constants.MIN_EPSILON
+ self.eps_decay = constants.EPSILON_DECAY
+ self.buffer = replay_Memory(constants.MAX_BUFFER_SIZE)
+ self.update_counter = 0
+ self.V = keras.layers.Dense(1,activation=None)
+ self.A = keras.layers.Dense(6,activation=None)
+
+ def new_model(self):
+
+ model = Sequential()
+ input_shape = (constants.MINIBATCH_SIZE, 18, 11, 11,)
+ model.add(Conv2D(256, 3, (1, 1), input_shape=input_shape[1:], activation="relu", data_format="channels_first",
+ padding="same"))
+ # print(model.output_shape)
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ # print(model.output_shape)
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ # print(model.output_shape)
+
+ model.add(Flatten())
+ model.add(Dense(128, activation="relu"))
+ model.add(Dense(64, activation='linear'))
+ model.compile(loss="mse", optimizer=Adam(learning_rate=0.0001), metrics=['accuracy'])
+ model.summary()
+ return model
+
+ def dueling(self, state):
+ V = self.V(state)
+ # advantage value
+ A = self.A(state)
+ mean = tf.math.reduce_mean(A, axis=1, keepdims=True)
+ # output
+ output = V + (A - mean)
+ return output
+
+ def advantage(self, state):
+ A = self.A(state)
+ return A
+
+ def act(self, obs, action_space):
+ return self.baseAgent.act(obs, Discrete(6))
+
+ def train(self):
+
+ if self.buffer.size() < constants.MIN_REPLAY_MEMORY_SIZE:
+ return
+
+ #dueling
+
+ current_states, action, reward, new_states, done = self.buffer.sample_element(constants.MINIBATCH_SIZE)
+
+ # 在样品中取 current_states, 从模型中获取Q值
+ current_states_q = self.dueling(self.training_model.predict(current_states))
+ double_new_q = self.dueling(self.training_model.predict(new_states))
+ # 在样品中取 next_state, 从旧网络中获取Q值
+ new_states_q = self.dueling(self.trained_model.predict(new_states))
+
+ # X为state,Y为所预测的action
+ states = []
+ actions = []
+
+ for index in range(constants.MINIBATCH_SIZE):
+
+ if done[index] != True:
+ # 更新Q值
+ #new_state_q = reward[index] + constants.DISCOUNT * np.max(new_states_q[index])
+ double_new_q = reward[index] + constants.DISCOUNT * new_states_q[index][np.argmax(double_new_q[index])]
+ else:
+ #new_state_q = reward[index]
+ double_new_q = reward[index]
+ # 在给定的states下更新Q值
+ current_better_q = current_states_q[index]
+ current_better_q[action[index]] = double_new_q
+
+ # 添加训练数据
+ states.append(current_states[index])
+ actions.append(current_better_q)
+
+ # 开始训练
+ # 使用专用的数据api,但更慢.
+ # states = tf.reshape(states, (-1, 12, 8, 8))
+ # train_dataset = tf.data.Dataset.from_tensor_slices((states, actions))
+ # self.training_model.fit(train_dataset, verbose=0, shuffle=False)
+
+ self.training_model.fit(np.array(states), np.array(actions), batch_size=constants.MINIBATCH_SIZE, verbose=0,
+ shuffle=False)
+
+ # 更新网络更新计数器
+ if done:
+ self.update_counter += 1
+
+ # 网络更新计数器达到上限,更新网络
+ if self.update_counter > constants.UPDATE_EVERY:
+ self.trained_model.set_weights(self.training_model.get_weights())
+ self.update_counter = 0
+
+ def action_choose(self, state):
+ state_reshape = tf.reshape(state, (-1, 18, 11, 11))
+ q_table = self.advantage(self.training_model.predict(state_reshape))
+ return q_table
+ # epsilon衰减
+
+ def epsilon_decay(self):
+ self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_epsilon else self.epsilon
+
+ def save_weights(self, numOfEpisode):
+
+ # 完成训练后存档参数
+ if numOfEpisode % 200 == 0:
+ self.training_model.save_weights(('./checkpoints/FFA{:}/FFA{:}'.format(numOfEpisode, numOfEpisode)))
+ # self.training_model.save_weights(('./checkpoints/FFA-test-1/FFA-test-1'.format(numOfEpisode, numOfEpisode)))
+ print("weights saved!")
+
+ def load_weights(self):
+ self.training_model.load_weights('./checkpoints/FFA2200/FFA2200')
+ self.trained_model.load_weights('./checkpoints/FFA2200/FFA2200')
+ print("weights loaded!")
+
+ def save_model(self):
+ self.training_model.save("./second_model")
diff --git a/DQN_mulit_tensorflow_2/DQNAgent_modified.py b/DQN_mulit_tensorflow_2/DQNAgent_modified.py
new file mode 100644
index 0000000..116c7b9
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/DQNAgent_modified.py
@@ -0,0 +1,133 @@
+from keras.layers import Dense, Flatten, Conv2D
+from keras import Sequential
+from tensorflow.keras.optimizers import Adam
+from pommerman.agents import BaseAgent
+from pommerman.agents.simple_agent import SimpleAgent
+from pommerman import characters
+
+from gym.spaces import Discrete
+
+from DQN_mulit_tensorflow_2 import constants
+from replay_memory import replay_Memory
+import numpy as np
+import tensorflow as tf
+
+
+class DQNAgent(BaseAgent):
+ """DQN second try with keras"""
+
+ def __init__(self, character=characters.Bomber):
+ super(DQNAgent, self).__init__(character)
+ self.baseAgent = SimpleAgent()
+
+ self.training_model = self.new_model()
+ self.trained_model = self.new_model()
+
+ self.trained_model.set_weights(self.training_model.get_weights())
+ self.load_weights()
+
+ self.epsilon = constants.epsilon
+ self.min_epsilon = constants.MIN_EPSILON
+ self.eps_decay = constants.EPSILON_DECAY
+ self.buffer = replay_Memory(constants.MAX_BUFFER_SIZE)
+ self.update_counter = 0
+
+ def new_model(self):
+
+ model = Sequential()
+ input_shape = (constants.MINIBATCH_SIZE, 18, 11, 11,)
+ model.add(Conv2D(256, 3, (1, 1), input_shape=input_shape[1:], activation="relu", data_format="channels_first",
+ padding="same"))
+ # print(model.output_shape)
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ # print(model.output_shape)
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ # print(model.output_shape)
+
+ model.add(Flatten())
+ model.add(Dense(128, activation="relu"))
+ model.add(Dense(6, activation='linear'))
+ model.compile(loss="mse", optimizer=Adam(learning_rate=0.0001), metrics=['accuracy'])
+ model.summary()
+ return model
+
+
+
+ def act(self, obs, action_space):
+ return self.baseAgent.act(obs, Discrete(6))
+
+ def train(self):
+
+ if self.buffer.size() < constants.MIN_REPLAY_MEMORY_SIZE:
+ return
+
+ current_states, action, reward, new_states, done = self.buffer.sample_element(constants.MINIBATCH_SIZE)
+
+ # 在样品中取 current_states, 从模型中获取Q值
+ current_states_q = self.training_model.predict(current_states)
+ double_new_q = self.training_model.predict(new_states)
+ # 在样品中取 next_state, 从旧网络中获取Q值
+ new_states_q = self.trained_model.predict(new_states)
+
+ # X为state,Y为所预测的action
+ states = []
+ actions = []
+
+ for index in range(constants.MINIBATCH_SIZE):
+
+ if done[index] != True:
+ # 更新Q值
+ new_state_q = reward[index] + constants.DISCOUNT * np.max(new_states_q[index])
+ double_new_q = reward[index] + constants.DISCOUNT * new_states_q[index][np.argmax(double_new_q[index])]
+ else:
+ new_state_q = reward[index]
+ # 在给定的states下更新Q值
+ current_better_q = current_states_q[index]
+ current_better_q[action[index]] = new_state_q
+
+ # 添加训练数据
+ states.append(current_states[index])
+ actions.append(current_better_q)
+
+ # 开始训练
+ # 使用专用的数据api,但更慢.
+ # states = tf.reshape(states, (-1, 12, 8, 8))
+ # train_dataset = tf.data.Dataset.from_tensor_slices((states, actions))
+ # self.training_model.fit(train_dataset, verbose=0, shuffle=False)
+
+ self.training_model.fit(np.array(states), np.array(actions), batch_size=constants.MINIBATCH_SIZE, verbose=0,
+ shuffle=False)
+
+ # 更新网络更新计数器
+ if done:
+ self.update_counter += 1
+
+ # 网络更新计数器达到上限,更新网络
+ if self.update_counter > constants.UPDATE_EVERY:
+ self.trained_model.set_weights(self.training_model.get_weights())
+ self.update_counter = 0
+
+ def action_choose(self, state):
+ state_reshape = tf.reshape(state, (-1, 18, 11, 11))
+ q_table = self.training_model.predict(state_reshape)
+ return q_table
+ # epsilon衰减
+
+ def epsilon_decay(self):
+ self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_epsilon else self.epsilon
+
+ def save_weights(self, numOfEpisode):
+
+ # 完成训练后存档参数
+ if numOfEpisode % 200 == 0:
+ self.training_model.save_weights(('./checkpoints/FFA{:}/FFA{:}'.format(numOfEpisode, numOfEpisode)))
+ # self.training_model.save_weights(('./checkpoints/FFA-test-1/FFA-test-1'.format(numOfEpisode, numOfEpisode)))
+ print("weights saved!")
+
+ def load_weights(self):
+ self.training_model.load_weights('./checkpoints/FFA2200/FFA2200')
+ self.trained_model.load_weights('./checkpoints/FFA2200/FFA2200')
+ print("weights loaded!")
+
+ def save_model(self):
+ self.training_model.save("./second_model")
diff --git a/DQN_mulit_tensorflow_2/DQNAgent_modified_nhwc.py b/DQN_mulit_tensorflow_2/DQNAgent_modified_nhwc.py
new file mode 100644
index 0000000..808c6e2
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/DQNAgent_modified_nhwc.py
@@ -0,0 +1,137 @@
+from keras.layers import Dense, Flatten, Conv2D
+from keras import Sequential
+from tensorflow.keras.optimizers import Adam
+from pommerman.agents import BaseAgent
+from pommerman.agents.simple_agent import SimpleAgent
+from pommerman import characters
+
+from gym.spaces import Discrete
+
+from DQN_mulit_tensorflow_2 import constants
+from replay_memory import replay_Memory
+import numpy as np
+import tensorflow as tf
+import time
+import os
+
+
+class DQNAgent(BaseAgent):
+ """DQN second try with keras"""
+
+ def __init__(self, character=characters.Bomber):
+ super(DQNAgent, self).__init__(character)
+ self.baseAgent = SimpleAgent()
+ #self.cuda = os.environ["CUDA_VISIBLE_DEVICES"] = "0"
+ self.training_model = self.new_model()
+ self.trained_model = self.new_model()
+
+ self.trained_model.set_weights(self.training_model.get_weights())
+ # self.load_weights()
+
+ self.epsilon = constants.epsilon
+ self.min_epsilon = constants.MIN_EPSILON
+ self.eps_decay = constants.EPSILON_DECAY
+ self.buffer = replay_Memory(constants.MAX_BUFFER_SIZE)
+ self.update_counter = 0
+
+ def new_model(self):
+
+ model = Sequential()
+ input_shape = (constants.MINIBATCH_SIZE, 11, 11, 18)
+ model.add(Conv2D(256, 3, (1, 1), input_shape=input_shape[1:], activation="relu", padding="same"))
+ # print(model.output_shape)
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", padding="same"))
+ # print(model.output_shape)
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", padding="same"))
+ # print(model.output_shape)
+
+ model.add(Flatten())
+ model.add(Dense(128, activation="relu"))
+ model.add(Dense(6, activation='linear'))
+ model.compile(loss="mse", optimizer=Adam(learning_rate=0.0001), metrics=['accuracy'])
+ model.summary()
+ return model
+
+ def act(self, obs, action_space):
+ return self.baseAgent.act(obs, Discrete(6))
+
+ def train(self):
+
+ if self.buffer.size() < constants.MIN_REPLAY_MEMORY_SIZE:
+ return
+
+ current_states, action, reward, new_states, done = self.buffer.sample_element(constants.MINIBATCH_SIZE)
+ # start_time = time.time()
+ current_states_shaped = tf.transpose(current_states, [0, 2, 3, 1])
+ new_states_shaped = tf.transpose(new_states, [0, 2, 3, 1])
+ # end_time = time.time()
+ # print("transform time: ",start_time-end_time)
+ # 在样品中取 current_states, 从模型中获取Q值
+ current_states_q = self.training_model.predict(current_states_shaped)
+
+ # 在样品中取 next_state, 从旧网络中获取Q值
+ new_states_q = self.trained_model.predict(new_states_shaped)
+
+ # X为state,Y为所预测的action
+ states = []
+ actions = []
+
+ for index in range(constants.MINIBATCH_SIZE):
+
+ if done[index] != True:
+ # 更新Q值
+ new_state_q = reward[index] + constants.DISCOUNT * np.max(new_states_q[index])
+ else:
+ new_state_q = reward[index]
+ # 在给定的states下更新Q值
+ current_better_q = current_states_q[index]
+ current_better_q[action[index]] = new_state_q
+
+ # 添加训练数据
+ states.append(current_states[index])
+ # states.append(tf.reshape(current_state,(-1,14,11,11)))
+ actions.append(current_better_q)
+
+ # 开始训练
+ # 使用专用的数据api,但更慢.
+ # states = tf.reshape(states, (-1, 12, 8, 8))
+ # train_dataset = tf.data.Dataset.from_tensor_slices((states, actions))
+ # self.training_model.fit(train_dataset, verbose=0, shuffle=False)
+ states = tf.transpose(states, [0, 2, 3, 1])
+ self.training_model.fit(np.array(states), np.array(actions), batch_size=constants.MINIBATCH_SIZE, verbose=0,
+ shuffle=False)
+
+ # 更新网络更新计数器
+ if done:
+ self.update_counter += 1
+
+ # 网络更新计数器达到上限,更新网络
+ if self.update_counter > constants.UPDATE_EVERY:
+ self.trained_model.set_weights(self.training_model.get_weights())
+ self.update_counter = 0
+
+ def action_choose(self, state):
+ # start_time = time.time()
+ state_reshape = tf.reshape(state, (-1, 11, 11, 18))
+ q_table = self.training_model.predict(state_reshape)
+ # end_time = time.time()
+ # print("action time: ", start_time - end_time)
+ return q_table
+ # epsilon衰减
+
+ def epsilon_decay(self):
+ self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_epsilon else self.epsilon
+
+ def save_weights(self, numOfEpisode):
+
+ # 完成训练后存档参数
+ if numOfEpisode % 200 == 0:
+ self.training_model.save_weights(('./checkpoints/FFA{:}/FFA{:}'.format(numOfEpisode, numOfEpisode)))
+ # self.training_model.save_weights(('./checkpoints/FFA-test-1/FFA-test-1'.format(numOfEpisode, numOfEpisode)))
+ print("weights saved!")
+
+ def load_weights(self):
+ self.training_model.load_weights('./checkpoints/FFA2200/FFA2200')
+ self.trained_model.load_weights('./checkpoints/FFA2200/FFA2200')
+ print("weights loaded!")
+
diff --git a/DQN_mulit_tensorflow_2/__pycache__/DQNAgent_ddqn.cpython-39.pyc b/DQN_mulit_tensorflow_2/__pycache__/DQNAgent_ddqn.cpython-39.pyc
new file mode 100644
index 0000000..38478e1
Binary files /dev/null and b/DQN_mulit_tensorflow_2/__pycache__/DQNAgent_ddqn.cpython-39.pyc differ
diff --git a/DQN_mulit_tensorflow_2/__pycache__/DQNAgent_modified_nhwc.cpython-39.pyc b/DQN_mulit_tensorflow_2/__pycache__/DQNAgent_modified_nhwc.cpython-39.pyc
new file mode 100644
index 0000000..2a0c14a
Binary files /dev/null and b/DQN_mulit_tensorflow_2/__pycache__/DQNAgent_modified_nhwc.cpython-39.pyc differ
diff --git a/DQN_mulit_tensorflow_2/__pycache__/constants.cpython-39.pyc b/DQN_mulit_tensorflow_2/__pycache__/constants.cpython-39.pyc
new file mode 100644
index 0000000..ca71b96
Binary files /dev/null and b/DQN_mulit_tensorflow_2/__pycache__/constants.cpython-39.pyc differ
diff --git a/DQN_mulit_tensorflow_2/__pycache__/replay_memory.cpython-39.pyc b/DQN_mulit_tensorflow_2/__pycache__/replay_memory.cpython-39.pyc
new file mode 100644
index 0000000..ac033a4
Binary files /dev/null and b/DQN_mulit_tensorflow_2/__pycache__/replay_memory.cpython-39.pyc differ
diff --git a/DQN_mulit_tensorflow_2/__pycache__/utility.cpython-39.pyc b/DQN_mulit_tensorflow_2/__pycache__/utility.cpython-39.pyc
new file mode 100644
index 0000000..19d2b43
Binary files /dev/null and b/DQN_mulit_tensorflow_2/__pycache__/utility.cpython-39.pyc differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA1000/FFA1000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/1/FFA1000/FFA1000.data-00000-of-00001
new file mode 100644
index 0000000..66998f9
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA1000/FFA1000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA1000/FFA1000.index b/DQN_mulit_tensorflow_2/backup/1/FFA1000/FFA1000.index
new file mode 100644
index 0000000..e01b445
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA1000/FFA1000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA1000/checkpoint b/DQN_mulit_tensorflow_2/backup/1/FFA1000/checkpoint
new file mode 100644
index 0000000..29be90e
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/1/FFA1000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA1000"
+all_model_checkpoint_paths: "FFA1000"
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA1500/FFA1500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/1/FFA1500/FFA1500.data-00000-of-00001
new file mode 100644
index 0000000..7fe8e47
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA1500/FFA1500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA1500/FFA1500.index b/DQN_mulit_tensorflow_2/backup/1/FFA1500/FFA1500.index
new file mode 100644
index 0000000..960df76
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA1500/FFA1500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA1500/checkpoint b/DQN_mulit_tensorflow_2/backup/1/FFA1500/checkpoint
new file mode 100644
index 0000000..b7b6bee
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/1/FFA1500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA1500"
+all_model_checkpoint_paths: "FFA1500"
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA2000/FFA2000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/1/FFA2000/FFA2000.data-00000-of-00001
new file mode 100644
index 0000000..08a4e0f
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA2000/FFA2000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA2000/FFA2000.index b/DQN_mulit_tensorflow_2/backup/1/FFA2000/FFA2000.index
new file mode 100644
index 0000000..c81c2de
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA2000/FFA2000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA2000/checkpoint b/DQN_mulit_tensorflow_2/backup/1/FFA2000/checkpoint
new file mode 100644
index 0000000..1dcab0c
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/1/FFA2000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA2000"
+all_model_checkpoint_paths: "FFA2000"
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA2500/FFA2500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/1/FFA2500/FFA2500.data-00000-of-00001
new file mode 100644
index 0000000..26cf0a6
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA2500/FFA2500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA2500/FFA2500.index b/DQN_mulit_tensorflow_2/backup/1/FFA2500/FFA2500.index
new file mode 100644
index 0000000..ddc6d38
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA2500/FFA2500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA2500/checkpoint b/DQN_mulit_tensorflow_2/backup/1/FFA2500/checkpoint
new file mode 100644
index 0000000..e1df453
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/1/FFA2500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA2500"
+all_model_checkpoint_paths: "FFA2500"
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA3000/FFA3000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/1/FFA3000/FFA3000.data-00000-of-00001
new file mode 100644
index 0000000..f5101db
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA3000/FFA3000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA3000/FFA3000.index b/DQN_mulit_tensorflow_2/backup/1/FFA3000/FFA3000.index
new file mode 100644
index 0000000..6683fa5
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA3000/FFA3000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA3000/checkpoint b/DQN_mulit_tensorflow_2/backup/1/FFA3000/checkpoint
new file mode 100644
index 0000000..05be1ca
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/1/FFA3000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA3000"
+all_model_checkpoint_paths: "FFA3000"
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA3500/FFA3500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/1/FFA3500/FFA3500.data-00000-of-00001
new file mode 100644
index 0000000..6186385
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA3500/FFA3500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA3500/FFA3500.index b/DQN_mulit_tensorflow_2/backup/1/FFA3500/FFA3500.index
new file mode 100644
index 0000000..edc5dc0
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA3500/FFA3500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA3500/checkpoint b/DQN_mulit_tensorflow_2/backup/1/FFA3500/checkpoint
new file mode 100644
index 0000000..6b23a0a
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/1/FFA3500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA3500"
+all_model_checkpoint_paths: "FFA3500"
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA4000/FFA4000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/1/FFA4000/FFA4000.data-00000-of-00001
new file mode 100644
index 0000000..7d219e0
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA4000/FFA4000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA4000/FFA4000.index b/DQN_mulit_tensorflow_2/backup/1/FFA4000/FFA4000.index
new file mode 100644
index 0000000..bf3bdfe
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA4000/FFA4000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA4000/checkpoint b/DQN_mulit_tensorflow_2/backup/1/FFA4000/checkpoint
new file mode 100644
index 0000000..04247ef
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/1/FFA4000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA4000"
+all_model_checkpoint_paths: "FFA4000"
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA4500/FFA4500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/1/FFA4500/FFA4500.data-00000-of-00001
new file mode 100644
index 0000000..e190c6e
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA4500/FFA4500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA4500/FFA4500.index b/DQN_mulit_tensorflow_2/backup/1/FFA4500/FFA4500.index
new file mode 100644
index 0000000..fbec94a
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA4500/FFA4500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA4500/checkpoint b/DQN_mulit_tensorflow_2/backup/1/FFA4500/checkpoint
new file mode 100644
index 0000000..70f6686
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/1/FFA4500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA4500"
+all_model_checkpoint_paths: "FFA4500"
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA500/FFA500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/1/FFA500/FFA500.data-00000-of-00001
new file mode 100644
index 0000000..77c8ff4
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA500/FFA500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA500/FFA500.index b/DQN_mulit_tensorflow_2/backup/1/FFA500/FFA500.index
new file mode 100644
index 0000000..aa5cadc
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA500/FFA500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA500/checkpoint b/DQN_mulit_tensorflow_2/backup/1/FFA500/checkpoint
new file mode 100644
index 0000000..10482c1
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/1/FFA500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA500"
+all_model_checkpoint_paths: "FFA500"
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA5000/FFA5000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/1/FFA5000/FFA5000.data-00000-of-00001
new file mode 100644
index 0000000..5a5fbb2
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA5000/FFA5000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA5000/FFA5000.index b/DQN_mulit_tensorflow_2/backup/1/FFA5000/FFA5000.index
new file mode 100644
index 0000000..19064ff
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/1/FFA5000/FFA5000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/1/FFA5000/checkpoint b/DQN_mulit_tensorflow_2/backup/1/FFA5000/checkpoint
new file mode 100644
index 0000000..941edb1
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/1/FFA5000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA5000"
+all_model_checkpoint_paths: "FFA5000"
diff --git a/DQN_mulit_tensorflow_2/backup/1/result.csv b/DQN_mulit_tensorflow_2/backup/1/result.csv
new file mode 100644
index 0000000..905bd51
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/1/result.csv
@@ -0,0 +1,5001 @@
+result
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+1
+1
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
diff --git a/DQN_mulit_tensorflow_2/backup/1/reward.csv b/DQN_mulit_tensorflow_2/backup/1/reward.csv
new file mode 100644
index 0000000..0b412a9
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/1/reward.csv
@@ -0,0 +1,5001 @@
+reward
+-25.92
+-12.8
+-18.2
+-17.47
+-12.8
+-31.8
+-25.46
+-27.84
+-12.43
+-21.88
+-15.24
+-25.17
+-20.98
+-20.2
+-24.45
+-10.68
+-18.53
+-0.93
+-25.29
+-26.76
+-13.2
+-19.62
+-31.04
+-18.51
+-27.54
+-29.15
+-30.81
+-26.64
+-32.11
+-8.66
+-19.45
+-26.48
+-13.67
+-18.96
+-19.68
+-20.52
+-25.17
+-30.62
+-24.81
+-7.09
+-15.84
+-18.31
+-20.09
+-15.84
+-7
+-18.42
+-13.75
+-18.2
+-17.9
+-27.27
+-18.26
+-13.54
+-16.66
+-10.38
+-19.67
+-17.43
+-6.45
+-18.82
+-12.69
+-12.8
+-5.82
+-29.4
+-11.81
+-25.22
+-18.75
+-17.48
+-2.18
+-27.14
+-12.15
+-26.48
+-12.8
+-18.2
+-18.96
+-13.95
+-13.56
+-10.36
+-23.75
+-19.62
+-29.09
+-12.8
+-8.2
+-18.2
+-0.33
+-18.2
+-17.99
+-12.8
+-18.2
+-18.2
+-15.45
+-20.82
+-16.82
+-27.14
+-16.87
+-27.78
+-12.8
+-20.57
+-2.8
+-18.2
+-12.8
+-17.9
+-22.08
+-27.51
+-31.55
+-23.22
+-29.87
+-13.44
+-20.8
+-19.55
+3.5
+-15.65
+-15.45
+-15.41
+-19.32
+-30.05
+-18.2
+-26.04
+-18.94
+-29.83
+-4.6
+-20.12
+-18.2
+-15.11
+-13.82
+-4.27
+-17.83
+-17.45
+-27.09
+-30.86
+-1.64
+-13.95
+-25.39
+-29.26
+-14.89
+-12.8
+-8.2
+-8.82
+-16.01
+2.39
+-18.69
+-18.2
+-15.11
+-15.6
+-29.45
+-27.56
+-6.86
+-20.54
+-14.17
+-18.2
+-19.26
+-22.8
+-14.18
+-18.84
+-26.72
+-6.87
+-16.58
+-27.18
+-27.82
+-25.29
+-7.15
+-16.67
+-19.1
+-23.43
+-15.53
+-18.74
+-13.64
+-25.58
+-19.65
+-15.68
+-27.49
+-12.94
+-34.31
+-5.82
+-25.88
+-16.64
+-17.41
+-21.09
+-14.15
+-14.66
+-8.2
+-19.44
+-15.83
+-12.8
+-29.2
+-15.72
+-18.48
+-23.5
+-15.85
+-14.38
+-18.82
+-18.2
+-18.2
+-16.21
+-19.22
+-15.11
+-15.19
+-24.2
+-25.86
+-25.88
+-14.07
+-13.69
+-22.62
+-18.2
+-9.1
+-11.61
+-15.65
+-13.5
+-15.97
+-21.77
+-14.78
+-18.2
+-24.78
+-17.95
+-12.8
+-24.11
+-25.71
+-23.9
+-18.2
+-18.7
+-16.96
+-18.2
+-18.2
+-18.2
+-15.77
+-18.2
+-18.2
+-16.13
+-17.57
+-22.06
+-25.99
+-23.1
+-18.2
+-28.13
+-23.7
+-18.2
+-18.2
+1.93
+-22.05
+-16.2
+-16.17
+-20.98
+-18.2
+-16.7
+-18.4
+-27.57
+-18.2
+-17.17
+-16.41
+-18.2
+-16.98
+-29.88
+-12.8
+-25.02
+-25.52
+-24.45
+-19
+-33.02
+-18.2
+-12.8
+-2.8
+-16.17
+-28.43
+-16.51
+-18.2
+-19.33
+-18.2
+-18.2
+-15.7
+-22.03
+-12.8
+-24.19
+-22.08
+-25.81
+-18.2
+-26.99
+-27.08
+-5.21
+-28.45
+-2.41
+-15.53
+-12.8
+-18.42
+-13.8
+-28.07
+-18.95
+-18.2
+-18.2
+-18.2
+-19.97
+-32.08
+-26.67
+-19.76
+-20.72
+-18.2
+-9.16
+-19.9
+-27.3
+-10.7
+-28.44
+-12.8
+-18.2
+-18.2
+-26.15
+-8.86
+-23.95
+-28.61
+-5.37
+-15.76
+-15.75
+-19.37
+-18.7
+-33.01
+-4.65
+-15.89
+-32.39
+-18.2
+-31.9
+-26.37
+-18.2
+-14.53
+-24.39
+-18.2
+-17.78
+-19.95
+-12.8
+-12.8
+-18.7
+-28.7
+-15.81
+-12.8
+-16.3
+-20.11
+-26.07
+-25.13
+-15.53
+-21.77
+-30.14
+-14.7
+-18.75
+-29.65
+-26.63
+-20.59
+-30.67
+-27.14
+-26.71
+-18.2
+-22.74
+-14.79
+-30.03
+-31.62
+-14.51
+-25.06
+-18.2
+-14.35
+-18.2
+-28.72
+-26.25
+-12.8
+-20.53
+-28.68
+-21.84
+-30.62
+-32.25
+-18.2
+-18.2
+-18.2
+-15.34
+-19.58
+-25.86
+-28.49
+-26.19
+-25.44
+-27.6
+-28.41
+-18.2
+-10.49
+-20.2
+-15.89
+-31.56
+-16.88
+-24.66
+-15.17
+-18.2
+-16.46
+-19.89
+-13.75
+-26.15
+-28.81
+-12.8
+-12.8
+-18.74
+-16.72
+3.16
+-19.15
+-18.2
+-18.2
+-14.53
+-28.17
+-29.04
+-18.2
+-27.2
+-18.2
+-20.72
+-18.2
+-26.19
+-13.15
+-33.04
+-17.42
+-4.39
+-14.97
+-18.2
+-18.9
+-12.8
+-16.14
+-12.8
+-15.33
+-34.2
+-18.23
+-15.01
+-16.08
+-14.51
+-18.98
+-18.2
+-5.34
+-20.04
+-15.18
+-18.2
+-9.5
+-18.2
+-21.84
+-25.82
+-7.92
+-13.7
+-26
+-18.2
+-27.49
+-2.93
+-28.02
+-18.7
+-15.17
+-16.07
+-18.25
+-14.25
+-27.02
+-25.15
+-16.02
+-26.1
+-21.6
+-27.08
+-28
+-33.69
+-3.94
+-18.2
+-15.96
+-25.78
+-15.53
+-15.67
+-18.2
+2.06
+-18.7
+-27.44
+-2.45
+-18.75
+-7.37
+-12.8
+-19.17
+-16.76
+-22.83
+-8.72
+-15.17
+-13.36
+-15.38
+-28.65
+-16.64
+-15.27
+-13.55
+-23.83
+-26.08
+-0.3
+-25.29
+-15.1
+-24.15
+-15.15
+-24.85
+-30.71
+-17.23
+-12.8
+-18.2
+-12.8
+-15.41
+-14.73
+-32.09
+-18.2
+-15.41
+-2.95
+-14.75
+-23.39
+-18.2
+-34.38
+-30.55
+-15.97
+-12.8
+-25.24
+-13.3
+-19.46
+-18.2
+-26.91
+-24.29
+-23.07
+-21.47
+-30.31
+-16.09
+-14.77
+-15.87
+-25.64
+-3.46
+-6.64
+-25.41
+-12.8
+-8.95
+-18.2
+-25.21
+-15.95
+-24.37
+-33.14
+-25.34
+-16.82
+-25.41
+-2.8
+-18.5
+-13.77
+-19.4
+-18.2
+-28.74
+-12.8
+-12.8
+-12.8
+-28.8
+-5.45
+-26.32
+-18.2
+-28.07
+-16.55
+-26.88
+-27.96
+-18.2
+-18.2
+-17.42
+-19.2
+-26.15
+-18.2
+-28.36
+-18.8
+-18.2
+-6.79
+-16.31
+-30.3
+-15.9
+-14.51
+-25.64
+-18.2
+-19.37
+-24.1
+-18.2
+-25.01
+-20.75
+-12.8
+-3.16
+-28.14
+-32.82
+-16.87
+-27.26
+-20.88
+-20.2
+-28.6
+-28.37
+-19.1
+-18.2
+-23.45
+-18.2
+-13.68
+-11.41
+0.47
+-26.49
+-7.29
+-16.09
+-4.15
+-34.33
+-5.23
+-12.8
+-13.43
+-26.65
+-13.3
+-25.16
+-16.02
+-27.59
+-21.16
+-31.34
+-18.2
+-27.99
+-28.35
+-27.34
+-5.61
+-28.05
+-27
+-20.41
+-30.7
+-18.2
+-0.71
+-27.13
+-27.73
+-18.75
+-25.36
+-18.2
+-19.38
+-18.2
+-24.8
+-30.92
+-18.2
+-15.91
+-25.22
+-25.54
+-25.46
+-25.6
+-19.75
+-13.93
+-3.59
+0.98
+-12.8
+-33.7
+-13.78
+-17.57
+-18.7
+-18.2
+-16.52
+-29.74
+-34.76
+-24.16
+-16.59
+-22.28
+-19.6
+-31.34
+-12.8
+-16.21
+-18.2
+-29.11
+-29.04
+-32.34
+-12.8
+-15.19
+-29.44
+-18.2
+-18.2
+-15.07
+-18.2
+-15.53
+-16.72
+-16.76
+-19.7
+0.91
+-18.2
+-27.31
+-24.73
+-31.84
+-24.87
+-14.91
+-18.2
+-27.61
+-26.17
+-24.32
+-25.28
+-12.8
+-25.43
+-30.11
+-15.01
+-28.99
+-15.93
+-19.19
+-12.8
+-14.31
+-15.22
+1.84
+-26.69
+-31.57
+-2.8
+-27.29
+-18.66
+-30.84
+-18.2
+-16.76
+-26.59
+-15.81
+-22.6
+-23.72
+-16.73
+-22.07
+-18.2
+-16.77
+-6.58
+-20.35
+-24.34
+-33.07
+-28.54
+-19.95
+-21
+-15.55
+-16.09
+-13.81
+-16.44
+-14.47
+-18.2
+-25.66
+-24.94
+-15.64
+-26.98
+-12.8
+-8.2
+-16.05
+-18.2
+-12.8
+-16.6
+-23.93
+-26.65
+-15.35
+-23.84
+-18.2
+-0.07
+-14.59
+-18.2
+-19.04
+-15.37
+-16.43
+-2.32
+-14.59
+-8.2
+-25.33
+-33.1
+-17.47
+-24.59
+-25.9
+-35.23
+-28.05
+-28.11
+-18.34
+-16.67
+-15.9
+-29.38
+-18.2
+-21.73
+-6.5
+-32.18
+-12.8
+-18.2
+-18.2
+-28.09
+-26.18
+-13.85
+-24.97
+-20.05
+-27.55
+-15.12
+-18.2
+-29.48
+-14.4
+-28.08
+-28.5
+-20.97
+-14.57
+-15.56
+-28.39
+-25.64
+-20.67
+-19.45
+-29.4
+-15.38
+-14.27
+-13.41
+-31.32
+-31.14
+-18.2
+-30.68
+-18.2
+-18.2
+-34.49
+-15.62
+-11.86
+-27.9
+-18.2
+-8.2
+-18.68
+-18.2
+-16.42
+-27.58
+-15.17
+-29.58
+-22.12
+-21.87
+-20.7
+-19.7
+-17.61
+-15.7
+-22.25
+-23.23
+-13.65
+-30.35
+-29.22
+-12.8
+-26.57
+-27.1
+-22.75
+-9
+-15.53
+-13.44
+-24.86
+-21.55
+-19.85
+-29.88
+-13.2
+-29.71
+-16.39
+-26.72
+-21.28
+-12.8
+-18.2
+-18.2
+-18.2
+-15.24
+-14.29
+-25.64
+-16.6
+-26.06
+-18.2
+-18.2
+-23.52
+-15.67
+-25.67
+-18.2
+-3.05
+-18.2
+-18.2
+-18.95
+-18.7
+-15.83
+-18.62
+-18.02
+-19.87
+-21.53
+-21.54
+-17.97
+-18.2
+-12.8
+-18.96
+-19.81
+-27.19
+-19.93
+-15.1
+-29.45
+-16.65
+-19.92
+-14.04
+-18.2
+-25.5
+-12.8
+-18.2
+-12.8
+-18.2
+-18.2
+-34.79
+-33.61
+-5.99
+-26.16
+-32.41
+-35.88
+-19.2
+-24.6
+-20.09
+-15.9
+-18.66
+-3.45
+-18.8
+-28.15
+-9.97
+-13.35
+-18.2
+-10.09
+-23.75
+-21.1
+-12.8
+-4.38
+-18.6
+-22.03
+-26.4
+-18.2
+-33.41
+-22.1
+-26.16
+-26.6
+-34.51
+-18.2
+-19.2
+-18.76
+-20.36
+-23.05
+-18.2
+-18.2
+-20.79
+-18.2
+-18.2
+-26.89
+-19.58
+-19.99
+-15.37
+-24.97
+1.58
+-26.96
+-18.2
+-24.74
+-21.94
+-23.92
+-18.5
+-12.8
+-18.2
+-12.8
+-18.2
+-20.2
+-18.2
+-12.8
+-29.73
+-17.05
+-21.5
+-12.8
+-25.18
+-28.44
+-28.45
+-21.53
+-26.09
+-16.99
+-9.77
+-18.88
+-30.28
+-27.11
+-22.71
+-15.71
+-27.71
+-12.77
+-28.02
+-26.51
+-21.23
+-10.74
+-18.2
+-25.93
+-10.51
+-27.33
+-9.85
+-20.2
+-18.25
+-20.04
+-21.09
+-8.2
+-8.2
+-13.85
+-13.09
+-19.75
+-18.2
+-12.8
+-20.24
+-13.2
+-18.7
+-18.4
+-18.2
+-28.37
+-17.29
+-18.02
+-5.86
+-20.15
+-6.71
+-26.45
+-15.92
+-18.75
+-19.06
+-19.27
+-21.14
+-15.21
+-13.88
+-18.2
+-12.8
+-18.2
+-8.2
+-26.44
+-26.18
+-26
+-26.76
+-27.08
+-27.79
+-2.06
+-16.42
+-19.5
+-13.21
+-26.82
+-26.3
+-27.96
+-14.4
+-28.63
+-23.92
+-0.3
+-18.2
+-18.2
+-18.2
+-16.32
+-26.89
+-25.57
+-5.75
+-16.13
+-12.8
+-17.35
+-22.56
+-28.96
+-15.58
+-26.7
+-13.8
+-17.15
+-26.37
+-23.26
+-32.22
+-25.57
+-27.16
+-15.9
+-26.33
+-24.52
+-27.27
+-24.03
+-20.07
+-12.8
+-3.65
+-19.7
+-18.2
+-18.45
+-15.18
+-18.2
+-19.55
+-26
+-17.05
+-19.25
+-8.2
+-18.2
+-18.2
+-17.19
+-27.28
+-33.81
+-26.69
+-25.23
+-23.55
+-25.41
+-26.36
+-26.94
+-27.97
+-23.58
+-27.41
+-15.23
+-24.37
+-4.37
+-29.05
+-18.2
+-17.9
+-3.3
+-19.7
+-3.61
+-25.32
+-16.41
+-25.26
+-8.86
+-25.81
+-23.43
+-24.96
+-25.06
+-18.2
+-18.2
+-18.2
+-18.2
+-18.47
+-15.79
+-28.98
+-26.09
+-24.37
+-15.05
+-15.25
+-24.24
+-16.63
+-16.75
+-27.18
+-2.48
+-30.15
+-16.65
+-26.04
+-12.16
+-18.2
+-18.2
+-18.2
+-8.2
+-12.8
+-12.8
+-18.2
+-23.82
+-20.26
+-19.48
+-27.96
+-20.64
+-17.77
+-25.5
+-17.21
+-16.4
+-24.52
+-16.37
+-16.4
+-20.49
+-28.27
+-5.34
+-27.58
+-26.51
+-24.24
+-34.39
+-10.32
+-16.8
+-26.09
+-16.87
+-15.38
+-19.1
+-13.26
+-18.2
+-12.8
+-18.2
+-10.62
+-18.2
+-18.2
+-25.81
+-26.12
+-26.4
+-25.15
+-35.91
+-16.45
+-16.87
+-29.16
+-26.73
+-25.36
+-23.39
+-4.06
+-25.63
+-31.71
+-0.79
+-29.84
+-28.87
+-24.24
+-27.09
+-16.51
+-28.57
+-26.42
+-26.31
+-16.25
+-20.85
+-19.41
+-17.64
+-26.43
+-30.55
+-19.23
+-0.79
+-15.59
+-27.94
+-28.1
+-23.69
+-28.03
+-1.93
+-28.27
+-17.13
+-27.93
+-27.28
+-14.98
+-27.69
+-33.47
+-28.34
+-25.13
+-28
+-29.39
+-24.6
+-16.88
+-28.46
+-30.23
+-20.3
+-1.64
+-15.13
+-22.87
+-25.02
+-31.68
+-15.75
+-30.75
+-26.3
+-22.9
+-24.11
+-11.11
+-29.29
+-25.16
+-15.37
+-16.67
+-26.13
+-16.97
+-15.53
+-27.65
+-25.78
+-26.55
+-24.11
+-14.03
+-25.34
+-21.63
+-29.36
+-31.84
+-28.23
+-30.42
+-30.73
+-9.52
+-14.98
+-12.48
+-19.73
+-23.23
+-27.23
+-26.78
+-18.2
+-18.7
+-25.3
+-25.9
+-20.9
+-8.2
+-19.12
+-24.34
+-18.75
+-17.28
+-16.02
+-9.87
+-7.36
+-26.9
+-16.1
+-30.53
+-18.2
+-1.42
+-16.39
+-25.66
+-18.2
+-26.26
+-19.7
+-19
+-18.2
+-18.2
+-18.2
+-18.2
+-20.31
+-25.72
+-22.06
+-22.1
+-24.48
+-21.95
+-18.82
+-17.16
+-10.53
+-22.33
+-17.17
+-29.34
+-17.02
+-22.96
+-12.8
+-18.96
+-20.47
+-27.38
+-27.57
+-6.15
+-28.61
+-29.51
+-34.35
+-16.7
+-17
+-30.31
+-34.95
+-29.12
+-18.29
+-15.91
+-24.35
+-23.87
+-20.83
+-25.5
+-20.31
+-20.33
+-22.9
+-22.11
+-11.34
+-19.71
+-7.37
+-18.2
+-18.2
+-18.2
+-18.2
+-18.2
+-18.2
+-19.28
+-0.5
+-16.93
+-28.62
+-29.77
+-27.8
+-16.33
+-20.28
+0.42
+-18.32
+-16.46
+-16.53
+-24.12
+-24.77
+-17.17
+-23.68
+-18.2
+-19.7
+-16.58
+-22.16
+-12.84
+-8.2
+-18.2
+-18.36
+-15.19
+-19.08
+-18.95
+-16.13
+-32.68
+-2.14
+-26.63
+-11.71
+-23.09
+-9.6
+-27.05
+-12.8
+-16.23
+-17.47
+-27.61
+-15.93
+-23.46
+-8.78
+-16.54
+-26.67
+-16.5
+-21.38
+-18.47
+-31.37
+-10.62
+-27.41
+-3.29
+-28.26
+-17.62
+-22.66
+-19.18
+-19.2
+-18.96
+-20.07
+-18.2
+-12.8
+-18.2
+-18.2
+-18.2
+-18.89
+-9.9
+-24.9
+-33.7
+-24.23
+-20.09
+-13.71
+-18.2
+-7.32
+-16.21
+-25.51
+-16.1
+-19.95
+-3.41
+-25.77
+-18.2
+-12.8
+-18.2
+-18.2
+-17.41
+-15.72
+-19.01
+-18.34
+-28.53
+-28.54
+-26.5
+-15.35
+-20.07
+-19.28
+-18.2
+-18.2
+-18.75
+-31.06
+-24.15
+0.85
+-26.06
+-25.81
+-20.86
+-22.83
+-26.12
+-23.3
+-16.54
+-29.91
+-36.63
+-15.43
+-15.45
+-15.82
+-22.9
+-19.11
+-16.19
+-17.67
+-18.2
+-18.34
+-23.78
+-14.73
+-17.31
+-17.98
+-17.29
+-19.42
+-19.95
+-9.85
+-18.2
+-12.8
+-18.2
+-19.01
+-19.3
+-28.87
+-22.33
+-25.09
+-25.38
+-23.06
+-18.2
+-18.2
+-18.2
+-18.2
+-18.2
+-12.8
+-18.2
+-18.2
+-18.2
+-12.8
+-18.2
+-12.8
+-18.69
+-5.94
+-18.2
+-14.03
+-15.71
+-25.66
+-4.7
+-25.52
+-25.82
+-29.47
+-16.03
+-3.66
+-14.8
+-24.5
+-12.99
+-13.63
+-16.73
+-25.23
+-27.06
+-34.53
+-26.6
+-15.96
+-21.39
+-26.76
+-18.98
+-15.15
+-16.21
+-16.87
+-25.27
+-8.2
+-19.45
+-20.28
+-24.83
+-34.71
+-1.72
+-14.5
+-19.78
+-25.46
+-18.15
+-16.24
+-17.9
+-23.36
+-25.03
+-25.79
+-15.75
+-29.78
+-24.55
+-21.57
+-12.69
+-12.8
+-18.2
+-18.2
+-22.97
+-29.67
+-16.53
+-27.26
+-16.05
+-6.1
+-16.69
+-17.07
+-15.1
+-33.84
+-16.17
+-16.7
+-12.8
+-18.2
+-18.2
+-18.2
+-18.2
+-8.2
+-18.2
+-16.07
+-14.83
+-32.33
+-14.64
+-30.86
+-4.47
+-22.28
+-31.03
+-31.65
+-15.96
+-28.07
+-21.9
+-31.62
+-26.53
+-15.67
+-13.86
+-16.41
+-15.39
+-16.73
+-27.27
+-22.85
+-17.27
+-30
+-17.42
+-31.5
+-15.07
+-24.27
+-13.3
+-14.08
+-27.84
+-31.1
+-30.72
+-14.41
+-26.2
+-32.59
+-15.11
+-22.39
+-24.58
+-22.87
+-20.53
+-18.2
+-12.8
+-18.2
+-20.93
+-19.46
+-18.2
+-12.8
+-18.2
+-19.82
+-17.93
+-7.74
+-10.32
+-19.67
+-27.11
+-28.06
+-28.55
+-18.6
+-15.08
+-16.91
+-15.86
+-27.76
+-27.59
+-15.99
+-27.37
+-17.76
+-25.58
+-23.77
+-17.1
+-18.2
+-18.2
+-18.2
+-18.2
+-8.2
+-18.2
+-32.43
+-31.34
+-23.22
+-17.05
+-13.75
+-15.89
+-31.3
+-16.53
+-16.53
+-26.44
+-31.43
+-24.53
+-24.03
+-16.86
+-22.21
+-16.96
+-31.94
+-25.11
+-23.8
+-4.73
+-16.45
+-26.61
+-15.88
+-25.64
+-16.67
+-31.43
+-15.3
+-16.27
+-16.28
+1.26
+-27.96
+-28.61
+-28.8
+-28.24
+-16.89
+-14.8
+-27.32
+-29.76
+-14.79
+-6.37
+-30.19
+-32.03
+-16.75
+-16.8
+-31
+-27.39
+-15.72
+-7.43
+-24.83
+-25.71
+-31.14
+-27.39
+-20.47
+-14.33
+-31.62
+-23.78
+-0.27
+-22.11
+-20.97
+-28.8
+-16.49
+-14.27
+-23.75
+-30.65
+-27.25
+-25.05
+-26.23
+-16.03
+-26.44
+-24.39
+-20.44
+-26
+-26.72
+-19.01
+-29.3
+-24.73
+-29.62
+-26.04
+-27.08
+-16.95
+-14.34
+-19.82
+-18.2
+-18.2
+-20
+-24.65
+-22.78
+-17.92
+-15.63
+-20.4
+-27.78
+-16.04
+-25.51
+-24.32
+-15.25
+-10.28
+-30.71
+-24.95
+-27.59
+-26.44
+-26.55
+-17.31
+-28.07
+-29.36
+-24.49
+-9.85
+-12.8
+-8.2
+-18.2
+-18.2
+-20.36
+-16.64
+-25.54
+-30.87
+-23.11
+-17.37
+-15.03
+-22.87
+-16.27
+-4.8
+-16.55
+-26.69
+-31.46
+-31.23
+-23.77
+-31.67
+-16.68
+0.81
+-5.91
+-28.55
+-26.59
+-22.74
+-16.64
+-30.17
+-28.34
+-23.68
+-22.18
+-25.39
+-29.32
+-19.73
+-19.45
+-16.37
+-18.2
+-18.2
+-19.55
+-16.79
+-7.68
+-16.77
+-9.86
+-27.6
+-2.81
+-21.63
+-16.67
+-16.97
+-17.07
+-17.07
+-14.95
+-18.47
+-19.92
+-12.8
+-27.59
+-20.95
+-30.29
+-17.92
+-2.3
+-5.58
+-29.77
+-16.15
+-16.57
+-23.55
+-12.8
+-12.8
+-18.2
+-18.2
+-18.69
+-18.69
+-18.02
+-17.32
+-28.11
+-24.5
+-4.36
+-18.2
+-18.89
+-28.37
+-26.18
+-4.51
+-15.98
+-25.58
+-14.73
+-18.2
+-18.2
+-18.2
+-18.2
+-12.8
+-18.2
+-18.2
+-15.87
+-27.11
+-24.24
+-16.47
+-23.89
+-15.93
+-26.83
+-29.85
+-35.64
+-28.46
+-24.27
+-24.79
+-22.99
+0.03
+-27.21
+-26.19
+-19.64
+-18.2
+-18.2
+-12.8
+-18.2
+-18.2
+-18.2
+-12.8
+-18.2
+-18.2
+-18.2
+-18.2
+-12.8
+-18.2
+-12.8
+-13.67
+-18.2
+-18.2
+-19.01
+-16.56
+-28.97
+-33.86
+-17.17
+-16.29
+-27.39
+-3.6
+-29.63
+-20.1
+-8.79
+4.4
+-17.13
+-26.41
+-29.07
+-27.46
+-22.68
+-2.45
+-4.75
+-28.11
+-25.95
+-25.12
+-26.04
+-15.69
+-22.76
+-27.39
+-25.68
+-6.11
+-9.42
+-1.14
+-26.63
+-20.46
+-6.87
+-12.8
+-17.03
+-28.65
+-23.33
+-20.37
+-19.65
+-18.85
+-18.2
+-20.38
+-18.31
+-17.95
+-18.2
+-18.2
+-18.45
+-19.5
+-18.3
+-19.61
+-8.48
+-18.2
+-15.35
+-18.2
+-18.2
+-8.2
+-18.2
+-18.2
+-18.2
+-20.31
+-21.55
+-6.24
+-25.27
+-32.63
+-17.09
+-6.22
+-24.47
+-22.93
+-29.08
+-14.99
+-26.2
+-29.76
+-8.36
+-18.2
+-18.2
+-18.2
+-12.8
+-14.61
+-23.64
+-3
+-27.17
+-21.69
+-16.62
+-30.57
+-34.15
+-28.76
+-16.47
+-27.74
+-13.18
+-18.45
+-14.12
+-18.38
+-23.94
+-1.76
+-34.06
+-17.12
+-31.87
+-32.14
+-27.43
+-15.21
+-16.41
+-30.07
+-21.34
+-16.64
+-32.63
+-17.11
+-16.27
+-26.62
+-27.82
+-16.01
+-14.81
+-27.75
+-27.21
+-25.2
+-23.7
+-24.65
+-30.51
+-15.23
+-29.16
+-28.05
+-17.1
+-29.12
+-27.47
+-27.9
+-23.67
+-18.2
+-12.8
+-18.02
+-19.49
+-22.37
+0.1
+-20.4
+-18.85
+-12.8
+-18.47
+-19.9
+-20.26
+-12.8
+-18.2
+-8.2
+-18.2
+-12.8
+-18.2
+-12.8
+-18.2
+-18.2
+-18.2
+-10.09
+-16.88
+-28.04
+-16.77
+-24.64
+-30.25
+-22.65
+-34.11
+-26.69
+-30.55
+-16.35
+-25.54
+1.36
+-29.36
+-16.17
+-32.85
+-18.44
+-6.2
+-20.44
+-7.93
+-24.55
+-21.75
+-16.07
+-18.79
+-19.29
+-23.02
+-16.57
+-34.98
+-27.33
+-16.71
+-14.8
+-17.37
+-17.17
+-17.61
+-30.960000000000022
+-14.950000000000003
+-17.170000000000044
+-17.200000000000045
+-4.580000000000007
+-29.55000000000002
+-20.42
+-26.449999999999996
+-23.979999999999993
+-21.29000000000004
+-23.849999999999998
+-8.759999999999993
+-18.2
+-18.2
+-20.04
+-25.14999999999999
+-11.21
+-29.440000000000015
+-16.39999999999999
+-22.380000000000003
+-22.889999999999997
+-20.09
+-8.66
+-18.2
+-18.2
+-18.2
+-18.2
+-12.8
+-16.890000000000043
+-16.47000000000007
+-25.280000000000044
+-24.519999999999996
+-24.71
+-27.890000000000008
+-19.730000000000018
+-22.939999999999998
+-22.9
+-20.04
+-30.50000000000002
+-25.69
+-26.230000000000004
+-30.56000000000003
+-19.559999999999995
+-2.8000000000000007
+-9.189999999999998
+-2.8000000000000007
+-19.7
+-25.64
+-22.13
+-29.610000000000007
+-6.330000000000001
+-26.710000000000008
+-23.709999999999994
+-30.980000000000025
+-15.260000000000005
+-21.52
+-6.21000000000001
+-19.550000000000008
+-26.14
+-15.59000000000005
+-19.659999999999997
+-19.700000000000003
+-25.890000000000004
+-31.37000000000004
+-23.719999999999995
+-29.500000000000007
+-19.660000000000032
+-20.4
+-16.3
+-31.170000000000023
+-15.550000000000034
+-20.5
+-25.850000000000005
+-12.77
+-18.47
+-18.2
+-18.2
+-18.2
+-19.55
+-15.81000000000005
+-24.599999999999998
+-27.340000000000003
+-16.04000000000004
+-22.319999999999997
+-14.520000000000001
+-18.2
+-12.8
+-18.2
+-31.030000000000022
+-25.680000000000007
+-22.260000000000005
+-25.330000000000002
+-18.340000000000018
+-24.939999999999998
+-24.64
+-19.73
+-19.500000000000018
+-6.240000000000009
+-22.160000000000025
+-20.6
+-22.63
+-24.349999999999994
+-26.189999999999998
+-24.83
+-25.53
+-20.57
+-28.90000000000001
+-32.30000000000003
+-29.880000000000017
+-18.35
+-18.2
+-18.2
+-20.03
+-31.490000000000027
+-21.71
+-16.75000000000004
+-21.830000000000002
+-28.980000000000018
+-24.97
+-20.679999999999996
+-21.020000000000003
+-19.880000000000003
+-12.8
+-18.740000000000002
+-18.2
+-18.2
+-12.8
+-12.8
+-12.8
+-26.100000000000005
+-26.370000000000008
+-25.220000000000002
+-22.65
+-18.480000000000018
+-24.49
+-32.48000000000004
+-13.180000000000023
+-27.29
+-27.22999999999999
+-13.730000000000013
+-25.020000000000003
+-24.86
+-16.67000000000005
+-23.749999999999996
+-25.43
+-28.980000000000018
+-27.569999999999986
+-31.390000000000025
+-17.23
+-18.42
+-18.2
+-18.42
+-16.21
+-25.28
+-16.810000000000002
+-13.65
+-18.2
+-19.8
+-21.660000000000032
+-21.730000000000004
+-19.39
+-5.67
+-18.2
+-13.670000000000002
+-26.500000000000007
+-26.529999999999987
+-27.60000000000001
+-23.349999999999994
+-33.830000000000034
+-24.699999999999992
+-30.180000000000017
+-25.700000000000003
+-35.01000000000003
+-22.180000000000007
+-27.080000000000013
+-18.36
+-19.009999999999998
+-9.380000000000047
+-26.049999999999986
+-24.959999999999994
+-27.75000000000001
+-14.650000000000023
+-18.2
+-26.220000000000006
+-28.820000000000014
+-21.480000000000047
+-17.000000000000007
+-29.430000000000017
+-15.960000000000027
+-15.38000000000004
+-28.570000000000014
+-3.8999999999999977
+-12.8
+-20.18
+-15.849999999999998
+-8.38
+-13.970000000000002
+-18.2
+-22.52
+-24.22
+-21.060000000000002
+-22.119999999999997
+-16.28
+-16.07
+-12.8
+-12.8
+-12.8
+-18.42
+-12.8
+-13.670000000000002
+-26.34
+-20.94
+-24.129999999999995
+-24.39999999999999
+-25.5
+-21.120000000000026
+-26.059999999999995
+-26.110000000000007
+-25.840000000000003
+-22.91
+-26.18000000000001
+-12.310000000000038
+-25.29
+-17.250000000000046
+-24.92000000000004
+-24.420000000000037
+-25.870000000000005
+-20.270000000000024
+-22.080000000000027
+-20.67
+-26.400000000000006
+-23.480000000000032
+-18.280000000000015
+-18.290000000000003
+-28.370000000000005
+-30.630000000000017
+-28.320000000000014
+-22.659999999999997
+-14.640000000000011
+-18.2
+-13.670000000000002
+-18.41
+-16.640000000000036
+-12.529999999999998
+-16.230000000000043
+-23.759999999999998
+-22.14
+-19.939999999999998
+-20.26
+-18.45
+-18.2
+-17.119999999999997
+-19.880000000000003
+-18.47
+-18.2
+-8.690000000000005
+-15.61
+-20.919999999999998
+-14.349999999999996
+-20.42
+-30.550000000000026
+-32.67000000000003
+-23.99000000000003
+-29.87000000000002
+-25.370000000000005
+-26.700000000000006
+-26.270000000000007
+-21.410000000000004
+-20.760000000000026
+-21.68
+-20.170000000000023
+-11.430000000000032
+-11.450000000000001
+-36.37000000000004
+-23.610000000000046
+-12.850000000000001
+-18.2
+-18.2
+-12.8
+-18.2
+-18.2
+-14.42
+-9.57
+-12.8
+-25.349999999999998
+-10.43
+-22.080000000000034
+-15.930000000000037
+-18.970000000000017
+-17.229999999999986
+-16.860000000000042
+-26.630000000000006
+-31.49000000000001
+-16.130000000000038
+-25.970000000000002
+-16.25
+-22.28
+-23.479999999999997
+-26.430000000000007
+-20.85
+-26.88000000000001
+-18.480000000000015
+-16.55000000000005
+-27.35000000000001
+-17.280000000000047
+-29.90000000000002
+-17.04000000000001
+-18.34
+-22.369999999999997
+-17.15
+-12.8
+-18.2
+-18.2
+-18.2
+-18.2
+-2.8000000000000007
+-18.2
+-12.270000000000001
+-16.32
+-18.2
+-15.41
+-19.79
+-34.03000000000006
+-14.850000000000023
+-25.369999999999997
+-28.109999999999992
+-30.57
+-29.840000000000018
+-25.910000000000007
+-26.640000000000008
+-10.25
+-16.440000000000037
+-26.570000000000007
+-16.530000000000054
+-23.290000000000035
+-21.65
+-22.659999999999997
+-16.180000000000035
+-16.380000000000038
+-27.24000000000001
+-17.320000000000046
+-5.960000000000007
+-24.15
+-27.96000000000001
+-28.860000000000014
+-21.47
+-28.740000000000016
+-29.78000000000003
+-20.11
+-24.799999999999997
+-15.919999999999998
+-12.8
+-18.2
+-13.960000000000042
+-18.510000000000012
+-16.070000000000004
+-26.550000000000008
+-22.14
+-29.510000000000016
+-29.780000000000015
+-21.48
+-23.86
+-23.11
+-11.080000000000002
+-28.880000000000017
+-24.43
+-28.670000000000016
+-21.32
+-16.20000000000006
+-5.609999999999987
+-25.369999999999994
+-25.590000000000003
+-27.470000000000006
+-29.40999999999999
+-26.189999999999998
+-24.43
+-28.07999999999999
+-16.88000000000005
+-10.740000000000029
+-25.219999999999995
+-21.080000000000002
+-12.569999999999997
+-18.2
+-1.6700000000000017
+-12.8
+-14.47000000000006
+-20.27000000000001
+-26.329999999999984
+-32.85000000000001
+-24.439999999999998
+-23.439999999999998
+-27.52999999999998
+-27.640000000000008
+-24.80999999999999
+-31.770000000000035
+-29.679999999999996
+-25.200000000000003
+-30.51000000000002
+-16.680000000000042
+-17.170000000000044
+-19.229999999999997
+-23.339999999999996
+-31.230000000000032
+-12.8
+-18.2
+-31.810000000000038
+-24.9
+-30.950000000000045
+-27.69999999999999
+-32.81000000000004
+-21.54
+-27.17000000000001
+-22.76000000000005
+-21.13
+-18.2
+-18.2
+-20.36
+-17.55000000000001
+-9.97
+-17.22
+-27.229999999999986
+-17.900000000000006
+-27.130000000000006
+-20.82
+-8.370000000000003
+-18.2
+-2.8000000000000007
+-18.2
+-18.2
+-15.340000000000002
+-27.33
+1.2299999999999998
+-28.30000000000001
+-15.860000000000053
+-28.480000000000004
+-22.439999999999998
+-7.77
+-15.52
+-12.8
+-20.450000000000003
+-15.709999999999999
+-18.2
+-18.2
+-17.82
+-18.2
+-18.2
+-12.8
+-19.28
+-19.200000000000014
+-29.50999999999999
+-25.630000000000003
+-30.790000000000024
+-29.30000000000002
+-17.09000000000005
+-24.340000000000046
+-6.330000000000014
+-5.710000000000003
+-23.839999999999996
+-27.140000000000008
+-2.4399999999999933
+-14.27000000000005
+-19.530000000000022
+-14.940000000000042
+-19.689999999999998
+-18.2
+-12.8
+-18.2
+-12.8
+-18.97
+-24.78
+-25.890000000000008
+-24.449999999999996
+-27.839999999999993
+-14.840000000000032
+-18.910000000000018
+-26.210000000000036
+-17.08
+-6.809999999999995
+-21.42
+-30.61000000000002
+-26.830000000000005
+-29.169999999999995
+-25.63999999999999
+-27.090000000000007
+-23.879999999999995
+-22.989999999999995
+-20.05
+-26.520000000000003
+-24.29
+-19.23
+-19.5
+-13.330000000000002
+-12.8
+-18.47
+-18.2
+-8.2
+-18.2
+-8.160000000000002
+-17.24
+-18.47
+-32.12000000000003
+-20.5
+-17.40000000000005
+-18.570000000000014
+-17.52000000000005
+-26.480000000000008
+-24.570000000000004
+-12.8
+-29.460000000000022
+-30.26000000000003
+-19.84000000000001
+-29.140000000000015
+-20.660000000000025
+-24.279999999999998
+-26.46000000000001
+-18.39
+-18.2
+-8.2
+-28.25999999999999
+-14.730000000000064
+-23.599999999999994
+-27.24000000000001
+-26.410000000000007
+-26.860000000000007
+-18.479999999999997
+-24.94
+-11.930000000000001
+-26.530000000000005
+-13.199999999999998
+-8.2
+-18.2
+-12.8
+-18.2
+-15.35
+-20.71
+-16.230000000000047
+-27.46
+-23.029999999999998
+-12.48
+-29.79000000000002
+-22.720000000000013
+-12.8
+-18.2
+-8.2
+-20.09
+-25.75
+-20.380000000000003
+-28.129999999999995
+-20.82000000000003
+-27.37999999999998
+-35.99000000000004
+-24.54000000000005
+-16.770000000000007
+-19.98
+-20.020000000000003
+-18.2
+-12.8
+-12.8
+-12.8
+-18.2
+-18.2
+-18.47
+-19.55
+-25.380000000000003
+-29.03000000000001
+-16.540000000000006
+-27.54000000000001
+-29.270000000000024
+-16.530000000000047
+-21.64
+-18.670000000000012
+-31.32000000000002
+-34.930000000000035
+-14.99000000000003
+-18.220000000000013
+-31.22000000000002
+-22.369999999999997
+-24.78
+-17.189999999999998
+-18.89
+-25.42000000000005
+-19.980000000000018
+-26.610000000000007
+-15.600000000000044
+-16.320000000000004
+-19.020000000000014
+-26.190000000000005
+-22.61000000000002
+-21.57
+-3.3100000000000023
+-20.650000000000034
+-28.270000000000014
+-25.900000000000006
+-27.850000000000012
+-16.420000000000034
+-16.540000000000006
+-3.9900000000000047
+-13.010000000000042
+-24.780000000000047
+-18.2
+-18.45
+-28.30000000000001
+-26.200000000000003
+-34.99000000000006
+-9.540000000000022
+-27.13000000000001
+-4.130000000000003
+-17.44
+-14.270000000000001
+-12.8
+-12.8
+-18.2
+-18.47
+-32.10000000000003
+-33.70000000000003
+-30.910000000000018
+-28.830000000000013
+-22.880000000000045
+-23.979999999999997
+-30.210000000000022
+-23.509999999999998
+-29.880000000000017
+-30.660000000000014
+-25.40000000000001
+-21.070000000000004
+-14.540000000000001
+-18.2
+-30.730000000000025
+-29.080000000000027
+-20.55
+-16.170000000000044
+-31.470000000000027
+-16.85
+-29.900000000000006
+-13.89
+-22.25
+-28.260000000000005
+-27.059999999999995
+-25.580000000000005
+-16.94000000000001
+-6.440000000000012
+-24.659999999999993
+-26.300000000000004
+-19.84
+-25.750000000000004
+-15.540000000000001
+-18.77
+-12.8
+-12.8
+-18.689999999999998
+-22.599999999999998
+-13.2
+-19.82
+-21.43
+-8.240000000000007
+-26.690000000000005
+-20.599999999999998
+-19.73
+-25.720000000000052
+-34.27000000000002
+-5.840000000000009
+-11.39
+-16.779999999999994
+-26.359999999999992
+-13.96
+-27.870000000000005
+-18.11
+-19.45
+-27.199999999999985
+-27.000000000000007
+-26.460000000000008
+-18.2
+-18.2
+-2.8000000000000007
+-12.8
+-18.2
+-12.8
+-21.060000000000027
+-25.64
+-23.559999999999988
+-8.279999999999998
+-12.8
+-12.8
+-8.2
+-13.65
+-12.8
+-18.2
+-12.8
+-18.2
+-18.2
+-5.070000000000011
+-29.070000000000014
+-31.46000000000003
+-27.410000000000007
+-24.87
+-25.02
+-26.700000000000006
+-29.970000000000034
+-17.000000000000046
+-16.61000000000004
+-30.990000000000023
+-24.129999999999995
+-26.1
+-26.79999999999999
+-25.330000000000002
+-15.45000000000007
+-19.640000000000022
+-20.4
+-26.069999999999986
+-27.939999999999994
+-27.099999999999987
+-34.290000000000035
+-15.230000000000016
+-18.2
+-13.559999999999997
+-32.420000000000016
+-19.360000000000014
+-13.73
+-20.03
+-19.7
+-24.47
+-25.069999999999993
+-19.62000000000002
+-18.06000000000001
+-25.83
+-16.25000000000003
+-20.00000000000002
+-24.160000000000025
+-34.48000000000004
+-16.970000000000045
+-20.270000000000003
+-18.36
+-22.33
+-29.210000000000015
+-3.8000000000000043
+-26.790000000000006
+-16.72000000000004
+-27.58000000000001
+-7.74
+-8.2
+-25.800000000000004
+-18.2
+-12.8
+-18.2
+-17.37000000000005
+-23.42000000000001
+-14.870000000000047
+-23.350000000000016
+-23.799999999999997
+-16.250000000000032
+-17.270000000000046
+-29.53000000000002
+-16.77000000000005
+-15.820000000000062
+-18.450000000000014
+-29.220000000000013
+-29.660000000000032
+-18.2
+-18.2
+-14.22
+-18.2
+-18.2
+-18.2
+-18.2
+-12.8
+-18.2
+-18.47
+-20.4
+-21.43
+-27.090000000000007
+-35.740000000000045
+-23.47
+-24.59
+-18.280000000000012
+-16.410000000000043
+-11.200000000000001
+-23.780000000000037
+-30.12000000000001
+-30.670000000000016
+-15.950000000000031
+-11.900000000000043
+-2.3199999999999976
+-25.31999999999999
+-23.819999999999993
+-24.299999999999997
+-24.91999999999999
+-14.650000000000066
+-15.420000000000053
+-19.91
+-21.130000000000003
+-21.330000000000016
+-15.570000000000036
+-26.42999999999999
+-26.889999999999986
+-28.280000000000015
+-19.77
+-16.890000000000043
+-24.55
+-14.0
+-12.8
+-18.2
+-14.540000000000001
+-23.749999999999996
+-22.759999999999998
+-24.75
+-24.599999999999998
+-20.17
+-16.33000000000005
+-28.650000000000006
+-16.230000000000047
+-23.859999999999996
+-23.11
+-35.530000000000044
+-16.53000000000006
+-22.299999999999997
+-12.84
+-7.659999999999997
+-6.110000000000008
+-22.17
+-26.560000000000006
+-28.440000000000012
+-21.290000000000035
+-29.720000000000013
+-30.18000000000002
+-25.540000000000035
+-25.980000000000004
+-26.550000000000004
+-28.000000000000007
+-27.03000000000001
+-27.580000000000013
+-16.38999999999999
+-24.83
+-3.769999999999988
+-9.899999999999991
+-18.45
+-12.91
+-18.2
+-18.2
+-18.2
+-18.2
+-12.8
+-12.8
+-18.2
+-21.77
+-21.83
+-15.36
+-12.8
+-19.7
+-20.98
+-18.63
+-19.23
+-26.479999999999997
+-16.19000000000004
+-30.560000000000024
+-4.150000000000004
+-28.529999999999994
+-28.23000000000001
+-10.480000000000032
+-16.269999999999985
+-16.610000000000056
+-7.790000000000001
+-18.2
+-14.100000000000001
+-18.89
+-15.71
+-18.47
+-16.53
+-33.58000000000006
+-5.3999999999999835
+-14.870000000000056
+-26.42
+-27.500000000000007
+-13.32
+-14.540000000000001
+-18.2
+-17.15
+-7.8299999999999965
+-13.02
+-18.2
+-18.2
+-18.740000000000002
+-22.46
+-21.42
+-13.89
+-25.540000000000003
+-16.47000000000003
+-22.46
+-30.119999999999997
+-32.86000000000003
+-24.369999999999997
+-18.80000000000001
+-28.560000000000016
+-27.39
+-25.05
+-34.54000000000006
+-22.93000000000005
+-28.690000000000015
+-16.620000000000044
+-17.270000000000046
+-17.270000000000046
+-28.350000000000016
+-28.400000000000013
+-23.209999999999994
+-18.2
+-12.55
+-27.700000000000006
+-26.230000000000004
+-26.83000000000001
+-25.690000000000005
+-29.700000000000017
+-22.660000000000043
+-18.950000000000003
+-11.970000000000002
+-2.9000000000000012
+-18.2
+-19.05
+-19.45
+-19.840000000000003
+-21.55
+-4.629999999999996
+-22.05
+-24.880000000000003
+-25.690000000000005
+-24.37000000000003
+-26.430000000000007
+-29.630000000000017
+-30.68000000000002
+-25.01
+-20.84
+-23.589999999999996
+-22.320000000000043
+-9.330000000000028
+-21.47
+-18.2
+-18.2
+-20.65
+-26.910000000000004
+-23.360000000000028
+-16.280000000000005
+-25.890000000000004
+-23.18
+-10.380000000000031
+-24.97
+-34.27000000000003
+-15.66999999999999
+-2.6900000000000013
+-30.440000000000026
+-21.020000000000003
+-23.949999999999996
+-15.770000000000003
+-26.390000000000008
+-20.710000000000022
+-17.75
+-18.2
+-2.8000000000000007
+-18.2
+-18.2
+-18.2
+-12.8
+-18.2
+-18.2
+-17.82
+-18.2
+-18.2
+-18.950000000000003
+-20.58
+-18.39
+-26.720000000000006
+-26.17
+-26.279999999999994
+-9.200000000000003
+-18.2
+-18.2
+-16.21000000000005
+-20.569999999999993
+-19.05
+-18.2
+-18.2
+-18.2
+-12.8
+-25.05
+-29.110000000000007
+-30.84000000000004
+-21.419999999999984
+-10.21
+-16.52
+-18.2
+-18.2
+-18.2
+-12.8
+-8.2
+-18.96
+-29.060000000000013
+-17.22000000000005
+-25.389999999999993
+1.2999999999999998
+-10.38
+-24.88
+-20.17
+-16.50000000000004
+-22.07
+-25.6
+-27.16000000000001
+-33.30000000000003
+-14.070000000000038
+-31.68000000000003
+-14.070000000000038
+-32.34000000000003
+-22.469999999999995
+-12.8
+-32.32000000000003
+-13.970000000000036
+-28.770000000000017
+-28.930000000000014
+-6.440000000000014
+-27.98000000000001
+-12.470000000000018
+-13.870000000000035
+-28.720000000000013
+-26.060000000000006
+-9.710000000000027
+-13.670000000000034
+-26.450000000000006
+-25.330000000000002
+-25.160000000000004
+-23.339999999999996
+-13.40000000000003
+-13.570000000000034
+-12.970000000000029
+-21.61
+-25.430000000000003
+-12.530000000000028
+-13.550000000000031
+-22.31000000000003
+-30.400000000000027
+-10.590000000000014
+-25.19999999999999
+-24.39999999999999
+-30.020000000000024
+-24.64999999999999
+-25.240000000000002
+-25.85999999999999
+-10.770000000000008
+-20.759999999999998
+-19.289999999999985
+-12.280000000000033
+-27.88
+-26.69
+-24.98
+-11.56000000000002
+-23.279999999999994
+-25.36999999999999
+-11.389999999999993
+-0.7299999999999989
+-11.169999999999996
+-22.79
+-29.950000000000024
+-24.8
+-26.390000000000008
+-28.59000000000001
+-13.110000000000031
+-32.29000000000003
+-21.310000000000002
+-24.710000000000008
+-20.36
+-27.510000000000012
+-32.95000000000003
+-18.34
+-13.620000000000001
+-18.2
+-12.8
+-9.76
+-13.970000000000036
+-16.24000000000001
+-27.230000000000008
+-23.759999999999994
+-15.720000000000002
+-13.650000000000036
+-13.60000000000003
+-13.10000000000003
+-6.1999999999999975
+-16.07
+-26.14999999999999
+-24.959999999999997
+-26.300000000000004
+-11.940000000000024
+-12.530000000000042
+-5.840000000000006
+-14.050000000000036
+-10.250000000000027
+-13.670000000000034
+-22.599999999999998
+-31.20000000000002
+-29.030000000000015
+-18.980000000000018
+-26.860000000000007
+-31.210000000000026
+-26.770000000000007
+-28.790000000000013
+-33.380000000000024
+-12.65000000000002
+-10.92
+-12.71000000000002
+-30.730000000000032
+-21.82
+-30.160000000000025
+-26.90999999999999
+-30.350000000000016
+-15.160000000000002
+-19.09000000000001
+-29.340000000000014
+-12.37000000000003
+-13.530000000000031
+-3.0999999999999934
+-25.18
+-4.110000000000003
+-30.660000000000014
+-29.810000000000002
+-11.320000000000023
+-10.959999999999996
+-25.929999999999996
+-26.439999999999984
+-10.970000000000024
+-19.990000000000002
+-6.570000000000005
+-11.810000000000024
+-20.52
+-11.510000000000002
+-24.059999999999995
+-3.0699999999999994
+-28.610000000000014
+-23.499999999999996
+-13.060000000000036
+-29.470000000000006
+-23.869999999999994
+-11.510000000000035
+-27.250000000000007
+-12.350000000000044
+-12.660000000000023
+-27.75000000000001
+-23.300000000000033
+-21.020000000000028
+-25.85
+-24.54
+-12.830000000000016
+-26.039999999999992
+-13.700000000000038
+-12.750000000000023
+-14.020000000000037
+-32.92000000000004
+-26.310000000000002
+-28.580000000000005
+-26.130000000000003
+-13.550000000000034
+-30.04000000000002
+-12.930000000000028
+-24.41999999999999
+-32.900000000000034
+-26.340000000000003
+-31.350000000000023
+-11.610000000000037
+-11.03
+-12.570000000000025
+-18.720000000000006
+-18.019999999999982
+-11.680000000000032
+-29.680000000000007
+-18.50000000000001
+-26.79
+-10.990000000000013
+-12.370000000000008
+-5.629999999999984
+-27.46999999999998
+-27.499999999999993
+-11.310000000000045
+-10.800000000000033
+-26.880000000000006
+-22.07
+-10.740000000000022
+-23.029999999999998
+-26.619999999999983
+-12.830000000000025
+-31.05000000000004
+-28.689999999999998
+-26.29
+-32.56000000000003
+-25.740000000000002
+-29.159999999999997
+-25.11
+-13.320000000000034
+-25.130000000000003
+-25.72
+-12.470000000000033
+-11.810000000000032
+-1.889999999999996
+-31.220000000000027
+-23.89999999999999
+-17.14999999999999
+-27.58
+-12.310000000000024
+-28.059999999999988
+-26.69999999999999
+-10.610000000000014
+-11.730000000000034
+-11.840000000000012
+-11.430000000000021
+-27.55999999999998
+-24.569999999999997
+-25.37999999999999
+-11.110000000000019
+-25.559999999999988
+-20.6
+-20.260000000000026
+-25.519999999999996
+-3.129999999999992
+-24.64999999999999
+-10.63000000000002
+-24.95999999999999
+-9.920000000000044
+-26.719999999999988
+-28.459999999999997
+-23.409999999999997
+-30.780000000000022
+-11.940000000000039
+-26.259999999999984
+-10.210000000000004
+-23.799999999999997
+-11.309999999999997
+-11.119999999999997
+-26.009999999999998
+-28.199999999999992
+-28.179999999999982
+-11.990000000000022
+-10.73
+-30.960000000000043
+-31.46000000000002
+-27.619999999999997
+-11.050000000000036
+-9.919999999999998
+-25.42999999999999
+-11.270000000000012
+-11.280000000000046
+-21.68000000000002
+-26.32
+-26.339999999999993
+-10.650000000000029
+-22.549999999999997
+-32.35000000000003
+-11.150000000000018
+-24.59999999999999
+-10.290000000000008
+-10.600000000000026
+-11.24000000000004
+-26.099999999999987
+-26.049999999999986
+-26.289999999999992
+-27.669999999999987
+-12.190000000000024
+-29.010000000000012
+-11.950000000000035
+-12.280000000000017
+-30.43000000000004
+-27.47999999999999
+-23.889999999999997
+-10.850000000000012
+-12.290000000000017
+-21.32000000000003
+-12.370000000000044
+-12.699999999999998
+-24.409999999999997
+-11.78000000000001
+-22.18
+-31.000000000000014
+-11.790000000000045
+-28.39
+-12.100000000000035
+-11.350000000000042
+-25.55999999999999
+-25.979999999999993
+-23.049999999999997
+-12.110000000000028
+-20.84000000000001
+-11.71000000000003
+-25.78999999999999
+-20.61000000000002
+-29.760000000000005
+-24.77
+-24.189999999999998
+-13.170000000000028
+-28.290000000000006
+-29.19999999999998
+-11.640000000000013
+-11.11000000000002
+-7.390000000000013
+-29.940000000000005
+-28.13999999999999
+-26.80999999999999
+-23.519999999999996
+-0.7999999999999983
+-27.46999999999999
+-27.47000000000001
+-30.220000000000027
+-26.6
+-30.300000000000008
+-26.980000000000008
+-12.460000000000042
+-23.759999999999994
+-27.60999999999999
+-12.370000000000038
+-25.950000000000003
+-27.36
+-27.73
+-25.279999999999994
+-23.239999999999995
+-6.539999999999992
+-28.970000000000006
+-26.829999999999995
+-11.270000000000042
+-19.18
+-21.370000000000033
+-18.91000000000001
+-31.520000000000017
+-10.530000000000006
+-21.03
+-25.769999999999992
+-23.549999999999997
+-17.499999999999996
+-30.91000000000001
+-12.170000000000009
+-10.810000000000022
+-26.229999999999986
+-10.570000000000011
+-15.15999999999999
+-26.659999999999993
+-26.009999999999987
+-27.490000000000006
+-24.109999999999996
+-20.6
+-10.610000000000015
+-26.829999999999984
+-27.45000000000001
+-11.510000000000055
+-26.209999999999987
+-21.520000000000003
+-11.65000000000002
+-18.479999999999993
+-10.350000000000012
+-28.819999999999993
+-26.499999999999993
+-25.279999999999994
+-29.03
+-27.109999999999992
+-27.849999999999994
+-10.569999999999999
+-26.999999999999986
+-10.480000000000024
+-11.070000000000004
+-24.55999999999999
+-10.270000000000017
+-10.610000000000015
+-30.50000000000001
+-24.68999999999999
+-25.419999999999998
+-10.230000000000002
+-17.629999999999985
+-20.480000000000004
+-19.099999999999994
+-10.770000000000016
+-10.550000000000008
+-25.51999999999999
+-27.169999999999998
+-20.54
+-24.859999999999992
+-20.69
+-11.77000000000002
+-27.250000000000004
+-20.380000000000003
+-24.259999999999998
+-10.720000000000017
+-10.840000000000016
+-25.319999999999997
+-26.289999999999985
+-23.599999999999994
+-19.059999999999995
+-10.710000000000038
+-10.290000000000008
+-17.729999999999997
+-24.09999999999999
+-27.630000000000003
+-15.920000000000003
+-9.259999999999986
+-9.249999999999996
+-18.45
+-18.5
+-18.2
+-18.2
+-12.8
+-18.95
+-26.099999999999994
+-17.9
+-19.65
+-18.2
+-18.7
+-10.2
+-30.670000000000037
+-25.79
+-11.870000000000038
+-25.470000000000002
+-24.679999999999993
+-26.839999999999993
+-11.240000000000023
+-28.479999999999997
+-23.689999999999998
+-22.080000000000002
+-25.840000000000003
+-24.29999999999999
+-11.890000000000006
+-30.850000000000016
+-5.239999999999992
+-21.990000000000002
+-25.929999999999996
+-23.959999999999994
+-20.85
+-12.810000000000025
+-28.490000000000002
+-11.210000000000015
+-30.44000000000001
+-22.65
+-27.759999999999998
+-21.100000000000016
+-30.780000000000022
+-20.83
+-1.539999999999998
+-29.860000000000014
+-6.249999999999993
+-12.170000000000044
+-29.670000000000016
+-11.900000000000048
+-12.230000000000016
+-27.19000000000001
+-27.20000000000001
+-11.870000000000058
+-8.440000000000033
+-11.910000000000016
+-20.85
+-23.949999999999996
+-10.970000000000029
+-12.890000000000029
+-21.340000000000025
+-26.050000000000004
+-25.779999999999994
+-11.04000000000002
+-28.34000000000001
+-29.900000000000016
+-26.61999999999999
+-27.84
+-10.35
+-23.589999999999996
+-32.78000000000003
+-12.500000000000021
+-4.1700000000000035
+-29.37000000000001
+-23.859999999999992
+-12.170000000000039
+-4.539999999999995
+-18.25
+-24.819999999999993
+-23.939999999999994
+-27.75999999999998
+-19.729999999999997
+-12.110000000000035
+-26.79
+-20.910000000000025
+-25.949999999999992
+-27.01999999999999
+-10.980000000000024
+-10.319999999999999
+-26.190000000000005
+-14.919999999999998
+-12.810000000000038
+-22.299999999999997
+-10.269999999999994
+-24.54
+-30.870000000000022
+-24.63
+-23.31
+-11.030000000000017
+-25.360000000000003
+-30.25
+-20.4
+-30.030000000000026
+-29.58
+-27.429999999999993
+-10.59000000000001
+-11.59000000000004
+-25.7
+-19.37000000000001
+-12.840000000000037
+-24.65
+-27.060000000000006
+-8.270000000000021
+-13.23000000000003
+-30.740000000000045
+-25.829999999999995
+-27.100000000000005
+-18.189999999999998
+-12.550000000000038
+-12.800000000000042
+-10.75000000000001
+-11.410000000000016
+-13.850000000000035
+-31.24000000000001
+-30.180000000000014
+-11.450000000000001
+-28.85
+-25.610000000000003
+-23.989999999999995
+-12.590000000000027
+-28.500000000000014
+-13.910000000000041
+-13.950000000000037
+-26.020000000000007
+-13.560000000000032
+-27.050000000000008
+-1.2899999999999985
+-28.720000000000013
+-19.49000000000001
+-27.86000000000001
+-5.2700000000000085
+-30.040000000000013
+-26.020000000000003
+-13.239999999999998
+-20.36
+-22.28
+-13.370000000000033
+1.2299999999999998
+-29.380000000000027
+-25.0
+-13.530000000000033
+-13.79000000000004
+-12.290000000000049
+-23.709999999999994
+-30.440000000000026
+-20.75000000000003
+-20.230000000000008
+-20.64
+-26.51
+-12.23000000000003
+-25.99
+-25.519999999999996
+-13.310000000000029
+-12.97000000000004
+-24.86
+-12.500000000000028
+-28.37000000000001
+-30.01000000000002
+-20.78
+-12.820000000000032
+-13.810000000000038
+-27.180000000000007
+-26.009999999999998
+-13.490000000000025
+-17.540000000000013
+-31.690000000000026
+-28.060000000000006
+-27.510000000000012
+-28.70000000000001
+-32.160000000000025
+-14.070000000000038
+-25.849999999999998
+-21.630000000000003
+-25.39
+-20.92
+-25.880000000000003
+-30.780000000000015
+-12.750000000000028
+-11.890000000000045
+-26.34
+-25.2
+-18.450000000000014
+-13.390000000000025
+-24.74
+-17.36000000000001
+-23.63
+-13.350000000000028
+-12.190000000000023
+-13.700000000000035
+-24.299999999999997
+-12.720000000000034
+-7.620000000000013
+-13.150000000000022
+-13.640000000000033
+-23.789999999999996
+-21.980000000000004
+-22.640000000000025
+-26.100000000000005
+-12.090000000000028
+-26.589999999999996
+-26.85
+-18.140000000000015
+-12.320000000000023
+-32.14000000000003
+-11.010000000000026
+-12.670000000000034
+-8.840000000000009
+-11.630000000000022
+-21.560000000000013
+-25.679999999999996
+-13.010000000000026
+-24.11
+-28.210000000000015
+-19.970000000000013
+-24.269999999999996
+-13.490000000000034
+-24.009999999999994
+-22.45
+-20.660000000000018
+-12.030000000000022
+-25.07
+-8.830000000000007
+-26.549999999999986
+-29.090000000000007
+-11.200000000000019
+-26.309999999999988
+-30.700000000000006
+-26.989999999999995
+-25.989999999999995
+-25.599999999999998
+-24.269999999999996
+-11.850000000000009
+-28.740000000000002
+-27.35000000000001
+-24.859999999999992
+-12.320000000000027
+-13.330000000000037
+-20.460000000000022
+-28.30999999999999
+-21.86
+-31.370000000000026
+-12.770000000000032
+-27.84000000000001
+-20.720000000000027
+-23.549999999999997
+-12.23000000000001
+-25.360000000000003
+-27.67000000000001
+-23.519999999999996
+-12.780000000000022
+-30.510000000000012
+-12.910000000000025
+-31.070000000000014
+-24.590000000000003
+-12.090000000000018
+-29.930000000000035
+-25.050000000000004
+-27.53000000000001
+-10.050000000000013
+-11.060000000000018
+-28.499999999999993
+-25.6
+-23.679999999999996
+-22.14
+-12.980000000000045
+-13.230000000000024
+-27.980000000000004
+-21.400000000000002
+-4.380000000000005
+-12.420000000000018
+-27.989999999999995
+-24.61
+-20.57000000000002
+-27.649999999999984
+-19.19000000000002
+-12.210000000000038
+-28.5
+-27.4
+-29.580000000000027
+-25.919999999999987
+-29.360000000000007
+-11.75000000000001
+-30.92000000000003
+-11.610000000000017
+-5.290000000000006
+-30.540000000000006
+-24.489999999999995
+-25.40999999999999
+-6.590000000000018
+-12.790000000000022
+-30.850000000000016
+-12.340000000000032
+-12.53000000000005
+-26.33
+-24.129999999999995
+-21.32
+-28.440000000000012
+-12.730000000000027
+-25.05
+-22.05000000000003
+-29.540000000000013
+-13.900000000000036
+-27.85000000000001
+-31.170000000000037
+-23.889999999999997
+-12.710000000000022
+-2.2799999999999976
+-21.410000000000025
+-13.33000000000003
+-23.859999999999996
+-24.000000000000036
+-26.790000000000006
+-19.770000000000017
+-17.000000000000007
+-31.830000000000027
+-33.52000000000003
+-27.790000000000013
+-13.600000000000035
+-6.490000000000013
+-13.670000000000035
+-5.650000000000011
+-13.35000000000004
+-26.960000000000008
+-20.590000000000018
+-13.620000000000035
+-28.09000000000001
+-12.989999999999998
+-24.630000000000003
+-29.390000000000015
+-26.78000000000001
+-4.960000000000005
+-23.819999999999997
+-6.060000000000013
+-2.6399999999999997
+-13.900000000000036
+-28.09000000000001
+-27.860000000000007
+-17.30000000000001
+-28.760000000000012
+-33.41000000000004
+-30.470000000000034
+-32.050000000000026
+-13.540000000000031
+-28.200000000000014
+-2.75
+-13.640000000000033
+-29.230000000000018
+-25.32
+-26.580000000000005
+-24.049999999999997
+-5.110000000000008
+-18.450000000000017
+-27.780000000000012
+-26.230000000000004
+-13.650000000000034
+-10.090000000000018
+-28.170000000000012
+-19.18000000000002
+-25.230000000000004
+-26.820000000000007
+-29.19000000000002
+-27.77000000000001
+-26.370000000000005
+-13.800000000000036
+-15.500000000000009
+-18.2
+-12.8
+-12.8
+-18.2
+-16.28
+-19.810000000000002
+-14.540000000000001
+-19.17000000000001
+-19.82
+-12.38
+-13.970000000000036
+-14.070000000000038
+-27.30000000000001
+-23.689999999999998
+-13.970000000000036
+-13.970000000000036
+-4.880000000000016
+-18.2
+-15.41
+-18.45
+-8.74
+-26.860000000000007
+-30.47000000000002
+-26.580000000000005
+-26.030000000000005
+-13.469999999999995
+-13.840000000000035
+-29.90000000000002
+-13.150000000000027
+-24.379999999999995
+-13.730000000000034
+-27.84000000000001
+-25.84
+-27.400000000000013
+-26.97000000000001
+-5.230000000000008
+-7.900000000000018
+-32.840000000000025
+-28.460000000000015
+-27.46000000000001
+-28.320000000000014
+-13.930000000000035
+-26.400000000000006
+-26.300000000000004
+-13.970000000000036
+-26.550000000000008
+-24.169999999999998
+-5.740000000000012
+-27.990000000000013
+-13.870000000000035
+-23.159999999999997
+-30.16000000000002
+-13.970000000000036
+-27.19000000000001
+-31.95000000000003
+-13.620000000000031
+-9.030000000000024
+-13.610000000000031
+-30.77000000000003
+-26.42
+-28.95000000000001
+-25.46
+-23.229999999999997
+-13.760000000000039
+-5.7500000000000115
+-13.600000000000032
+-23.019999999999996
+-28.610000000000014
+-24.019999999999996
+-12.620000000000049
+-26.21
+-24.63
+-27.71000000000001
+-20.700000000000028
+-23.139999999999997
+-13.970000000000036
+-24.91
+-28.32000000000001
+-19.330000000000016
+-29.93000000000002
+-28.76000000000002
+-17.820000000000014
+-10.440000000000026
+-13.890000000000038
+-13.720000000000033
+-27.16000000000001
+-27.84000000000001
+-13.520000000000033
+-24.22
+-22.369999999999997
+-12.890000000000036
+-24.93
+-9.710000000000027
+-12.890000000000025
+-31.61000000000003
+-25.530000000000005
+-28.800000000000022
+-13.870000000000035
+-13.430000000000032
+-15.48
+-27.709999999999994
+-27.299999999999997
+-12.290000000000019
+-13.75000000000003
+-24.839999999999993
+-12.150000000000023
+-13.90000000000004
+-22.08000000000004
+-13.050000000000031
+-16.760000000000005
+-27.490000000000006
+-29.420000000000016
+-26.970000000000006
+-25.77
+-13.200000000000037
+-14.440000000000001
+-28.380000000000003
+-28.85000000000002
+-29.860000000000017
+-20.7
+-12.45000000000002
+-13.550000000000042
+-29.770000000000014
+-12.290000000000024
+-31.390000000000022
+-23.119999999999997
+-19.890000000000015
+-31.870000000000033
+-26.620000000000008
+-13.960000000000035
+-13.530000000000033
+-30.460000000000022
+-23.070000000000032
+-28.090000000000003
+-27.89000000000001
+-20.74000000000003
+-26.090000000000007
+-13.320000000000025
+-11.01
+-5.0600000000000005
+-18.25
+-18.2
+-20.23
+-22.15000000000004
+-7.950000000000029
+-26.539999999999992
+-11.790000000000026
+-22.36999999999999
+-25.109999999999992
+-19.840000000000003
+-20.619999999999997
+-10.660000000000036
+-21.249999999999996
+-11.220000000000017
+-26.040000000000003
+-13.040000000000024
+-22.709999999999997
+-24.78
+-18.32
+-12.650000000000038
+-29.610000000000007
+-13.639999999999993
+-6.069999999999998
+-27.53999999999998
+-10.310000000000022
+-10.250000000000016
+-4.9399999999999835
+-10.050000000000008
+-27.509999999999984
+-26.349999999999987
+-29.73000000000001
+-10.340000000000014
+-28.449999999999996
+-18.840000000000003
+-10.840000000000028
+-25.41999999999999
+-10.790000000000026
+-10.020000000000008
+-17.98999999999998
+-24.50999999999999
+-21.290000000000028
+-10.680000000000021
+-29.870000000000005
+-26.04
+-22.799999999999997
+-11.55000000000002
+-28.36999999999999
+-11.67000000000002
+-3.459999999999992
+-10.320000000000002
+-28.46000000000001
+-10.46000000000002
+-25.109999999999992
+-10.710000000000024
+-23.959999999999994
+-25.199999999999992
+-24.649999999999988
+-25.67999999999999
+-17.049999999999997
+-25.31999999999999
+-19.450000000000014
+-11.590000000000046
+-24.139999999999993
+-15.119999999999987
+-22.999999999999996
+-11.710000000000042
+-12.070000000000046
+-26.059999999999985
+-12.190000000000031
+-26.650000000000006
+-11.270000000000021
+-16.62999999999998
+-27.78
+-30.74000000000003
+-24.099999999999994
+-26.159999999999993
+-15.739999999999998
+-25.51
+-22.190000000000047
+-28.62000000000001
+-10.750000000000009
+-7.350000000000002
+-24.699999999999992
+-8.500000000000028
+-12.140000000000025
+-25.549999999999997
+-26.940000000000005
+-29.01999999999999
+-29.74000000000001
+-11.150000000000007
+-23.72
+-22.82
+-3.2199999999999918
+-29.990000000000002
+-25.579999999999988
+-12.010000000000021
+-23.359999999999996
+-27.25000000000001
+-12.229999999999999
+-21.130000000000003
+-17.40000000000005
+-26.410000000000007
+-28.16000000000001
+-24.8
+-32.50000000000003
+-24.98
+-15.220000000000002
+-15.820000000000045
+-24.830000000000005
+-28.72
+-8.480000000000016
+-19.049999999999994
+-23.189999999999998
+-27.749999999999993
+-24.93999999999999
+-20.3
+-12.950000000000001
+-19.92
+-25.850000000000005
+-34.11000000000004
+-5.180000000000008
+-16.980000000000057
+-27.000000000000007
+-14.710000000000045
+-17.919999999999998
+-18.55
+-13.39
+-18.02
+-12.8
+-18.97
+-17.080000000000002
+-21.19
+-24.87000000000005
+-29.67000000000001
+-12.920000000000044
+-26.119999999999987
+-20.090000000000018
+-25.429999999999993
+-27.32999999999999
+-20.7
+-26.549999999999986
+-21.6
+-13.780000000000038
+-25.36
+-20.28000000000001
+-24.369999999999997
+-17.83999999999998
+-20.040000000000024
+-19.23000000000001
+-20.32
+-21.240000000000023
+-27.979999999999993
+-26.309999999999988
+-32.33000000000004
+-25.389999999999993
+-22.669999999999995
+-32.410000000000025
+-29.45000000000002
+-20.45
+-28.180000000000007
+-26.209999999999997
+-15.880000000000035
+-26.050000000000004
+-23.98000000000004
+-27.429999999999993
+-33.94000000000003
+-11.740000000000002
+-25.710000000000004
+-8.340000000000027
+-15.690000000000046
+-17.62
+-10.520000000000017
+-32.72000000000003
+-20.140000000000022
+-16.16000000000003
+-30.380000000000017
+-6.740000000000012
+-20.730000000000025
+-15.360000000000065
+-17.720000000000006
+-16.040000000000052
+-30.640000000000022
+-15.36000000000004
+-13.270000000000039
+-21.060000000000002
+-27.160000000000004
+-22.71
+-34.02000000000005
+-23.929999999999996
+-12.110000000000001
+-24.829999999999995
+-25.970000000000002
+-22.04
+-26.580000000000002
+-27.22000000000001
+-10.61
+-28.750000000000018
+-16.440000000000047
+-18.21
+-24.78
+-32.71000000000004
+-16.32000000000004
+-15.970000000000038
+-16.880000000000056
+-31.640000000000022
+-32.76000000000004
+-26.640000000000004
+-22.569999999999993
+-14.440000000000001
+-21.67
+-15.400000000000041
+-16.860000000000007
+-10.009999999999996
+-18.2
+-13.670000000000002
+-19.82
+-29.38000000000001
+-15.54000000000007
+-20.41
+-14.540000000000035
+-24.22
+-23.9
+-16.570000000000054
+-2.619999999999997
+-18.580000000000005
+-14.550000000000061
+-29.470000000000006
+-25.369999999999997
+-28.77000000000001
+-14.850000000000039
+-20.52
+-26.46
+-29.69000000000002
+-32.340000000000025
+-28.210000000000015
+-14.61
+-18.41
+-8.2
+-19.28
+-21.599999999999998
+-21.519999999999996
+-16.200000000000042
+-33.40000000000003
+-28.560000000000013
+-21.060000000000027
+-5.8600000000000065
+-15.520000000000046
+-32.700000000000045
+-33.16000000000004
+-28.880000000000013
+-23.510000000000026
+-14.970000000000022
+-15.520000000000046
+-30.410000000000025
+-24.299999999999997
+-18.550000000000004
+-3.5400000000000014
+-16.020000000000046
+-12.470000000000036
+-4.369999999999998
+-19.82
+-17.880000000000003
+-15.35
+-11.770000000000037
+-9.46
+-19.449999999999996
+-24.009999999999998
+-30.80000000000001
+-13.710000000000035
+-25.72999999999999
+-26.63
+-19.25000000000001
+-26.520000000000003
+-31.480000000000025
+-26.720000000000006
+-16.83000000000002
+-19.46
+-19.57
+-18.6
+-13.010000000000002
+-18.27
+-14.140000000000022
+-1.919999999999997
+-24.59
+-21.130000000000003
+-20.130000000000003
+-21.28000000000003
+-25.68
+-5.840000000000009
+-25.23
+-13.810000000000032
+-15.730000000000052
+-29.150000000000013
+-32.49000000000003
+-25.319999999999997
+-28.390000000000008
+-25.160000000000032
+-28.14
+-22.04000000000003
+-27.77000000000001
+-29.720000000000006
+-29.79000000000002
+-14.86
+-20.240000000000002
+-20.009999999999998
+-26.85000000000001
+-28.22000000000001
+-26.589999999999996
+-21.759999999999998
+-15.68
+-18.2
+-12.8
+-16.130000000000003
+-12.8
+-18.2
+-12.8
+-18.2
+-12.8
+-18.2
+-19.8
+-20.27
+-20.630000000000003
+-22.960000000000022
+-14.160000000000021
+-15.680000000000033
+-29.490000000000016
+-26.490000000000006
+-18.720000000000006
+-31.66000000000001
+-26.020000000000003
+-21.74000000000002
+-28.56000000000001
+-25.02
+-15.660000000000052
+-14.800000000000031
+-26.330000000000002
+-27.24
+-21.590000000000003
+-5.77000000000001
+-21.04
+-19.79
+-24.930000000000028
+-26.870000000000005
+-26.49
+-30.900000000000016
+-30.820000000000014
+-26.05
+-12.439999999999998
+-29.600000000000016
+-33.66000000000003
+-29.29000000000002
+-23.849999999999994
+-24.580000000000002
+-14.219999999999997
+-27.130000000000003
+-27.35000000000001
+-19.730000000000008
+-16.769999999999996
+-26.59
+-24.31
+-18.660000000000018
+-26.050000000000004
+-24.619999999999994
+-15.120000000000037
+-26.509999999999998
+-26.740000000000002
+-26.14
+-15.180000000000037
+-19.620000000000008
+-30.26000000000002
+-14.560000000000013
+-21.89
+-20.82000000000002
+-15.55
+-27.640000000000004
+-12.910000000000025
+-20.02
+-26.600000000000005
+-28.070000000000007
+-28.44000000000001
+-32.520000000000046
+-19.75
+-3.479999999999997
+-27.169999999999995
+-26.890000000000004
+-28.179999999999993
+-27.22
+-30.56000000000001
+-24.389999999999993
+-15.760000000000035
+-5.2100000000000035
+-26.180000000000003
+-6.730000000000004
+-13.030000000000028
+-12.8
+-18.2
+-12.8
+-14.54
+-22.710000000000022
+-34.24000000000002
+-15.150000000000043
+-18.009999999999998
+-22.810000000000038
+-27.64
+-24.019999999999996
+-15.720000000000043
+-23.869999999999997
+-18.740000000000002
+-20.57
+-12.450000000000035
+-27.810000000000002
+-29.67000000000002
+-23.0
+-26.110000000000003
+-14.450000000000038
+-23.290000000000028
+-24.860000000000003
+-24.88
+-23.320000000000043
+-14.070000000000043
+-23.769999999999996
+-5.959999999999999
+-18.040000000000006
+-25.47
+-24.94
+-14.230000000000025
+-27.080000000000002
+-14.800000000000038
+-28.610000000000014
+-24.679999999999993
+-23.70000000000003
+-24.579999999999995
+-20.66
+-22.770000000000046
+-28.310000000000002
+-23.58
+-19.55000000000001
+-27.099999999999998
+-14.520000000000053
+-25.490000000000002
+-11.819999999999997
+-16.619999999999997
+-21.68
+-23.54000000000002
+-15.900000000000059
+-32.92000000000004
+-21.76
+-28.870000000000008
+-26.960000000000004
+-10.390000000000024
+-23.350000000000023
+-19.990000000000002
+-18.37
+-15.600000000000001
+-22.14
+-2.2599999999999962
+-21.410000000000004
+-19.55
+-27.95000000000001
+-27.96
+-29.080000000000013
+-27.91000000000001
+-26.879999999999995
+-20.04
+-18.7
+-20.08
+-8.51
+-29.56000000000001
+-30.490000000000038
+-31.610000000000028
+-21.109999999999996
+-25.659999999999997
+-26.169999999999998
+-27.320000000000004
+-29.220000000000013
+-28.83000000000002
+-23.860000000000007
+-27.87
+-27.660000000000004
+-25.509999999999998
+-25.53
+-22.279999999999998
+-30.320000000000043
+-30.470000000000017
+-25.26
+-14.950000000000026
+-15.100000000000035
+-11.450000000000001
+-18.2
+-18.2
+-18.2
+-18.2
+-18.2
+-18.740000000000002
+-11.25000000000003
+-20.880000000000003
+-27.390000000000004
+-14.88000000000005
+-23.819999999999997
+-21.14000000000003
+-21.69
+-27.47
+-29.21000000000001
+-14.920000000000035
+-25.6
+-23.03000000000003
+-19.69
+-33.28000000000003
+-25.149999999999995
+-2.6899999999999946
+-13.350000000000026
+-28.989999999999988
+-25.11
+-25.089999999999996
+-18.480000000000004
+-18.2
+-19.45
+-16.14
+-8.02
+-17.15
+-18.34
+-18.2
+-18.2
+-19.48
+-10.280000000000001
+-19.939999999999998
+-14.770000000000026
+-16.660000000000046
+-21.12000000000002
+-14.980000000000054
+-30.37000000000002
+-28.970000000000013
+-6.720000000000008
+-23.699999999999996
+-14.780000000000038
+-27.320000000000007
+-27.0
+-33.23000000000002
+-24.579999999999995
+-21.330000000000002
+-19.84
+-8.160000000000018
+-23.449999999999996
+-15.450000000000042
+-17.51000000000001
+-13.530000000000024
+-25.369999999999997
+-25.34
+-26.359999999999992
+-14.100000000000012
+-11.240000000000068
+-27.03
+-15.40000000000004
+-26.64
+-14.549999999999994
+-25.67
+-3.2
+-6.720000000000001
+-14.42000000000005
+-31.580000000000013
+-15.59000000000005
+-26.479999999999993
+-25.450000000000003
+-25.369999999999997
+-14.490000000000057
+-22.009999999999994
+-20.259999999999998
+-17.060000000000002
+-25.379999999999992
+-24.78
+-27.370000000000008
+-26.36
+-25.220000000000002
+-30.520000000000024
+-13.72000000000003
+-19.610000000000007
+-15.769999999999996
+-21.870000000000022
+-27.470000000000006
+-13.78000000000004
+-25.369999999999997
+-26.160000000000004
+-15.460000000000043
+-27.960000000000004
+-31.200000000000024
+-24.939999999999998
+-26.77999999999999
+-14.179999999999996
+-24.630000000000003
+-26.800000000000004
+-27.999999999999986
+-14.73000000000004
+-25.410000000000004
+-26.449999999999992
+-4.019999999999994
+-10.19
+-15.390000000000041
+-14.740000000000068
+-22.53000000000001
+-19.830000000000016
+-21.719999999999995
+-15.03
+-12.8
+-20.439999999999998
+-18.96
+-18.7
+-21.58
+-13.620000000000001
+-12.8
+-18.2
+-18.2
+-18.2
+-16.17
+-20.310000000000016
+-27.21999999999999
+-15.040000000000044
+-33.870000000000054
+-31.620000000000033
+-10.31
+-31.95000000000002
+-8.969999999999999
+-17.229999999999997
+-18.439999999999998
+-19.990000000000002
+-30.64000000000003
+-17.840000000000003
+-29.45000000000001
+-16.230000000000047
+-30.06000000000003
+-32.36000000000003
+-7.900000000000011
+-7.920000000000018
+-26.63
+-31.13000000000001
+-15.050000000000043
+-34.19000000000003
+-24.339999999999996
+-30.030000000000012
+-13.990000000000027
+-28.07
+-14.440000000000031
+-24.370000000000033
+-14.530000000000042
+-8.10000000000002
+-28.870000000000005
+-12.890000000000024
+-7.530000000000008
+-21.789999999999996
+-18.2
+-18.2
+-18.2
+-19.759999999999998
+-12.950000000000001
+-19.9
+-20.05
+-1.1099999999999985
+-28.360000000000014
+-25.379999999999995
+-15.160000000000041
+-7.990000000000011
+-27.03000000000001
+-15.190000000000046
+-15.050000000000038
+-27.35000000000001
+-21.450000000000003
+-33.26000000000002
+-19.710000000000022
+-27.540000000000006
+-21.22
+-27.119999999999997
+-31.700000000000014
+-27.000000000000004
+-28.07
+-16.890000000000008
+-25.169999999999995
+-30.130000000000003
+-16.02
+-25.380000000000003
+-24.919999999999998
+-26.679999999999996
+-26.240000000000002
+-20.06000000000001
+-17.14
+-28.51
+-4.8199999999999905
+-27.799999999999994
+-14.710000000000027
+-24.169999999999995
+-26.519999999999996
+-29.150000000000006
+-30.099999999999998
+-16.719999999999995
+-21.83
+-23.64000000000003
+-3.3999999999999937
+-22.930000000000028
+-16.770000000000003
+-26.780000000000005
+-27.880000000000003
+-14.120000000000024
+-26.299999999999997
+-21.840000000000003
+-27.11
+-13.590000000000027
+-19.73000000000001
+-25.75
+-14.940000000000044
+-25.060000000000002
+-25.090000000000003
+-17.19
+-15.890000000000038
+-23.719999999999995
+-28.540000000000013
+-15.42000000000005
+-22.890000000000022
+-11.650000000000047
+-8.630000000000022
+-14.300000000000033
+-25.369999999999997
+-16.14000000000004
+-14.990000000000048
+-15.470000000000066
+-5.030000000000005
+-25.45
+-27.7
+-25.339999999999996
+-22.12000000000001
+-28.570000000000007
+-29.36000000000002
+-15.38000000000003
+-23.11000000000005
+-16.140000000000057
+-24.100000000000037
+-26.85
+-27.960000000000022
+-17.419999999999998
+-24.84000000000004
+-28.850000000000005
+-28.83000000000001
+-20.39
+-18.970000000000013
+-32.28000000000003
+-25.82
+-16.710000000000008
+-19.28000000000001
+-20.30000000000002
+-23.229999999999997
+-21.70000000000004
+-7.030000000000022
+-21.990000000000002
+-14.870000000000047
+-18.86
+-20.380000000000003
+-21.830000000000034
+-28.13000000000001
+-24.020000000000046
+-14.780000000000022
+-27.330000000000005
+-26.39
+-13.86000000000004
+-7.62000000000001
+-31.210000000000008
+-22.25
+-25.34
+-20.730000000000025
+-15.830000000000057
+-32.710000000000036
+-25.2
+-16.86
+-29.120000000000015
+-14.860000000000037
+-15.420000000000043
+-7.530000000000001
+-23.72
+-27.85000000000001
+-25.85
+-21.510000000000034
+-34.34000000000003
+-28.140000000000008
+-26.400000000000006
+-27.380000000000003
+-15.30000000000003
+-27.480000000000004
+-31.72000000000004
+-22.460000000000036
+-32.630000000000024
+-14.720000000000043
+-30.36000000000002
+-24.959999999999994
+-26.83000000000001
+-1.8199999999999965
+-28.189999999999998
+-15.180000000000048
+-7.640000000000006
+-8.930000000000017
+-27.96000000000001
+-22.760000000000044
+-28.24
+-14.650000000000055
+-14.91
+-19.59
+-27.02
+-30.410000000000014
+-25.57
+-14.680000000000057
+-24.529999999999998
+-31.26000000000002
+-13.320000000000032
+-27.060000000000006
+-15.180000000000064
+-18.86
+-5.469999999999994
+-25.18
+-27.630000000000003
+-25.249999999999993
+-25.439999999999998
+-24.669999999999998
+-24.96
+-33.05000000000003
+-10.660000000000055
+-10.020000000000017
+-14.670000000000048
+-27.900000000000013
+-8.69000000000002
+-14.950000000000049
+-27.229999999999993
+-28.69000000000001
+-26.14
+-27.759999999999998
+-28.070000000000004
+-13.509999999999998
+-24.909999999999993
+-30.860000000000028
+-25.919999999999995
+-20.76000000000002
+-26.64
+-26.900000000000002
+-14.260000000000066
+-30.38000000000001
+-18.16000000000001
+-25.18
+-20.07000000000003
+-23.43
+-22.36
+-15.42000000000005
+-19.640000000000022
+-27.840000000000003
+-14.060000000000045
+-28.840000000000003
+-28.24
+-25.71
+-32.19000000000002
+-14.640000000000047
+-22.100000000000033
+-26.35
+-30.60000000000001
+-27.590000000000003
+-19.1
+-9.590000000000027
+-22.040000000000042
+-25.02
+-23.210000000000043
+-27.75000000000001
+-21.39
+-14.840000000000042
+-15.260000000000058
+-18.580000000000013
+-29.680000000000014
+-16.260000000000048
+-29.120000000000008
+-4.209999999999997
+-13.809999999999997
+-14.430000000000055
+-21.51
+-26.600000000000012
+-12.950000000000001
+-18.2
+-17.15
+-18.2
+-18.689999999999998
+-26.840000000000003
+-25.92
+-29.440000000000015
+-21.690000000000026
+-31.25000000000003
+-25.5
+-29.66000000000001
+-32.04000000000002
+-25.119999999999997
+-18.710000000000015
+-15.68000000000004
+-15.140000000000052
+-23.24000000000003
+-15.850000000000046
+-29.650000000000016
+-31.310000000000034
+-29.970000000000013
+-16.26000000000003
+-6.840000000000018
+-7.530000000000019
+-15.900000000000043
+-16.300000000000036
+-25.2
+-27.85000000000001
+-28.150000000000013
+-22.709999999999997
+-19.220000000000013
+-27.849999999999994
+-15.64000000000003
+-31.58000000000002
+-22.010000000000023
+-20.050000000000026
+-27.28000000000001
+-27.060000000000006
+-32.28000000000004
+-4.429999999999997
+-28.340000000000007
+-22.9
+-14.43000000000003
+-18.42
+-20.86
+-16.680000000000057
+-6.7499999999999964
+-31.57000000000002
+-22.379999999999995
+-19.29
+-15.78
+-19.01000000000002
+-22.770000000000017
+-15.280000000000058
+-17.66000000000001
+-22.29
+-14.160000000000023
+-24.160000000000046
+-28.060000000000002
+-13.369999999999996
+-25.03
+-10.850000000000001
+-15.830000000000048
+-31.960000000000022
+-29.070000000000007
+-16.730000000000004
+-8.26
+-35.15000000000006
+-26.479999999999997
+-22.450000000000028
+-32.940000000000026
+-15.830000000000062
+-29.230000000000015
+-20.5
+-18.2
+-13.670000000000002
+-19.009999999999998
+-20.03
+-24.62
+-15.59
+-16.740000000000006
+-29.65000000000002
+-15.480000000000057
+-29.340000000000003
+-24.93
+-18.98
+-26.0
+-8.70000000000003
+-24.040000000000042
+-21.420000000000023
+-24.599999999999998
+-21.87
+-21.67
+-18.2
+-20.36
+-21.090000000000003
+-20.09
+-21.53
+-26.05
+-17.530000000000005
+-5.08
+-23.93
+-26.369999999999997
+-31.770000000000035
+-15.040000000000045
+-33.87000000000004
+-21.29
+-19.009999999999998
+-19.180000000000014
+-14.26
+-15.010000000000048
+-16.04000000000004
+-21.52
+-22.25
+-28.99000000000002
+-23.400000000000034
+-21.400000000000002
+-6.69
+-18.2
+-18.2
+-18.2
+-12.8
+-18.2
+-16.28
+-15.760000000000034
+-15.290000000000049
+-27.35
+-28.960000000000008
+-20.800000000000022
+-15.220000000000057
+-24.659999999999993
+-5.72
+-22.74
+-24.659999999999993
+-24.229999999999997
+-15.000000000000057
+-27.499999999999993
+-14.750000000000032
+-24.77
+-20.3
+-21.200000000000003
+-16.12000000000005
+-16.32000000000005
+-14.43000000000003
+-15.910000000000046
+-13.940000000000024
+-21.18
+-22.739999999999995
+-21.810000000000002
+-19.9
+-22.71
+-14.760000000000002
+-14.190000000000001
+-20.42
+-20.52
+-5.52
+-24.279999999999998
+-15.860000000000035
+-14.939999999999994
+-16.60000000000005
+-28.41000000000001
+-26.09
+-25.55
+-20.68
+-18.96
+-19.77
+-8.76
+-23.94000000000004
+-15.620000000000058
+-30.86000000000002
+-8.640000000000017
+-15.640000000000047
+-16.980000000000054
+-28.650000000000013
+-32.32000000000003
+-20.46
+-14.570000000000059
+-29.960000000000015
+-24.209999999999997
+-29.960000000000015
+-15.900000000000036
+-28.800000000000015
+-4.209999999999999
+-15.430000000000046
+-15.100000000000035
+-7.52000000000001
+-28.08000000000001
+-25.0
+-26.460000000000008
+-14.280000000000028
+-23.04
+-27.220000000000002
+-15.080000000000048
+-25.980000000000004
+-32.950000000000045
+-29.990000000000016
+-18.689999999999998
+-18.96
+-28.180000000000014
+-30.110000000000017
+-14.920000000000051
+-25.039999999999996
+-17.049999999999997
+-13.110000000000053
+-30.87000000000001
+-29.820000000000014
+-14.58000000000005
+-15.960000000000058
+-16.14000000000006
+-15.97000000000003
+-5.030000000000005
+-21.67
+-26.25
+-31.300000000000036
+-23.939999999999998
+-15.500000000000057
+-26.68
+-22.230000000000032
+-23.500000000000036
+-25.309999999999995
+-14.810000000000024
diff --git "a/DQN_mulit_tensorflow_2/backup/1/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt" "b/DQN_mulit_tensorflow_2/backup/1/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt"
new file mode 100644
index 0000000..54ce7a3
--- /dev/null
+++ "b/DQN_mulit_tensorflow_2/backup/1/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt"
@@ -0,0 +1,3 @@
+前4000ep 的reward* 1.5
+4000ep 开始 平局不作数
+5000ep 开始 修复reward fail计算两次的问题
\ No newline at end of file
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA1000/FFA1000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/2/FFA1000/FFA1000.data-00000-of-00001
new file mode 100644
index 0000000..2550e3d
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA1000/FFA1000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA1000/FFA1000.index b/DQN_mulit_tensorflow_2/backup/2/FFA1000/FFA1000.index
new file mode 100644
index 0000000..af95c58
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA1000/FFA1000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA1000/checkpoint b/DQN_mulit_tensorflow_2/backup/2/FFA1000/checkpoint
new file mode 100644
index 0000000..29be90e
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/2/FFA1000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA1000"
+all_model_checkpoint_paths: "FFA1000"
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA1500/FFA1500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/2/FFA1500/FFA1500.data-00000-of-00001
new file mode 100644
index 0000000..6c591e0
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA1500/FFA1500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA1500/FFA1500.index b/DQN_mulit_tensorflow_2/backup/2/FFA1500/FFA1500.index
new file mode 100644
index 0000000..e7aa070
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA1500/FFA1500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA1500/checkpoint b/DQN_mulit_tensorflow_2/backup/2/FFA1500/checkpoint
new file mode 100644
index 0000000..b7b6bee
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/2/FFA1500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA1500"
+all_model_checkpoint_paths: "FFA1500"
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA2000/FFA2000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/2/FFA2000/FFA2000.data-00000-of-00001
new file mode 100644
index 0000000..03f7c9b
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA2000/FFA2000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA2000/FFA2000.index b/DQN_mulit_tensorflow_2/backup/2/FFA2000/FFA2000.index
new file mode 100644
index 0000000..7ab0f35
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA2000/FFA2000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA2000/checkpoint b/DQN_mulit_tensorflow_2/backup/2/FFA2000/checkpoint
new file mode 100644
index 0000000..1dcab0c
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/2/FFA2000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA2000"
+all_model_checkpoint_paths: "FFA2000"
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA2500/FFA2500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/2/FFA2500/FFA2500.data-00000-of-00001
new file mode 100644
index 0000000..22822e9
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA2500/FFA2500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA2500/FFA2500.index b/DQN_mulit_tensorflow_2/backup/2/FFA2500/FFA2500.index
new file mode 100644
index 0000000..defeeaa
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA2500/FFA2500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA2500/checkpoint b/DQN_mulit_tensorflow_2/backup/2/FFA2500/checkpoint
new file mode 100644
index 0000000..e1df453
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/2/FFA2500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA2500"
+all_model_checkpoint_paths: "FFA2500"
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA3000/FFA3000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/2/FFA3000/FFA3000.data-00000-of-00001
new file mode 100644
index 0000000..8fab8b2
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA3000/FFA3000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA3000/FFA3000.index b/DQN_mulit_tensorflow_2/backup/2/FFA3000/FFA3000.index
new file mode 100644
index 0000000..25c3370
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA3000/FFA3000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA3000/checkpoint b/DQN_mulit_tensorflow_2/backup/2/FFA3000/checkpoint
new file mode 100644
index 0000000..05be1ca
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/2/FFA3000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA3000"
+all_model_checkpoint_paths: "FFA3000"
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA3500/FFA3500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/2/FFA3500/FFA3500.data-00000-of-00001
new file mode 100644
index 0000000..10b4592
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA3500/FFA3500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA3500/FFA3500.index b/DQN_mulit_tensorflow_2/backup/2/FFA3500/FFA3500.index
new file mode 100644
index 0000000..03c552a
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA3500/FFA3500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA3500/checkpoint b/DQN_mulit_tensorflow_2/backup/2/FFA3500/checkpoint
new file mode 100644
index 0000000..6b23a0a
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/2/FFA3500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA3500"
+all_model_checkpoint_paths: "FFA3500"
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA4000/FFA4000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/2/FFA4000/FFA4000.data-00000-of-00001
new file mode 100644
index 0000000..75ef77f
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA4000/FFA4000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA4000/FFA4000.index b/DQN_mulit_tensorflow_2/backup/2/FFA4000/FFA4000.index
new file mode 100644
index 0000000..6e98238
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA4000/FFA4000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA4000/checkpoint b/DQN_mulit_tensorflow_2/backup/2/FFA4000/checkpoint
new file mode 100644
index 0000000..04247ef
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/2/FFA4000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA4000"
+all_model_checkpoint_paths: "FFA4000"
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA4500/FFA4500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/2/FFA4500/FFA4500.data-00000-of-00001
new file mode 100644
index 0000000..e959ebd
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA4500/FFA4500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA4500/FFA4500.index b/DQN_mulit_tensorflow_2/backup/2/FFA4500/FFA4500.index
new file mode 100644
index 0000000..ba6f775
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA4500/FFA4500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA4500/checkpoint b/DQN_mulit_tensorflow_2/backup/2/FFA4500/checkpoint
new file mode 100644
index 0000000..70f6686
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/2/FFA4500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA4500"
+all_model_checkpoint_paths: "FFA4500"
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA500/FFA500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/2/FFA500/FFA500.data-00000-of-00001
new file mode 100644
index 0000000..b2a5ec5
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA500/FFA500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA500/FFA500.index b/DQN_mulit_tensorflow_2/backup/2/FFA500/FFA500.index
new file mode 100644
index 0000000..fe61fa8
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA500/FFA500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA500/checkpoint b/DQN_mulit_tensorflow_2/backup/2/FFA500/checkpoint
new file mode 100644
index 0000000..10482c1
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/2/FFA500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA500"
+all_model_checkpoint_paths: "FFA500"
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA5000/FFA5000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/2/FFA5000/FFA5000.data-00000-of-00001
new file mode 100644
index 0000000..92e4c07
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA5000/FFA5000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA5000/FFA5000.index b/DQN_mulit_tensorflow_2/backup/2/FFA5000/FFA5000.index
new file mode 100644
index 0000000..8e8ee26
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA5000/FFA5000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA5000/checkpoint b/DQN_mulit_tensorflow_2/backup/2/FFA5000/checkpoint
new file mode 100644
index 0000000..941edb1
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/2/FFA5000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA5000"
+all_model_checkpoint_paths: "FFA5000"
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA5500/FFA5500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/2/FFA5500/FFA5500.data-00000-of-00001
new file mode 100644
index 0000000..f9fa8b8
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA5500/FFA5500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA5500/FFA5500.index b/DQN_mulit_tensorflow_2/backup/2/FFA5500/FFA5500.index
new file mode 100644
index 0000000..b3570d9
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/2/FFA5500/FFA5500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/2/FFA5500/checkpoint b/DQN_mulit_tensorflow_2/backup/2/FFA5500/checkpoint
new file mode 100644
index 0000000..77701fd
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/2/FFA5500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA5500"
+all_model_checkpoint_paths: "FFA5500"
diff --git a/DQN_mulit_tensorflow_2/backup/2/result.csv b/DQN_mulit_tensorflow_2/backup/2/result.csv
new file mode 100644
index 0000000..871216c
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/2/result.csv
@@ -0,0 +1,5501 @@
+result
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
diff --git a/DQN_mulit_tensorflow_2/backup/2/reward.csv b/DQN_mulit_tensorflow_2/backup/2/reward.csv
new file mode 100644
index 0000000..5e852d9
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/2/reward.csv
@@ -0,0 +1,5501 @@
+reward
+-5.1499999999999995
+-5.1499999999999995
+-4.15
+-16.70000000000003
+-4.12
+-5.1499999999999995
+-10.14999999999999
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-4.15
+-14.760000000000003
+-4.15
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-4.15
+-9.009999999999994
+-5.1499999999999995
+-8.139999999999997
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-4.180000000000001
+-6.320000000000001
+-9.97999999999999
+-6.24
+-11.359999999999976
+-5.359999999999999
+-4.030000000000002
+-5.1499999999999995
+-4.9399999999999995
+-1.080000000000001
+-6.430000000000001
+-6.110000000000001
+-4.15
+-8.589999999999991
+-5.44
+-6.44
+-5.63
+-3.66
+-6.3900000000000015
+-4.79
+-10.449999999999985
+-3.83
+-4.989999999999999
+-5.890000000000001
+-5.12
+-4.32
+-5.1499999999999995
+-5.32
+-4.3100000000000005
+-4.2
+-4.73
+-3.83
+-4.64
+-5.25
+-3.790000000000001
+-5.47
+-4.24
+-4.69
+-4.24
+-4.57
+-5.529999999999999
+-4.24
+-4.16
+-6.600000000000001
+-3.6300000000000026
+-5.1
+-5.38
+-1.8100000000000005
+-5.12
+-5.48
+-5.119999999999999
+-6.0200000000000005
+-5.109999999999999
+-5.0200000000000005
+-4.12
+-5.4399999999999995
+-5.710000000000001
+-5.25
+-4.819999999999999
+-4.7299999999999995
+-5.69
+-4.6499999999999995
+-5.3
+-5.82
+-5.06
+-5.1499999999999995
+-4.54
+-4.16
+-4.13
+-5.2299999999999995
+-5.37
+4.589999999999999
+-4.24
+-5.1499999999999995
+-5.12
+-4.75
+-6.590000000000001
+-4.49
+-5.8500000000000005
+-5.63
+-4.15
+-5.12
+-6.340000000000002
+-4.79
+-3.89
+-6.0
+-5.28
+-4.89
+-3.95
+-7.0200000000000005
+-6.0200000000000005
+-4.8
+-5.2
+-4.44
+-4.29
+-5.109999999999999
+-5.09
+-4.15
+-4.31
+-4.85
+-5.06
+-5.540000000000001
+-5.77
+-5.220000000000001
+-5.109999999999999
+-4.11
+-5.12
+-3.8100000000000005
+-2.1999999999999984
+-5.750000000000002
+-4.41
+-3.2399999999999993
+-4.38
+-5.83
+-3.1999999999999993
+-4.48
+-5.109999999999999
+-3.169999999999999
+-5.12
+-4.84
+-3.8500000000000005
+-4.3100000000000005
+-4.040000000000001
+-4.12
+-4.29
+-4.48
+-4.84
+-4.15
+-5.12
+-5.14
+-4.16
+-5.71
+-4.41
+-4.930000000000001
+-5.24
+-6.110000000000001
+-5.75
+-5.74
+-4.16
+-5.19
+-5.54
+-5.74
+-5.09
+-6.550000000000001
+-3.8100000000000005
+-4.12
+-4.030000000000001
+-4.78
+-5.12
+-4.18
+-4.96
+-4.710000000000001
+-4.88
+-5.710000000000001
+-5.38
+-4.14
+-5.72
+-5.1499999999999995
+-5.68
+-5.12
+-4.32
+-5.47
+-3.7600000000000007
+-5.529999999999999
+-5.3
+-5.640000000000001
+-4.81
+-5.12
+-5.2
+-4.79
+-3.89
+-4.9399999999999995
+-5.1499999999999995
+-4.6899999999999995
+-4.64
+-4.15
+-5.56
+-4.93
+-5.04
+-5.9
+-5.21
+-4.29
+-2.91
+-8.629999999999992
+-5.319999999999999
+-3.6900000000000004
+-4.3100000000000005
+-6.350000000000001
+-3.7900000000000005
+-5.29
+-5.12
+-5.12
+-4.75
+-4.859999999999999
+-6.03
+-4.140000000000001
+-5.16
+-3.3600000000000003
+-4.859999999999999
+-6.77
+-5.700000000000001
+-4.64
+-4.390000000000001
+-4.82
+-5.16
+-4.5
+-5.06
+-4.89
+-5.58
+-5.54
+-5.1499999999999995
+-5.04
+-5.1499999999999995
+-5.1499999999999995
+-4.12
+-4.84
+-5.03
+-7.139999999999997
+-6.020000000000001
+-5.03
+-3.8400000000000007
+-4.59
+-5.1499999999999995
+-4.55
+-4.32
+-5.63
+-4.44
+-4.39
+-4.44
+-4.64
+-5.1
+-3.940000000000001
+-5.63
+-4.15
+-5.1499999999999995
+-4.300000000000001
+-5.1499999999999995
+-5.1499999999999995
+-4.12
+-5.3999999999999995
+-4.15
+-4.0600000000000005
+-3.7600000000000007
+-4.77
+-4.06
+-5.22
+-5.12
+-5.1499999999999995
+-5.860000000000001
+-5.35
+-5.119999999999999
+-7.969999999999997
+-3.670000000000001
+-4.0200000000000005
+-5.64
+-5.74
+-5.890000000000001
+-5.88
+-5.44
+-5.12
+-5.119999999999999
+-4.71
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.4399999999999995
+-5.119999999999999
+-5.51
+-5.29
+-5.12
+-3.920000000000001
+-5.45
+-4.640000000000001
+-4.76
+-4.69
+-4.8
+-5.1499999999999995
+-4.82
+-4.92
+-4.33
+-4.12
+-5.12
+-4.97
+-4.89
+-4.869999999999999
+-5.37
+-4.3
+-7.309999999999999
+-5.6499999999999995
+-4.35
+-5.4399999999999995
+-4.67
+-4.54
+-4.07
+-4.12
+-5.119999999999999
+-4.24
+-4.15
+-5.3
+-4.34
+-5.9
+-4.840000000000001
+-5.41
+-4.74
+-4.36
+-3.99
+-4.93
+-4.34
+-5.4399999999999995
+-4.15
+-5.1499999999999995
+-5.3999999999999995
+-3.74
+-5.1499999999999995
+-3.7200000000000006
+-5.12
+-5.3
+-5.12
+-4.92
+-5.1499999999999995
+-5.09
+-4.58
+-4.140000000000001
+-4.15
+-4.24
+-5.06
+-5.2
+-5.1499999999999995
+-4.69
+-3.209999999999999
+-5.75
+-4.15
+-4.5
+-4.15
+-5.1499999999999995
+-4.86
+-4.84
+-5.59
+-5.32
+-5.3999999999999995
+-4.15
+-4.15
+-5.119999999999999
+-5.2299999999999995
+-4.15
+-4.64
+-5.87
+-4.34
+-4.15
+-11.119999999999978
+-5.1499999999999995
+-5.12
+-4.44
+-5.13
+-5.35
+-5.24
+-4.9799999999999995
+-4.21
+-5.069999999999999
+-6.09
+-4.859999999999999
+-3.7
+-5.510000000000001
+-5.9
+-7.160000000000001
+-7.309999999999998
+-4.79
+-3.7300000000000004
+-5.0
+-5.1499999999999995
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-4.89
+-5.28
+-6.260000000000001
+-5.390000000000001
+-4.9399999999999995
+-4.82
+-4.89
+-4.72
+-5.04
+-6.360000000000001
+-3.5300000000000007
+-5.1499999999999995
+-5.02
+-3.6100000000000003
+-4.93
+-5.1499999999999995
+-4.12
+-5.109999999999999
+-0.17
+-4.84
+-5.83
+-4.4
+-5.12
+-5.12
+-4.67
+-4.69
+-4.93
+-5.1499999999999995
+-4.390000000000001
+-4.24
+-4.78
+-5.1499999999999995
+-3.9800000000000004
+-5.12
+-4.79
+-4.69
+-5.22
+-5.09
+-5.42
+-4.2
+-4.66
+-4.16
+-5.12
+-3.7500000000000004
+-5.29
+-5.09
+-5.3
+-5.06
+-6.0600000000000005
+-5.760000000000001
+-7.189999999999999
+-5.12
+-5.1499999999999995
+-4.35
+-5.159999999999999
+-4.74
+-5.4799999999999995
+-4.67
+-5.1499999999999995
+-5.25
+-5.1499999999999995
+-5.12
+-4.109999999999999
+-5.3
+-5.1499999999999995
+-5.18
+-5.600000000000001
+-5.1499999999999995
+-4.76
+-4.680000000000001
+-4.06
+-5.2
+-4.15
+-3.84
+-4.359999999999999
+-4.45
+-5.390000000000001
+-4.34
+-4.41
+-4.24
+-4.87
+-5.73
+-6.61
+-5.109999999999999
+-5.119999999999999
+-4.390000000000001
+-4.71
+-4.42
+-4.109999999999999
+-5.52
+-4.15
+-4.15
+-4.450000000000001
+-5.25
+-5.06
+-6.210000000000001
+-5.73
+-5.71
+-4.15
+-5.12
+-6.73
+-6.620000000000001
+-4.54
+-4.89
+-4.34
+-4.84
+-4.24
+-5.09
+-4.15
+-4.140000000000001
+-5.12
+-4.76
+-4.390000000000001
+-4.17
+-4.12
+-5.08
+-3.93
+-4.24
+-5.33
+-5.06
+-5.409999999999999
+-5.25
+-4.91
+-4.45
+-4.15
+-4.15
+-5.1499999999999995
+-5.529999999999999
+-5.94
+-5.1499999999999995
+-5.27
+-5.2299999999999995
+-5.1499999999999995
+-5.31
+-4.76
+-5.12
+-5.1499999999999995
+-4.2
+-5.06
+-4.92
+-5.1499999999999995
+-4.23
+-5.58
+-3.79
+-4.2
+-3.9700000000000006
+-3.89
+-4.9399999999999995
+-5.38
+-5.53
+-6.050000000000001
+-4.12
+-3.89
+-5.16
+-3.730000000000001
+-3.380000000000001
+-6.23
+-3.560000000000001
+-4.15
+-5.1499999999999995
+-4.69
+-4.89
+-5.1499999999999995
+-4.74
+-3.6900000000000004
+-4.1
+-4.67
+-5.12
+-3.74
+-5.1499999999999995
+-4.3100000000000005
+-5.39
+-5.7
+-3.74
+-2.859999999999999
+-4.220000000000001
+-4.74
+-3.74
+-5.01
+-4.19
+-4.73
+-5.1
+-4.74
+-3.79
+-4.32
+-5.12
+-4.6000000000000005
+-5.1499999999999995
+-5.25
+-5.7
+-5.12
+-5.12
+-5.12
+-4.66
+-4.090000000000001
+-4.68
+-1.4200000000000004
+-6.570000000000002
+-4.37
+-4.5600000000000005
+-5.1499999999999995
+-5.33
+-4.9399999999999995
+-5.1499999999999995
+-5.12
+-4.19
+-3.690000000000001
+-4.84
+-4.24
+-5.24
+-4.029999999999999
+-5.409999999999999
+-4.180000000000001
+-4.15
+1.1900000000000013
+-4.37
+-6.1
+-5.18
+-4.220000000000001
+-5.1499999999999995
+-6.690000000000001
+-9.399999999999986
+-7.359999999999999
+-4.390000000000001
+-5.01
+-4.15
+-6.050000000000001
+-5.06
+-4.01
+-5.53
+-6.15
+-3.559999999999998
+-5.13
+-5.24
+-5.12
+-4.15
+-5.5
+-4.140000000000001
+-6.210000000000003
+-6.300000000000001
+-4.67
+-2.9599999999999977
+-5.24
+-4.48
+-4.77
+-4.58
+-4.15
+-5.550000000000001
+-5.33
+-3.169999999999999
+-4.15
+-5.06
+-5.41
+-3.8200000000000003
+-5.68
+-3.2399999999999993
+-5.39
+-3.2399999999999993
+-4.89
+-4.31
+-5.31
+-5.45
+-5.43
+-5.990000000000001
+-5.1499999999999995
+-5.42
+-4.54
+-4.69
+-4.15
+-4.48
+-6.930000000000001
+-5.5
+-7.579999999999998
+-4.49
+-5.29
+-3.5000000000000004
+-5.1499999999999995
+-5.1499999999999995
+-4.89
+-5.119999999999999
+-4.989999999999999
+-4.989999999999999
+-4.15
+-5.1499999999999995
+-5.1499999999999995
+-4.54
+-4.19
+-5.09
+-4.71
+-5.51
+-5.13
+-5.1499999999999995
+-5.1899999999999995
+-5.119999999999999
+-4.89
+-4.16
+-4.44
+-5.770000000000001
+-4.79
+-4.24
+-4.69
+-3.4200000000000004
+-4.390000000000001
+-4.9799999999999995
+-5.109999999999999
+-4.49
+-4.2
+-5.01
+-5.12
+-4.76
+-4.72
+-5.1499999999999995
+-4.44
+-5.12
+-4.71
+-4.98
+-5.29
+-4.819999999999999
+-4.43
+-4.64
+-6.790000000000001
+-4.37
+-5.42
+-5.56
+-5.12
+-5.640000000000001
+1.3599999999999999
+-5.1499999999999995
+-4.75
+-5.12
+-5.17
+-5.66
+-5.79
+-4.54
+-5.7700000000000005
+-3.6900000000000004
+-5.1499999999999995
+-4.460000000000001
+-5.840000000000001
+-5.32
+-4.460000000000001
+-4.34
+-5.56
+-4.64
+-4.54
+-5.64
+-4.92
+-1.359999999999998
+-4.54
+-5.25
+-4.38
+-5.52
+-4.15
+-6.23
+-5.09
+-5.119999999999999
+-4.87
+-3.700000000000001
+-4.64
+-5.770000000000001
+-5.1499999999999995
+-3.1999999999999993
+-4.15
+-5.55
+-4.919999999999999
+-4.300000000000001
+-4.290000000000001
+-5.27
+-2.4399999999999986
+-5.029999999999999
+-6.470000000000001
+-3.460000000000001
+-5.63
+-3.74
+-4.89
+-5.0600000000000005
+-5.12
+-4.15
+-4.92
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-4.0600000000000005
+-3.450000000000001
+-5.1499999999999995
+-5.12
+-4.72
+-5.12
+-5.12
+-4.69
+-5.14
+-4.96
+-4.12
+-4.74
+-4.44
+-3.7800000000000002
+-5.06
+-4.25
+-5.12
+-6.77
+-5.12
+-5.1499999999999995
+-5.1499999999999995
+-5.78
+-5.12
+-4.25
+-5.14
+-4.15
+-5.1499999999999995
+-5.1499999999999995
+-5.51
+-4.84
+-5.22
+-4.15
+-5.12
+-4.15
+-4.0600000000000005
+-5.74
+-5.470000000000001
+-4.4
+-5.1499999999999995
+-5.42
+-5.12
+-5.1499999999999995
+-3.3400000000000003
+-5.579999999999999
+-5.04
+-3.6900000000000004
+-7.259999999999998
+-5.13
+-4.989999999999999
+-4.95
+-7.23
+-4.43
+-5.69
+-4.63
+-4.64
+-4.84
+-5.2299999999999995
+-5.39
+-5.08
+-5.12
+-5.890000000000001
+-4.16
+-3.2299999999999973
+-4.25
+-5.58
+-5.1499999999999995
+-6.050000000000001
+-6.700000000000001
+-5.9
+-5.13
+-6.390000000000001
+-4.2
+-4.74
+-4.19
+-3.8099999999999996
+-5.05
+-4.29
+-0.10999999999999999
+-5.880000000000001
+-4.16
+-5.34
+-4.87
+-5.35
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.319999999999999
+-4.64
+-6.160000000000001
+-5.3999999999999995
+-4.29
+-5.12
+-4.76
+-4.21
+-5.42
+-4.27
+-3.79
+-6.210000000000001
+-4.15
+-4.15
+-4.15
+-5.1499999999999995
+-5.52
+-5.83
+-5.420000000000001
+-3.96
+-6.960000000000002
+-4.19
+-5.34
+-4.12
+-4.15
+-4.49
+-4.54
+-5.7
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.12
+-5.1499999999999995
+-5.1499999999999995
+-3.5600000000000005
+-5.1499999999999995
+-5.12
+-5.1499999999999995
+-5.39
+-6.330000000000001
+-5.03
+-5.17
+-5.7700000000000005
+-3.3800000000000017
+-5.1
+-5.1899999999999995
+-5.45
+-5.21
+-5.1499999999999995
+-4.68
+-6.03
+-4.29
+-4.15
+-4.390000000000001
+-5.47
+-4.12
+-3.9400000000000004
+-5.1499999999999995
+-7.06
+-4.96
+-4.89
+-3.29
+-4.81
+-4.54
+-5.2
+-4.54
+-5.609999999999999
+-5.47
+-5.109999999999999
+-4.88
+-5.06
+-5.35
+-5.19
+-5.900000000000001
+-4.72
+-4.12
+-4.25
+-5.22
+-6.28
+-4.92
+-3.75
+-4.44
+-4.16
+-5.5
+-5.1499999999999995
+-5.3
+-3.9400000000000004
+-5.58
+-4.8999999999999995
+-4.76
+-5.38
+-6.040000000000001
+-3.8000000000000007
+-4.9399999999999995
+-3.680000000000001
+-0.7999999999999972
+-4.77
+-5.25
+-3.74
+-4.200000000000001
+-4.84
+-5.1499999999999995
+-4.47
+-4.92
+-5.1499999999999995
+-3.87
+-5.109999999999999
+-4.490000000000001
+-5.260000000000001
+-4.55
+-5.76
+-3.7900000000000005
+-4.24
+-4.390000000000001
+-3.74
+-4.19
+-4.64
+-5.39
+-3.79
+-5.1499999999999995
+-4.15
+-4.15
+-4.15
+-5.17
+-5.5
+-3.9600000000000004
+-4.36
+-5.51
+-4.15
+-5.1499999999999995
+-5.2
+-5.1499999999999995
+-4.12
+-4.67
+-4.97
+-4.88
+-5.119999999999999
+-5.4
+-4.61
+-2.55
+-4.15
+-5.29
+-5.25
+-5.06
+-4.380000000000001
+-3.580000000000001
+-4.34
+-4.1
+-6.990000000000002
+-5.8
+-7.369999999999999
+-4.33
+-5.44
+-5.540000000000001
+-5.1499999999999995
+-3.72
+-4.54
+-5.06
+-5.45
+-4.6
+-4.49
+-4.58
+-3.1399999999999992
+-5.12
+-5.1499999999999995
+-5.18
+-3.74
+-4.01
+-3.8900000000000006
+-4.9799999999999995
+-4.0600000000000005
+-4.25
+-5.109999999999999
+-5.38
+-4.29
+-5.38
+-4.74
+-4.25
+-3.64
+-5.2
+-5.52
+-4.17
+-5.55
+-5.109999999999999
+-4.9399999999999995
+-5.45
+-4.29
+-5.38
+-4.4
+-5.56
+-4.78
+-5.75
+-4.989999999999999
+-4.74
+-5.1499999999999995
+-5.1499999999999995
+-4.59
+-6.360000000000001
+-4.25
+-5.12
+-4.45
+-4.79
+-5.1499999999999995
+-4.34
+-3.1799999999999997
+-4.74
+-4.75
+-4.36
+-4.17
+-4.58
+-4.15
+-4.83
+-5.16
+-4.41
+-5.159999999999999
+-5.029999999999999
+-3.3400000000000003
+-4.15
+-3.02
+-4.79
+-4.91
+-3.6600000000000006
+-4.59
+-4.26
+-4.45
+-4.0600000000000005
+-5.27
+-4.44
+-5.33
+-4.2700000000000005
+-5.1499999999999995
+-5.1499999999999995
+-5.12
+-5.12
+-4.84
+-4.12
+-4.93
+-4.29
+-4.21
+-6.410000000000001
+-5.08
+-4.74
+-5.37
+-4.2
+-5.159999999999999
+-5.5
+-5.12
+-4.5200000000000005
+-4.06
+-5.47
+-5.12
+-2.8999999999999986
+-4.77
+-4.84
+-5.1499999999999995
+-5.1499999999999995
+-4.34
+-4.21
+-3.74
+-4.87
+-5.2299999999999995
+-5.1499999999999995
+-5.16
+-5.1499999999999995
+-5.1499999999999995
+-5.21
+-5.12
+-4.34
+-5.8500000000000005
+-4.79
+-5.1499999999999995
+-5.12
+-5.109999999999999
+-4.1899999999999995
+-4.19
+-4.23
+-4.84
+-3.74
+-5.1499999999999995
+-4.109999999999999
+-4.28
+-4.24
+-5.1499999999999995
+-2.6099999999999985
+-4.26
+-5.319999999999999
+-3.79
+-5.109999999999999
+-5.109999999999999
+-5.119999999999999
+-5.1499999999999995
+-4.99
+-5.22
+-5.109999999999999
+-5.41
+-5.12
+-3.51
+-5.17
+-4.15
+-3.6400000000000006
+-5.1499999999999995
+-4.73
+-4.15
+-3.74
+-4.4
+-5.1499999999999995
+-5.1499999999999995
+-4.65
+-3.2299999999999995
+-4.84
+-4.78
+-4.72
+-5.43
+-6.140000000000001
+-5.06
+-4.41
+-5.12
+-3.7600000000000002
+-5.58
+-5.880000000000001
+-5.1499999999999995
+-5.1499999999999995
+-4.15
+-4.89
+-4.24
+-4.54
+-4.15
+-5.1499999999999995
+-4.430000000000001
+-5.12
+-4.949999999999999
+-4.3100000000000005
+-4.89
+-5.91
+-4.9399999999999995
+-5.12
+-6.330000000000001
+-4.35
+-5.22
+-3.5900000000000007
+-5.710000000000001
+-5.1499999999999995
+-4.220000000000001
+-5.45
+-3.6400000000000006
+-4.54
+-4.12
+-5.52
+-4.74
+-3.3499999999999996
+-4.49
+-5.31
+-4.48
+-4.45
+-5.14
+-5.840000000000001
+-4.76
+-4.84
+-4.0200000000000005
+-4.15
+-5.1499999999999995
+-4.15
+-5.04
+-4.1899999999999995
+-4.44
+-5.510000000000001
+-4.59
+-4.65
+-3.870000000000001
+-4.43
+-5.73
+-6.890000000000001
+-4.96
+-4.54
+-4.15
+-3.420000000000001
+-5.870000000000001
+-5.16
+-4.17
+-5.1499999999999995
+-4.91
+-4.15
+-5.3
+-3.5000000000000004
+-5.12
+-5.1499999999999995
+-5.2299999999999995
+-4.040000000000001
+-4.15
+-4.57
+-4.53
+-4.24
+-4.19
+-4.12
+-4.15
+-4.0600000000000005
+-5.2
+-4.0600000000000005
+-4.59
+-5.1499999999999995
+-4.78
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-5.72
+-4.15
+-3.9400000000000004
+-6.569999999999999
+-7.319999999999997
+-6.140000000000001
+-5.359999999999999
+-8.289999999999994
+-3.79
+-7.689999999999996
+-4.15
+-4.32
+-5.12
+-4.29
+-4.15
+-4.12
+-5.24
+-6.810000000000001
+-4.9399999999999995
+-5.1499999999999995
+-1.969999999999996
+-4.680000000000001
+-2.709999999999998
+-4.1899999999999995
+-4.97
+-4.58
+-4.699999999999999
+-4.2
+-5.119999999999999
+-4.77
+-3.7699999999999996
+-4.66
+-6.420000000000001
+-5.22
+-5.47
+-4.16
+-3.64
+-5.609999999999999
+-4.040000000000001
+-4.949999999999999
+-4.85
+-4.12
+-7.409999999999998
+-5.1499999999999995
+-5.1499999999999995
+-4.34
+-5.069999999999999
+-4.29
+-5.109999999999999
+-4.64
+-4.15
+-4.66
+-5.109999999999999
+-5.48
+-5.72
+-4.25
+-5.140000000000001
+-4.51
+-5.14
+-4.84
+-4.390000000000001
+-5.1499999999999995
+-4.89
+-4.04
+-5.1499999999999995
+-4.420000000000001
+-6.090000000000001
+-5.2299999999999995
+-4.9399999999999995
+-4.92
+-3.4400000000000004
+-5.42
+-5.459999999999999
+-4.2
+-4.24
+-5.3
+-3.4800000000000004
+-4.93
+-5.6
+-5.5
+-5.1499999999999995
+-5.3999999999999995
+-3.84
+-5.3500000000000005
+-4.4
+-4.0600000000000005
+-5.27
+-4.9399999999999995
+-4.24
+-4.06
+-5.69
+-5.12
+-4.49
+-4.040000000000001
+-4.06
+-5.1899999999999995
+-5.12
+-4.77
+-4.9399999999999995
+-4.69
+-5.329999999999999
+-4.84
+-4.17
+-4.59
+-4.64
+-2.419999999999999
+-4.76
+-5.06
+-4.16
+-5.1499999999999995
+-4.97
+-6.07
+-5.12
+-4.8
+-4.340000000000001
+-5.119999999999999
+-5.710000000000001
+-4.390000000000001
+-4.17
+-5.14
+-4.42
+-5.8100000000000005
+-4.07
+-4.050000000000001
+-4.45
+-3.2399999999999993
+-5.1499999999999995
+-3.7100000000000004
+-3.74
+-5.1499999999999995
+-5.04
+-4.19
+-5.12
+-5.1499999999999995
+-4.0600000000000005
+-4.44
+-4.15
+-4.74
+-5.1499999999999995
+-4.5
+-5.3999999999999995
+-4.56
+-4.59
+-5.12
+-5.239999999999999
+-5.39
+-4.79
+-4.190000000000001
+-4.96
+-4.38
+-4.300000000000001
+-5.1499999999999995
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-5.1499999999999995
+-3.49
+-4.01
+-4.49
+-4.44
+-5.12
+-6.0600000000000005
+-5.1499999999999995
+-5.4399999999999995
+-6.130000000000001
+-4.15
+-4.9399999999999995
+-4.08
+-4.24
+-5.039999999999999
+-4.29
+-4.5
+-6.1400000000000015
+-5.24
+-4.34
+-4.46
+-5.06
+-2.92
+-4.6499999999999995
+-4.25
+-3.1499999999999995
+-3.7700000000000005
+-3.2099999999999995
+-4.25
+-5.1499999999999995
+-4.44
+-3.4900000000000007
+-4.71
+-4.19
+-5.66
+-4.46
+-4.15
+-5.3
+-4.8999999999999995
+-4.67
+-4.19
+-4.200000000000001
+-4.0600000000000005
+-4.44
+-5.109999999999999
+-5.029999999999999
+-4.89
+-5.24
+-3.9700000000000006
+-5.109999999999999
+-5.41
+-3.8000000000000007
+-5.1499999999999995
+-5.3
+-4.64
+-5.04
+-5.1499999999999995
+-6.5600000000000005
+-5.710000000000001
+-4.74
+-4.6899999999999995
+-5.119999999999999
+-5.700000000000001
+-2.3799999999999972
+-4.15
+-4.68
+-5.55
+-4.080000000000001
+-5.119999999999999
+-4.17
+-4.989999999999999
+-5.86
+-4.49
+-4.15
+-4.79
+-4.63
+-5.06
+-5.109999999999999
+-4.989999999999999
+-5.29
+-5.32
+-4.89
+-5.2
+-5.27
+-3.84
+-5.1499999999999995
+-4.74
+-5.1499999999999995
+-4.15
+-4.15
+-5.1499999999999995
+-5.109999999999999
+-5.5
+-4.15
+-4.000000000000001
+-5.22
+-6.280000000000001
+-5.13
+-5.46
+-5.59
+-5.82
+-4.7700000000000005
+-4.28
+-5.25
+-5.1499999999999995
+-3.3600000000000008
+-5.9
+-4.01
+-9.62999999999999
+-5.98
+-4.11
+-5.31
+-3.74
+-5.1499999999999995
+-4.220000000000001
+-3.64
+-3.3400000000000003
+-3.3400000000000003
+-6.91
+-3.79
+-4.64
+-5.57
+-4.69
+-4.390000000000001
+-4.43
+-3.41
+-5.119999999999999
+-5.1899999999999995
+-5.069999999999999
+-4.44
+-2.289999999999999
+-4.79
+-5.7
+-5.75
+-4.12
+-5.3999999999999995
+-4.15
+-5.1499999999999995
+-4.15
+-4.210000000000001
+-4.89
+-3.74
+-4.67
+-4.59
+-4.24
+-5.1499999999999995
+-4.79
+-4.77
+-4.15
+-5.1499999999999995
+-5.4399999999999995
+-4.59
+-4.62
+-4.75
+-4.8
+-4.74
+-5.1499999999999995
+-5.06
+-3.4500000000000006
+-7.439999999999998
+-5.63
+-5.5
+-4.15
+-9.059999999999995
+-4.61
+-4.12
+-5.06
+-5.76
+-4.21
+-4.89
+-4.28
+-5.12
+-4.89
+-4.15
+-5.619999999999999
+-5.8
+-5.119999999999999
+-5.62
+-5.12
+-4.29
+-3.7100000000000004
+-5.1499999999999995
+-4.220000000000001
+-4.91
+-5.960000000000001
+-3.2099999999999995
+-6.160000000000001
+-4.21
+-5.159999999999999
+-5.14
+-4.62
+-4.28
+-4.12
+-4.59
+-4.680000000000001
+-5.119999999999999
+-4.89
+-5.6
+-4.87
+-4.12
+-4.44
+-4.64
+-5.16
+-3.5900000000000007
+-5.83
+-5.35
+-5.06
+-5.1499999999999995
+-5.12
+-4.54
+-4.2
+-5.26
+-5.14
+-5.12
+-4.74
+-4.89
+-5.2
+-5.63
+-4.9399999999999995
+-5.26
+-5.529999999999999
+-4.74
+-4.17
+-5.1499999999999995
+-5.1499999999999995
+-3.570000000000001
+-5.12
+-5.24
+-5.12
+-5.12
+-4.69
+-5.1499999999999995
+-4.959999999999999
+-4.15
+-4.34
+-3.650000000000001
+-5.52
+-5.8500000000000005
+-4.04
+-5.52
+-4.34
+-4.71
+-3.459999999999999
+-4.470000000000001
+-7.219999999999995
+-4.15
+-5.5200000000000005
+-5.930000000000001
+-5.220000000000001
+-5.05
+-6.49
+2.419999999999998
+-5.119999999999999
+-3.21
+-4.71
+-5.1499999999999995
+-5.1499999999999995
+-4.3100000000000005
+-4.81
+-3.7500000000000004
+-4.989999999999999
+-4.24
+-4.29
+-4.49
+-4.65
+-4.74
+-4.04
+-5.1499999999999995
+-5.09
+-5.1499999999999995
+-5.79
+-5.119999999999999
+-6.33
+-7.0
+-5.119999999999999
+-4.8
+-5.2299999999999995
+-4.949999999999999
+-4.17
+-5.459999999999999
+-6.0200000000000005
+-4.140000000000001
+-4.15
+-5.2
+-5.12
+-5.12
+-4.19
+-5.1499999999999995
+-5.1499999999999995
+-3.6400000000000006
+-5.75
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-4.0600000000000005
+-4.9399999999999995
+-5.09
+-5.06
+-5.62
+-5.31
+-5.55
+-5.26
+-4.88
+-4.71
+-3.4400000000000004
+-5.1499999999999995
+-4.34
+-5.64
+-4.989999999999999
+-4.08
+-4.28
+-4.92
+-4.64
+-4.64
+-4.15
+-5.1499999999999995
+-4.74
+-5.07
+-5.22
+-4.72
+-4.0600000000000005
+-5.02
+-4.789999999999999
+-5.2
+-5.1499999999999995
+-4.54
+-5.119999999999999
+-5.109999999999999
+-4.89
+-4.29
+-5.119999999999999
+-3.67
+-5.4799999999999995
+-5.06
+-4.17
+-4.15
+-5.1499999999999995
+-4.15
+-4.090000000000001
+-4.99
+-5.25
+-3.730000000000001
+-7.939999999999998
+-5.7299999999999995
+-5.12
+-4.58
+-5.68
+-3.74
+-5.790000000000001
+-4.29
+-6.300000000000001
+-4.32
+-5.04
+-4.15
+-5.119999999999999
+-4.45
+-3.79
+-5.66
+-5.12
+-5.569999999999999
+-4.32
+-4.1
+-4.46
+-5.06
+-5.27
+-5.0
+-5.43
+-3.4400000000000004
+-4.84
+-5.36
+-5.06
+-6.08
+-5.119999999999999
+-4.34
+-3.5600000000000005
+-4.29
+-5.32
+-5.12
+-3.6900000000000004
+-4.54
+-5.12
+-5.109999999999999
+-5.4399999999999995
+-3.630000000000001
+-3.9400000000000004
+-4.29
+-4.17
+-4.12
+-4.19
+-4.69
+-5.28
+-5.06
+-5.1499999999999995
+-5.1499999999999995
+-3.4400000000000004
+-5.06
+-5.09
+-5.1499999999999995
+-4.15
+-5.12
+-3.8300000000000005
+-5.21
+-4.44
+-5.49
+-4.32
+-5.12
+-4.25
+-5.24
+-5.1499999999999995
+-5.1499999999999995
+-4.15
+-4.4
+-4.15
+-4.24
+-4.49
+-4.5
+-4.2
+-5.17
+-4.4
+-4.74
+-4.470000000000001
+-5.109999999999999
+-3.5900000000000007
+-5.5
+-4.53
+-4.35
+-5.119999999999999
+-5.14
+-4.29
+-5.1499999999999995
+-4.15
+-4.67
+-5.24
+-5.119999999999999
+-4.15
+-4.590000000000001
+-5.1499999999999995
+-5.27
+-4.0200000000000005
+-4.46
+-5.21
+-4.92
+-4.74
+-5.29
+-5.99
+-5.119999999999999
+-4.7
+-4.4
+-5.1899999999999995
+-4.79
+-4.370000000000003
+-4.87
+-4.74
+-5.1499999999999995
+-5.12
+-4.83
+-3.22
+-5.3
+-4.15
+-5.3999999999999995
+-4.0600000000000005
+-4.25
+-3.7900000000000005
+-4.15
+-4.540000000000001
+-4.4
+-4.79
+-8.479999999999995
+-5.109999999999999
+-4.890000000000001
+-4.01
+-4.040000000000001
+-5.19
+-5.159999999999999
+-5.09
+-4.620000000000001
+-4.27
+-4.15
+-4.470000000000001
+-3.64
+-3.5100000000000002
+-5.1499999999999995
+-4.12
+-4.29
+-6.400000000000001
+-5.52
+-3.67
+-4.45
+-4.29
+-6.250000000000001
+-3.4300000000000006
+-4.15
+-5.75
+-4.54
+-2.7399999999999984
+-4.12
+-5.14
+-5.4399999999999995
+-5.84
+-4.15
+-4.84
+-4.93
+-4.74
+-4.61
+-5.319999999999999
+-4.81
+-4.24
+-4.21
+-5.1499999999999995
+-5.06
+-4.15
+-4.83
+-5.24
+-5.24
+-4.9799999999999995
+-5.1499999999999995
+-4.74
+-4.34
+-5.1499999999999995
+-5.12
+-4.580000000000001
+-5.34
+-4.89
+-4.49
+-5.1499999999999995
+-4.030000000000001
+-5.06
+-4.470000000000001
+-5.36
+-4.79
+-4.46
+-4.39
+-4.15
+-4.71
+-3.960000000000001
+-2.3799999999999986
+-5.12
+-4.26
+-3.74
+-4.11
+-5.22
+-5.5
+-4.67
+-6.1000000000000005
+-4.25
+-3.8200000000000003
+-4.390000000000001
+-5.76
+-5.06
+-5.59
+-5.12
+-4.95
+4.5299999999999985
+-4.68
+-4.15
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-3.750000000000001
+-5.109999999999999
+-3.29
+-3.4900000000000007
+-5.13
+-4.15
+-4.12
+-4.4399999999999995
+-4.17
+-6.570000000000002
+-4.109999999999999
+-3.71
+-4.32
+-4.15
+-5.82
+-4.46
+-4.140000000000001
+-5.12
+-4.35
+-5.86
+-4.12
+-4.45
+-5.53
+-4.109999999999999
+-3.93
+-4.15
+-5.45
+-5.0200000000000005
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-5.14
+-4.24
+-5.109999999999999
+-3.84
+-4.59
+-3.5500000000000007
+-3.26
+-3.3900000000000006
+-5.06
+-4.29
+-5.1499999999999995
+-4.46
+-4.7
+-4.46
+-4.9399999999999995
+-3.84
+-5.750000000000001
+-4.44
+-4.120000000000001
+-4.140000000000001
+-5.43
+-3.64
+-5.26
+-3.67
+-4.54
+-4.3100000000000005
+-5.32
+-5.1499999999999995
+-4.109999999999999
+-5.36
+-3.29
+-2.679999999999999
+-4.090000000000001
+-4.12
+-5.09
+-4.97
+-5.2299999999999995
+-4.6899999999999995
+-5.62
+-5.1499999999999995
+-3.74
+-5.14
+-5.06
+-4.26
+-3.670000000000001
+-5.6
+-7.09
+-5.14
+-4.72
+-5.1499999999999995
+-4.15
+-6.0600000000000005
+-4.569999999999999
+-4.69
+-3.7600000000000007
+-5.1499999999999995
+-4.12
+-4.15
+-4.84
+-5.1499999999999995
+-4.71
+-4.96
+-4.74
+-4.93
+-4.51
+-4.25
+-5.4399999999999995
+1.0799999999999992
+-5.04
+-5.09
+-3.64
+-5.17
+-4.08
+-3.6900000000000004
+-4.15
+-3.8400000000000007
+-4.6
+-3.5200000000000005
+-5.1499999999999995
+-5.3100000000000005
+-4.71
+-5.2
+-4.89
+-5.32
+-6.4300000000000015
+-5.1499999999999995
+-4.0
+-4.77
+-5.55
+-5.1499999999999995
+-2.7499999999999982
+-5.54
+-5.159999999999999
+-4.24
+-4.06
+-6.540000000000001
+-5.250000000000001
+-5.12
+-4.74
+-5.960000000000001
+-6.590000000000001
+-5.960000000000001
+-5.2
+-4.04
+-6.420000000000002
+-4.1899999999999995
+-4.9399999999999995
+-4.12
+-5.2
+-4.300000000000001
+-4.55
+-4.500000000000001
+-5.1499999999999995
+1.3299999999999992
+-5.17
+-5.1499999999999995
+-4.19
+-5.17
+-5.69
+-5.3999999999999995
+-4.33
+-5.1499999999999995
+-5.109999999999999
+-5.1499999999999995
+-4.89
+-4.24
+-5.1499999999999995
+-5.1499999999999995
+-5.12
+-5.029999999999999
+-5.569999999999999
+-5.74
+-4.24
+-5.529999999999999
+0.7000000000000028
+-4.819999999999999
+-4.7299999999999995
+-5.1499999999999995
+-3.96
+-5.17
+-4.76
+-4.41
+-5.26
+-4.15
+-5.670000000000001
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-4.44
+-3.3000000000000003
+-3.089999999999999
+-5.25
+-3.37
+-4.24
+-4.24
+-4.9399999999999995
+-5.12
+-5.73
+-5.3
+-6.3500000000000005
+-5.1499999999999995
+-5.1499999999999995
+-4.9399999999999995
+-3.84
+-3.5900000000000007
+-5.41
+-4.24
+-6.360000000000001
+-5.920000000000001
+-6.420000000000001
+-2.0999999999999988
+-5.4399999999999995
+-4.9399999999999995
+-7.399999999999999
+-4.96
+-4.84
+-5.5
+-6.590000000000002
+-4.93
+-4.64
+-5.12
+-4.67
+-4.44
+-5.4799999999999995
+-4.59
+-4.79
+-4.749999999999999
+-2.969999999999999
+-3.79
+-5.710000000000002
+-5.34
+-5.1499999999999995
+-5.04
+-3.620000000000001
+-4.49
+-5.6000000000000005
+-5.1499999999999995
+-4.18
+-9.049999999999994
+-6.650000000000001
+-4.9799999999999995
+-5.12
+-4.92
+-4.58
+-4.75
+-4.84
+-5.1499999999999995
+-5.1499999999999995
+-4.12
+11.190000000000001
+-4.03
+-5.45
+-6.23
+-4.48
+-3.7700000000000005
+-4.48
+-5.1499999999999995
+-6.55
+-4.36
+-3.74
+-3.4000000000000004
+-4.84
+-5.58
+-5.1499999999999995
+-5.06
+-5.1499999999999995
+-5.4399999999999995
+-5.430000000000001
+-5.39
+-4.420000000000001
+-3.84
+-4.12
+-5.029999999999999
+-5.1499999999999995
+-4.410000000000001
+-4.54
+8.110000000000005
+-4.7
+-4.99
+-4.74
+-6.370000000000001
+-5.59
+-4.93
+-4.120000000000001
+-4.17
+-5.04
+-2.239999999999999
+-4.79
+-5.1
+-4.49
+-5.1499999999999995
+-5.449999999999999
+-4.89
+-4.32
+-4.2
+-4.19
+-4.29
+-7.120000000000001
+-5.14
+-6.070000000000001
+-5.750000000000001
+-5.14
+-6.23
+-4.12
+-4.0200000000000005
+-4.19
+-3.8400000000000007
+-5.12
+-3.8900000000000006
+-4.2
+-5.06
+-3.29
+-3.6399999999999997
+-5.29
+-5.7
+-4.9399999999999995
+-4.84
+-4.359999999999999
+-5.51
+-3.2399999999999993
+-4.64
+-3.99
+-4.67
+-6.0600000000000005
+-5.06
+-5.5
+-5.159999999999999
+-5.1499999999999995
+-4.31
+-5.37
+-4.84
+-4.84
+-4.300000000000001
+-5.459999999999999
+-6.16
+-5.1499999999999995
+-5.2
+-5.39
+-4.9399999999999995
+-5.99
+-5.840000000000001
+-5.1499999999999995
+-5.06
+-5.4799999999999995
+-5.29
+-5.61
+-5.29
+-3.74
+-5.41
+-3.7400000000000007
+-4.24
+-4.74
+-6.120000000000001
+-5.45
+-5.05
+-4.34
+-4.89
+-5.159999999999999
+-5.61
+-3.2399999999999993
+-5.7
+-4.67
+-5.119999999999999
+-4.44
+-4.85
+-3.99
+-4.71
+-4.3500000000000005
+-4.859999999999999
+-4.12
+-5.17
+-5.1499999999999995
+-5.369999999999999
+-5.960000000000001
+-4.680000000000001
+-4.34
+-4.98
+-4.220000000000001
+-4.64
+-4.84
+-6.03
+-4.6499999999999995
+-4.67
+-5.73
+-4.8999999999999995
+-5.29
+-4.24
+-4.74
+-4.140000000000001
+-4.03
+-4.880000000000001
+-5.890000000000001
+-4.79
+-4.24
+-4.21
+-4.88
+-5.3
+-6.07
+-4.9399999999999995
+-5.1499999999999995
+-4.21
+-4.29
+-3.2700000000000005
+-4.79
+-4.6899999999999995
+-5.73
+-4.19
+-3.6900000000000004
+-4.12
+-1.5600000000000014
+-5.1499999999999995
+-4.15
+-4.68
+-4.66
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.14
+-5.1899999999999995
+-3.9900000000000007
+-5.12
+-6.090000000000002
+-4.82
+-4.0600000000000005
+-5.1899999999999995
+-5.109999999999999
+-4.29
+-4.24
+-5.1499999999999995
+-4.79
+-5.1499999999999995
+-4.12
+-5.39
+-4.24
+-5.04
+-5.119999999999999
+-5.470000000000001
+-5.1499999999999995
+-4.640000000000001
+-5.27
+-4.63
+-5.02
+-5.1499999999999995
+-5.29
+-4.89
+-4.709999999999999
+-4.48
+-4.62
+-5.1499999999999995
+-5.1499999999999995
+-5.12
+-4.83
+-5.1499999999999995
+-5.17
+-4.79
+-5.1499999999999995
+-4.82
+-4.9399999999999995
+-2.349999999999998
+-4.89
+-3.5700000000000003
+-3.039999999999999
+-5.1499999999999995
+-2.669999999999999
+-5.1499999999999995
+-4.0600000000000005
+-5.04
+-4.42
+-4.75
+-4.69
+-5.2
+-4.94
+-3.26
+-4.94
+-4.9399999999999995
+-4.6
+-3.99
+-5.1499999999999995
+-5.17
+-4.909999999999999
+-5.6899999999999995
+-5.870000000000001
+-4.74
+-5.1499999999999995
+-5.1499999999999995
+-4.7299999999999995
+-4.67
+-5.42
+-5.5
+-4.88
+-4.21
+-5.1499999999999995
+-4.87
+-5.1499999999999995
+-3.5900000000000007
+-5.1499999999999995
+-4.06
+-4.34
+-5.6
+-5.35
+-5.109999999999999
+-5.2
+-4.15
+-4.24
+-5.369999999999999
+-3.74
+-5.12
+-4.24
+-4.21
+-4.82
+-4.61
+-5.49
+-3.49
+-4.75
+-5.12
+-4.15
+-5.47
+-5.09
+-5.12
+-5.52
+-7.639999999999999
+-5.01
+-5.12
+-4.15
+-4.12
+-4.15
+-4.84
+-5.960000000000001
+-5.1499999999999995
+-5.06
+-4.48
+-3.5700000000000003
+-5.279999999999999
+-5.1499999999999995
+-4.81
+-3.92
+-4.58
+-5.64
+-4.18
+-3.3900000000000006
+-5.04
+-4.29
+-5.18
+-4.4399999999999995
+-5.22
+-5.16
+-4.65
+-3.940000000000001
+-4.15
+-4.5
+-3.99
+-3.7900000000000005
+-4.32
+-5.119999999999999
+-4.71
+-3.2799999999999994
+-4.24
+-5.1499999999999995
+-4.89
+-3.7900000000000005
+-5.3999999999999995
+-4.24
+-4.3100000000000005
+-5.320000000000002
+-3.060000000000001
+-5.18
+-5.1499999999999995
+-4.020000000000001
+-5.1499999999999995
+-4.74
+-5.1499999999999995
+-5.12
+-5.2
+-5.2299999999999995
+-4.44
+-5.159999999999999
+-4.74
+-3.64
+-4.33
+-4.51
+-5.39
+-7.529999999999997
+-5.14
+-5.5600000000000005
+-4.1
+-5.01
+-4.989999999999999
+-5.04
+-5.159999999999999
+-5.790000000000001
+-5.1499999999999995
+-4.87
+-5.1499999999999995
+-4.06
+-4.24
+-5.1499999999999995
+-4.24
+-4.33
+-4.390000000000001
+-5.2299999999999995
+-5.930000000000001
+-5.26
+-5.08
+-5.1499999999999995
+-5.1899999999999995
+-4.12
+-4.74
+-4.34
+-4.24
+-4.64
+-5.59
+-4.8
+-5.27
+-3.4900000000000007
+-4.140000000000001
+-3.3400000000000003
+-4.89
+-5.1499999999999995
+-4.41
+-4.79
+-4.54
+-4.96
+-4.29
+-5.17
+-4.9399999999999995
+-5.47
+-5.1499999999999995
+-4.390000000000001
+-4.69
+-5.38
+-3.6900000000000004
+-4.48
+-5.04
+-4.05
+-5.14
+-5.17
+-5.22
+-4.19
+-4.9
+-2.0199999999999987
+-5.12
+-4.1899999999999995
+-3.470000000000001
+-4.79
+-5.1499999999999995
+-5.1499999999999995
+-4.24
+-2.8099999999999987
+-5.25
+-5.12
+-5.04
+-5.109999999999999
+-5.33
+-5.109999999999999
+-4.46
+-5.52
+-4.0600000000000005
+-4.15
+-6.28
+-4.78
+-4.36
+-5.770000000000001
+-4.2700000000000005
+-4.55
+-5.74
+-4.28
+-5.390000000000001
+-5.25
+-5.33
+-4.89
+-4.71
+-2.889999999999999
+-4.24
+-4.58
+-4.84
+-5.2700000000000005
+-4.74
+-5.05
+-4.38
+-7.619999999999996
+-5.1499999999999995
+-6.010000000000001
+-4.44
+-5.12
+-5.1499999999999995
+-7.149999999999997
+-4.35
+-4.44
+-3.4900000000000007
+-5.1
+-4.24
+-5.1499999999999995
+-5.51
+-5.1499999999999995
+-4.2
+-4.46
+-4.390000000000001
+-5.109999999999999
+-5.22
+-5.21
+-5.08
+-4.17
+-3.920000000000001
+-4.16
+-5.12
+-4.24
+-6.960000000000002
+-5.37
+-4.24
+-5.06
+-5.1499999999999995
+-5.12
+-5.3
+-4.16
+-5.33
+-4.390000000000001
+-5.1499999999999995
+-4.59
+-5.2
+-4.67
+-6.320000000000001
+-4.84
+-5.46
+-4.24
+-5.4399999999999995
+-5.33
+-4.19
+-4.4
+-3.88
+-4.78
+-4.28
+-5.38
+-4.390000000000001
+-5.39
+-5.1499999999999995
+-5.06
+-5.34
+-5.49
+-3.67
+-4.6
+-5.1499999999999995
+-4.15
+-3.88
+-5.1499999999999995
+-5.12
+-3.9100000000000015
+-4.06
+-4.999999999999999
+-5.37
+-3.540000000000001
+-4.34
+-4.15
+-5.1499999999999995
+-4.15
+-4.15
+-4.28
+-5.159999999999999
+-3.9400000000000004
+-5.1499999999999995
+-5.86
+-5.1499999999999995
+-4.33
+-5.640000000000001
+-3.8900000000000006
+-5.119999999999999
+-4.2
+-5.1499999999999995
+-4.74
+-5.1499999999999995
+-4.82
+-4.74
+-3.2399999999999993
+-5.2
+-5.25
+-3.3900000000000006
+-5.1499999999999995
+-5.34
+-5.12
+-4.91
+-5.840000000000001
+-5.49
+-5.23
+-5.119999999999999
+-4.29
+-4.81
+-5.1499999999999995
+-5.42
+-4.34
+-2.8999999999999986
+-4.29
+-4.19
+-5.1499999999999995
+-4.24
+-4.7
+-4.15
+-5.1499999999999995
+-4.65
+-5.73
+-5.06
+-5.04
+-4.34
+-4.5200000000000005
+-5.1499999999999995
+-4.610000000000001
+-4.44
+-4.15
+-4.92
+-4.46
+-5.24
+-6.450000000000001
+-5.39
+-4.390000000000001
+-4.12
+-5.640000000000001
+-5.06
+-5.069999999999999
+-5.25
+-4.91
+-5.12
+-5.119999999999999
+-4.15
+-4.15
+-6.520000000000001
+-5.02
+-4.78
+-5.18
+-5.0
+-5.119999999999999
+-3.97
+-5.06
+-4.390000000000001
+-4.79
+-4.24
+-4.82
+-4.6899999999999995
+-5.79
+-5.1499999999999995
+-5.06
+-4.7
+-4.76
+-4.300000000000001
+-5.1499999999999995
+-5.1499999999999995
+-4.59
+-4.79
+-3.77
+-4.74
+-7.929999999999991
+-5.25
+-3.2399999999999993
+-4.29
+-3.29
+-4.89
+-4.64
+-5.12
+-4.35
+-4.49
+-4.93
+-4.87
+-2.6799999999999993
+-4.0600000000000005
+-4.590000000000001
+-5.55
+-4.29
+-3.79
+-4.15
+-3.2200000000000006
+-4.0600000000000005
+-5.1499999999999995
+-4.630000000000001
+-7.7599999999999945
+-5.06
+-4.59
+-4.5
+-4.15
+-5.1499999999999995
+-5.2
+-5.12
+-4.15
+-4.15
+-4.12
+-4.15
+-4.3
+-5.1499999999999995
+-2.3099999999999996
+-4.300000000000001
+-5.13
+-5.1499999999999995
+-5.18
+-4.24
+-3.9800000000000004
+-4.12
+-4.74
+-5.159999999999999
+-3.74
+-4.66
+-5.1499999999999995
+-4.040000000000001
+-4.8
+-5.159999999999999
+-4.35
+-4.17
+-4.15
+-5.1499999999999995
+-5.1499999999999995
+-4.15
+-4.24
+-4.34
+-3.4700000000000006
+-4.819999999999999
+-4.12
+-4.140000000000001
+-5.34
+-6.550000000000002
+-4.0600000000000005
+-5.319999999999999
+-6.450000000000001
+-4.15
+-4.88
+-3.64
+-4.34
+-4.62
+-5.33
+-5.25
+-4.530000000000001
+-5.06
+-5.47
+-4.24
+-5.06
+-5.890000000000001
+-4.32
+-4.79
+-4.97
+-5.890000000000001
+-4.9399999999999995
+-6.4
+-4.55
+-4.18
+-5.71
+-7.300000000000001
+-5.44
+-5.2
+-4.989999999999999
+-4.74
+-4.47
+-4.89
+-4.83
+-4.390000000000001
+-5.119999999999999
+-4.989999999999999
+-5.06
+-3.8400000000000007
+-5.8500000000000005
+-5.46
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-4.18
+-4.4
+-4.65
+-4.81
+-5.54
+-5.1499999999999995
+-4.49
+-7.739999999999998
+-4.97
+-5.29
+-4.989999999999999
+-4.59
+-4.390000000000001
+-4.460000000000001
+-4.17
+-5.1499999999999995
+-4.34
+-3.74
+-5.1499999999999995
+-5.1499999999999995
+-4.53
+-4.79
+-3.2299999999999995
+-3.79
+-5.38
+-5.119999999999999
+-5.12
+-5.1499999999999995
+-4.67
+-3.9400000000000004
+-5.08
+-5.12
+-4.58
+-4.300000000000001
+-1.5200000000000011
+-5.35
+-4.84
+-5.25
+-5.12
+-5.12
+-5.12
+-5.5
+-4.109999999999999
+-5.17
+-4.44
+-5.1499999999999995
+-5.1499999999999995
+-3.74
+-5.04
+-5.16
+-5.06
+-4.17
+-5.49
+-5.22
+-4.140000000000001
+-4.5200000000000005
+-3.79
+-4.24
+-4.840000000000001
+-5.8
+-4.51
+-3.97
+-4.79
+-4.680000000000001
+-5.770000000000001
+-6.62
+-5.820000000000001
+-4.74
+-5.16
+-4.24
+-5.34
+-4.290000000000001
+-4.34
+-4.76
+-4.66
+-3.99
+-5.12
+-4.74
+-4.71
+-4.79
+-5.1499999999999995
+-5.1499999999999995
+-4.71
+-3.6800000000000006
+-4.89
+-4.79
+-5.2
+-3.2399999999999993
+-4.74
+-4.9399999999999995
+-5.53
+-5.14
+-4.24
+-4.15
+-5.02
+-5.1499999999999995
+-4.74
+-4.63
+-4.84
+-5.29
+-3.2399999999999993
+-8.619999999999996
+-4.220000000000001
+-4.6899999999999995
+-4.829999999999999
+-5.3999999999999995
+-4.74
+-3.84
+-4.49
+-4.470000000000001
+-5.59
+-4.15
+-3.700000000000001
+-5.73
+-4.4
+-5.609999999999999
+-5.1499999999999995
+-4.140000000000001
+-3.5200000000000014
+-2.6999999999999997
+-5.09
+-4.15
+-5.33
+-4.91
+-5.65
+-5.29
+-4.18
+-4.12
+-4.33
+-5.359999999999999
+-6.390000000000001
+-4.030000000000001
+-4.69
+-5.2
+-4.390000000000001
+-4.86
+-5.39
+-5.16
+-4.73
+-4.84
+-4.96
+-5.12
+-2.8699999999999988
+-4.989999999999999
+-4.59
+-4.000000000000001
+-4.74
+-4.89
+-3.91
+-4.6
+-4.690000000000001
+-4.55
+-4.69
+-4.06
+-4.35
+-5.1499999999999995
+-4.140000000000001
+-4.050000000000001
+-5.28
+-5.1499999999999995
+-5.1499999999999995
+-4.15
+-4.15
+-4.6
+-5.1499999999999995
+-4.79
+-3.8600000000000003
+-4.76
+-4.77
+-4.72
+-4.21
+-5.43
+-5.029999999999999
+-4.04
+-5.449999999999999
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-4.74
+-4.15
+-5.1499999999999995
+-5.1499999999999995
+-4.48
+-4.44
+-5.06
+-5.08
+-4.420000000000002
+-5.109999999999999
+-5.1899999999999995
+-5.1499999999999995
+-5.14
+-5.49
+-4.12
+-5.3
+-5.1499999999999995
+-4.29
+-4.79
+-4.15
+-4.15
+-4.77
+-5.680000000000001
+-5.069999999999999
+-4.9399999999999995
+-4.69
+-4.49
+-4.77
+-3.79
+-4.23
+-4.76
+-4.68
+-4.64
+-4.29
+-4.29
+-4.12
+-5.1499999999999995
+-4.15
+-3.83
+-5.33
+-3.3500000000000014
+-5.27
+-5.67
+-5.1499999999999995
+-5.12
+-5.45
+-5.04
+-5.119999999999999
+-4.29
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-3.0600000000000005
+-4.390000000000001
+-5.1899999999999995
+-3.2399999999999993
+-4.6
+-4.89
+-5.12
+-4.34
+-4.15
+-5.35
+-5.1499999999999995
+-5.1499999999999995
+-4.24
+-5.39
+-5.119999999999999
+-4.24
+-5.06
+-4.79
+-5.32
+-5.73
+-6.9
+-5.119999999999999
+-7.549999999999999
+-5.29
+-4.26
+-4.89
+-3.97
+-4.44
+-4.16
+-4.470000000000001
+-5.12
+-6.320000000000001
+-5.119999999999999
+-5.870000000000001
+-3.91
+-7.32
+-3.749999999999999
+-4.29
+-5.1499999999999995
+-4.73
+-5.420000000000001
+-3.8800000000000003
+-4.800000000000001
+-5.83
+-0.07999999999999918
+-5.03
+-4.359999999999999
+-7.659999999999997
+-5.17
+-4.15
+-3.8300000000000005
+-4.989999999999999
+-2.22
+-5.29
+-4.15
+-5.1499999999999995
+-5.38
+-4.81
+-3.3400000000000003
+-7.369999999999997
+-4.7600000000000025
+-4.989999999999999
+-7.519999999999998
+-5.3999999999999995
+-4.62
+-4.89
+-5.15
+-5.12
+-4.15
+-5.12
+-4.15
+-5.12
+-3.9700000000000006
+-5.52
+-5.1499999999999995
+-4.49
+-5.1499999999999995
+-5.3999999999999995
+-4.15
+-4.77
+-5.2
+-5.17
+-5.09
+-5.1499999999999995
+-3.32
+-4.46
+-4.71
+-4.4
+-4.35
+-6.03
+-5.26
+-3.74
+-3.6800000000000006
+-4.74
+-5.119999999999999
+-5.12
+-4.15
+-4.180000000000001
+-4.390000000000001
+-5.54
+-4.49
+-4.29
+-4.54
+-3.4200000000000004
+-4.12
+-5.53
+-3.119999999999999
+-3.74
+-5.1499999999999995
+-4.74
+-4.78
+-4.390000000000001
+-4.54
+-4.21
+-5.46
+-4.84
+-5.4399999999999995
+-4.8100000000000005
+-5.670000000000001
+-3.5900000000000007
+-3.0699999999999994
+-4.92
+-4.470000000000001
+-4.300000000000001
+-5.49
+-3.89
+-5.41
+-2.9299999999999993
+-3.1499999999999995
+-4.67
+-3.1999999999999993
+-4.49
+-4.359999999999999
+-5.28
+-4.78
+-5.450000000000001
+-4.15
+-6.650000000000001
+-5.1499999999999995
+-4.55
+-5.34
+-4.64
+-4.61
+-4.989999999999999
+-2.8199999999999985
+-4.15
+-3.74
+-4.66
+-5.06
+-4.739999999999999
+-6.23
+-4.82
+-6.030000000000001
+-4.0600000000000005
+-4.859999999999999
+-5.35
+-4.64
+-4.49
+-4.29
+-5.1499999999999995
+-5.32
+-5.119999999999999
+-4.49
+-5.1499999999999995
+-4.43
+-5.14
+-4.77
+-3.89
+-6.720000000000001
+-4.74
+-5.12
+-4.040000000000001
+-4.38
+-4.17
+-4.0600000000000005
+-4.2
+-4.59
+-5.1499999999999995
+-3.2399999999999993
+-4.24
+-5.7700000000000005
+-4.89
+-4.68
+-7.5499999999999945
+-5.390000000000001
+-5.09
+-5.49
+-5.359999999999999
+-5.109999999999999
+-5.1899999999999995
+-4.44
+-4.6899999999999995
+-4.15
+-4.300000000000001
+-7.6699999999999955
+-5.1499999999999995
+-0.4900000000000002
+-5.5
+-4.5600000000000005
+-4.84
+-4.49
+-3.79
+-5.1499999999999995
+-4.15
+-4.19
+-5.029999999999999
+-5.3
+-2.5699999999999985
+-5.54
+-7.159999999999997
+-4.909999999999999
+-4.89
+-4.74
+-5.499999999999999
+-4.24
+-4.74
+-5.12
+-4.74
+-4.62
+-4.15
+-5.1499999999999995
+-5.2
+-4.24
+-4.15
+-2.679999999999997
+-4.91
+-4.370000000000001
+0.39000000000000007
+-5.12
+-4.35
+-4.24
+-4.15
+-4.15
+-4.300000000000001
+-4.2
+-4.390000000000001
+-4.62
+-5.1499999999999995
+-4.79
+-5.24
+-6.5199999999999925
+-4.9399999999999995
+-5.98
+-5.06
+-4.15
+-4.44
+-6.190000000000001
+-4.89
+-3.74
+-4.27
+-5.5600000000000005
+-4.890000000000001
+-5.09
+-4.38
+-4.68
+-5.34
+-4.68
+-4.71
+-4.29
+-5.28
+-3.7900000000000005
+-5.1499999999999995
+-5.7700000000000005
+-4.93
+-4.390000000000001
+-4.15
+-3.7800000000000002
+-4.140000000000001
+-4.15
+-4.24
+-4.15
+-4.25
+-4.34
+-4.9399999999999995
+-5.14
+-4.74
+-3.71
+-3.5900000000000007
+-4.9399999999999995
+-3.2399999999999993
+-3.5600000000000005
+-2.249999999999996
+-5.23
+-5.95
+-4.3100000000000005
+-5.12
+-4.15
+-4.71
+-2.219999999999997
+-3.9300000000000006
+-4.37
+-4.84
+-4.390000000000001
+-4.79
+-5.1499999999999995
+-3.039999999999999
+-4.89
+-4.24
+-5.039999999999999
+-4.99
+-4.15
+-5.26
+-5.12
+-4.74
+-4.44
+-4.4399999999999995
+-5.35
+-5.59
+-3.89
+-4.15
+-5.49
+-1.29
+-3.89
+-5.1499999999999995
+-3.74
+-4.34
+-5.029999999999999
+-4.15
+-4.24
+-5.1499999999999995
+-4.15
+-4.9799999999999995
+-5.4399999999999995
+-4.87
+-5.04
+-5.790000000000001
+-4.680000000000001
+-3.89
+-5.109999999999999
+-4.89
+-5.38
+-4.87
+-5.119999999999999
+-4.65
+-5.1899999999999995
+-4.9399999999999995
+-4.109999999999999
+-4.390000000000001
+-2.3799999999999955
+-4.65
+-4.44
+-5.1499999999999995
+-4.15
+-5.12
+-6.520000000000001
+-4.989999999999999
+-5.26
+-4.15
+-4.71
+-5.22
+-5.1499999999999995
+-4.1
+-6.0200000000000005
+-4.6899999999999995
+-4.9399999999999995
+-5.840000000000001
+-5.27
+-4.1
+-3.6300000000000003
+-5.43
+-6.5
+-5.700000000000001
+-4.69
+-4.17
+-4.24
+-5.29
+-4.89
+-4.49
+-3.7900000000000005
+-5.38
+-5.159999999999999
+-4.3100000000000005
+-5.24
+-5.62
+-3.670000000000001
+-5.27
+-3.79
+-4.24
+-4.590000000000001
+-5.1499999999999995
+-5.12
+-4.73
+-5.1499999999999995
+-4.390000000000001
+-5.1499999999999995
+-3.4700000000000006
+-4.55
+-4.45
+-4.41
+-5.19
+-5.04
+-5.1499999999999995
+-4.87
+-5.13
+-4.390000000000001
+-5.21
+-4.76
+-4.89
+-5.34
+-3.67
+-4.88
+-4.28
+-6.330000000000001
+-4.140000000000001
+-5.12
+-4.32
+-5.05
+-5.12
+-4.15
+-4.59
+-4.96
+-4.7700000000000005
+-5.24
+-5.119999999999999
+-5.09
+-4.5
+-6.959999999999997
+-4.64
+-3.2399999999999993
+-5.12
+-4.19
+-4.06
+-4.34
+-5.3
+-3.9400000000000004
+-4.77
+-4.38
+-4.74
+-5.3
+-5.3
+-6.010000000000002
+-3.4300000000000006
+-3.650000000000001
+-5.78
+-4.95
+-5.0200000000000005
+-4.53
+-7.059999999999999
+-4.64
+-5.5
+-5.340000000000001
+-6.120000000000001
+-4.000000000000001
+-3.68
+-5.119999999999999
+-4.83
+-4.909999999999999
+-4.84
+-4.3100000000000005
+-4.74
+-4.94
+-4.04
+-5.18
+-5.78
+-5.570000000000001
+-5.1499999999999995
+-5.08
+-5.12
+-5.28
+-4.12
+-4.76
+-5.24
+-4.18
+-4.9399999999999995
+-4.15
+-5.1499999999999995
+-4.45
+-5.5
+-5.37
+-4.6899999999999995
+-4.9399999999999995
+-5.159999999999999
+-4.84
+-5.12
+-3.540000000000001
+-3.169999999999999
+-5.12
+-5.1499999999999995
+-4.24
+-5.1499999999999995
+-5.06
+-5.800000000000002
+-4.86
+-4.109999999999999
+-4.44
+-4.49
+-4.84
+-6.050000000000001
+-5.22
+-5.2
+-5.950000000000002
+-4.67
+-5.750000000000001
+4.400000000000001
+-3.8900000000000006
+-4.390000000000001
+-4.79
+-3.74
+-4.74
+-5.1899999999999995
+-5.7
+-4.4
+-1.689999999999995
+-5.119999999999999
+-3.99
+-4.989999999999999
+-4.12
+-5.1499999999999995
+-5.119999999999999
+-5.09
+-5.02
+-6.710000000000001
+-4.49
+-5.1499999999999995
+-5.2
+-5.1499999999999995
+2.9099999999999993
+-4.15
+-4.34
+-3.6799999999999997
+-4.9399999999999995
+-4.29
+-4.9399999999999995
+-4.34
+-4.49
+-4.040000000000001
+-4.79
+-4.6899999999999995
+-9.779999999999987
+-5.640000000000001
+-5.1499999999999995
+-3.3400000000000003
+-3.89
+-5.49
+-4.15
+-4.0600000000000005
+-4.15
+-4.35
+-4.4
+-5.12
+-5.12
+-4.9399999999999995
+-3.8500000000000005
+-4.19
+-5.41
+-5.14
+-4.57
+-4.9799999999999995
+-5.790000000000001
+-3.74
+-4.06
+-5.26
+-4.04
+-4.69
+-4.8
+-4.79
+-5.36
+-3.29
+-4.12
+-5.06
+-5.46
+-4.15
+-4.12
+-0.14
+-5.12
+-4.15
+-4.49
+-5.09
+-5.1499999999999995
+-3.700000000000001
+-6.450000000000001
+-4.290000000000001
+-5.28
+-6.580000000000002
+-4.29
+-3.7800000000000002
+-5.1499999999999995
+-5.54
+-4.15
+-3.74
+-5.1499999999999995
+-4.15
+-4.29
+-5.3500000000000005
+-5.1499999999999995
+-3.4300000000000006
+-5.41
+-4.15
+-5.47
+-5.59
+-4.79
+-3.870000000000001
+-4.34
+-5.1499999999999995
+-5.63
+-4.4
+-4.630000000000001
+-5.1499999999999995
+-5.52
+-4.24
+-4.37
+-4.3100000000000005
+-4.15
+-4.24
+-5.33
+-4.83
+-4.55
+-5.04
+-4.9799999999999995
+-4.16
+-5.529999999999999
+-4.24
+-2.7399999999999984
+-4.24
+-4.15
+-4.15
+-5.24
+-4.9399999999999995
+-6.220000000000001
+-4.260000000000001
+-5.57
+-7.15
+-4.64
+-5.1499999999999995
+-4.15
+-4.109999999999999
+-4.470000000000001
+-5.1499999999999995
+-4.15
+-4.79
+-5.1499999999999995
+-5.109999999999999
+-3.83
+-5.24
+-5.83
+-5.12
+-4.04
+-4.84
+-5.82
+-5.34
+-4.15
+-5.42
+-4.8999999999999995
+-6.120000000000001
+-4.26
+-5.1499999999999995
+-4.02
+-4.15
+-4.0600000000000005
+-3.33
+-5.39
+-5.0200000000000005
+-4.54
+-5.04
+-4.23
+-3.9900000000000007
+-4.71
+-5.7299999999999995
+-4.59
+-5.529999999999999
+-4.740000000000001
+-5.04
+-4.35
+-5.65
+-5.1499999999999995
+-4.24
+-4.9399999999999995
+-4.2700000000000005
+-5.16
+-3.74
+-4.109999999999999
+-3.6900000000000004
+-4.83
+-5.1499999999999995
+-5.46
+-4.280000000000001
+-4.69
+-4.84
+-5.32
+-4.83
+-6.880000000000001
+-4.5200000000000005
+-3.540000000000001
+-4.24
+-4.220000000000001
+-4.15
+-4.15
+-4.15
+-4.12
+-5.06
+-5.31
+-4.2
+-4.15
+-4.2
+-5.1499999999999995
+-4.79
+-4.17
+-4.75
+-3.6800000000000006
+-5.2
+-4.15
+-3.3400000000000003
+-5.1499999999999995
+-4.79
+-3.8300000000000005
+-4.33
+-5.1499999999999995
+-5.49
+-5.409999999999999
+-3.64
+-5.09
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-5.1499999999999995
+-4.56
+-3.25
+-4.5600000000000005
+-4.77
+-5.500000000000001
+-5.1499999999999995
+-4.74
+-5.12
+-4.66
+-4.62
+-4.15
+-5.390000000000001
+-4.49
+-5.06
+-5.13
+-5.2
+-4.67
+-4.15
+-5.61
+-4.84
+-4.88
+-5.1499999999999995
+-5.14
+-4.89
+-4.130000000000001
+-4.12
+-5.34
+-4.44
+-4.859999999999999
+-4.24
+-5.12
+-3.3400000000000003
+-5.12
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-3.2399999999999993
+-4.17
+-5.06
+-4.74
+-4.12
+-4.9399999999999995
+-4.29
+-4.15
+-4.859999999999999
+-4.680000000000001
+-5.82
+-4.64
+-3.6200000000000006
+-5.38
+-6.090000000000002
+-5.7
+-5.6899999999999995
+-4.49
+-4.78
+-5.7700000000000005
+-4.989999999999999
+-4.12
+-1.2299999999999986
+-4.9399999999999995
+-3.37
+-5.35
+-5.1499999999999995
+-5.109999999999999
+-4.34
+-6.269999999999996
+-4.64
+-4.59
+-2.749999999999998
+-4.79
+-5.22
+-4.09
+-4.29
+-4.41
+-4.68
+-4.390000000000001
+-3.8
+-5.58
+-4.46
+-4.52
+-4.24
+-4.5200000000000005
+-5.04
+-4.24
+-4.7299999999999995
+-4.45
+-5.109999999999999
+-4.44
+-4.96
+-4.36
+-5.119999999999999
+-3.6400000000000006
+-5.1
+-5.12
+-5.49
+-4.96
+-4.34
+-3.64
+-4.54
+-6.020000000000001
+-4.78
+-5.49
+-5.5
+-5.3
+-5.02
+-4.670000000000001
+-7.709999999999995
+-4.9399999999999995
+-4.69
+-4.989999999999999
+-5.1499999999999995
+-5.62
+-4.81
+-5.63
+-4.45
+-4.140000000000001
+-5.119999999999999
+-5.24
+-4.4
+-4.420000000000001
+-5.55
+-4.989999999999999
+-4.14
+-6.250000000000001
+-3.6600000000000006
+-4.6899999999999995
+-3.4500000000000006
+-4.989999999999999
+-5.39
+-4.42
+-5.159999999999999
+-4.44
+-4.26
+-5.1499999999999995
+-5.06
+-5.59
+-4.29
+-4.6899999999999995
+-4.34
+-4.35
+-5.38
+-5.49
+-4.24
+-5.62
+-4.9799999999999995
+-4.04
+-3.0199999999999987
+-4.040000000000001
+-4.15
+-5.1499999999999995
+-5.12
+-5.1499999999999995
+-5.04
+-4.15
+-4.15
+-5.1499999999999995
+-5.109999999999999
+-5.2
+-3.74
+-4.74
+-5.17
+-3.2399999999999993
+-4.4
+-4.45
+-5.1499999999999995
+-5.1499999999999995
+-4.24
+-4.15
+-4.35
+-4.0600000000000005
+-3.29
+-5.39
+-5.35
+-4.15
+-3.3100000000000005
+-6.330000000000001
+-5.37
+-5.1499999999999995
+-5.09
+-5.4799999999999995
+-4.59
+-4.6
+-4.84
+-4.54
+-4.62
+-5.5600000000000005
+-4.34
+-5.1899999999999995
+-3.7000000000000006
+-4.34
+-5.1499999999999995
+-5.42
+-3.7100000000000004
+-5.49
+-5.1499999999999995
+-5.1499999999999995
+-4.32
+-6.570000000000001
+-6.4
+-5.94
+-3.2399999999999993
+-4.7299999999999995
+-5.960000000000001
+-4.180000000000001
+-3.79
+-5.109999999999999
+-4.36
+-4.250000000000001
+-4.38
+-4.9799999999999995
+-3.5600000000000005
+-5.3999999999999995
+-6.2700000000000005
+-6.510000000000001
+-7.449999999999996
+-5.21
+-5.1499999999999995
+-4.0600000000000005
+-5.1899999999999995
+-5.380000000000001
+-5.930000000000001
+-4.13
+-5.71
+-5.86
+-5.06
+-4.58
+-3.880000000000001
+-5.12
+-4.62
+-4.15
+-4.67
+-5.09
+-4.15
+-3.74
+-5.43
+-4.89
+-3.7900000000000005
+-4.21
+-5.2
+-5.1499999999999995
+-4.65
+-5.56
+-5.1899999999999995
+-7.249999999999997
+-4.64
+-4.989999999999999
+-5.109999999999999
+-4.17
+-6.460000000000001
+-4.31
+-5.54
+-6.800000000000001
+-5.46
+-5.1499999999999995
+-5.12
+-3.74
+-4.89
+-4.140000000000001
+-4.84
+-4.24
+-5.12
+-4.58
+-4.43
+-3.3099999999999996
+-5.12
+-4.0600000000000005
+-2.519999999999999
+-3.2399999999999993
+-4.24
+-5.1499999999999995
+-5.06
+-4.74
+-4.12
+-5.16
+-4.71
+-5.55
+-5.39
+-5.529999999999999
+-6.320000000000001
+-3.6100000000000008
+-4.19
+-3.1799999999999993
+-4.59
+-4.79
+-5.12
+-4.64
+-3.0000000000000004
+-4.19
+-5.1
+-3.8900000000000006
+-5.1499999999999995
+-4.989999999999999
+-4.15
+-5.529999999999999
+-5.38
+-5.49
+-5.04
+-4.38
+-5.12
+-4.050000000000001
+-5.22
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-4.16
+-5.28
+-5.700000000000001
+-6.620000000000002
+-4.21
+-6.210000000000001
+-5.1499999999999995
+-5.67
+-5.1499999999999995
+-4.72
+-4.15
+-2.889999999999999
+-5.33
+-5.2
+-4.16
+-5.1499999999999995
+-5.52
+-4.83
+-5.92
+-2.789999999999999
+-4.15
+-5.1499999999999995
+-5.12
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-4.25
+-3.7300000000000004
+-3.7600000000000007
+-4.35
+-3.4800000000000004
+-4.12
+-5.12
+-4.390000000000001
+-2.6999999999999984
+-4.84
+-5.119999999999999
+-3.74
+-5.51
+-4.6899999999999995
+-4.15
+-3.6900000000000004
+-5.12
+-5.27
+-4.4
+-4.41
+-5.39
+-5.1499999999999995
+-4.15
+-2.7399999999999984
+-4.64
+-5.2299999999999995
+-4.12
+-5.62
+-4.8999999999999995
+-5.1499999999999995
+-4.15
+-3.9100000000000006
+-3.8400000000000003
+-5.13
+-4.49
+-5.58
+-6.320000000000001
+-6.420000000000002
+-5.51
+-5.04
+-5.25
+-5.21
+-4.69
+-5.6
+-4.0200000000000005
+-5.4799999999999995
+-5.12
+-5.22
+-6.16
+-5.1499999999999995
+-4.510000000000001
+-5.5
+-5.33
+-5.06
+-4.54
+-4.5200000000000005
+-4.2
+-4.68
+-3.2299999999999995
+-5.6899999999999995
+-4.56
+-3.2399999999999993
+-5.32
+-5.1499999999999995
+-5.12
+-5.3500000000000005
+-4.05
+-5.45
+-5.79
+-4.87
+-4.59
+-4.17
+-6.23
+-5.1899999999999995
+-4.15
+-5.06
+-4.93
+-3.7700000000000005
+-3.64
+-4.24
+-4.34
+-5.1499999999999995
+-4.08
+-2.909999999999999
+-3.79
+-5.62
+-4.5600000000000005
+-5.169999999999999
+-4.1899999999999995
+-5.12
+-5.12
+-4.32
+-4.44
+-5.1499999999999995
+-5.1499999999999995
+-7.599999999999998
+-3.6300000000000003
+-4.79
+-6.240000000000001
+-4.74
+-5.109999999999999
+-6.460000000000001
+-6.610000000000001
+-6.040000000000001
+-4.28
+-5.069999999999999
+-5.33
+-4.2
+-5.2
+-5.06
+-5.2
+-4.15
+-5.3
+-5.2
+-5.359999999999999
+-4.390000000000001
+-4.14
+-4.74
+-4.23
+-4.19
+-4.12
+-4.19
+-3.2499999999999996
+-5.1499999999999995
+-4.09
+-4.24
+-4.390000000000001
+-2.7399999999999993
+-4.15
+-5.43
+-5.1499999999999995
+-4.2700000000000005
+-5.08
+-1.1500000000000012
+-5.06
+-4.64
+-6.240000000000001
+-4.24
+-6.050000000000001
+-5.14
+-4.390000000000001
+-5.04
+-4.720000000000001
+-4.06
+-5.78
+-4.87
+-4.74
+-4.24
+-5.17
+-5.13
+-4.45
+-4.4
+-4.88
+-4.15
+-4.45
+-2.9399999999999986
+-4.15
+-4.39
+-5.24
+-5.9300000000000015
+-5.33
+-5.14
+-4.16
+-4.34
+-4.74
+-4.29
+-5.119999999999999
+-4.24
+-4.74
+-4.140000000000001
+-5.43
+-5.12
+-4.390000000000001
+-4.15
+-4.15
+-4.959999999999999
+-5.09
+-4.199999999999999
+-4.4
+-5.06
+-5.6
+-5.830000000000001
+-4.18
+-6.100000000000001
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-4.390000000000001
+-3.9400000000000004
+-4.06
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-5.029999999999999
+-5.01
+-3.580000000000001
+-4.2
+-4.15
+-5.119999999999999
+-3.2199999999999993
+-5.1499999999999995
+-4.59
+-6.0600000000000005
+-5.76
+-4.69
+-4.06
+-5.22
+-5.06
+-4.15
+-5.1499999999999995
+-6.16
+-4.43
+-4.15
+-4.84
+-5.08
+-5.13
+-4.51
+-3.5400000000000005
+-4.24
+-5.880000000000001
+-4.110000000000001
+-4.3999999999999995
+-4.88
+-5.68
+-4.79
+-5.1499999999999995
+-4.2
+-4.410000000000001
+-5.39
+-4.79
+-5.37
+-4.35
+-5.08
+-4.84
+-6.250000000000001
+-4.29
+-4.84
+-4.69
+-5.1499999999999995
+-4.15
+-3.74
+-4.25
+-5.1499999999999995
+-4.72
+-4.34
+-5.1499999999999995
+-4.96
+-4.56
+-5.12
+-5.109999999999999
+-4.380000000000001
+-5.1499999999999995
+-5.1499999999999995
+-4.15
+-5.12
+-5.33
+-5.49
+-4.59
+-4.79
+-4.8999999999999995
+-4.24
+-4.15
+-4.76
+-4.9399999999999995
+-5.52
+-4.9399999999999995
+-4.24
+-5.12
+-6.040000000000001
+-5.22
+-4.63
+-5.47
+-4.89
+-5.12
+-4.54
+-4.12
+-5.11
+-4.24
+-4.0
+-4.12
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-4.190000000000001
+-5.109999999999999
+-5.1499999999999995
+-2.4399999999999995
+-5.119999999999999
+-4.54
+-0.6800000000000015
+-5.1499999999999995
+-4.15
+-4.53
+-3.9900000000000007
+-5.1499999999999995
+-5.12
+-5.42
+-3.29
+-5.1499999999999995
+-4.9399999999999995
+-4.15
+-5.12
+-4.24
+-3.830000000000001
+-5.0
+-3.8700000000000006
+-4.98
+-5.8100000000000005
+-5.990000000000001
+-4.19
+-5.49
+-4.15
+-3.31
+-7.689999999999994
+-4.36
+-5.609999999999999
+-4.550000000000001
+-5.0600000000000005
+-4.74
+-6.989999999999998
+-4.53
+-5.59
+-4.15
+-5.25
+-4.88
+-5.1499999999999995
+-4.32
+-5.1499999999999995
+-5.6
+-2.689999999999999
+-5.1499999999999995
+-5.12
+-5.42
+-5.640000000000001
+-4.76
+-5.14
+-4.210000000000001
+-5.01
+-4.46
+-4.49
+-4.14
+-4.89
+-4.74
+-5.29
+-4.15
+-3.8900000000000006
+-4.82
+-4.989999999999999
+-4.49
+-4.74
+-3.8099999999999996
+-4.49
+-4.109999999999999
+-4.43
+-4.06
+-4.140000000000001
+-5.1899999999999995
+-3.2399999999999993
+-4.15
+-4.09
+-4.24
+-4.74
+-3.74
+-5.21
+-5.1499999999999995
+-4.24
+-4.79
+-6.590000000000002
+-4.67
+-4.24
+-5.1499999999999995
+-4.15
+-4.51
+-3.3400000000000003
+-4.390000000000001
+-5.39
+-4.9399999999999995
+-6.170000000000001
+-6.610000000000001
+-3.4400000000000004
+-5.58
+-2.029999999999998
+-5.4399999999999995
+-4.33
+-4.390000000000001
+-3.99
+-6.070000000000001
+-4.46
+-4.29
+-5.260000000000001
+-6.7700000000000005
+-6.380000000000001
+-4.59
+-4.23
+-5.29
+-5.56
+-5.119999999999999
+-4.54
+-5.27
+-5.13
+-5.92
+-4.74
+-5.1499999999999995
+-5.14
+-4.12
+-3.9300000000000006
+-5.12
+-4.040000000000001
+-5.1499999999999995
+-4.49
+-5.11
+-4.8
+-4.12
+-4.300000000000001
+-5.1499999999999995
+-4.29
+-3.9300000000000006
+-3.8900000000000006
+-4.18
+-9.249999999999995
+-4.84
+-4.23
+-4.84
+-4.9399999999999995
+-2.91
+-5.930000000000001
+-3.99
+-4.76
+-3.3900000000000006
+-4.010000000000001
+-4.08
+-4.28
+-5.06
+-5.09
+-4.390000000000001
+-4.250000000000001
+-4.15
+-3.8900000000000006
+-4.34
+-5.360000000000001
+-4.380000000000001
+-5.61
+-5.1499999999999995
+-6.300000000000001
+-8.249999999999996
+-8.779999999999994
+-3.8400000000000007
+-3.209999999999999
+-5.1499999999999995
+-5.1499999999999995
+-4.74
+-4.9799999999999995
+-6.0600000000000005
+-5.0
+-4.12
+-5.6499999999999995
+-5.17
+-5.369999999999999
+-4.19
+-5.91
+-5.18
+-3.7400000000000007
+-6.880000000000001
+-5.119999999999999
+-5.23
+-4.37
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.0
+-4.93
+-3.74
+-4.43
+-4.9799999999999995
+-5.3
+-4.15
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-5.22
+-5.51
+-5.6000000000000005
+-5.7
+-5.359999999999999
+-5.1499999999999995
+-5.1499999999999995
+-4.24
+-5.109999999999999
+-5.32
+-5.12
+-4.2700000000000005
+-5.1499999999999995
+-4.15
+-4.15
+-5.1499999999999995
+-6.230000000000001
+-4.85
+-3.8099999999999996
+-4.7
+-5.61
+-5.5
+-7.949999999999996
+-4.4700000000000015
+-5.1499999999999995
+-5.1499999999999995
+-4.0600000000000005
+-4.49
+-4.470000000000001
+-4.390000000000001
+-4.81
+-4.15
+-5.710000000000001
+-4.17
+-5.1499999999999995
+-5.1499999999999995
+-5.17
+-4.24
+-4.29
+-5.06
+-4.959999999999999
+-6.160000000000001
+-4.750000000000001
+-5.35
+-5.12
+-5.2
+-4.260000000000001
+-5.119999999999999
+-5.01
+-4.4700000000000015
+-5.1499999999999995
+-4.28
+-5.26
+-4.87
+-4.21
+-6.710000000000001
+-6.0600000000000005
+-4.300000000000001
+-4.64
+-4.15
+-5.17
+-4.64
+-4.57
+-2.6299999999999994
+-4.75
+-5.26
+-4.45
+-4.36
+-4.12
+-4.62
+-5.09
+-5.09
+-5.49
+-4.7
+-5.1499999999999995
+-5.1499999999999995
+-3.2399999999999993
+-3.6200000000000006
+-5.1499999999999995
+-5.1499999999999995
+-4.74
+-3.8900000000000006
+-5.1499999999999995
+-5.58
+-5.17
+-5.71
+-5.04
+-4.16
+-4.700000000000001
+-5.12
+-4.24
+-5.42
+-5.1499999999999995
+-5.1499999999999995
+-3.74
+-4.43
+-5.1499999999999995
+-5.1499999999999995
+-4.34
+-5.069999999999999
+-5.159999999999999
+-5.529999999999999
+-6.180000000000001
+-6.84
+-4.93
+-5.640000000000001
+-3.8600000000000003
+-3.7100000000000004
+-4.93
+-4.74
+-5.1499999999999995
+-5.38
+-3.5400000000000005
+-4.25
+-5.1499999999999995
+-5.12
+-5.42
+-4.29
+-4.15
+-4.41
+-4.390000000000001
+-3.9600000000000004
+-4.58
+-4.15
+-3.74
+-5.3999999999999995
+-3.540000000000001
+-5.870000000000001
+-4.74
+-4.050000000000001
+-4.550000000000001
+-5.1499999999999995
+-4.19
+-5.4399999999999995
+-3.9400000000000004
+-5.119999999999999
+-5.72
+-5.34
+-5.17
+-4.26
+-5.12
+-4.49
+-4.34
+-4.84
+-4.53
+-4.49
+-5.1499999999999995
+-5.1499999999999995
+-5.45
+-4.470000000000001
+-5.1499999999999995
+-5.1499999999999995
+-4.15
+-4.54
+-4.15
+-5.06
+-5.09
+-4.95
+-3.7600000000000007
+-5.1499999999999995
+-4.33
+-4.89
+-6.12
+-5.2
+-5.529999999999999
+-4.8
+-3.9100000000000006
+-3.2399999999999993
+-4.19
+-4.89
+-4.64
+-5.48
+-7.01
+-5.199999999999999
+-4.67
+-5.28
+-4.15
+-4.15
+-5.12
+-4.15
+-5.12
+-4.29
+-5.63
+-5.1499999999999995
+-5.12
+-4.15
+-5.29
+-5.12
+-4.28
+-5.64
+-4.4
+-4.66
+-3.9
+-4.49
+-4.67
+-5.1499999999999995
+-5.2
+-5.1499999999999995
+-5.12
+-4.46
+-4.670000000000001
+-5.12
+-4.19
+-4.19
+-4.72
+-4.24
+-5.1499999999999995
+-4.71
+-5.25
+-4.460000000000001
+-4.89
+-5.06
+-6.330000000000001
+-5.480000000000001
+-5.1499999999999995
+-3.09
+-5.09
+-4.29
+-5.22
+-3.74
+-3.0
+-3.7800000000000002
+-3.4200000000000004
+-5.790000000000001
+-4.34
+-6.040000000000001
+-4.19
+-4.84
+-5.1499999999999995
+-4.15
+-5.1499999999999995
+-3.650000000000001
+-5.14
+-5.3999999999999995
+-4.79
+-4.15
+-5.1499999999999995
+-4.15
+-5.12
+-4.74
+-5.1499999999999995
+-4.12
+-4.67
+-4.79
+-4.64
+-5.45
+-5.08
+-5.59
+-4.24
+-4.140000000000001
+-4.81
+-4.15
+-4.390000000000001
+-4.23
+-6.100000000000001
+-5.930000000000001
+-4.34
+-6.9
+-5.570000000000001
+-4.82
+-4.49
+-4.89
+-4.15
+-5.470000000000001
+-4.35
+-4.44
+-4.84
+-4.12
+-4.34
+-5.1499999999999995
+-4.15
+-5.12
+-4.76
+-4.63
+-4.37
+-5.1499999999999995
+-4.15
+-4.34
+-4.15
+-4.24
+-4.12
+-5.33
+-5.12
+-4.49
+-4.84
+-4.390000000000001
+-5.1499999999999995
+-5.1499999999999995
+-4.380000000000002
+-5.499999999999999
+-4.82
+-4.300000000000001
+-5.06
+-4.2
+-4.24
+1.0799999999999992
+-5.1499999999999995
+-5.06
+-5.31
+-10.289999999999985
+-9.509999999999991
+-12.380000000000042
+-12.459999999999981
+-12.98999999999998
+-10.749999999999988
+-8.929999999999989
+-4.15
+-5.09
+-4.9399999999999995
+-5.1499999999999995
+-5.1499999999999995
+-5.06
+-9.09999999999999
+-4.670000000000001
+-4.29
+-8.189999999999994
+-4.699999999999999
+-4.090000000000001
+-3.7900000000000005
+-5.1499999999999995
+-4.24
+-6.4300000000000015
+-4.74
+-3.9900000000000007
+-2.949999999999999
+-5.1499999999999995
+-5.28
+-4.9799999999999995
+-4.66
+-5.1
+-4.15
+-4.9399999999999995
+-4.96
+-8.529999999999994
+-6.889999999999999
+-4.54
+-5.58
+-5.119999999999999
+-4.34
+-5.7
+-5.61
+-4.83
+-5.42
+-5.12
+-4.76
+-5.3
+-5.199999999999999
+-4.15
+-4.54
+-5.25
+8.119999999999997
+-4.930000000000001
+-5.740000000000002
+-4.49
+-4.15
+-4.15
+-4.87
+-6.010000000000001
+-4.57
+-5.55
+-4.39
+-5.920000000000002
+-5.680000000000001
+-5.58
+-5.2299999999999995
+-4.38
+-0.6100000000000005
+-4.0600000000000005
+-5.0200000000000005
+-4.7
+-4.2700000000000005
+-5.930000000000001
+-5.960000000000001
+-5.24
+-4.15
+-4.9399999999999995
+-3.79
+-5.49
+-4.930000000000001
+-5.06
+-4.36
+-4.64
+-5.28
+-4.64
+-4.300000000000001
+-5.1499999999999995
+-5.1499999999999995
+-2.6999999999999984
+-4.95
+-4.76
+-4.88
+-4.949999999999999
+-5.6
+-4.96
+-4.34
+-5.119999999999999
+-4.03
+-5.1499999999999995
+-4.23
+-5.34
+-7.039999999999999
+-4.67
+-5.1499999999999995
+-4.090000000000001
+-5.12
+-4.4
+-4.15
+-5.1499999999999995
+-5.22
+-3.93
+-4.15
+-5.12
+-5.04
+-5.31
+-4.09
+-4.54
+-4.49
+-5.56
+-4.2
+-5.119999999999999
+-4.03
+-5.47
+-6.110000000000001
+-5.56
+-4.140000000000001
+-4.79
+-5.1499999999999995
+-0.8699999999999948
+-3.5700000000000003
+-4.67
+-5.24
+-5.24
+-3.7399999999999998
+-4.8999999999999995
+-6.45
+-5.79
+-5.159999999999999
+-4.34
+-5.12
+-6.130000000000001
+-4.0600000000000005
+-5.17
+-4.040000000000001
+-3.93
+-4.34
+-5.18
+-5.61
+-3.49
+-4.15
+-5.1499999999999995
+-5.1499999999999995
+-5.12
+-4.25
+-5.12
+-5.06
+-5.1499999999999995
+-3.89
+-5.3999999999999995
+-4.34
+-5.1499999999999995
+-4.739999999999999
+-4.1899999999999995
+-4.24
+-3.5500000000000007
+-6.370000000000001
+-4.81
+-3.93
+-3.6500000000000012
+-5.109999999999999
+-5.36
+-4.88
+-4.74
+-4.29
+-5.180000000000001
+-5.4799999999999995
+-3.7700000000000005
+-4.390000000000001
+-5.27
+-4.15
+-3.4800000000000004
+-7.110000000000001
+-4.83
+-4.8
+-5.04
+-4.140000000000001
+-5.1499999999999995
+-3.7900000000000005
+-4.74
+-5.1499999999999995
+-3.2399999999999993
+-5.25
+-4.29
+-4.29
+-5.1499999999999995
+-5.12
+-4.74
+-4.69
+-4.74
+-4.38
+-4.15
+-5.12
+-5.0
+-4.49
+-4.93
+-5.09
+-5.12
+-8.249999999999996
+-4.15
+-4.43
+-5.14
+-4.67
+-5.1499999999999995
+-4.24
+-4.15
+-4.84
+-3.1299999999999994
+-4.15
+-5.12
+-5.109999999999999
+-4.23
+-4.21
+-5.31
+-4.71
+-5.25
+-4.15
+-4.84
+-5.14
+-4.15
+-4.140000000000001
+-5.1499999999999995
+-4.71
+-3.68
+-4.79
+-4.16
+-5.22
+-4.62
+-4.32
+-3.440000000000001
+-4.35
+-3.8400000000000007
+-4.79
+-4.58
+-4.7299999999999995
+-4.949999999999999
+-5.47
+-4.14
+-4.82
+-4.15
+-4.74
+-5.1499999999999995
+-5.39
+-4.999999999999999
+-5.369999999999999
+-5.26
+-5.41
+-5.06
+-4.72
+-4.36
+-5.92
+-4.24
+-5.1499999999999995
+-5.24
+-4.79
+-4.15
+-4.24
+-4.74
+-5.119999999999999
+-3.6899999999999995
+-5.25
+-4.64
+-5.28
+-5.48
+-4.44
+-4.24
+-3.29
+-3.84
+-4.99
+-5.210000000000001
+-4.15
+-2.7399999999999984
+-5.06
+-3.93
+-4.79
+-5.31
+-5.2
+-5.1499999999999995
+-4.260000000000001
+-4.12
+-3.8200000000000003
+-5.04
+-4.26
+-4.29
+-4.24
+-4.34
+-4.63
+-4.57
+-4.33
+-5.1499999999999995
+-4.15
+-5.06
+-4.2
+-4.390000000000001
+-4.6899999999999995
+-5.82
+-5.7
+-4.15
+-4.43
+-4.84
+-3.9000000000000004
+-3.4800000000000004
+-3.440000000000001
+-5.06
+-5.109999999999999
+-5.29
+-5.3
+-5.94
+-5.73
+-3.8
+-4.73
+-4.659999999999999
+-5.34
+-4.54
+-5.640000000000001
+-4.61
+-5.029999999999999
+-4.11
+-4.390000000000001
+-4.15
+-4.24
+-5.71
+-4.989999999999999
+-4.65
+-5.699999999999999
+-4.68
+-5.01
+-4.35
+-4.15
+-5.06
+-4.49
+-5.1499999999999995
+-5.37
+-4.89
+-5.1899999999999995
+-5.04
+-4.79
+-4.54
+-4.24
+-4.89
+-4.45
+-4.69
+-4.84
+-4.34
+-5.1499999999999995
+-2.929999999999999
+-2.7399999999999984
+-4.390000000000001
+-4.29
+-5.12
+-4.04
+-5.1499999999999995
+-4.58
+-4.34
+-5.01
+-3.8100000000000005
+-4.53
+-1.96
+-4.29
+-5.1499999999999995
+-3.2399999999999993
+-5.109999999999999
+-3.99
+-5.99
+-6.020000000000001
+-5.12
+-4.140000000000001
+-4.7
+-3.9700000000000006
+-4.15
+-4.19
+-3.99
+-4.69
+-5.1499999999999995
+-4.64
+2.8099999999999996
+-5.12
+-4.58
+-3.7200000000000006
+-4.85
+-3.89
+-5.1499999999999995
+-4.74
+-5.27
+-4.49
+-4.71
+-5.119999999999999
+-5.39
+-4.010000000000001
+-4.89
+-5.12
+-4.54
+-4.41
+-5.02
+-6.420000000000001
+-4.58
+-4.53
+-6.630000000000001
+-4.68
+-5.26
+-4.74
+-3.059999999999999
+-4.84
+-4.84
+-4.69
+-4.33
+-6.0600000000000005
+-4.74
+-3.98
+-4.180000000000001
+-5.1499999999999995
+-5.1499999999999995
+-5.34
+-4.15
+-5.02
+-4.15
+-4.390000000000001
+-4.640000000000001
+-5.62
+-4.44
+-3.5100000000000007
+-4.1899999999999995
+-5.159999999999999
+-3.6799999999999997
+-3.5500000000000007
+-4.390000000000001
+-3.74
+-4.590000000000001
+-5.1499999999999995
+-5.1499999999999995
+-5.1499999999999995
+-3.7900000000000005
+-5.34
+-4.15
+-3.29
+-5.5
+-5.1499999999999995
+-5.12
+-1.719999999999998
+-4.859999999999999
+-4.28
+-4.24
+-2.8699999999999988
+-5.1499999999999995
+-3.4900000000000007
+-4.14
+-4.34
+-5.1499999999999995
+-4.24
+-4.12
+-4.69
+-4.15
+-5.12
+-5.0
+2.11000000000001
+-4.42
+-4.78
+-5.12
+-4.140000000000001
+-4.34
+-3.9700000000000006
+-4.62
+-5.0600000000000005
+-4.92
+-4.74
+-5.840000000000001
+-4.86
+-6.1800000000000015
+-4.4
+-4.83
+-5.1499999999999995
+-4.64
+-4.74
+-4.48
+-5.83
+-5.370000000000001
+-5.06
+-5.37
+-3.790000000000001
+-5.92
+-3.67
+-3.74
+-5.1499999999999995
+-3.2399999999999993
+-4.17
+-4.54
+-4.8
+-5.12
+-4.070000000000001
+-4.89
+-4.43
diff --git "a/DQN_mulit_tensorflow_2/backup/2/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt" "b/DQN_mulit_tensorflow_2/backup/2/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt"
new file mode 100644
index 0000000..590018e
--- /dev/null
+++ "b/DQN_mulit_tensorflow_2/backup/2/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt"
@@ -0,0 +1,8 @@
+r_wood = 0.3
+ r_powerup = 0.5
+ r_put_bomb = 0.3
+ r_put_bomb_near_enemy = 1
+ r_kick = 0.5
+ r_in_flame = -10
+ r_move = -0.02
+ r_stay = -0.04
\ No newline at end of file
diff --git a/DQN_mulit_tensorflow_2/backup/3/FFA100/FFA100.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/3/FFA100/FFA100.data-00000-of-00001
new file mode 100644
index 0000000..efea347
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/3/FFA100/FFA100.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/3/FFA100/FFA100.index b/DQN_mulit_tensorflow_2/backup/3/FFA100/FFA100.index
new file mode 100644
index 0000000..c4a2901
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/3/FFA100/FFA100.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/3/FFA100/checkpoint b/DQN_mulit_tensorflow_2/backup/3/FFA100/checkpoint
new file mode 100644
index 0000000..5366365
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/3/FFA100/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA100"
+all_model_checkpoint_paths: "FFA100"
diff --git a/DQN_mulit_tensorflow_2/backup/3/FFA200/FFA200.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/3/FFA200/FFA200.data-00000-of-00001
new file mode 100644
index 0000000..946ab5a
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/3/FFA200/FFA200.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/3/FFA200/FFA200.index b/DQN_mulit_tensorflow_2/backup/3/FFA200/FFA200.index
new file mode 100644
index 0000000..cc551fa
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/3/FFA200/FFA200.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/3/FFA200/checkpoint b/DQN_mulit_tensorflow_2/backup/3/FFA200/checkpoint
new file mode 100644
index 0000000..73278bf
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/3/FFA200/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA200"
+all_model_checkpoint_paths: "FFA200"
diff --git a/DQN_mulit_tensorflow_2/backup/3/result.csv b/DQN_mulit_tensorflow_2/backup/3/result.csv
new file mode 100644
index 0000000..ad8e5cb
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/3/result.csv
@@ -0,0 +1,1001 @@
+result
+0
+0
+0
+0
+0
+2
+0
+0
+0
+1
+0
+2
+0
+0
+1
+0
+1
+0
+0
+0
+2
+0
+0
+0
+1
+0
+1
+0
+0
+0
+2
+2
+0
+0
+2
+0
+0
+2
+0
+0
+0
+0
+0
+2
+0
+0
+2
+0
+1
+0
+2
+0
+2
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+2
+2
+0
+0
+0
+0
+0
+1
+2
+0
+0
+2
+1
+0
+1
+0
+0
+1
+0
+0
+1
+1
+1
+1
+0
+1
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+2
+1
+2
+0
+1
+1
+0
+2
+0
+1
+0
+1
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+2
+0
+1
+0
+2
+1
+1
+0
+0
+0
+0
+2
+2
+0
+0
+0
+1
+2
+0
+0
+0
+2
+0
+1
+0
+1
+0
+0
+0
+2
+1
+0
+1
+1
+0
+0
+1
+0
+2
+0
+1
+1
+1
+0
+0
+1
+0
+0
+1
+2
+0
+1
+0
+1
+0
+0
+0
+0
+2
+0
+0
+2
+0
+2
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+2
+1
+1
+0
+0
+0
+1
+0
+1
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+1
+0
+1
+2
+0
+0
+2
+2
+0
+0
+0
+0
+0
+2
+2
+1
+0
+0
+1
+0
+1
+2
+0
+0
+2
+0
+1
+0
+0
+0
+0
+1
+1
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+1
+1
+0
+1
+0
+1
+1
+1
+0
+0
+0
+1
+2
+1
+0
+0
+0
+0
+0
+2
+0
+0
+1
+0
+0
+0
+2
+0
+1
+0
+1
+0
+0
+0
+1
+1
+0
+0
+0
+0
+2
+2
+0
+0
+0
+0
+2
+2
+1
+2
+2
+0
+0
+0
+2
+1
+0
+2
+0
+0
+1
+0
+0
+0
+1
+0
+0
+2
+1
+0
+0
+0
+0
+0
+1
+0
+1
+1
+0
+0
+0
+1
+1
+1
+0
+1
+0
+1
+0
+2
+1
+0
+0
+2
+0
+0
+2
+1
+1
+2
+0
+2
+0
+0
+1
+0
+0
+1
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+2
+1
+0
+0
+2
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+2
+2
+1
+0
+0
+0
+1
+1
+1
+0
+2
+0
+0
+2
+0
+0
+0
+1
+0
+0
+1
+0
+0
+1
+0
+1
+2
+0
+1
+0
+2
+0
+0
+0
+0
+1
+0
+2
+0
+0
+0
+0
+2
+0
+0
+1
+0
+0
+2
+0
+0
+0
+1
+2
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+2
+0
+1
+0
+1
+1
+1
+0
+1
+0
+0
+2
+1
+0
+0
+0
+1
+0
+2
+0
+0
+1
+1
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+2
+2
+0
+1
+1
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+1
+2
+1
+0
+1
+1
+0
+1
+0
+0
+0
+1
+0
+1
+0
+0
+0
+2
+2
+0
+0
+0
+0
+2
+1
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+2
+0
+0
+0
+1
+0
+2
+0
+0
+1
+1
+0
+2
+0
+0
+0
+0
+0
+0
+2
+0
+1
+0
+0
+2
+0
+0
+0
+0
+0
+0
+1
+1
+2
+1
+0
+1
+1
+1
+2
+1
+0
+0
+0
+1
+0
+0
+0
+1
+0
+1
+0
+2
+0
+0
+2
+2
+2
+1
+1
+1
+2
+1
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+1
+0
+0
+1
+0
+0
+2
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+2
+0
+1
+0
+2
+0
+0
+0
+0
+0
+0
+2
+0
+0
+1
+0
+1
+1
+0
+0
+2
+2
+1
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+2
+1
+0
+2
+0
+0
+0
+2
+0
+0
+2
+1
+0
+2
+1
+0
+2
+0
+0
+1
+1
+0
+0
+1
+1
+0
+0
+0
+0
+1
+1
+1
+0
+0
+1
+0
+1
+2
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+1
+0
+0
+0
+1
+0
+1
+1
+0
+1
+2
+0
+0
+1
+1
+1
+0
+0
+0
+2
+1
+2
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+2
+0
+0
+0
+1
+0
+0
+1
+0
+0
+0
+0
+1
+1
+0
+1
+0
+0
+0
+1
+2
+1
+0
+2
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+1
+2
+1
+0
+0
+1
+0
+0
+2
+0
+0
+1
+1
+0
+1
+0
+0
+0
+2
+1
+2
+0
+0
+0
+0
+0
+0
+1
+0
+2
+0
+1
+2
+0
+1
+1
+1
+0
+1
+0
+1
+0
+0
+1
+2
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+1
+0
+1
+1
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+2
+1
+0
+0
+1
+0
+0
+0
+0
+0
+2
+1
+1
+1
+0
+0
+0
+0
+0
+2
+1
+2
+2
+0
+0
+0
+2
+2
+0
+0
+0
+0
+0
+1
+0
+0
+0
+2
+0
+1
+2
+0
+0
+1
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+1
+0
+0
+1
+0
+1
+1
+0
+0
+0
+1
+0
+0
+0
+1
+0
diff --git a/DQN_mulit_tensorflow_2/backup/3/reward.csv b/DQN_mulit_tensorflow_2/backup/3/reward.csv
new file mode 100644
index 0000000..9f07028
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/3/reward.csv
@@ -0,0 +1,1001 @@
+reward
+-0.42000000000002125
+0.8999999999999737
+16.159999999999982
+-2.3600000000000376
+1.6399999999999757
+9.35999999999998
+6.69999999999996
+-3.0800000000000187
+0.23999999999998956
+13.63999999999994
+-1.6600000000000126
+7.559999999999968
+1.1799999999999642
+-3.240000000000002
+11.839999999999932
+4.859999999999987
+5.479999999999962
+-7.620000000000001
+3.8399999999999714
+4.2599999999999305
+13.219999999999951
+-5.480000000000003
+-1.840000000000023
+-2.840000000000007
+15.699999999999962
+-2.8000000000000203
+12.819999999999965
+-7.6000000000000005
+-0.900000000000011
+2.3599999999999692
+8.919999999999972
+11.279999999999971
+1.6199999999999584
+1.4999999999999591
+4.239999999999998
+3.71999999999996
+0.9799999999999685
+9.759999999999973
+-2.4200000000000186
+-4.1200000000000045
+-3.0200000000000164
+-2.240000000000018
+-3.12000000000001
+3.839999999999999
+-3.5400000000000045
+-1.4600000000000222
+16.779999999999948
+-4.620000000000015
+19.67999999999993
+0.21999999999997222
+10.15999999999996
+4.879999999999949
+11.579999999999963
+-9.32
+-2.7800000000000082
+5.55999999999997
+15.419999999999952
+-0.9000000000000323
+-3.4200000000000035
+-2.0200000000000156
+-6.5600000000000005
+3.979999999999981
+-0.920000000000023
+1.6399999999999997
+10.599999999999971
+-3.8200000000000083
+-5.280000000000001
+-3.6400000000000112
+-5.840000000000002
+-5.820000000000002
+13.579999999999943
+9.659999999999942
+0.6999999999999886
+-4.5000000000000036
+14.719999999999963
+15.119999999999948
+-0.7399999999999913
+10.559999999999961
+-8.520000000000001
+-5.740000000000002
+13.079999999999947
+8.259999999999927
+-2.2800000000000082
+12.919999999999922
+13.559999999999928
+12.079999999999925
+14.959999999999926
+-5.640000000000004
+17.479999999999954
+-4.140000000000006
+12.659999999999986
+1.2599999999999838
+0.059999999999972076
+-1.7600000000000033
+-0.36000000000003496
+11.51999999999994
+-6.220000000000001
+-2.4400000000000137
+4.899999999999956
+9.97999999999999
+20.719999999999914
+11.299999999999969
+5.279999999999941
+14.859999999999935
+15.759999999999941
+5.779999999999967
+9.979999999999967
+3.6399999999999544
+11.77999999999993
+5.399999999999951
+25.479999999999958
+-6.8999999999999995
+-2.8200000000000145
+11.519999999999927
+-9.24
+8.959999999999951
+-4.860000000000005
+-0.30000000000005755
+-1.0800000000000285
+-3.1000000000000165
+5.979999999999977
+18.819999999999947
+0.8999999999999666
+22.939999999999948
+-2.2600000000000318
+4.159999999999996
+15.119999999999937
+19.57999999999995
+-1.5200000000000191
+-5.280000000000006
+1.099999999999966
+1.6799999999999606
+5.299999999999992
+11.37999999999996
+-2.4000000000000137
+7.219999999999946
+3.8199999999999683
+10.319999999999961
+15.779999999999944
+-4.920000000000019
+2.019999999999964
+-1.6200000000000099
+6.359999999999985
+0.6199999999999388
+17.23999999999996
+-0.8000000000000114
+12.039999999999925
+-6.4399999999999995
+-1.7200000000000166
+-5.900000000000018
+8.939999999999976
+11.35999999999993
+-4.680000000000001
+14.199999999999923
+12.699999999999937
+-3.4400000000000075
+-3.740000000000009
+12.799999999999951
+-4.740000000000004
+5.159999999999993
+-6.720000000000001
+14.65999999999994
+12.059999999999933
+16.179999999999932
+-6.980000000000001
+-4.920000000000002
+15.239999999999954
+-2.4400000000000173
+-1.6000000000000174
+12.999999999999936
+9.619999999999985
+1.4399999999999782
+12.339999999999948
+-1.080000000000009
+11.01999999999996
+-3.3800000000000123
+-1.2400000000000055
+-4.820000000000014
+-3.2400000000000118
+8.279999999999946
+-6.940000000000001
+-3.980000000000004
+9.699999999999942
+1.3599999999999905
+11.859999999999962
+-4.500000000000007
+0.33999999999999275
+-0.3000000000000167
+10.439999999999918
+-6.26
+4.8999999999999755
+-7.380000000000001
+-4.580000000000005
+1.459999999999944
+3.159999999999977
+14.659999999999966
+13.819999999999926
+10.919999999999956
+1.4999999999999751
+-4.700000000000011
+-6.36
+10.139999999999931
+-5.100000000000005
+16.759999999999923
+1.6599999999999842
+-1.9600000000000222
+-0.24000000000003396
+8.039999999999937
+2.999999999999986
+-4.000000000000004
+-2.4600000000000106
+-2.6600000000000117
+1.8399999999999768
+-5.160000000000002
+4.659999999999959
+12.799999999999924
+9.81999999999993
+-0.7200000000000149
+-9.16
+23.919999999999963
+2.039999999999967
+10.419999999999913
+12.859999999999946
+-5.520000000000003
+0.09999999999996412
+9.819999999999958
+4.02
+-4.900000000000004
+-5.480000000000001
+3.8599999999999905
+-3.400000000000036
+-4.700000000000004
+14.19999999999994
+9.599999999999968
+16.97999999999995
+-4.260000000000006
+4.419999999999943
+12.739999999999954
+-5.200000000000005
+21.079999999999963
+7.139999999999982
+-2.7400000000000144
+-2.62000000000001
+10.059999999999945
+2.279999999999953
+10.819999999999942
+-1.500000000000016
+-7.320000000000001
+3.119999999999969
+-2.8800000000000114
+12.519999999999927
+14.19999999999993
+-6.380000000000001
+14.759999999999959
+-3.880000000000015
+-3.4000000000000163
+-5.3
+-6.600000000000001
+15.999999999999957
+2.3799999999999777
+0.4199999999999573
+-3.5000000000000098
+15.559999999999938
+12.119999999999957
+1.2199999999999438
+11.499999999999929
+-4.100000000000014
+13.699999999999973
+19.23999999999994
+18.339999999999943
+-4.360000000000016
+-6.380000000000001
+-5.760000000000004
+7.219999999999917
+8.379999999999978
+18.839999999999932
+2.8799999999999386
+-7.800000000000001
+-6.339999999999999
+-0.5600000000000325
+-1.7400000000000215
+17.399999999999956
+-5.560000000000002
+3.679999999999959
+14.879999999999939
+0.6999999999999797
+3.4199999999999715
+-7.860000000000007
+11.71999999999995
+-4.12000000000001
+12.479999999999931
+-1.70000000000001
+14.739999999999956
+-6.120000000000001
+3.539999999999944
+-3.7400000000000144
+11.959999999999926
+18.579999999999938
+-4.380000000000008
+1.359999999999971
+-3.8600000000000048
+0.3399999999999661
+7.599999999999984
+10.819999999999952
+-3.600000000000014
+-5.560000000000004
+-4.100000000000003
+-3.36000000000001
+8.899999999999977
+11.939999999999976
+16.93999999999995
+14.139999999999965
+12.259999999999984
+-0.7200000000000202
+-5.100000000000002
+-1.900000000000011
+11.999999999999973
+6.339999999999972
+2.019999999999971
+4.939999999999991
+-4.360000000000005
+-4.600000000000004
+12.119999999999942
+-5.200000000000001
+-1.9200000000000426
+-5.140000000000004
+11.519999999999945
+0.8599999999999799
+1.1399999999999721
+4.539999999999993
+13.099999999999929
+8.099999999999948
+-4.320000000000011
+5.039999999999969
+-0.760000000000014
+-8.66
+8.019999999999941
+-3.8600000000000065
+18.539999999999964
+15.619999999999957
+-4.380000000000012
+5.499999999999998
+3.439999999999946
+14.139999999999931
+10.93999999999995
+13.079999999999943
+-5.620000000000004
+11.719999999999947
+-7.580000000000001
+17.259999999999952
+-6.02
+12.979999999999952
+12.319999999999947
+-1.8000000000000238
+0.07999999999996277
+1.8199999999999998
+-3.6600000000000055
+-2.0400000000000045
+12.559999999999956
+13.739999999999949
+13.199999999999934
+9.259999999999987
+-4.900000000000004
+12.719999999999965
+-0.7400000000000269
+-6.260000000000001
+18.079999999999934
+-1.1600000000000108
+5.759999999999975
+13.35999999999994
+1.85999999999998
+-3.5400000000000036
+12.639999999999926
+4.979999999999963
+-4.400000000000004
+-2.2000000000000197
+0.9999999999999645
+10.899999999999968
+-6.5600000000000005
+-8.52
+5.85999999999996
+-1.140000000000029
+10.319999999999949
+13.319999999999936
+12.29999999999993
+-0.5600000000000165
+-3.180000000000012
+6.79999999999999
+18.17999999999993
+-4.420000000000007
+-5.340000000000003
+-6.200000000000001
+-4.140000000000004
+-0.20000000000002416
+-3.040000000000001
+10.19999999999994
+-4.480000000000009
+-3.4200000000000097
+-7.800000000000001
+20.559999999999945
+10.579999999999936
+12.67999999999993
+2.919999999999968
+-7.24
+-6.860000000000001
+13.779999999999974
+13.519999999999934
+18.17999999999992
+-1.0400000000000205
+4.8599999999999905
+-3.1800000000000272
+-5.140000000000002
+12.69999999999996
+-9.040000000000001
+-2.600000000000005
+-5.560000000000001
+14.599999999999968
+-6.22
+1.779999999999962
+14.519999999999948
+-5.760000000000003
+-0.34000000000000163
+11.159999999999956
+-2.0200000000000333
+10.819999999999933
+10.099999999999971
+4.059999999999954
+14.759999999999959
+0.2599999999999838
+9.359999999999973
+-6.199999999999998
+0.19999999999997087
+0.3199999999999896
+-4.620000000000007
+14.439999999999948
+-2.2600000000000113
+3.7199999999999993
+-2.6600000000000144
+2.0399999999999388
+3.3999999999999595
+-0.680000000000021
+13.17999999999996
+4.039999999999953
+-7.9
+15.999999999999943
+-7.880000000000001
+-5.940000000000001
+4.159999999999996
+-6.480000000000001
+-0.14000000000004142
+-6.400000000000003
+11.379999999999919
+6.679999999999982
+-5.380000000000004
+-0.26000000000003176
+6.379999999999934
+-4.180000000000011
+16.499999999999925
+-2.8000000000000167
+0.039999999999981384
+-5.020000000000008
+-1.70000000000001
+-4.480000000000005
+-3.300000000000014
+2.239999999999986
+5.119999999999928
+-1.2200000000000202
+-4.220000000000002
+-0.5800000000000214
+-5.000000000000003
+13.559999999999983
+14.879999999999974
+0.49999999999998224
+-8.4
+6.079999999999924
+-0.40000000000003233
+-1.5600000000000147
+-2.2000000000000304
+-6.34
+-4.020000000000014
+-4.780000000000013
+-6.980000000000002
+13.43999999999993
+16.35999999999995
+-0.6400000000000361
+7.659999999999922
+1.0399999999999654
+17.419999999999938
+15.059999999999942
+18.87999999999996
+0.35999999999998344
+7.1799999999999145
+5.339999999999964
+-3.66000000000001
+8.399999999999983
+16.59999999999996
+1.6399999999999704
+0.07999999999998764
+3.7399999999999753
+11.039999999999948
+1.0999999999999464
+6.739999999999992
+7.299999999999944
+1.1399999999999828
+11.61999999999992
+9.839999999999925
+-4.0400000000000045
+4.779999999999967
+-3.3400000000000123
+13.879999999999962
+-2.480000000000018
+-1.5200000000000262
+-0.40000000000003055
+-6.2
+16.61999999999997
+-9.040000000000001
+-5.40000000000001
+-6.180000000000001
+-6.5600000000000005
+12.639999999999931
+15.95999999999993
+14.19999999999997
+-2.180000000000015
+14.579999999999927
+14.539999999999939
+-5.600000000000002
+-1.3200000000000163
+9.959999999999962
+-7.580000000000001
+16.49999999999996
+-2.340000000000013
+14.919999999999932
+-1.5600000000000023
+-2.0400000000000196
+-3.3000000000000256
+17.85999999999996
+12.639999999999972
+8.939999999999928
+0.21999999999994735
+10.999999999999936
+16.759999999999945
+-3.4200000000000124
+14.979999999999958
+-2.760000000000037
+4.859999999999943
+-1.4000000000000234
+11.079999999999943
+-4.740000000000004
+12.479999999999926
+-4.620000000000007
+3.3399999999999554
+-3.780000000000043
+10.919999999999972
+12.96
+1.819999999999979
+-2.7000000000000153
+1.759999999999943
+0.47999999999996845
+8.05999999999998
+5.559999999999961
+-3.5800000000000116
+-1.5400000000000151
+15.799999999999956
+0.47999999999998977
+-1.9000000000000252
+-5.460000000000005
+-7.24
+3.379999999999944
+4.159999999999963
+8.47999999999995
+4.83999999999997
+12.379999999999976
+2.8599999999999692
+-3.680000000000006
+1.9399999999999817
+3.9199999999999697
+-2.4200000000000275
+-2.640000000000037
+19.659999999999958
+-1.8800000000000097
+-5.700000000000014
+-5.680000000000001
+-4.060000000000004
+13.439999999999939
+10.199999999999969
+2.2999999999999705
+-3.14000000000002
+0.3599999999999657
+17.039999999999935
+-5.540000000000003
+10.47999999999995
+-1.7400000000000144
+-5.600000000000004
+11.659999999999945
+22.519999999999914
+-4.240000000000002
+3.4800000000000004
+-1.9800000000000209
+-5.0000000000000036
+-4.220000000000005
+0.9599999999999866
+-1.3400000000000283
+-2.340000000000015
+9.899999999999967
+-8.92
+12.39999999999997
+-2.4000000000000288
+1.4999999999999716
+11.159999999999956
+-0.12000000000003119
+-6.9
+-9.0
+-5.360000000000001
+-8.8
+-6.74
+15.179999999999955
+16.679999999999914
+13.559999999999954
+12.779999999999939
+-2.6600000000000206
+18.639999999999933
+16.019999999999968
+14.439999999999936
+8.55999999999998
+21.799999999999933
+-3.0800000000000285
+-1.6600000000000286
+6.039999999999974
+12.999999999999952
+0.19999999999999396
+-4.280000000000017
+-9.34
+10.379999999999916
+0.4999999999999858
+13.639999999999928
+-4.580000000000013
+15.19999999999997
+-1.660000000000025
+-0.16000000000002146
+8.979999999999968
+11.059999999999942
+17.93999999999998
+11.19999999999995
+9.439999999999943
+16.119999999999916
+9.439999999999944
+14.599999999999945
+9.339999999999954
+5.559999999999949
+-8.46
+9.859999999999955
+-4.860000000000004
+-5.200000000000005
+-8.3
+14.339999999999936
+0.7399999999999682
+0.6799999999999908
+-1.4600000000000648
+9.499999999999957
+1.2199999999999847
+9.279999999999948
+-1.5000000000000178
+14.599999999999925
+2.799999999999965
+17.139999999999944
+3.979999999999942
+13.799999999999944
+6.879999999999946
+-6.120000000000001
+19.779999999999934
+-3.760000000000007
+-1.680000000000014
+10.77999999999998
+-2.800000000000029
+-4.980000000000004
+10.979999999999954
+2.299999999999951
+12.919999999999947
+-2.520000000000012
+-4.720000000000002
+-0.060000000000025366
+-1.3400000000000194
+-0.020000000000031548
+-0.3400000000000123
+6.679999999999976
+-5.180000000000006
+18.719999999999928
+-5.680000000000002
+8.259999999999973
+-1.5600000000000165
+4.079999999999977
+-2.0400000000000054
+3.2599999999999802
+-0.6000000000000192
+-5.860000000000002
+8.979999999999986
+-3.280000000000004
+-5.600000000000001
+14.759999999999943
+-0.9200000000000141
+15.939999999999944
+14.839999999999927
+2.27999999999998
+-3.8800000000000052
+15.099999999999977
+11.499999999999954
+13.439999999999948
+-1.4200000000000212
+9.319999999999931
+-2.060000000000022
+-1.6600000000000303
+-4.280000000000004
+-8.92
+3.7399999999999487
+-2.720000000000012
+12.519999999999927
+-8.96
+12.85999999999997
+16.979999999999922
+-3.9600000000000053
+14.439999999999937
+5.619999999999971
+-4.020000000000018
+-0.7600000000000247
+2.3799999999999994
+1.9799999999999827
+1.6599999999999877
+11.019999999999984
+19.61999999999993
+-0.22000000000003261
+8.459999999999972
+20.219999999999935
+-1.5987211554602254e-14
+16.019999999999953
+-3.5600000000000094
+3.4599999999999476
+22.03999999999994
+11.75999999999996
+-4.120000000000008
+-6.76
+13.67999999999998
+11.659999999999949
+-9.260000000000002
+-2.6400000000000157
+-4.800000000000003
+1.7199999999999545
+16.819999999999958
+17.69999999999995
+12.67999999999994
+-1.3600000000000385
+-0.2600000000000193
+15.739999999999922
+-9.040000000000001
+11.979999999999928
+12.259999999999959
+5.5199999999999605
+17.859999999999925
+-2.1600000000000303
+-6.780000000000001
+-2.0400000000000222
+-7.340000000000001
+-9.280000000000001
+-1.5400000000000134
+-7.540000000000001
+-7.08
+12.779999999999982
+2.9399999999999533
+-1.0800000000000143
+-1.6800000000000281
+3.6799999999999695
+-3.730349362740526e-14
+10.579999999999956
+-3.6600000000000055
+1.65999999999997
+13.959999999999928
+-5.280000000000006
+-4.600000000000006
+-4.1600000000000055
+15.839999999999929
+-3.220000000000004
+13.399999999999936
+12.379999999999933
+5.339999999999961
+18.339999999999954
+8.079999999999977
+-5.560000000000001
+2.4199999999999555
+13.95999999999992
+12.419999999999957
+10.679999999999929
+1.4399999999999729
+-4.740000000000016
+4.979999999999972
+4.339999999999997
+13.879999999999967
+5.919999999999991
+-9.659999999999998
+-2.300000000000032
+-1.7000000000000295
+-2.5400000000000196
+13.379999999999956
+1.4799999999999667
+-2.2200000000000095
+1.9399999999999782
+1.4999999999999893
+14.459999999999923
+3.859999999999973
+13.37999999999996
+-4.3000000000000025
+-6.360000000000001
+-0.06000000000002714
+16.819999999999926
+-3.020000000000011
+-7.1400000000000015
+15.100000000000016
+0.15999999999997527
+0.3599999999999657
+-3.860000000000004
+-1.360000000000035
+16.03999999999993
+10.419999999999938
+-2.840000000000014
+11.379999999999937
+-4.400000000000013
+-1.1800000000000246
+-2.6000000000000068
+17.639999999999937
+4.179999999999998
+15.039999999999953
+-3.9800000000000093
+16.619999999999955
+-9.260000000000002
+7.25999999999997
+-6.020000000000005
+16.73999999999994
+-2.1000000000000103
+-4.200000000000006
+2.899999999999956
+-4.08000000000002
+-9.3
+-5.180000000000003
+9.959999999999985
+-8.92
+2.979999999999981
+-6.060000000000002
+-4.200000000000003
+-4.60000000000001
+-0.56000000000002
+13.35999999999996
+11.41999999999996
+15.89999999999992
+0.5599999999999721
+1.3799999999999812
+12.039999999999955
+0.7199999999999775
+0.19999999999998508
+22.299999999999944
+-0.34000000000001407
+-2.1000000000000103
+14.439999999999936
+16.739999999999934
+3.0199999999999463
+13.539999999999926
+-7.08
+3.4799999999999294
+1.6199999999999655
+9.359999999999971
+14.119999999999932
+11.599999999999977
+-6.800000000000001
+5.899999999999963
+2.1199999999999743
+-0.10000000000001741
+-2.5200000000000147
+12.539999999999939
+11.099999999999957
+-3.420000000000016
+8.959999999999983
+-5.520000000000001
+15.279999999999937
+14.13999999999996
+-7.159999999999999
+13.859999999999923
+13.139999999999946
+16.699999999999967
+-0.2400000000000091
+11.759999999999925
+6.739999999999963
+15.239999999999966
+-4.9600000000000035
+-4.360000000000004
+12.01999999999996
+20.459999999999944
+-7.320000000000001
+-1.7600000000000318
+15.419999999999932
+-1.6800000000000122
+-6.6
+3.2799999999999585
+-0.5200000000000244
+12.199999999999932
+2.319999999999986
+-1.0600000000000342
+-1.8200000000000252
+1.299999999999951
+17.639999999999954
+0.6399999999999508
+-8.66
+18.159999999999936
+-7.280000000000003
+20.45999999999995
+15.259999999999938
+-5.500000000000001
+-1.0600000000000112
+1.0399999999999654
+-1.680000000000014
+-4.68000000000001
+17.319999999999943
+13.019999999999927
+-4.140000000000015
+-0.1800000000000317
+-3.4200000000000372
+-5.220000000000002
+-2.320000000000012
+15.799999999999972
+9.879999999999962
+-0.06000000000002714
+-3.5600000000000147
+13.299999999999951
+-5.120000000000005
+-3.920000000000014
+4.799999999999928
+-3.000000000000015
+3.219999999999988
+8.159999999999986
+13.939999999999955
+13.55999999999992
+11.439999999999966
+-5.200000000000002
+-4.240000000000004
+-5.600000000000004
+-6.76
+-9.360000000000001
+9.819999999999974
+15.57999999999994
+7.67999999999997
+17.299999999999972
+-4.880000000000015
+5.75999999999997
+-0.42000000000001947
+11.359999999999985
+7.339999999999989
+2.879999999999967
+-1.1400000000000308
+-5.240000000000006
+-0.5000000000000284
+-1.4400000000000155
+10.639999999999954
+2.4599999999999813
+1.85999999999998
+-4.680000000000006
+7.3799999999999795
+2.2599999999999767
+14.479999999999936
+10.639999999999965
+-4.180000000000004
+-6.380000000000001
+16.35999999999995
+-4.800000000000003
+-3.7800000000000056
+-5.880000000000004
+15.279999999999939
+-8.92
+12.19999999999993
+1.3199999999999772
+-0.28000000000001357
+-2.5600000000000103
+1.01999999999998
+-5.000000000000004
+-6.000000000000001
+5.939999999999953
+-6.0600000000000005
+11.399999999999944
+11.75999999999994
+12.439999999999944
+3.0399999999999885
+-8.260000000000002
+19.979999999999915
+-5.620000000000002
+12.079999999999961
+16.03999999999992
+-0.6200000000000241
+4.239999999999959
+-3.200000000000018
+13.599999999999941
+3.339999999999968
+5.099999999999927
+-4.120000000000012
+13.239999999999934
+4.65999999999995
diff --git "a/DQN_mulit_tensorflow_2/backup/3/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt" "b/DQN_mulit_tensorflow_2/backup/3/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt"
new file mode 100644
index 0000000..07a3047
--- /dev/null
+++ "b/DQN_mulit_tensorflow_2/backup/3/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt"
@@ -0,0 +1 @@
+1000ep 全部由simple agents完成, 无用
\ No newline at end of file
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA1000/FFA1000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/4/FFA1000/FFA1000.data-00000-of-00001
new file mode 100644
index 0000000..e14e5c0
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA1000/FFA1000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA1000/FFA1000.index b/DQN_mulit_tensorflow_2/backup/4/FFA1000/FFA1000.index
new file mode 100644
index 0000000..5114600
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA1000/FFA1000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA1000/checkpoint b/DQN_mulit_tensorflow_2/backup/4/FFA1000/checkpoint
new file mode 100644
index 0000000..29be90e
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/4/FFA1000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA1000"
+all_model_checkpoint_paths: "FFA1000"
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA1500/FFA1500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/4/FFA1500/FFA1500.data-00000-of-00001
new file mode 100644
index 0000000..ac9feb5
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA1500/FFA1500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA1500/FFA1500.index b/DQN_mulit_tensorflow_2/backup/4/FFA1500/FFA1500.index
new file mode 100644
index 0000000..67b1bea
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA1500/FFA1500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA1500/checkpoint b/DQN_mulit_tensorflow_2/backup/4/FFA1500/checkpoint
new file mode 100644
index 0000000..b7b6bee
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/4/FFA1500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA1500"
+all_model_checkpoint_paths: "FFA1500"
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA2000/FFA2000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/4/FFA2000/FFA2000.data-00000-of-00001
new file mode 100644
index 0000000..6ab47e9
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA2000/FFA2000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA2000/FFA2000.index b/DQN_mulit_tensorflow_2/backup/4/FFA2000/FFA2000.index
new file mode 100644
index 0000000..623b483
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA2000/FFA2000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA2000/checkpoint b/DQN_mulit_tensorflow_2/backup/4/FFA2000/checkpoint
new file mode 100644
index 0000000..1dcab0c
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/4/FFA2000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA2000"
+all_model_checkpoint_paths: "FFA2000"
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA2500/FFA2500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/4/FFA2500/FFA2500.data-00000-of-00001
new file mode 100644
index 0000000..2bc7766
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA2500/FFA2500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA2500/FFA2500.index b/DQN_mulit_tensorflow_2/backup/4/FFA2500/FFA2500.index
new file mode 100644
index 0000000..f571c06
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA2500/FFA2500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA2500/checkpoint b/DQN_mulit_tensorflow_2/backup/4/FFA2500/checkpoint
new file mode 100644
index 0000000..e1df453
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/4/FFA2500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA2500"
+all_model_checkpoint_paths: "FFA2500"
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA3000/FFA3000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/4/FFA3000/FFA3000.data-00000-of-00001
new file mode 100644
index 0000000..09c4e94
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA3000/FFA3000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA3000/FFA3000.index b/DQN_mulit_tensorflow_2/backup/4/FFA3000/FFA3000.index
new file mode 100644
index 0000000..5d1895f
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA3000/FFA3000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA3000/checkpoint b/DQN_mulit_tensorflow_2/backup/4/FFA3000/checkpoint
new file mode 100644
index 0000000..05be1ca
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/4/FFA3000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA3000"
+all_model_checkpoint_paths: "FFA3000"
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA3500/FFA3500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/4/FFA3500/FFA3500.data-00000-of-00001
new file mode 100644
index 0000000..cd0db02
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA3500/FFA3500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA3500/FFA3500.index b/DQN_mulit_tensorflow_2/backup/4/FFA3500/FFA3500.index
new file mode 100644
index 0000000..31f49fb
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA3500/FFA3500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA3500/checkpoint b/DQN_mulit_tensorflow_2/backup/4/FFA3500/checkpoint
new file mode 100644
index 0000000..6b23a0a
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/4/FFA3500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA3500"
+all_model_checkpoint_paths: "FFA3500"
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA4000/FFA4000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/4/FFA4000/FFA4000.data-00000-of-00001
new file mode 100644
index 0000000..ce1e083
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA4000/FFA4000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA4000/FFA4000.index b/DQN_mulit_tensorflow_2/backup/4/FFA4000/FFA4000.index
new file mode 100644
index 0000000..c7dd906
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA4000/FFA4000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA4000/checkpoint b/DQN_mulit_tensorflow_2/backup/4/FFA4000/checkpoint
new file mode 100644
index 0000000..04247ef
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/4/FFA4000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA4000"
+all_model_checkpoint_paths: "FFA4000"
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA4500/FFA4500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/4/FFA4500/FFA4500.data-00000-of-00001
new file mode 100644
index 0000000..4d18225
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA4500/FFA4500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA4500/FFA4500.index b/DQN_mulit_tensorflow_2/backup/4/FFA4500/FFA4500.index
new file mode 100644
index 0000000..a030ef9
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA4500/FFA4500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA4500/checkpoint b/DQN_mulit_tensorflow_2/backup/4/FFA4500/checkpoint
new file mode 100644
index 0000000..70f6686
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/4/FFA4500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA4500"
+all_model_checkpoint_paths: "FFA4500"
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA500/FFA500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/4/FFA500/FFA500.data-00000-of-00001
new file mode 100644
index 0000000..71c8a1a
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA500/FFA500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA500/FFA500.index b/DQN_mulit_tensorflow_2/backup/4/FFA500/FFA500.index
new file mode 100644
index 0000000..5e5400a
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/4/FFA500/FFA500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/4/FFA500/checkpoint b/DQN_mulit_tensorflow_2/backup/4/FFA500/checkpoint
new file mode 100644
index 0000000..10482c1
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/4/FFA500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA500"
+all_model_checkpoint_paths: "FFA500"
diff --git a/DQN_mulit_tensorflow_2/backup/4/result.csv b/DQN_mulit_tensorflow_2/backup/4/result.csv
new file mode 100644
index 0000000..1358faf
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/4/result.csv
@@ -0,0 +1,4501 @@
+result
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
diff --git a/DQN_mulit_tensorflow_2/backup/4/reward.csv b/DQN_mulit_tensorflow_2/backup/4/reward.csv
new file mode 100644
index 0000000..30c10a2
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/4/reward.csv
@@ -0,0 +1,5272 @@
+reward
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+-13.440000000000003
+-10.77999999999995
+-10.539999999999957
+-10.639999999999999
+-9.52
+-13.360000000000001
+-9.819999999999999
+-14.360000000000003
+-9.98
+-8.9
+-9.48
+-9.979999999999999
+-10.139999999999999
+-9.94
+-9.68
+-10.059999999999999
+-9.04
+-10.44
+-10.0
+-10.399999999999999
+-10.0
+-10.0
+-10.0
+-10.0
+-9.780000000000001
+-10.0
+-10.94
+-13.500000000000002
+-9.459999999999999
+-9.86
+-9.459999999999999
+-12.16
+-9.459999999999999
+-11.08
+-17.600000000000016
+-9.659999999999998
+-9.18
+-9.26
+-15.940000000000012
+-10.0
+-9.459999999999999
+-11.42
+-10.0
+-12.720000000000002
+-9.94
+-9.6
+-9.42
+-9.459999999999999
+-11.600000000000001
+-9.68
+-9.68
+-10.08
+-10.2
+-9.860000000000001
+-10.059999999999999
+-9.399999999999999
+-9.78
+-14.920000000000005
+-10.34
+-11.520000000000001
+-11.52
+-12.66
+-9.959999999999999
+-11.2
+-10.0
+-9.18
+-9.700000000000001
+-10.58
+-9.479999999999999
+-9.739999999999998
+-7.300000000000015
+-11.36
+-10.0
+-10.0
+-9.459999999999999
+-10.36
+-10.12
+-12.400000000000002
+-10.12
+-9.76
+-12.440000000000001
+-10.26
+-15.26000000000001
+-9.860000000000001
+-10.06
+-10.0
+-10.059999999999999
+-11.02
+-9.62
+-9.459999999999999
+-9.54
+-10.319999999999999
+-2.9200000000000026
+-10.0
+-10.84
+-14.020000000000003
+-8.92
+-10.059999999999999
+-10.0
+-10.0
+-10.159999999999998
+-14.300000000000004
+-10.62
+-10.98
+-10.799999999999999
+-11.760000000000002
+-9.74
+-11.280000000000001
+-10.379999999999999
+-10.04
+-10.0
+-10.379999999999999
+-11.32
+-10.8
+-11.1
+-11.64
+-10.059999999999999
+-10.28
+-15.120000000000008
+-9.459999999999999
+-11.02
+-10.34
+-10.4
+-9.459999999999999
+-9.020000000000001
+-9.44
+-11.740000000000002
+-11.96
+-10.0
+-10.059999999999999
+-9.94
+-10.879999999999999
+-8.840000000000002
+-9.459999999999999
+-9.979999999999999
+-9.819999999999999
+-14.000000000000002
+-9.78
+-10.26
+-9.979999999999999
+-10.219999999999999
+-10.0
+-10.26
+-9.459999999999999
+-12.920000000000002
+-9.799999999999999
+-10.399999999999999
+-9.719999999999999
+-9.28
+-10.159999999999998
+-10.28
+-9.58
+-9.5
+-9.92
+-8.66
+-9.32
+-9.36
+-9.459999999999999
+-9.54
+-12.700000000000003
+-10.579999999999998
+-10.52
+-10.239999999999998
+-9.92
+-10.059999999999999
+-9.4
+-10.159999999999998
+-10.0
+-9.86
+-9.5
+-10.820000000000002
+-13.320000000000004
+-9.8
+-9.66
+-11.84
+-9.879999999999999
+-9.54
+-9.379999999999999
+-9.399999999999999
+-9.639999999999999
+-9.78
+-10.42
+-10.0
+-9.459999999999999
+-9.72
+-10.0
+-8.220000000000002
+-10.94
+-11.600000000000001
+-10.799999999999999
+-10.059999999999999
+-10.26
+-9.559999999999999
+-9.92
+-10.059999999999999
+-11.66
+-9.459999999999999
+-11.620000000000003
+-10.9
+-10.0
+-9.899999999999999
+-9.799999999999999
+-10.559999999999999
+-10.04
+-10.620000000000001
+-9.799999999999999
+-10.0
+-11.06
+-8.379999999999999
+-9.459999999999999
+-10.0
+-11.08
+-9.399999999999999
+-8.8
+-10.3
+-9.639999999999999
+-9.559999999999999
+-9.08
+-9.659999999999998
+-9.479999999999999
+-9.479999999999999
+-11.760000000000002
+-11.18
+-10.24
+-9.08
+-9.7
+-9.459999999999999
+-9.4
+-10.1
+-16.600000000000016
+-13.240000000000002
+-9.84
+-10.76
+-9.639999999999999
+-9.959999999999999
+-10.319999999999999
+-9.62
+-10.52
+-10.44
+-13.440000000000001
+-8.7
+-11.46
+-9.7
+-10.1
+-10.059999999999999
+-9.799999999999999
+-9.62
+-9.979999999999999
+-11.02
+-10.059999999999999
+-10.0
+-11.7
+-9.459999999999999
+-9.58
+-10.479999999999999
+-15.660000000000007
+-10.82
+-10.0
+-9.64
+-12.14
+-9.739999999999998
+-11.3
+-10.3
+-10.98
+-10.059999999999999
+-9.280000000000001
+-10.479999999999999
+-9.5
+-10.059999999999999
+-10.059999999999999
+-9.459999999999999
+-9.2
+-10.059999999999999
+-9.459999999999999
+-9.459999999999999
+-10.62
+-9.459999999999999
+-9.7
+-10.920000000000002
+-9.48
+-11.64
+-10.3
+-9.44
+-8.84
+-10.26
+-9.86
+-12.660000000000002
+-9.7
+-10.0
+-11.46
+-9.879999999999999
+-14.380000000000006
+-9.66
+-9.879999999999999
+-10.66
+-12.940000000000001
+-11.620000000000001
+-9.52
+-9.68
+-11.08
+-10.7
+-2.100000000000001
+-9.879999999999999
+-12.18
+-9.34
+-14.480000000000004
+-9.459999999999999
+-10.66
+-9.5
+-12.940000000000001
+-10.139999999999999
+-10.0
+-9.459999999999999
+-9.440000000000001
+-13.460000000000003
+-11.88
+-10.68
+-10.059999999999999
+-9.799999999999999
+-10.9
+-10.239999999999998
+-12.020000000000001
+-9.639999999999999
+-10.36
+-10.379999999999999
+-9.76
+-12.98
+-9.46
+-8.9
+-9.399999999999999
+-11.180000000000001
+-10.059999999999999
+-10.0
+-10.22
+-10.42
+-10.66
+-12.280000000000005
+-10.040000000000001
+-10.7
+-9.399999999999999
+-9.459999999999999
+-10.88
+-10.059999999999999
+-10.059999999999999
+-9.459999999999999
+-9.94
+-9.5
+-10.239999999999998
+-10.059999999999999
+-10.04
+-9.739999999999998
+-9.399999999999999
+-10.059999999999999
+-9.28
+-9.799999999999999
+-9.12
+-15.360000000000012
+-10.040000000000001
+-9.96
+-10.7
+-10.059999999999999
+-9.399999999999999
+-9.399999999999999
+-9.56
+-10.0
+-9.92
+-10.6
+-9.24
+-12.060000000000002
+-10.52
+-10.48
+-9.540000000000001
+-9.5
+-12.240000000000002
+-9.879999999999999
+-8.36
+-9.459999999999999
+-10.24
+-10.479999999999999
+-9.44
+-9.5
+-10.0
+-9.399999999999999
+-9.5
+-10.78
+-10.82
+-9.62
+-10.059999999999999
+-12.16
+-8.82
+-12.32
+-9.459999999999999
+-10.139999999999999
+-9.559999999999999
+-9.86
+-9.94
+-10.059999999999999
+-10.479999999999999
+-9.399999999999999
+-9.42
+-11.580000000000002
+-9.94
+-10.059999999999999
+-10.059999999999999
+-10.42
+-10.42
+-10.219999999999999
+-10.459999999999999
+-1.1399999999999615
+-10.379999999999999
+-8.86
+-9.68
+-11.600000000000001
+-9.46
+-10.0
+-10.02
+-9.4
+-9.459999999999999
+-9.2
+-10.059999999999999
+-9.459999999999999
+-12.420000000000002
+-10.7
+-10.58
+-10.159999999999998
+-9.899999999999999
+-3.8800000000000026
+-9.52
+-9.36
+-9.16
+-10.180000000000001
+-10.16
+-11.98
+-13.320000000000002
+-10.459999999999999
+-11.500000000000005
+-9.86
+-9.66
+-10.239999999999998
+-9.32
+-10.12
+-11.700000000000001
+-13.240000000000002
+-11.48
+-10.76
+-11.42
+-9.399999999999999
+-11.68
+-11.28
+-11.440000000000001
+-10.16
+-9.02
+-9.54
+-11.16
+-9.68
+-10.459999999999999
+-9.72
+-7.920000000000017
+-9.639999999999999
+-9.659999999999998
+-12.520000000000001
+-9.86
+-11.96
+-9.719999999999972
+-12.980000000000002
+-12.080000000000002
+-12.920000000000003
+-10.82
+-15.720000000000006
+-9.459999999999999
+-9.399999999999999
+-10.88
+-11.64
+-10.139999999999999
+-10.799999999999999
+-10.76
+-11.04
+-9.459999999999999
+-5.459999999999999
+-9.28
+-2.8200000000000016
+-10.239999999999998
+-10.059999999999999
+-9.459999999999999
+-9.459999999999999
+-9.319999999999999
+-11.16
+-9.72
+-9.819999999999999
+-10.78
+-10.58
+-13.14
+-11.8
+-9.399999999999999
+-10.0
+-10.059999999999999
+-9.34
+-10.0
+-9.459999999999999
+-9.399999999999999
+-11.18
+-12.66
+-10.0
+-9.6
+-9.979999999999999
+-9.940000000000001
+-10.8
+-10.22
+-13.740000000000002
+-10.780000000000001
+-10.7
+-10.04
+-4.820000000000005
+-11.920000000000002
+-10.059999999999999
+-10.0
+-9.459999999999999
+-9.4
+-10.059999999999999
+-10.059999999999999
+-8.86
+-10.62
+-9.7
+-9.719999999999999
+-9.78
+-11.68
+-9.78
+-11.4
+-9.2
+-10.440000000000001
+-11.96
+-10.7
+-10.08
+-9.899999999999999
+-9.4
+-9.879999999999999
+-10.219999999999999
+-9.78
+-11.42
+-10.44
+-9.799999999999999
+-10.32
+-11.580000000000002
+-11.68
+-9.4
+-9.0
+-9.799999999999999
+-10.059999999999999
+-9.86
+-11.0
+-9.899999999999999
+-10.74
+-10.32
+-9.399999999999999
+-9.32
+-9.9
+-11.320000000000002
+-9.68
+-9.06
+-9.86
+-11.600000000000001
+-10.28
+-10.340000000000002
+-8.78
+-8.960000000000003
+-9.34
+-10.92
+-10.02
+-9.559999999999999
+-10.54
+-9.799999999999999
+-11.06
+-10.28
+-0.8599999999999923
+-9.399999999999999
+-14.480000000000002
+-12.440000000000001
+-10.0
+-13.780000000000006
+-9.459999999999999
+-9.399999999999999
+-10.0
+-10.36
+-10.059999999999999
+-10.0
+-11.14
+-10.0
+-9.979999999999999
+-10.18
+-10.98
+-9.86
+-9.42
+-10.68
+-10.52
+-9.92
+-9.719999999999999
+-10.78
+-10.0
+-10.219999999999999
+-11.3
+-10.499999999999998
+-8.46
+-10.139999999999999
+-13.280000000000001
+-10.399999999999999
+-10.9
+-10.059999999999999
+-9.399999999999999
+-9.54
+-10.0
+-9.559999999999999
+-9.819999999999999
+-9.219999999999999
+-9.76
+-8.600000000000001
+-10.02
+-9.6
+-4.580000000000004
+-9.519999999999976
+-14.600000000000005
+-12.64
+-11.660000000000002
+-9.799999999999999
+-10.139999999999999
+-10.120000000000001
+-9.4
+-10.08
+-9.96
+-12.180000000000001
+-9.78
+-10.54
+-9.379999999999999
+-9.28
+-11.32
+-10.0
+-11.900000000000002
+-10.299999999999999
+-11.4
+-9.719999999999999
+-10.84
+-9.819999999999999
+-11.58
+-10.0
+-10.1
+-9.92
+-10.159999999999998
+-9.52
+-9.28
+-9.739999999999998
+-9.28
+-10.52
+-11.720000000000002
+-10.7
+-10.540000000000001
+-8.44000000000001
+-10.299999999999999
+-10.799999999999999
+-9.979999999999999
+-10.82
+-10.7
+-9.78
+-9.84
+-10.540000000000001
+-10.579999999999998
+-11.020000000000001
+-9.459999999999999
+-9.6
+-9.799999999999999
+-12.120000000000001
+-11.2
+-9.399999999999999
+-9.459999999999999
+-8.72
+-10.059999999999999
+-9.76
+-12.64
+-11.9
+-10.059999999999999
+-9.06
+-10.58
+-11.84
+-9.639999999999999
+-10.34
+-10.98
+-10.12
+-10.72
+-2.1000000000000014
+-10.36
+-9.62
+-10.659999999999998
+-10.0
+-9.34
+-10.0
+-10.579999999999998
+-9.659999999999998
+-10.44
+-10.059999999999999
+-9.459999999999999
+-9.96
+-9.819999999999999
+-10.219999999999999
+-9.5
+-9.66
+-10.5
+-18.220000000000006
+-11.7
+-11.24
+-9.979999999999999
+-10.0
+-9.44
+-9.399999999999999
+-11.9
+-10.059999999999999
+-10.2
+-11.2
+-9.2
+-9.100000000000001
+-9.42
+-9.58
+-10.639999999999999
+-8.66
+-10.0
+-9.399999999999999
+-9.88
+-11.040000000000001
+-10.62
+-11.860000000000001
+-9.6
+-10.1
+-10.559999999999999
+-10.379999999999999
+-9.5
+-10.059999999999999
+-11.120000000000001
+-10.58
+-10.059999999999999
+-10.059999999999999
+-10.0
+-10.96
+-10.74
+-10.059999999999999
+-11.66
+-10.499999999999998
+-10.299999999999999
+-4.28
+-10.0
+-10.059999999999999
+-9.6
+-9.059999999999999
+-9.48
+-9.5
+-9.84
+-9.58
+-10.059999999999999
+-13.360000000000003
+-9.4
+-10.479999999999999
+-10.059999999999999
+-10.0
+-11.08
+-9.659999999999998
+-9.280000000000001
+-9.819999999999999
+-10.219999999999999
+-9.94
+-9.58
+-11.16
+-10.74
+-10.059999999999999
+-10.299999999999999
+-9.659999999999998
+-9.620000000000001
+-9.879999999999999
+-10.639999999999999
+-9.24
+-9.86
+-9.54
+-10.26
+-12.02
+-9.639999999999999
+-10.08
+-9.399999999999999
+-10.299999999999999
+-10.299999999999999
+-9.819999999999999
+-9.459999999999999
+-9.459999999999999
+-9.399999999999999
+-10.7
+-5.639999999999999
+-9.68
+-10.76
+-12.680000000000003
+-11.76
+-9.86
+-10.58
+-10.7
+-9.459999999999999
+-10.0
+-10.1
+-9.459999999999999
+-8.92
+-9.459999999999999
+-11.4
+-9.459999999999999
+-10.46
+-10.319999999999999
+-12.040000000000001
+-10.059999999999999
+-10.54
+-10.059999999999999
+-11.100000000000001
+-10.86
+-15.680000000000005
+-9.640000000000002
+-9.7
+-9.459999999999999
+-10.0
+-9.399999999999999
+-9.94
+-10.84
+-9.76
+-10.459999999999999
+-10.139999999999999
+-9.559999999999999
+-15.38000000000001
+-10.32
+-13.000000000000004
+-11.26
+-10.18
+-10.08
+-9.4
+-13.46
+-11.26
+-11.72
+-9.68
+-9.399999999999999
+-9.32
+-10.059999999999999
+-9.52
+-9.1
+-10.54
+-10.059999999999999
+-9.04
+-10.059999999999999
+-10.08
+-11.74
+-10.54
+-13.160000000000002
+-16.080000000000016
+-9.7
+-11.060000000000004
+-9.4
+-10.379999999999999
+-9.48
+-10.299999999999999
+-10.04
+-10.059999999999999
+-9.92
+-9.62
+-10.66
+-9.88
+-10.540000000000001
+-9.139999999999999
+-10.620000000000001
+-10.26
+-9.08
+-9.899999999999999
+-9.28
+-11.12
+-11.5
+-12.24
+-10.059999999999999
+-10.0
+-12.580000000000002
+-10.5
+-9.6
+-10.34
+-10.059999999999999
+-10.34
+-10.659999999999998
+-10.059999999999999
+-10.42
+-10.059999999999999
+-10.059999999999999
+-9.78
+-11.46
+-10.0
+-10.26
+-10.34
+-9.86
+-12.600000000000001
+-11.3
+-12.880000000000003
+-10.059999999999999
+-9.459999999999999
+-10.059999999999999
+-10.559999999999999
+-10.719999999999999
+-9.360000000000001
+-8.94
+-9.399999999999999
+-11.84
+-10.479999999999999
+-9.299999999999999
+-9.459999999999999
+-14.080000000000002
+-10.059999999999999
+-10.84
+-10.059999999999999
+-9.4
+-9.819999999999999
+-10.0
+-9.379999999999999
+-9.879999999999999
+-10.0
+-9.34
+-10.58
+-11.12
+-9.459999999999999
+-10.059999999999999
+-10.059999999999999
+-9.86
+-10.0
+-9.68
+-10.959999999999999
+-11.38
+-10.0
+-12.180000000000001
+-9.459999999999999
+-5.699999999999999
+-10.059999999999999
+-9.979999999999999
+-9.76
+-10.639999999999999
+-10.299999999999999
+-11.200000000000001
+-10.84
+-10.700000000000001
+-9.84
+-10.32
+-3.240000000000002
+-11.16
+-10.0
+-10.499999999999998
+-10.28
+-10.780000000000001
+-10.059999999999999
+-10.059999999999999
+-9.28
+-10.88
+-10.6
+-11.98
+-11.1
+-10.88
+-9.919999999999998
+-14.600000000000005
+-9.42
+-12.74
+-5.42000000000001
+-10.02
+-11.34
+-9.54
+-11.42
+-15.120000000000006
+-9.84
+-9.76
+-10.18
+-6.06
+-11.600000000000001
+-10.159999999999998
+-11.34
+-9.440000000000001
+-10.08
+-9.28
+-12.2
+-10.04
+-10.36
+-14.240000000000006
+-10.76
+-10.1
+-11.22
+-10.04
+-10.5
+-9.24
+-11.18
+-10.059999999999999
+-11.9
+-9.280000000000001
+-10.4
+-10.079999999999998
+-10.62
+-9.28
+-10.82
+-12.260000000000002
+-13.96
+-10.04
+-9.76
+-10.639999999999999
+-10.219999999999999
+-11.600000000000001
+-10.42
+-9.94
+-14.080000000000004
+-10.879999999999999
+-10.68
+-10.0
+-9.459999999999999
+-10.059999999999999
+-10.059999999999999
+-10.0
+-9.54
+-9.979999999999999
+-10.78
+-9.959999999999999
+-9.52
+-11.04
+-11.219999999999999
+-9.42
+-9.58
+-10.959999999999999
+-9.88
+-10.26
+-9.459999999999999
+-9.559999999999999
+-9.42
+-9.34
+-9.78
+-11.66
+-11.18
+-11.0
+-13.840000000000002
+-10.48
+-10.82
+-10.04
+-10.36
+-10.139999999999999
+-12.940000000000003
+-9.36
+-10.78
+-10.28
+-9.459999999999999
+-9.799999999999999
+-12.340000000000002
+-10.84
+-10.059999999999999
+-10.059999999999999
+-10.58
+-10.0
+-10.379999999999999
+-10.68
+-9.459999999999999
+-10.059999999999999
+-10.04
+-13.440000000000001
+-13.760000000000003
+-9.459999999999999
+-10.84
+-10.059999999999999
+-10.0
+-9.28
+-9.76
+-9.86
+-10.0
+-9.819999999999999
+-9.4
+-9.399999999999999
+-10.12
+-9.899999999999999
+-10.479999999999999
+-10.459999999999999
+-9.739999999999998
+-9.78
+-3.6400000000000023
+-11.16
+-10.78
+-10.26
+-10.76
+-10.299999999999999
+-10.059999999999999
+-10.059999999999999
+-9.48
+-9.739999999999998
+-10.0
+-10.0
+-10.059999999999999
+-9.28
+-9.54
+-11.5
+-11.120000000000003
+-14.460000000000004
+-10.059999999999999
+-9.94
+-10.059999999999999
+-10.319999999999999
+-12.220000000000002
+-10.959999999999999
+-9.819999999999999
+-10.1
+-9.739999999999998
+-13.100000000000001
+-9.559999999999999
+-10.52
+-9.94
+-10.18
+-10.18
+-9.860000000000001
+-10.94
+-10.719999999999999
+-11.14
+-9.6
+-9.959999999999999
+-11.64
+-9.64
+-9.5
+-9.42
+-10.16
+-13.640000000000002
+-8.959999999999999
+-9.76
+-11.600000000000001
+-9.739999999999998
+-11.0
+-10.059999999999999
+-10.059999999999999
+-10.08
+-10.38
+-10.76
+-9.459999999999999
+-9.979999999999999
+-10.08
+-9.739999999999998
+-9.719999999999999
+-10.98
+-10.1
+-10.96
+-13.3
+-9.7
+-9.86
+-9.52
+-10.86
+-10.2
+-10.379999999999999
+-9.459999999999999
+-9.459999999999999
+-9.399999999999999
+-10.059999999999999
+-14.040000000000003
+-10.12
+-10.68
+-9.44
+-12.440000000000003
+-9.92
+-14.280000000000001
+-10.159999999999998
+-11.620000000000001
+-9.459999999999999
+-9.459999999999999
+-12.620000000000001
+-10.36
+-10.219999999999999
+-9.659999999999998
+-9.399999999999999
+-10.08
+-11.44
+-9.459999999999999
+-10.18
+-10.52
+-10.7
+-10.0
+-15.220000000000004
+-9.36
+-10.98
+-10.54
+-9.5
+-10.42
+-9.94
+-10.059999999999999
+-9.459999999999999
+-11.420000000000002
+-9.979999999999999
+-11.54
+-10.059999999999999
+-10.82
+-9.459999999999999
+-9.459999999999999
+-10.059999999999999
+-10.42
+-9.68
+-13.060000000000002
+-9.520000000000001
+-11.66
+-9.6
+-11.72
+-10.98
+-9.280000000000001
+-10.66
+-10.62
+-10.0
+-10.0
+-10.059999999999999
+-9.399999999999999
+-9.86
+-10.0
+-10.399999999999999
+-9.819999999999999
+-10.299999999999999
+-10.180000000000001
+-5.780000000000012
+-10.559999999999999
+-9.58
+-10.0
+-10.059999999999999
+-10.059999999999999
+-10.1
+-10.059999999999999
+-10.059999999999999
+-9.399999999999999
+-10.04
+-11.680000000000001
+-9.54
+-10.0
+-9.36
+-10.1
+-12.32
+-10.139999999999999
+-11.14
+-9.739999999999998
+-15.060000000000006
+-10.96
+-10.84
+-10.68
+-10.059999999999999
+-9.78
+-10.059999999999999
+-13.060000000000002
+-10.44
+-10.36
+-11.82
+-10.299999999999999
+-9.74
+-9.719999999999999
+-9.799999999999999
+-14.000000000000004
+-11.3
+-9.86
+-13.100000000000001
+-10.82
+-14.700000000000003
+-11.5
+-9.719999999999999
+-10.74
+-9.899999999999999
+-9.94
+-10.04
+-9.78
+-10.459999999999999
+-10.8
+-9.459999999999999
+-9.72
+-9.86
+-12.96
+-9.459999999999999
+-0.8000000000000006
+-10.8
+-11.02
+-9.999999999999998
+-9.639999999999999
+-10.16
+-10.4
+-10.0
+-12.280000000000001
+-9.739999999999998
+-12.420000000000002
+-10.22
+-10.76
+-11.26
+-10.04
+-11.52
+-12.98
+-10.42
+-10.6
+-10.28
+-11.34
+-9.86
+-9.64
+-9.219999999999999
+-11.080000000000002
+-9.22
+-11.26
+-10.08
+-10.0
+-10.159999999999998
+-10.219999999999999
+-9.459999999999999
+-9.399999999999999
+-11.500000000000002
+-9.58
+-10.18
+-10.059999999999999
+-10.04
+-10.76
+-9.459999999999999
+-9.6
+-10.76
+-10.059999999999999
+-9.360000000000001
+-10.159999999999998
+-10.44
+-10.0
+-11.4
+-9.879999999999999
+-12.560000000000002
+-9.379999999999999
+-11.24
+-11.64
+-10.219999999999999
+-9.899999999999999
+-9.44
+-10.959999999999999
+-12.32
+-10.059999999999999
+-9.42
+-12.1
+-10.7
+-10.36
+-10.059999999999999
+-9.879999999999999
+-10.34
+-9.459999999999999
+-10.059999999999999
+-10.0
+-11.24
+-11.36
+-9.899999999999999
+-10.0
+-11.04
+-11.1
+-11.28
+-10.159999999999998
+-10.08
+-10.059999999999999
+-10.059999999999999
+-10.059999999999999
+-10.139999999999999
+-10.059999999999999
+-10.059999999999999
+-8.8
+-10.379999999999999
+-10.059999999999999
+-10.26
+-11.680000000000001
+-10.299999999999999
+-9.82
+-10.059999999999999
+-10.02
+-12.580000000000002
+-10.139999999999999
+-10.02
+-10.379999999999999
+-10.059999999999999
+-10.78
+-10.059999999999999
+-9.78
+-9.92
+-9.459999999999999
+-10.079999999999998
+-9.08
+-11.52
+-13.520000000000003
+-8.740000000000002
+-10.239999999999998
+-10.059999999999999
+-10.66
+-9.879999999999999
+-9.34
+-9.62
+-12.32
+-10.639999999999999
+-9.78
+-10.219999999999999
+-9.6
+-9.459999999999999
+-12.360000000000001
+-9.459999999999999
+-9.739999999999998
+-9.5
+-10.159999999999998
+-9.659999999999998
+-9.799999999999999
+-10.139999999999999
+-9.559999999999999
+-11.26
+-10.639999999999999
+-9.479999999999999
+-10.139999999999999
+-10.08
+-4.74
+-11.34
+-9.7
+-10.059999999999999
+-10.059999999999999
+-9.459999999999999
+-10.52
+-10.059999999999999
+-9.28
+-9.899999999999999
+-10.1
+-10.56
+-8.66
+-11.860000000000001
+-16.90000000000001
+-9.819999999999999
+-10.139999999999999
+-8.92
+-9.639999999999999
+-11.98
+-9.020000000000001
+-13.080000000000002
+-10.52
+-9.86
+-10.76
+-11.68
+-9.54
+-9.459999999999999
+-9.639999999999999
+-10.9
+-9.459999999999999
+-10.579999999999998
+-9.739999999999998
+-10.059999999999999
+-16.40000000000001
+-10.02
+-10.3
+-9.58
+-4.739999999999998
+-9.12
+-9.72
+-10.36
+-9.58
+-9.92
+-9.620000000000001
+-10.26
+-9.659999999999998
+-10.6
+-9.5
+-9.4
+-9.32
+-10.639999999999999
+-10.059999999999999
+-10.26
+-9.34
+-9.78
+-11.56
+-10.94
+-10.1
+-10.52
+-9.7
+-10.0
+-9.92
+-10.499999999999998
+-10.379999999999999
+-9.459999999999999
+-10.540000000000001
+-10.620000000000001
+-9.459999999999999
+-18.87999999999998
+-10.16
+-14.600000000000003
+-10.34
+-7.140000000000035
+-10.379999999999999
+-11.900000000000002
+-10.1
+-10.0
+-9.78
+-9.88
+-9.48
+-10.719999999999999
+-13.180000000000001
+-9.719999999999999
+-10.34
+-10.12
+-12.120000000000001
+-9.719999999999999
+-10.86
+-10.5
+-11.860000000000003
+-10.82
+-9.399999999999999
+-11.66
+-10.26
+-9.459999999999999
+-10.0
+-10.68
+-9.459999999999999
+-9.459999999999999
+-10.059999999999999
+-17.800000000000022
+-10.36
+-12.640000000000004
+-9.42
+-12.100000000000001
+-12.240000000000002
+-15.620000000000012
+-10.219999999999999
+-11.219999999999999
+-9.399999999999999
+-9.399999999999999
+-10.0
+-10.639999999999999
+-10.0
+-10.059999999999999
+-10.159999999999998
+-9.58
+-9.459999999999999
+-10.059999999999999
+-9.88
+-10.139999999999999
+-9.58
+-11.64
+-9.459999999999999
+-10.0
+-9.88
+-9.78
+-9.92
+-9.7
+-13.46
+-11.9
+-11.740000000000002
+-10.54
+-8.84
+-9.86
+-10.059999999999999
+-13.240000000000002
+-10.299999999999999
+-9.440000000000001
+-9.28
+-11.680000000000001
+-9.5
+-10.28
+-10.520000000000001
+-3.320000000000002
+-13.660000000000002
+-9.94
+-10.299999999999999
+-9.899999999999999
+-11.0
+-10.799999999999999
+-8.879999999999999
+-9.92
+-9.28
+-10.6
+-11.780000000000001
+-10.08
+-10.0
+-9.94
+-11.500000000000002
+-11.1
+-9.739999999999998
+-10.4
+-10.74
+-9.799999999999999
+-9.7
+-11.82
+-10.499999999999998
+-9.68
+-10.74
+-10.04
+-10.68
+-10.059999999999999
+-9.459999999999999
+-10.0
+-10.159999999999998
+-9.2
+-9.819999999999999
+-10.559999999999999
+-11.42
+-8.94
+-10.0
+-10.78
+-10.46
+-10.139999999999999
+-8.92
+-11.22
+-10.58
+-10.08
+-9.58
+-9.459999999999999
+-10.059999999999999
+-9.459999999999999
+-9.979999999999999
+-10.0
+-9.459999999999999
+-10.0
+-10.059999999999999
+-11.16
+-10.059999999999999
+-9.459999999999999
+-10.059999999999999
+-11.900000000000002
+-9.68
+-12.260000000000002
+-9.66
+-9.7
+-4.440000000000008
+-9.32
+-6.199999999999999
+-12.400000000000002
+-12.020000000000001
+-9.219999999999999
+-9.78
+-11.8
+-0.28
+-11.9
+-12.700000000000003
+-10.58
+-10.9
+-12.080000000000002
+-9.56
+-9.399999999999999
+-11.48
+-9.819999999999999
+-9.639999999999999
+-9.459999999999999
+-9.440000000000001
+-9.739999999999998
+-12.3
+-11.64
+-9.54
+-13.740000000000002
+-10.08
+-11.5
+-10.079999999999998
+-10.059999999999999
+-9.459999999999999
+-10.059999999999999
+-11.24
+-10.219999999999999
+-10.58
+-10.64
+-9.84
+-9.739999999999998
+-9.299999999999999
+-10.299999999999999
+-9.32
+-9.54
+-9.459999999999999
+-10.059999999999999
+-11.1
+-9.42
+-10.26
+-10.68
+-10.059999999999999
+-9.76
+-9.979999999999999
+-9.719999999999999
+-11.1
+-9.84
+-9.5
+-9.6
+-10.0
+-10.059999999999999
+-10.0
+-9.459999999999999
+-10.34
+-10.72
+-10.0
+-10.24
+-9.219999999999999
+-10.219999999999999
+-9.7
+-10.059999999999999
+-10.059999999999999
+-11.02
+-10.379999999999999
+-9.799999999999999
+-10.08
+-9.32
+-10.059999999999999
+-10.319999999999999
+-11.3
+-10.4
+-1.3000000000000007
+-9.5
+-9.299999999999999
+-10.82
+-9.5
+-12.580000000000002
+-10.0
+-14.080000000000004
+-10.0
+-9.399999999999999
+-11.04
+-11.74
+-10.12
+-9.86
+-12.0
+-9.219999999999999
+-1.1400000000000001
+-9.479999999999999
+-9.68
+-10.9
+-10.139999999999999
+-10.360000000000001
+-9.139999999999999
+-9.379999999999999
+-9.76
+-9.7
+-10.799999999999999
+-10.86
+-10.479999999999999
+-10.38
+-9.76
+-7.600000000000001
+-9.54
+-9.719999999999999
+-10.1
+-8.92
+-11.24
+-9.68
+-10.62
+-10.12
+-11.600000000000001
+-10.18
+-10.34
+-11.3
+-9.219999999999999
+-10.2
+-10.62
+-9.78
+-8.96
+-9.86
+-11.46
+-9.459999999999999
+-10.0
+-12.720000000000002
+-10.059999999999999
+-13.160000000000004
+-12.220000000000002
+-9.799999999999999
+-9.62
+-10.78
+-9.399999999999999
+-9.7
+-10.219999999999999
+-9.32
+-10.68
+-9.92
+-10.26
+-12.280000000000001
+-11.400000000000002
+-8.94
+-10.059999999999999
+-9.219999999999999
+-9.299999999999999
+-9.459999999999999
+-9.78
+-9.459999999999999
+-9.399999999999999
+-9.459999999999999
+-9.54
+-10.219999999999999
+-9.459999999999999
+-9.879999999999999
+-10.04
+-9.92
+-13.540000000000003
+-10.239999999999998
+-9.54
+-11.56
+-10.58
+-11.36
+-10.8
+-10.08
+-9.78
+-10.0
+-10.34
+-11.700000000000001
+-11.040000000000001
+-11.58
+-9.879999999999999
+-10.32
+-11.28
+-10.4
+-10.08
+-9.84
+-11.46
+-10.719999999999999
+-9.459999999999999
+-10.059999999999999
+-8.74
+-4.680000000000001
+-8.74
+-11.3
+-10.04
+-10.239999999999998
+-9.84
+-0.16000000000000347
+-4.0
+-9.459999999999999
+-9.48
+-9.479999999999999
+-11.1
+-9.36
+-9.84
+-9.559999999999999
+-10.74
+-12.5
+-8.72
+-13.220000000000002
+-9.799999999999999
+-9.459999999999999
+-10.059999999999999
+-10.059999999999999
+-10.02
+-9.459999999999999
+-10.059999999999999
+-9.780000000000001
+-10.74
+-9.959999999999999
+-10.88
+-11.740000000000002
+-9.020000000000001
+-11.96
+-10.36
+-9.7
+-10.08
+-14.180000000000003
+-9.879999999999999
+-13.06
+-9.76
+-12.320000000000002
+-10.1
+-10.3
+-13.140000000000002
+-9.819999999999999
+-10.12
+-9.7
+-9.979999999999999
+-9.06
+-10.059999999999999
+-10.520000000000001
+-9.6
+-10.0
+-10.32
+-10.059999999999999
+-9.44
+-9.4
+-9.42
+-10.059999999999999
+-10.059999999999999
+-9.459999999999999
+-11.540000000000001
+-9.459999999999999
+-10.28
+-9.44
+-9.979999999999999
+-9.799999999999999
+-9.899999999999999
+-10.139999999999999
+-11.16
+-10.6
+-10.299999999999999
+-10.299999999999999
+-10.76
+-10.08
+-9.879999999999999
+-9.819999999999999
+-12.400000000000002
+-8.16
+-9.76
+-9.92
+-9.78
+-9.28
+-11.26
+-9.399999999999999
+-10.0
+-10.1
+-10.059999999999999
+-10.52
+-9.42
+-10.319999999999999
+-11.18
+-10.84
+-9.48
+-10.46
+-10.219999999999999
+-10.58
+-13.720000000000004
+2.220000000000004
+-10.059999999999999
+-9.78
+-10.16
+-9.399999999999999
+-12.72
+-9.219999999999999
+-9.34
+-10.219999999999999
+-9.120000000000001
+-9.68
+-9.44
+-10.0
+-10.36
+-11.36
+-10.62
+-10.639999999999999
+-11.24
+-12.180000000000001
+-10.52
+-10.059999999999999
+-10.059999999999999
+-8.46
+-10.26
+-14.760000000000003
+-10.299999999999999
+-9.86
+-10.9
+-11.840000000000002
+-11.840000000000002
+-9.6
+-9.979999999999999
+-10.159999999999998
+-9.36
+-9.280000000000001
+-9.6
+-10.96
+-12.780000000000001
+-11.46
+-10.059999999999999
+-10.28
+-10.66
+-9.719999999999999
+-11.36
+-10.459999999999999
+-10.7
+-9.0
+-11.600000000000001
+-10.059999999999999
+-10.36
+-9.379999999999999
+-9.8
+-10.760000000000002
+-9.5
+-9.86
+-10.2
+-10.2
+-10.12
+-10.299999999999999
+-9.92
+-9.96
+-11.840000000000002
+-9.239999999999998
+-9.6
+-10.0
+-9.459999999999999
+-9.68
+-10.98
+-9.54
+-10.32
+-10.319999999999999
+-10.379999999999999
+-10.159999999999998
+-11.48
+-10.479999999999999
+-9.7
+-10.82
+-14.780000000000005
+-10.88
+-11.600000000000001
+-9.2
+-9.54
+-10.92
+-12.900000000000002
+-9.280000000000001
+-9.5
+-12.020000000000001
+-9.72
+-10.260000000000002
+-12.38
+-10.059999999999999
+-9.84
+-9.4
+-11.540000000000001
+-9.02
+-10.219999999999999
+-11.42
+-9.979999999999999
+-11.380000000000003
+-9.16
+-9.799999999999999
+-10.0
+-9.36
+-11.1
+-8.719999999999999
+-10.5
+-10.76
+-9.520000000000001
+-9.5
+-11.360000000000001
+-9.7
+-13.200000000000003
+-9.8
+-13.440000000000001
+-11.58
+-10.219999999999999
+-10.299999999999999
+-9.08
+-13.900000000000002
+-9.739999999999998
+-9.94
+-9.799999999999999
+-6.3000000000000105
+-11.64
+-11.06
+-10.860000000000001
+-10.26
+-10.82
+-11.780000000000001
+-9.819999999999999
+-10.34
+-10.54
+-11.620000000000001
+-10.02
+-9.28
+-10.42
+-8.48
+-9.7
+-9.459999999999999
+-10.059999999999999
+-10.059999999999999
+-9.76
+-10.78
+-9.94
+-10.98
+-9.659999999999998
+-10.059999999999999
+-10.0
+-9.26
+-11.2
+-11.700000000000001
+-9.98
+-11.24
+-9.459999999999999
+-10.059999999999999
+-10.059999999999999
+-9.74
+-10.1
+-11.24
+-10.42
+-12.16
+-10.059999999999999
+-9.459999999999999
+-10.059999999999999
+-10.54
+-9.399999999999999
+-10.280000000000001
+-10.66
+-9.94
+-8.120000000000001
+-10.36
+-9.66
+-10.299999999999999
+-9.78
+-10.16
+-9.799999999999999
+-10.64
+-11.620000000000001
+-11.860000000000001
+-13.600000000000001
+-10.1
+-8.76
+-12.16
+-9.979999999999999
+-10.6
+-11.440000000000001
+-10.399999999999999
+-9.399999999999999
+-9.66
+-12.3
+-10.239999999999998
+-9.739999999999998
+-10.62
+-9.459999999999999
+-11.82
+-10.18
+-9.76
+-9.42
+-9.819999999999999
+-10.82
+-9.7
+-11.180000000000001
+-14.68000000000001
+-9.959999999999999
+-14.780000000000003
+-9.84
+-9.479999999999999
+-12.920000000000002
+-10.059999999999999
+-10.36
+-10.7
+-9.479999999999999
+-12.540000000000001
+-9.48
+-9.88
+-9.84
+-10.059999999999999
+-8.58
+-9.76
+-8.84
+-11.0
+-10.780000000000001
+-15.900000000000006
+-9.7
+-10.28
+-9.979999999999999
+-11.02
+-10.98
+-9.639999999999999
+-9.459999999999999
+-12.08
+-11.14
+-9.76
+-10.059999999999999
+-9.4
+-10.059999999999999
+-9.459999999999999
+-9.7
+-11.9
+-10.02
+-10.12
+-9.659999999999998
+-11.16
+-14.100000000000001
+-10.059999999999999
+-9.32
+-9.18
+-9.459999999999999
+-10.219999999999999
+-9.500000000000002
+-12.2
+-9.399999999999999
+-10.059999999999999
+-9.92
+-10.159999999999998
+-10.76
+-9.78
+-9.459999999999999
+-10.059999999999999
+-9.459999999999999
+-10.059999999999999
+-10.059999999999999
+-9.76
+-10.059999999999999
+-9.399999999999999
+-10.059999999999999
+-10.059999999999999
+-9.799999999999999
+-14.200000000000003
+-11.72
+-14.580000000000005
+-10.639999999999999
+-11.52
+-10.139999999999999
+-9.7
+-9.02
+-11.18
+-9.899999999999999
+-9.879999999999999
+-9.6
+-9.62
+-9.52
+-11.58
+-9.639999999999999
+-11.12
+-13.020000000000001
+-10.059999999999999
+-9.459999999999999
+-10.0
+-11.72
+-10.399999999999999
+-9.88
+-9.78
+-9.7
+-9.459999999999999
+-9.459999999999999
+-10.059999999999999
+-10.5
+-10.16
+-10.18
+-9.78
+-11.200000000000001
+-12.38
+-9.799999999999999
+-9.459999999999999
+-10.6
+-9.34
+-14.300000000000004
+-12.100000000000001
+-10.219999999999999
+-9.62
+-10.68
+-10.12
+-9.66
+-9.78
+-4.380000000000003
+-9.66
+-9.399999999999999
+-10.059999999999999
+-9.88
+-13.380000000000003
+-9.58
+-9.459999999999999
+-10.0
+-9.5
+-10.059999999999999
+-10.059999999999999
+-9.02
+-10.059999999999999
+-7.700000000000003
+-9.78
+-10.18
+-10.219999999999999
+-9.520000000000001
+-10.219999999999999
+-9.979999999999999
+-9.32
+-9.44
+-10.18
+-9.7
+-10.059999999999999
+-9.459999999999999
+-10.559999999999999
+-10.920000000000002
+-10.92
+-9.719999999999999
+-10.6
+-10.48
+-11.780000000000001
+-10.88
+-9.66
+-10.26
+-10.0
+-9.72
+-9.959999999999999
+-11.64
+-8.4
+-11.32
+-6.540000000000008
+-9.52
+-11.48
+-9.78
+-9.600000000000001
+-10.059999999999999
+-10.059999999999999
+-10.059999999999999
+-9.459999999999999
+-10.799999999999999
+-9.879999999999999
+-10.0
+-11.760000000000002
+-11.540000000000003
+-9.739999999999998
+-10.059999999999999
+-9.84
+-9.5
+-10.12
+-10.18
+-9.399999999999999
+-9.719999999999999
+-11.620000000000001
+-11.22
+-11.64
+-10.479999999999999
+-10.26
+-9.819999999999999
+-9.5
+-10.040000000000001
+-12.940000000000001
+-11.42
+-9.76
+-10.159999999999998
+-14.020000000000005
+-11.68
+-9.66
+-10.059999999999999
+-10.0
+-9.200000000000001
+-11.14
+-9.940000000000001
+-13.440000000000001
+-10.059999999999999
+-11.5
+-9.92
+-10.24
+-9.96
+-9.479999999999999
+-12.500000000000002
+-13.880000000000003
+-10.12
+-9.1
+-10.5
+-9.459999999999999
+-12.100000000000001
+-10.379999999999999
+-10.840000000000002
+-9.68
+-17.220000000000027
+-14.080000000000002
+-9.92
+-9.719999999999999
+-10.0
+-10.0
+-9.42
+-9.719999999999999
+-10.059999999999999
+-10.059999999999999
+-11.379999999999999
+-11.14
+-13.440000000000003
+-11.16
+-11.58
+-13.460000000000003
+-9.86
+-10.799999999999999
+-12.66
+-10.059999999999999
+-10.0
+-9.459999999999999
+-10.059999999999999
+-10.0
+-9.58
+-9.879999999999999
+-10.059999999999999
+-10.84
+-9.459999999999999
+-10.719999999999999
+-10.139999999999999
+-10.0
+-8.74
+-10.44
+-10.059999999999999
+-9.86
+-10.9
+-10.0
+-10.059999999999999
+-9.459999999999999
+-8.120000000000001
+-11.24
+-9.959999999999999
+-11.74
+-9.399999999999999
+-9.879999999999999
+-10.499999999999998
+-11.26
+-8.76
+-9.94
+-10.399999999999999
+-8.78
+-9.62
+-9.76
+-11.600000000000001
+-10.04
+-9.979999999999999
+-11.12
+-9.799999999999999
+-10.54
+-11.120000000000001
+-10.059999999999999
+-9.399999999999999
+-9.399999999999999
+-9.459999999999999
+-9.719999999999999
+2.76
+-9.799999999999999
+-10.16
+-12.08
+-11.080000000000002
+-8.66
+-10.8
+-10.58
+-9.66
+-9.42
+-14.420000000000005
+-10.06
+-10.42
+-9.72
+-10.059999999999999
+-10.200000000000001
+-9.68
+-9.48
+-11.32
+-10.78
+-9.379999999999999
+-10.04
+-10.82
+-9.74
+-10.479999999999999
+-11.4
+-9.94
+-10.379999999999999
+-11.28
+-11.0
+-9.76
+-10.239999999999998
+-11.34
+-10.059999999999999
+-9.959999999999999
+-13.700000000000003
+-9.0
+-10.4
+-12.900000000000002
+-0.32
+-9.4
+-9.979999999999999
+-9.459999999999999
+-11.08
+-9.459999999999999
+-9.459999999999999
+-10.059999999999999
+-10.62
+-10.059999999999999
+-10.059999999999999
+-11.000000000000002
+-10.64
+-10.0
+-11.06
+-10.82
+-10.02
+-9.58
+-9.78
+-11.22
+-10.639999999999999
+-11.32
+-9.239999999999998
+-9.7
+-11.24
+-9.94
+-10.639999999999999
+-9.88
+-10.5
+-10.04
+-10.66
+-10.74
+-10.379999999999999
+-9.459999999999999
+-10.04
+-9.379999999999999
+-10.04
+-10.059999999999999
+-11.680000000000001
+-9.739999999999998
+-10.52
+-12.8
+-9.94
+-10.0
+-9.92
+-9.88
+-8.740000000000002
+-9.5
+-9.44
+-8.98
+-9.88
+-10.399999999999999
+-10.0
+-11.06
+-11.88
+-3.900000000000004
+-12.780000000000001
+-9.68
+-10.02
+-11.940000000000001
+-9.799999999999999
+-9.379999999999999
+-9.799999999999999
+-9.7
+-10.02
+-10.299999999999999
+-9.819999999999999
+-10.059999999999999
+-11.0
+-10.9
+-10.059999999999999
+-9.459999999999999
+-10.54
+-9.44
+-10.499999999999998
+-9.1
+-9.299999999999999
+-10.86
+-10.719999999999999
+-10.5
+-9.879999999999999
+-12.840000000000002
+-9.559999999999999
+-9.96
+-9.54
+-10.139999999999999
+-9.52
+-10.200000000000001
+-9.520000000000001
+-10.98
+-9.7
+-11.9
+-9.459999999999999
+-10.059999999999999
+-11.000000000000002
+-9.5
+-1.2200000000000004
+-9.58
+-10.12
+-9.080000000000002
+-9.76
+-10.059999999999999
+-9.04
+-9.879999999999999
+-10.12
+-9.459999999999999
+-12.280000000000001
+-9.9
+-13.220000000000002
+-10.059999999999999
+-10.059999999999999
+-9.459999999999999
+-10.059999999999999
+-10.059999999999999
+-8.760000000000002
+-9.819999999999999
+-9.459999999999999
+-9.280000000000001
+-9.5
+-9.399999999999999
+-9.84
+-10.24
+-10.840000000000002
+-8.5
+-7.859999999999999
+-10.0
+-10.0
+-10.2
+-10.0
+-9.78
+-11.260000000000002
+-9.979999999999999
+-10.159999999999998
+-3.2200000000000006
+-11.72
+-10.0
+-11.14
+-9.76
+-10.059999999999999
+-9.379999999999999
+-10.62
+-10.26
+-10.02
+-10.92
+-14.700000000000003
+-9.6
+-9.94
+-11.42
+-10.34
+-10.18
+-11.720000000000002
+-9.940000000000001
+-9.48
+-9.8
+-9.719999999999999
+-10.44
+-9.32
+-11.14
+-9.5
+-9.54
+-10.1
+-12.4
+-16.940000000000005
+-11.139999999999999
+-9.36
+-9.659999999999998
+-9.66
+-9.139999999999999
+-9.08
+-10.18
+-8.9
+-10.04
+-10.58
+-14.140000000000002
+-9.62
+-10.62
+-11.22
+-9.72
+-9.26
+-10.920000000000002
+-9.62
+-11.460000000000003
+-10.0
+-11.58
+-9.739999999999998
+-9.94
+-9.68
+-10.82
+-9.559999999999999
+-10.459999999999999
+-10.059999999999999
+-10.1
+-9.459999999999999
+-9.459999999999999
+-9.280000000000001
+-10.64
+-10.340000000000002
+-8.76
+-10.02
+-10.159999999999998
+-10.219999999999999
+-10.799999999999999
+-9.959999999999999
+-10.92
+-11.64
+-10.78
+-9.86
+-10.0
+-9.579999999999998
+-10.42
+-9.399999999999999
+-10.059999999999999
+-11.280000000000001
+-9.68
+-12.940000000000001
+-9.62
+-10.7
+-8.86
+-10.26
+-10.44
+-10.08
+-11.98
+-12.24
+-11.02
+-14.580000000000002
+-9.68
+-10.5
+-9.459999999999999
+-7.38
+-10.92
+-3.0200000000000036
+-10.18
+-9.24
+-9.54
+-9.88
+-9.360000000000001
+-9.139999999999999
+-9.139999999999999
+-10.0
+-10.74
+-9.979999999999999
+-10.2
+-10.54
+-3.560000000000005
+-9.819999999999999
+-9.78
+-9.399999999999999
+-1.459999999999999
+-14.340000000000007
+-10.059999999999999
+-9.520000000000001
+-10.62
+-11.22
+-3.2200000000000006
+-12.22
+-11.480000000000002
+-10.42
+-9.139999999999999
+-9.399999999999999
+-10.04
+-9.639999999999999
+-10.96
+-9.44
+-9.44
+-9.5
+-10.0
+-10.459999999999999
+-11.08
+-13.300000000000002
+-11.34
+-10.44
+-9.86
+-10.0
+-10.159999999999998
+-9.819999999999999
+-10.42
+-10.76
+-10.94
+-10.059999999999999
+-10.0
+-9.399999999999999
+-9.62
+-9.399999999999999
+-9.44
+-9.48
+-10.0
+-9.32
+-10.459999999999999
+-10.059999999999999
+-10.0
+-9.48
+-10.6
+-9.459999999999999
+-10.419999999999998
+-9.68
+-9.3
+-9.899999999999999
+-10.7
+-10.379999999999999
+-10.72
+-9.6
+-10.0
+-15.460000000000012
+-10.319999999999999
+-10.059999999999999
+-10.059999999999999
+-10.02
+-9.479999999999999
+-10.54
+-10.0
+-10.639999999999999
+-9.479999999999999
+-10.360000000000001
+-10.04
+-9.84
+-10.159999999999998
+-10.42
+-9.959999999999999
+-11.680000000000001
+-9.399999999999999
+-10.78
+-11.06
+-9.659999999999998
+-11.120000000000001
+-9.74
+-9.24
+-9.78
+-9.399999999999999
+-10.0
+-10.139999999999999
+-10.0
+-9.459999999999999
+-9.76
+-12.760000000000002
+-10.0
+-10.26
+-9.68
+-10.98
+-13.940000000000001
+-10.2
+-9.739999999999998
+-10.18
+-11.3
+-10.719999999999999
+-9.32
+-9.68
+-9.92
+-9.399999999999999
+-11.1
+-9.459999999999999
+-9.459999999999999
+-10.78
+-8.020000000000001
+-10.2
+-10.66
+-9.78
+-16.14000000000001
+-9.94
+-9.799999999999999
+-10.48
+-16.24000000000001
+-9.5
+-9.84
+-9.459999999999999
+-10.34
+-13.480000000000002
+-11.540000000000003
+-10.54
+-9.559999999999999
+-9.6
+-10.08
+-9.78
+-9.559999999999999
+-12.720000000000002
+-10.56
+-9.7
+-11.440000000000001
+-10.379999999999999
+-10.399999999999999
+-9.459999999999999
+-10.200000000000001
+-12.120000000000001
+-13.160000000000002
+-9.62
+-10.139999999999999
+-9.879999999999999
+-15.760000000000009
+-9.6
+-9.739999999999998
+-10.18
+-9.68
+-10.36
+-10.059999999999999
+-10.28
+-10.0
+-10.9
+-10.0
+-9.459999999999999
+-9.979999999999999
+-10.3
+-10.059999999999999
+-9.16
+-11.600000000000001
+-10.239999999999998
+-9.76
+-10.92
+-9.459999999999999
+-11.16
+-9.58
+-10.760000000000002
+-9.62
+-11.26
+-11.5
+-10.0
+-9.5
+-9.84
+-12.840000000000002
+-9.979999999999999
+-9.24
+-10.52
+-10.18
+-10.059999999999999
+-9.899999999999999
+-9.959999999999999
+-12.100000000000001
+-12.340000000000002
+-9.44
+-11.22
+-9.7
+-10.02
+-9.659999999999998
+-9.719999999999999
+-10.2
+-10.26
+-9.459999999999999
+-9.44
+-9.139999999999999
+-9.16
+-10.159999999999998
+-9.799999999999999
+-9.459999999999999
+-11.14
+-10.239999999999998
+-9.58
+-9.48
+-10.059999999999999
+-9.1
+-10.059999999999999
+-10.84
+-10.0
+-12.440000000000001
+-10.0
+-4.600000000000001
+-9.479999999999999
+-10.059999999999999
+-9.36
+-9.219999999999999
+-10.1
+-5.960000000000002
+-9.86
+-10.34
+-10.28
+-12.06
+-9.62
+-10.94
+-9.66
+-10.24
+-10.32
+-9.959999999999999
+-9.88
+-11.0
+-10.159999999999998
+-10.04
+-9.94
+-12.560000000000002
+-9.08
+-9.36
+-9.36
+-9.799999999999999
+-9.32
+-9.48
+-9.86
+-10.02
+-10.68
+-12.260000000000002
+-9.379999999999999
+-10.059999999999999
+-10.0
+-10.059999999999999
+-9.459999999999999
+-10.059999999999999
+-9.459999999999999
+-10.059999999999999
+-10.0
+-8.56
+-10.399999999999999
+-9.819999999999999
+-11.4
+-8.3
+-8.76
+-9.719999999999999
+-9.62
+-11.02
+-10.06
+-9.5
+-9.000000000000002
+-10.36
+-10.86
+-10.14
+-8.8
+-9.6
+-12.440000000000001
+-9.399999999999999
+-9.879999999999999
+-12.440000000000001
+-12.440000000000001
+-9.899999999999999
+-9.639999999999999
+-10.26
+-9.44
+-14.760000000000002
+-9.7
+-10.02
+-9.38
+-11.360000000000003
+-14.080000000000002
+-10.34
+-9.5
+-10.159999999999998
+-8.94
+-10.36
+-9.459999999999999
+-8.3
+-10.3
+-9.799999999999999
+-9.58
+-10.059999999999999
+-10.34
+-12.780000000000003
+-9.92
+-10.0
+-12.580000000000002
+-9.479999999999999
+-10.959999999999999
+-9.459999999999999
+-9.459999999999999
+-9.459999999999999
+-9.899999999999999
+-10.2
+-10.58
+-9.7
+-10.159999999999998
+-9.24
+-10.280000000000001
+-11.24
+-8.84
+-9.719999999999999
+-12.260000000000002
+-10.059999999999999
+-10.54
+-9.399999999999999
+-9.399999999999999
+-10.38
+-9.399999999999999
+-10.0
+-9.719999999999999
+-8.98
+-9.8
+-11.18
+-10.6
+-10.12
+-9.86
+-10.239999999999998
+-9.68
+-11.68
+-9.06
+-10.2
+-9.92
+-10.459999999999999
+-9.459999999999999
+-11.16
+-9.68
+-10.66
+-10.72
+-9.799999999999999
+-9.44
+-10.059999999999999
+-10.0
+-12.420000000000002
+-9.28
+-9.879999999999999
+-10.0
+-10.26
+-12.900000000000002
+-9.819999999999999
+-9.739999999999998
+-10.34
+-9.379999999999999
+-10.719999999999999
+-10.059999999999999
+-10.66
+-9.459999999999999
+-10.54
+-9.459999999999999
+-10.059999999999999
+-9.94
+-9.379999999999999
+-9.559999999999999
+-10.5
+-10.0
+-9.459999999999999
+-9.18
+-10.459999999999999
+-9.739999999999998
+-10.0
+-9.399999999999999
+-9.459999999999999
+-9.379999999999999
+-10.180000000000001
+-9.78
+-9.16
+-9.399999999999999
+-9.6
+-10.26
+-10.399999999999999
+-10.479999999999999
+-9.7
+-10.0
+-10.74
+-10.0
+-10.0
+-11.280000000000001
+-10.94
+-9.739999999999998
+-9.52
+-10.32
+-9.040000000000001
+-9.459999999999999
+-9.18
+-9.379999999999999
+-10.54
+-10.459999999999999
+-9.34
+-9.84
+-11.06
+-10.26
+-9.08
+-9.86
+-9.32
+-10.58
+-10.92
+-9.76
+-10.66
+-9.42
+-10.34
+-4.48
+-11.64
+-9.320000000000002
+-9.1
+-8.3
+-9.899999999999999
+-9.520000000000001
+-11.56
+-10.12
+-10.0
+-11.4
+-10.34
+-9.86
+-10.059999999999999
+-8.44
+-9.7
+-10.34
+-9.78
+-9.659999999999998
+-9.520000000000001
+-11.500000000000002
+-12.220000000000002
+-10.28
+-10.0
+-10.239999999999998
+-10.059999999999999
+-9.54
+-10.059999999999999
+-9.48
+-10.4
+-10.059999999999999
+-9.12
+-9.759999999999998
+-10.78
+-11.08
+-10.0
+-10.299999999999999
+-9.280000000000001
+-9.5
+-11.22
+-10.319999999999999
+-12.260000000000002
+-10.219999999999999
+-10.059999999999999
+-10.0
+-9.68
+-9.28
+-10.0
+-10.0
+-9.84
+-10.82
+-9.4
+-9.559999999999999
+-10.239999999999998
+-10.059999999999999
+-10.0
+-12.340000000000002
+-12.24
+-10.059999999999999
+-10.579999999999998
+-11.14
+-9.120000000000001
+-9.78
+-10.719999999999999
+-9.94
+-11.42
+-11.18
+-12.580000000000002
+-10.959999999999999
+-10.059999999999999
+-9.360000000000001
+-8.459999999999999
+-9.1
+-9.5
+-10.219999999999999
+-9.84
+-13.960000000000003
+-12.96
+-9.5
+-10.479999999999999
+-12.3
+-10.58
+-10.059999999999999
+-10.059999999999999
+-9.719999999999999
+-9.84
+-9.399999999999999
+-10.059999999999999
+-9.459999999999999
+-10.0
+-10.84
+-10.42
+-10.9
+-10.0
+-9.84
+-8.899999999999999
+-9.92
+-10.84
+-10.0
+-10.0
+-12.860000000000001
+-8.94
+-9.84
+-9.66
+-9.56
+-9.52
+-10.059999999999999
+-9.459999999999999
+-9.52
+-10.059999999999999
+-10.74
+-9.52
+-9.86
+-8.34
+-10.059999999999999
+-10.68
+-13.380000000000003
+-11.24
+-9.94
+-11.3
+-10.0
+-10.0
+-10.68
+-9.62
+-10.58
+-9.959999999999999
+-9.66
+-9.66
+-11.140000000000002
+-9.379999999999999
+-10.1
+-10.0
+-12.500000000000005
+-11.14
+-9.48
+-7.96
+-9.62
+-11.680000000000001
+-10.02
+-9.68
+-9.68
+-10.24
+-9.94
+-9.700000000000001
+-9.459999999999999
+-9.94
+-10.0
+-9.5
+-7.68
+-12.56
+-9.86
+-9.94
+-12.4
+-9.76
+-11.8
+-10.219999999999999
+-10.579999999999998
+-10.0
+-9.459999999999999
+-14.580000000000004
+-12.88
+-12.340000000000002
+-12.700000000000001
+-15.240000000000009
+-10.62
+-10.059999999999999
+-11.960000000000003
+-9.899999999999999
+-9.44
+-10.639999999999999
+-10.520000000000001
+-9.399999999999999
+-14.260000000000005
+-12.700000000000001
+-10.52
+-9.780000000000001
+-10.299999999999999
+-9.399999999999999
+-11.440000000000001
+-11.42
+-10.68
+-10.1
+-9.82
+-12.16
+-9.2
+-13.240000000000002
+-9.780000000000001
+-11.120000000000001
+-9.979999999999999
+-9.879999999999999
+-11.780000000000003
+-9.819999999999999
+-11.24
+-11.600000000000001
+-12.66
+-9.84
+-10.7
+-9.44
+-10.639999999999999
+-14.200000000000003
+-9.879999999999999
+-11.340000000000002
+-9.959999999999999
+-9.719999999999999
+-10.44
+-13.500000000000002
+-13.48
+-10.02
+-10.62
+-9.739999999999998
+-10.719999999999999
+-9.94
+-8.56
+-9.979999999999999
+-9.879999999999999
+-9.68
+-13.040000000000003
+-8.920000000000005
+-10.34
+-9.879999999999999
+-13.500000000000002
+-11.68
+-12.38
+-10.34
+-10.8
+-10.5
+-10.680000000000001
+-9.34
+-9.979999999999999
+-4.640000000000005
+-1.3200000000000005
+-11.0
+-10.0
+-10.04
+-9.44
+-9.44
+-13.46
+-9.580000000000002
+-10.02
+-9.639999999999999
+-9.58
+-10.94
+-9.459999999999999
+-10.059999999999999
+-9.22
+-9.86
+-9.739999999999998
+-9.52
+-9.879999999999999
+-12.48
+-12.240000000000002
+-5.340000000000003
+-9.6
+-10.2
+-10.18
+-10.059999999999999
+-10.0
+-10.059999999999999
+-10.059999999999999
+-10.0
+-10.84
+-9.16
+-9.700000000000001
+-9.68
+-10.84
+-9.399999999999999
+-10.059999999999999
+-9.459999999999999
+-11.379999999999999
+-8.5
+-11.36
+-9.399999999999999
+-9.44
+-10.12
+-10.219999999999999
+-9.399999999999999
+-8.32
+-9.459999999999999
+-10.059999999999999
+-10.0
+-9.799999999999999
+-10.44
+-9.280000000000001
+-10.059999999999999
+-11.380000000000003
+-10.059999999999999
+-9.92
+-9.5
+-12.440000000000001
+-8.66
+-9.0
+-10.319999999999999
+-9.28
+-9.08
+-9.48
+-9.62
+-11.36
+-10.54
+-9.94
+-9.6
+-9.48
+-9.819999999999999
+-11.04
+-9.96
+-10.419999999999998
+-9.96
+-7.119999999999999
+-9.459999999999999
+-10.059999999999999
+-9.4
+-9.4
+-8.48
+-10.1
+-9.1
+-9.819999999999999
+-11.16
+-7.42
+-9.18
+-9.86
+-9.76
+-10.4
+-9.479999999999999
+-10.1
+-10.059999999999999
+-10.5
+-9.399999999999999
+-9.399999999999999
+-10.68
+-9.58
+-9.62
+-9.459999999999999
+-9.62
+-11.26
+-10.4
+-9.819999999999999
+-9.459999999999999
+-11.04
+-11.920000000000002
+-15.740000000000009
+-9.020000000000001
+-8.56
+-9.9
+-9.68
+-10.04
+-10.059999999999999
+-9.64
+-14.020000000000003
+-10.52
+-10.36
+-13.840000000000002
+-11.46
+-11.12
+-9.54
+-9.76
+-9.659999999999998
+-11.379999999999999
+-12.8
+-10.5
+-10.059999999999999
+-10.0
+-10.219999999999999
+-10.0
+-9.2
+-11.340000000000002
+-10.160000000000002
+-9.84
+-9.799999999999999
+-11.22
+-5.160000000000005
+-10.239999999999998
+-9.42
+-9.459999999999999
+-13.520000000000003
+-11.0
+-10.360000000000001
+-10.659999999999998
+-10.0
+-9.92
+-10.159999999999998
+-9.899999999999999
+-11.08
+-10.059999999999999
+-10.059999999999999
+-10.0
+-10.72
+-10.379999999999999
+-9.34
+-10.42
+-11.380000000000003
+-9.62
+-11.360000000000001
+-10.499999999999998
+-15.340000000000009
+-9.66
+-9.799999999999999
+-12.14
+-9.540000000000001
+-10.700000000000001
+-9.719999999999999
+-10.479999999999999
+-9.739999999999998
+-10.0
+-9.440000000000001
+-9.84
+-10.0
+-9.66
+-10.0
+-10.239999999999998
+-10.059999999999999
+-10.059999999999999
+-9.68
+-9.719999999999999
+-10.059999999999999
+-10.92
+-9.520000000000001
+-11.56
+-11.42
+-12.360000000000003
+-8.080000000000002
+-11.64
+-13.500000000000004
+-10.0
+-9.86
+-10.08
+-9.459999999999999
+-10.0
+-11.74
+-9.899999999999999
+-9.94
+-8.94
+-10.059999999999999
+-9.459999999999999
+-8.959999999999999
+-9.42
+-10.0
+-11.6
+-9.219999999999999
+-10.0
+-12.940000000000001
+-9.399999999999999
+-11.2
+-11.86
+-12.780000000000001
+-9.92
+-13.060000000000002
+-9.86
+-13.400000000000002
+-11.58
+-10.66
+-10.38
+-14.300000000000002
+-10.1
+-11.28
+-10.059999999999999
+-9.559999999999999
+-11.02
+-10.0
+-10.0
+-10.319999999999999
+-10.36
+-10.4
+-13.400000000000002
+-9.7
+-13.860000000000003
+-10.0
+-9.719999999999999
+-10.0
+-9.360000000000001
+-9.66
+-11.36
+-9.6
+-10.04
+-12.160000000000002
+-10.34
+-9.639999999999999
+-10.459999999999999
+-9.82
+-10.7
+-9.68
+-9.42
+-10.02
+-11.14
+-10.0
+-9.32
+-10.719999999999999
+-10.639999999999999
+-10.12
+-10.0
+-10.02
+-9.399999999999999
+-10.36
+-8.999999999999988
+-9.8
+-10.98
+-10.059999999999999
+-10.0
+-9.860000000000001
+-9.76
+-9.76
+-8.98
+-8.8
+-9.399999999999999
+-10.86
+-9.239999999999998
+-10.12
+-10.059999999999999
+-8.52
+-10.42
+-10.98
+-10.92
+-9.74
+-10.24
+-9.66
+-9.479999999999999
+-10.02
+-13.980000000000002
+-9.76
+-13.820000000000004
+-10.02
+-9.62
+-13.900000000000002
+-8.84
+-9.459999999999999
+-10.6
+-10.0
+-10.16
+-9.459999999999999
+-10.059999999999999
+-12.000000000000002
+-9.7
+-9.7
+-8.040000000000003
+-9.819999999999999
+-9.459999999999999
+-9.879999999999999
+-12.280000000000001
+-10.08
+-9.78
+-13.540000000000003
+-9.62
+-11.66
+-10.88
+-10.2
+-9.559999999999999
+-9.62
+-10.34
+-11.14
+-10.299999999999999
+-10.2
+-10.459999999999999
+-10.0
+-10.299999999999999
+-10.0
+-10.2
+-10.76
+-10.379999999999999
+-9.62
+-9.92
+-10.1
+-9.719999999999999
+-10.28
+-11.06
+-10.139999999999999
+-11.320000000000002
+-15.000000000000004
+-10.1
+-9.739999999999998
+-9.52
+-10.78
+-9.799999999999999
+-10.18
+-10.0
+-10.059999999999999
+-9.44
+-8.86
+-9.64
+-10.639999999999999
+-9.78
+-10.08
+-9.340000000000002
+-10.580000000000002
+-9.86
+-11.56
+-10.28
+-12.420000000000002
+-10.059999999999999
+-10.0
+-11.6
+-9.399999999999999
+-15.940000000000005
+-12.720000000000002
+-10.6
+-9.899999999999999
+-9.62
+-9.7
+-12.56
+-11.56
+-9.719999999999999
+-12.820000000000002
+-14.060000000000004
+-12.540000000000003
+-12.82
+-10.219999999999999
+-9.28
+-11.26
+-9.459999999999999
+-9.32
+-10.059999999999999
+-9.919999999999998
+-13.800000000000002
+-10.159999999999998
+-9.8
+-9.5
+-10.64
+-10.059999999999999
+-9.819999999999999
+-9.18
+-9.84
+0.09999999999999609
+-10.0
+-9.120000000000001
+-9.879999999999999
+-11.34
+-9.44
+-9.620000000000001
+-9.7
+-10.920000000000002
+-11.24
+-8.240000000000002
+-10.680000000000001
+-9.76
+-9.68
+-9.84
+-9.56
+-9.819999999999999
+-9.299999999999999
+-9.66
+-9.68
+-9.979999999999999
+-11.26
+-10.26
+-9.299999999999999
+-9.559999999999999
+-12.700000000000001
+-9.479999999999999
+-3.800000000000015
+-9.58
+-10.040000000000001
+-9.44
+-10.059999999999999
+-10.059999999999999
+-10.059999999999999
+-10.62
+-10.059999999999999
+-9.799999999999999
+-9.139999999999999
+-10.059999999999999
+-9.459999999999999
+-10.0
+-10.12
+-9.54
+-9.440000000000001
+-10.0
+-9.459999999999999
+-9.44
+-9.84
+-10.0
+-10.059999999999999
+-10.0
+-9.459999999999999
+-10.159999999999998
+-10.76
+-9.399999999999999
+-10.219999999999999
+-9.62
+-12.66
+-11.42
+-9.54
+-11.620000000000001
+-10.3
+-9.459999999999999
+-11.600000000000001
+-11.88
+-1.4800000000000006
+-10.479999999999999
+-11.22
+-13.3
+-9.58
+-9.78
+-9.98
+-10.459999999999999
+-10.98
+-10.740000000000002
+-11.0
+-10.059999999999999
+-9.84
+-10.559999999999999
+-9.86
+-10.08
+-10.220000000000002
+-10.52
+-9.7
+-10.879999999999999
+-10.06
+-10.219999999999999
+-10.52
+-9.62
+-10.559999999999999
+-9.2
+-10.879999999999999
+-9.52
+-10.1
+-10.0
+-10.86
+-9.959999999999999
+-9.4
+-9.76
+-9.22
+-10.18
+-9.879999999999999
+-10.18
+-10.36
+-10.0
+-9.219999999999999
+-9.62
+-9.36
+-8.54
+-9.600000000000001
+-10.0
+-9.739999999999998
+-12.340000000000003
+-10.68
+-10.94
+-10.62
+-14.660000000000005
+-10.499999999999998
+-9.459999999999999
+-9.799999999999999
+-10.68
+-12.34
+-14.14
+-10.66
+-10.299999999999999
+-10.2
+-10.22
+-10.6
+-9.76
+-10.659999999999998
+-9.88
+-10.2
+-9.739999999999998
+-9.18
+-9.459999999999999
+-9.92
+-9.78
+-9.6
+-9.76
+-10.54
+-1.6400000000000021
+-10.12
+-9.459999999999999
+-10.0
+-10.059999999999999
+-9.799999999999999
+-10.28
+-12.900000000000002
+-9.62
+-10.379999999999999
+-10.0
+-10.059999999999999
+-10.239999999999998
+-3.140000000000002
+-11.32
+-9.959999999999999
+-10.02
+-9.399999999999999
+-9.959999999999999
+-4.12
+-9.139999999999999
+-9.459999999999999
+-9.62
+-8.379999999999999
+-10.059999999999999
+-9.399999999999999
+-10.62
+-10.0
+-10.059999999999999
+-11.52
+-9.44
+-9.459999999999999
+-9.739999999999998
+-13.100000000000001
+-10.1
+-12.4
+-11.040000000000001
+-11.92
+-10.4
+-9.879999999999999
+-9.799999999999999
+-11.860000000000001
+-10.98
+-9.76
+-10.319999999999999
+-10.299999999999999
+-9.38
+-10.62
+-10.02
+-10.159999999999998
+-13.440000000000001
+-9.54
+-10.74
+-10.1
+-9.700000000000001
+-10.26
+-8.92
+-10.059999999999999
+-8.6
+-12.900000000000002
+-9.58
+-9.739999999999998
+-10.059999999999999
+-13.780000000000001
+-14.560000000000006
+-10.94
+-9.6
+-11.06
+-9.76
+-9.799999999999999
+-10.399999999999999
+-10.1
+-9.7
+-11.540000000000001
+-11.0
+-9.06
+-9.96
+-9.379999999999999
+-9.88
+-9.62
+-9.78
+-11.2
+-10.059999999999999
+-10.26
+-10.44
+-10.120000000000001
+-9.76
+-10.459999999999999
+-9.68
+-11.18
+-9.58
+-9.879999999999999
+-9.719999999999999
+-10.960000000000003
+-10.44
+-14.160000000000004
+-11.760000000000002
+-10.52
+-11.74
+-13.600000000000005
+-9.62
+-12.48
+-9.32
+-10.459999999999999
+-9.600000000000001
+-9.88
+-9.36
+-15.580000000000021
+-10.26
+-10.379999999999999
+-10.940000000000001
+-9.459999999999999
+-9.459999999999999
+-11.46
+-10.799999999999999
+-9.639999999999999
+-9.379999999999999
+-10.059999999999999
+-9.4
+-9.68
+-10.499999999999998
+-10.139999999999999
+-10.0
+-9.440000000000001
+-9.68
+-9.84
+-9.819999999999999
+-9.459999999999999
+-10.059999999999999
+-11.4
+-10.0
+-9.559999999999999
+-9.18
+-10.0
+-9.879999999999999
+-9.459999999999999
+-10.8
+-9.04
+-9.959999999999999
+-9.559999999999999
+-10.66
+-11.16
+-14.100000000000005
+-11.3
+-9.62
+-11.840000000000002
+-10.059999999999999
+-9.459999999999999
+-9.78
+-10.0
+-9.0
+-9.959999999999999
+-9.899999999999999
+-9.219999999999999
+-9.659999999999998
+-9.879999999999999
+-9.979999999999999
+-10.52
+-10.58
+-4.8199999999999985
+-10.1
+-9.399999999999999
+-10.059999999999999
+-11.24
+-10.76
+-12.500000000000002
+-10.08
+-11.2
+-9.459999999999999
+-9.659999999999998
+-10.860000000000001
+-10.82
+-9.36
+-9.78
+-9.24
+-12.06
+-9.459999999999999
+-10.059999999999999
+-10.28
+-10.42
+-10.780000000000001
+-16.200000000000003
+-10.1
+-7.779999999999999
+-9.459999999999999
+-9.6
+-9.459999999999999
+-10.219999999999999
+-10.74
+-10.639999999999999
+-10.16
+-10.26
+-10.86
+-8.12
+-11.56
+-9.819999999999999
+-10.059999999999999
+-10.36
+-10.059999999999999
+-9.540000000000001
+-10.18
+-9.62
+-10.98
+-10.42
+-9.5
+-9.219999999999999
+-9.7
+-9.4
+-9.58
+-9.18
+-10.5
+-10.26
+-10.059999999999999
+-10.0
+-9.52
+-8.84
+-10.34
+-9.6
+-10.84
+-10.299999999999999
+-8.7
+-9.879999999999999
+-10.86
+-9.48
+-10.42
+-9.48
+-10.0
+-9.979999999999999
+-10.36
+-10.059999999999999
+-9.8
+-10.559999999999999
+-10.059999999999999
+-10.62
+-10.54
+-10.0
+-10.2
+-10.08
+-11.540000000000001
+-12.960000000000003
+-9.7
+-10.739999999999998
+-10.1
+-10.459999999999999
+-10.16
+-11.700000000000001
+-10.76
+-11.32
+-9.54
+-9.58
+-11.76
+-10.18
+-9.639999999999999
+-9.899999999999999
+-9.48
+-10.7
+-12.040000000000001
+-9.58
+-8.84
+-12.980000000000002
+-10.399999999999999
+-10.200000000000001
+-9.719999999999999
+-10.12
+-9.78
+-9.08
+-11.1
+-9.54
+-9.959999999999999
+-9.44
+-10.379999999999999
+-10.94
+-10.0
+-9.96
+-14.040000000000003
+-9.4
+-10.459999999999999
+-13.340000000000003
+-11.540000000000001
+-12.2
+-10.479999999999999
+-11.38
+-10.700000000000001
+-13.360000000000001
+-8.34
+-10.200000000000001
+-9.76
+-13.180000000000003
+-9.36
+-10.72
+-8.719999999999999
+-9.52
+-10.54
+-10.059999999999999
+-9.4
+-11.440000000000001
+-12.680000000000003
+-10.239999999999998
+-10.159999999999998
+-10.26
+-10.68
+-9.399999999999999
+-10.059999999999999
+-11.920000000000002
+-12.46
+-11.52
+-9.4
+-9.479999999999999
+-10.96
+-11.920000000000002
+-10.760000000000002
+-10.239999999999998
+-10.02
+-9.54
+-9.639999999999999
+-9.420000000000002
+-9.78
+-11.66
+-11.38
+-10.28
+-10.059999999999999
+-9.520000000000001
+-10.08
+-9.940000000000001
+-9.600000000000001
+-10.7
+-9.719999999999999
+-12.96
+-9.78
+-9.459999999999999
+-10.059999999999999
+-9.979999999999999
+-9.84
+-10.12
+-9.799999999999999
+-9.68
+-9.88
+-9.979999999999999
+-11.12
+-12.320000000000002
+-9.86
+-10.0
+-10.200000000000001
+-9.5
+-10.0
+-10.08
+-10.159999999999998
+-10.000000000000002
+-10.68
+-10.0
+-9.459999999999999
+-9.5
+-10.18
+-9.879999999999999
+-10.32
+-10.58
+-1.0000000000000002
+-10.319999999999999
+-16.520000000000007
+-10.18
+-11.100000000000001
+-10.0
+-9.719999999999999
+-10.299999999999999
+-9.76
+-9.92
+-10.0
+-10.0
+-11.76
+-9.879999999999999
+-10.18
+-9.66
+-9.5
+-9.459999999999999
+-10.959999999999999
+-10.12
+-10.0
+-10.520000000000001
+-9.94
+-8.940000000000001
+-15.980000000000004
+-9.68
+-10.1
+-10.059999999999999
+-9.979999999999999
+-10.059999999999999
+-9.399999999999999
+-10.059999999999999
+-9.399999999999999
+-5.460000000000007
+-10.76
+-9.459999999999999
+-9.64
+-9.459999999999999
+-9.5
+-12.660000000000002
+-9.86
+-12.380000000000003
+-9.62
+-10.28
+-10.28
+-10.18
+-9.34
+-9.26
+-10.959999999999999
+-9.399999999999999
+-10.059999999999999
+-9.459999999999999
+-9.7
+-9.479999999999999
+-10.059999999999999
+-10.059999999999999
+-9.36
+-10.54
+-9.899999999999999
+-9.58
+3.0800000000000143
+-12.400000000000002
+-10.540000000000001
+-12.440000000000001
+-9.379999999999999
+-10.7
+-11.860000000000001
+-14.020000000000003
+-12.14
+-10.1
+-10.059999999999999
+-9.5
+-10.28
+-10.12
+-10.9
+-8.700000000000001
+-11.040000000000001
+-13.820000000000004
+-9.58
+-9.719999999999999
+-9.66
+-9.459999999999999
+-16.180000000000007
+-10.62
+-9.940000000000001
+-9.940000000000001
+-9.42
+-9.819999999999999
+-9.580000000000002
+-10.04
+-10.02
+-10.820000000000002
+-9.899999999999999
+-9.3
+-9.68
+-10.08
+-12.060000000000002
+-9.5
+-13.260000000000002
+-12.320000000000002
+-13.620000000000005
+-10.34
+-9.719999999999999
+-10.0
+-9.4
+-12.14
+-10.559999999999999
+-10.0
+-9.860000000000001
+-9.44
+-11.42
+-9.4
+-10.64
+-9.9
+-9.12
+-11.600000000000001
+-9.7
+-10.2
+-10.78
+-9.959999999999999
+-10.059999999999999
+-10.14
+-10.76
+-11.06
+-10.64
+-14.540000000000004
+-10.02
+-10.479999999999999
+-10.24
+-9.459999999999999
+-10.0
+-10.0
+-9.819999999999999
+-10.0
+-12.060000000000002
+-10.6
+-13.320000000000006
+-10.66
+-10.0
+-9.04
+-11.1
+-10.04
+-11.860000000000001
+-11.620000000000001
+-9.459999999999999
+-11.02
+-10.459999999999999
+-11.88
+-10.68
+-10.28
+-9.42
+-9.86
+-9.72
+-9.399999999999999
+-9.459999999999999
+-10.0
+-9.46
+-14.620000000000001
+-10.42
+-9.92
+-10.02
+-10.059999999999999
+-10.88
+-10.1
+-9.92
+-10.059999999999999
+-9.42
+-9.799999999999999
+-9.9
+-10.1
+-10.059999999999999
+-10.46
+-12.700000000000001
+-10.34
+-10.64
+-9.98
+-9.66
+-10.52
+-10.54
+-9.66
+-11.08
+-7.700000000000002
+-10.600000000000001
+-9.659999999999998
+-10.0
+-10.0
+-9.459999999999999
+-9.459999999999999
+-10.059999999999999
+-10.0
+-9.64
+-10.82
+-9.459999999999999
+-10.059999999999999
+-10.059999999999999
+-9.600000000000001
+-9.399999999999999
+-10.44
+-12.600000000000001
+-12.100000000000001
+-10.299999999999999
+-9.28
+-10.059999999999999
+-10.120000000000001
+-9.520000000000001
+-10.700000000000001
+-9.719999999999999
+-10.94
+-9.719999999999999
+-9.459999999999999
+-9.879999999999999
+-9.68
+-9.479999999999999
+-13.360000000000001
+-9.44
+-10.059999999999999
+-13.440000000000001
+-11.2
+-9.459999999999999
+-10.059999999999999
+-10.42
+-9.459999999999999
+-9.44
+-10.059999999999999
+-10.0
+-11.44
+-9.879999999999999
+-10.059999999999999
+-10.059999999999999
+-9.280000000000001
+-10.42
+-12.06
+-9.260000000000002
+-10.34
+-9.459999999999999
+-9.8
+-10.059999999999999
+-9.4
+-9.459999999999999
+-10.399999999999999
+-10.159999999999998
+-12.3
+-8.900000000000002
+-10.059999999999999
+-10.059999999999999
+-11.620000000000001
+-13.180000000000001
+-10.54
+-10.28
+-8.799999999999999
+-10.0
+-11.500000000000002
+-10.02
+-11.620000000000001
+-10.26
+-10.059999999999999
+-9.459999999999999
+-10.3
+-9.64
+-9.739999999999998
+-10.36
+-10.66
+-9.819999999999999
+-9.459999999999999
+-9.58
+-9.28
+-9.2
+-10.379999999999999
+-11.580000000000002
+-9.68
+-9.36
+-11.64
+-9.5
+-10.0
+-9.4
+-10.16
+-10.18
+-10.26
+-10.219999999999999
+-9.86
+-10.059999999999999
+-10.08
+-10.219999999999999
+-9.42
+-11.08
+-10.059999999999999
+-10.02
+-9.86
+-11.16
+-9.7
+-8.92
+-8.520000000000001
+-9.399999999999999
+-10.18
+-10.0
+-10.059999999999999
+-13.040000000000001
+-10.38
+-12.260000000000002
+-9.84
+-10.58
+-10.1
+-12.100000000000001
+-5.359999999999999
+-9.639999999999999
+-9.84
+-9.98
+-9.58
+-9.76
+-8.78
+-9.459999999999999
+-9.76
+-9.799999999999999
+-9.7
+-11.080000000000002
+-9.36
+-13.240000000000002
+-11.22
+-12.260000000000002
diff --git "a/DQN_mulit_tensorflow_2/backup/4/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt" "b/DQN_mulit_tensorflow_2/backup/4/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt"
new file mode 100644
index 0000000..31db43f
--- /dev/null
+++ "b/DQN_mulit_tensorflow_2/backup/4/\346\226\260\345\273\272\346\226\207\346\234\254\346\226\207\346\241\243.txt"
@@ -0,0 +1,11 @@
+ r_wood = 0.3
+ r_powerup = 0.5
+ r_put_bomb = 0.3
+ r_put_bomb_near_enemy = 1
+ r_kick = 0.5
+ r_in_flame = -10
+ r_move = 0.02
+ r_stay = -0.04
+
+ reward = 0
+ 调整网络前
\ No newline at end of file
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1000/FFA1000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1000/FFA1000.data-00000-of-00001
new file mode 100644
index 0000000..333fdef
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1000/FFA1000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1000/FFA1000.index b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1000/FFA1000.index
new file mode 100644
index 0000000..0b4d91d
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1000/FFA1000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1000/checkpoint b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1000/checkpoint
new file mode 100644
index 0000000..29be90e
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA1000"
+all_model_checkpoint_paths: "FFA1000"
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1500/FFA1500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1500/FFA1500.data-00000-of-00001
new file mode 100644
index 0000000..cd32e3f
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1500/FFA1500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1500/FFA1500.index b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1500/FFA1500.index
new file mode 100644
index 0000000..4eedd13
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1500/FFA1500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1500/checkpoint b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1500/checkpoint
new file mode 100644
index 0000000..b7b6bee
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA1500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA1500"
+all_model_checkpoint_paths: "FFA1500"
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2000/FFA2000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2000/FFA2000.data-00000-of-00001
new file mode 100644
index 0000000..5024135
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2000/FFA2000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2000/FFA2000.index b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2000/FFA2000.index
new file mode 100644
index 0000000..2fb3f51
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2000/FFA2000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2000/checkpoint b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2000/checkpoint
new file mode 100644
index 0000000..1dcab0c
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA2000"
+all_model_checkpoint_paths: "FFA2000"
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2500/FFA2500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2500/FFA2500.data-00000-of-00001
new file mode 100644
index 0000000..cc440c6
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2500/FFA2500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2500/FFA2500.index b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2500/FFA2500.index
new file mode 100644
index 0000000..1957af8
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2500/FFA2500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2500/checkpoint b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2500/checkpoint
new file mode 100644
index 0000000..e1df453
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA2500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA2500"
+all_model_checkpoint_paths: "FFA2500"
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA3000/FFA3000.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA3000/FFA3000.data-00000-of-00001
new file mode 100644
index 0000000..1272df3
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA3000/FFA3000.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA3000/FFA3000.index b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA3000/FFA3000.index
new file mode 100644
index 0000000..21d32d8
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA3000/FFA3000.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA3000/checkpoint b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA3000/checkpoint
new file mode 100644
index 0000000..05be1ca
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA3000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA3000"
+all_model_checkpoint_paths: "FFA3000"
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA500/FFA500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA500/FFA500.data-00000-of-00001
new file mode 100644
index 0000000..30b95d8
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA500/FFA500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA500/FFA500.index b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA500/FFA500.index
new file mode 100644
index 0000000..1d99931
Binary files /dev/null and b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA500/FFA500.index differ
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA500/checkpoint b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA500/checkpoint
new file mode 100644
index 0000000..10482c1
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/FFA500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA500"
+all_model_checkpoint_paths: "FFA500"
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/result.csv b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/result.csv
new file mode 100644
index 0000000..ece9d37
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/result.csv
@@ -0,0 +1,3001 @@
+result
+2
+2
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+2
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+2
+0
+2
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+2
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+2
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+2
+1
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+1
+0
+0
+1
+0
+1
+0
+0
+0
+0
+2
+1
+2
+1
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+1
+0
+1
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+2
+0
+0
+1
+2
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+2
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+2
+0
+0
+2
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+2
+0
+0
+0
+0
+0
+1
+2
+0
+0
+1
+1
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+2
+1
+0
+2
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+1
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+1
+0
+0
+1
+0
+0
+1
+1
+0
+0
+0
+1
+0
+1
+1
+1
+0
+0
+2
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+1
+0
+1
+0
+0
+0
+0
+2
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+1
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+2
+0
+1
+0
+0
+1
+2
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+2
+0
+1
+1
+1
+0
+0
+1
+0
+0
+1
+0
+0
+0
+2
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+2
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+2
+2
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+1
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+2
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+1
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+2
+1
+0
+1
+0
+1
+0
+1
+0
+0
+0
+0
+0
+2
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+1
+0
+1
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+2
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+2
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+1
+2
+2
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+1
+0
+1
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+2
+2
+0
+0
+0
+0
+2
+0
+0
+0
+1
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+1
+0
+1
+2
+2
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+2
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+2
+0
+0
+0
+0
+1
+0
+0
+0
+2
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+2
+2
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+0
+0
+1
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+1
+2
+1
+0
+0
+0
+0
+0
+1
+0
+1
+2
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+2
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+1
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+1
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+1
+0
+0
+0
+1
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+2
+0
+0
+2
+0
+0
+0
+0
+1
+2
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+0
+0
+1
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+0
+0
+1
+0
+0
+2
+0
+0
+0
+0
+0
+0
+0
+1
+0
+2
+0
+1
+0
+2
+0
+0
+1
+0
+0
+0
+2
+0
+1
+0
+0
+0
+0
+0
diff --git a/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/reward.csv b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/reward.csv
new file mode 100644
index 0000000..2478e8e
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/backup/FFA1 32(3) 64(3) 64(3) 512 6/reward.csv
@@ -0,0 +1,3001 @@
+reward
+1.327
+1.121
+-0.9159999999999999
+0.5400000000000001
+-0.20499999999999996
+-0.30399999999999994
+-0.475
+-1.05
+-1.082
+-1.12
+-1.4930000000000003
+-1.073
+-1.248
+-1.4620000000000002
+-1.111
+-1.095
+-1.129
+-1.056
+-1.033
+-1.246
+-1.1800000000000002
+-1.292
+-1.114
+-1.2309999999999999
+-1.113
+-2.1400000000000006
+-1.112
+-1.9750000000000008
+-1.9670000000000005
+-1.092
+-1.1800000000000002
+-1.071
+-1.102
+-1.173
+-1.4100000000000001
+-1.4710000000000003
+-1.322
+-1.4030000000000002
+-1.267
+-1.077
+-1.3290000000000002
+-1.183
+-1.201
+-1.084
+-1.184
+-1.3760000000000001
+-1.2850000000000001
+-1.059
+-1.3180000000000047
+-1.2280000000000002
+-1.221
+-1.3510000000000002
+-1.098
+-1.261
+-1.4160000000000001
+-1.096
+-1.143
+-1.4689999999999999
+-1.014
+-1.168
+-1.116
+-1.336
+-1.035
+-1.148
+-1.083
+-1.247
+-1.4290000000000003
+-1.201
+-1.054
+-1.081
+-1.186
+-1.144
+-1.199
+-1.143
+-1.113
+-1.049
+-1.6540000000000004
+-1.6080000000000005
+-1.6600000000000004
+-1.5290000000000004
+-1.112
+-1.199
+-1.2080000000000002
+-1.207
+-1.034
+-1.468
+-1.3900000000000001
+-1.467
+-1.055
+-1.047
+-1.171
+-1.158
+-1.041
+-1.135
+-1.019
+-1.135
+-1.1179999999999999
+-1.019
+-1.17
+-1.204
+-1.066
+-1.6210000000000004
+-1.28
+-1.045
+-1.103
+-1.071
+-1.257
+-1.102
+-1.194
+-1.3750000000000002
+-1.173
+-1.469
+-1.108
+-1.167
+-1.2710000000000001
+-1.025
+-1.1219999999999999
+-1.118
+-1.244
+-1.0030000000000001
+-0.999
+-1.113
+-1.151
+-1.5970000000000004
+-1.288
+-1.06
+-1.071
+-1.296
+-1.068
+-1.067
+-1.125
+-1.295
+-1.326
+-1.3130000000000002
+-1.009
+-1.257
+-1.125
+-1.134
+-1.11
+-1.173
+-1.5100000000000002
+-1.356
+-1.387
+-1.135
+-1.4680000000000004
+-1.308
+-1.6010000000000004
+-1.083
+-2.4090000000000016
+-1.212
+-1.279
+-1.175
+-1.4300000000000002
+-1.2630000000000003
+-1.125
+-1.1079999999999999
+-1.133
+-1.2
+-1.071
+-1.052
+-1.3820000000000001
+-1.111
+-1.044
+-1.156
+-1.183
+-1.7130000000000005
+-1.116
+-1.021
+-1.095
+-1.059
+-1.057
+-1.082
+-1.129
+-1.165
+-1.131
+-1.075
+-1.197
+-1.106
+-1.151
+-1.031
+-1.3090000000000002
+-1.024
+-1.4820000000000002
+-1.179
+-1.099
+-1.5980000000000034
+-1.6750000000000005
+-1.4660000000000002
+-1.095
+-1.07
+-1.152
+-1.083
+-1.6230000000000002
+-1.459
+-1.144
+-1.115
+-1.4330000000000003
+-1.3510000000000002
+-1.0919999999999999
+-1.09
+-1.0920000000000003
+-1.4050000000000002
+-1.148
+-1.051
+-1.4280000000000004
+-1.5940000000000003
+-1.32
+-1.3630000000000002
+-1.282
+-1.032
+-1.1360000000000001
+-1.373
+-1.146
+-1.1059999999999999
+-1.143
+-1.221
+-1.095
+-1.129
+-1.153
+-1.3530000000000002
+-1.2760000000000002
+-1.244
+-1.051
+-1.2990000000000002
+-1.094
+-1.1199999999999999
+-1.3450000000000002
+-1.126
+-1.091
+-1.35
+-1.391
+-1.3520000000000003
+-1.24
+-1.095
+-1.086
+-1.0539999999999998
+-1.045
+-1.036
+-1.2089999999999999
+-1.0419999999999998
+-1.5520000000000005
+-1.226
+-1.103
+-1.18
+-1.229
+-1.119
+-1.1429999999999998
+-1.041
+-1.1219999999999999
+-1.125
+-1.0779999999999998
+-1.113
+-1.189
+-1.1199999999999999
+-1.213
+-1.057
+-1.3290000000000002
+-1.246
+-1.206
+-1.588
+-1.107
+-1.5320000000000005
+-1.023
+-1.4780000000000002
+-1.137
+-1.064
+-1.0110000000000001
+-1.4360000000000002
+-2.117000000000001
+-1.137
+-1.296
+-1.192
+-1.127
+-1.087
+-1.067
+-1.247
+-1.4940000000000002
+-1.106
+-1.4320000000000002
+-1.15
+-1.2890000000000001
+-1.2930000000000001
+-1.7830000000000037
+-1.3490000000000002
+-1.2080000000000035
+-1.3820000000000001
+-1.064
+-1.056
+-1.08
+-1.034
+-1.4290000000000003
+-1.1520000000000001
+-1.3250000000000002
+-1.5270000000000001
+-1.076
+-1.27
+-1.191
+-1.107
+-1.4070000000000003
+-1.292
+-1.092
+-1.083
+-1.3190000000000002
+-1.114
+-1.233
+-1.085
+-1.1360000000000001
+-1.5250000000000004
+-1.5950000000000002
+-1.435
+-1.2160000000000002
+-1.072
+-1.5490000000000004
+0.7739999999999999
+-1.045
+-1.2590000000000001
+-1.2790000000000001
+-1.6260000000000003
+-1.247
+-1.1199999999999999
+-1.215
+-1.072
+-1.3270000000000002
+-1.5410000000000001
+-1.8640000000000005
+-1.211
+-1.195
+-1.087
+-1.176
+-1.075
+-1.8020000000000005
+-1.109
+-1.461
+-1.027
+-1.158
+-1.2810000000000001
+-1.3230000000000002
+-1.4070000000000003
+-1.222
+-1.346
+-1.4740000000000002
+-1.047
+-1.296
+-1.346
+-1.187
+-1.066
+-1.9930000000000008
+-1.106
+-1.4650000000000003
+-1.136
+-1.7120000000000006
+-0.991
+-1.088
+-1.5480000000000005
+-1.5420000000000003
+-1.2520000000000002
+-1.155
+-1.276
+-1.3070000000000004
+-1.1700000000000002
+-1.315
+-1.1989999999999998
+-1.059
+-1.062
+-1.21
+-1.6540000000000004
+-1.5310000000000001
+0.7809999999999999
+-1.188
+-1.217
+-1.171
+-1.2460000000000033
+-1.2690000000000001
+-1.027
+-1.217
+-1.3790000000000004
+-1.2590000000000001
+-1.7310000000000003
+-1.229
+-1.2880000000000003
+-1.104
+-1.4950000000000003
+-1.082
+-1.4720000000000004
+-1.6540000000000004
+-1.1600000000000001
+-1.146
+-1.101
+-1.4840000000000004
+-1.3470000000000002
+-1.4060000000000001
+-1.363
+-1.3710000000000002
+-1.165
+-1.128
+-1.247
+-1.4370000000000003
+-1.205
+-1.1420000000000001
+-1.29
+-1.379
+-1.04
+-1.9800000000000009
+-1.156
+-1.198
+-1.066
+-1.18
+-1.2950000000000002
+-1.2
+-1.171
+-1.138
+-1.255
+-1.049
+-1.003
+-1.158
+-1.5380000000000003
+-1.171
+-1.3110000000000002
+-1.131
+-1.266
+-1.363000000000001
+-1.3310000000000002
+-1.3860000000000001
+-1.3470000000000002
+-1.186
+-1.223
+-1.6380000000000003
+-1.27
+-1.218
+-1.8750000000000004
+-1.104
+-1.4160000000000001
+-1.2739999999999998
+-1.079
+-1.7170000000000005
+-1.071
+-1.11
+-1.065
+0.6519999999999999
+-1.285
+-1.304
+-1.351
+-1.392
+-1.146
+-1.151
+-1.314
+-1.102
+-1.4200000000000004
+-1.247
+-1.071
+-1.5920000000000005
+-1.3280000000000003
+-1.119
+-1.391
+-1.1029999999999998
+-1.165
+-1.4110000000000003
+-1.039
+-1.4240000000000002
+-1.152
+-1.2
+-1.568
+-1.7470000000000003
+-1.084
+-1.162
+-1.3230000000000002
+-1.4740000000000004
+-1.1400000000000001
+-1.304
+-1.9860000000000055
+0.4199999999999998
+-0.5850000000000001
+-1.146
+-1.084
+-1.026
+-1.2730000000000001
+-1.063
+-1.008
+-1.0050000000000001
+-1.374
+-1.5680000000000003
+-1.4840000000000002
+-1.3010000000000002
+-1.2870000000000001
+-1.2229999999999999
+-1.129000000000001
+-1.069
+-1.4000000000000001
+-1.214
+-1.099
+-1.175
+-1.181
+-1.043
+-1.893000000000001
+-1.2840000000000003
+-2.1320000000000006
+-1.175
+-1.7620000000000007
+-1.107
+-1.1929999999999998
+-1.155
+-1.139
+-1.153
+-1.231
+-1.2120000000000002
+-1.2730000000000001
+-1.3160000000000003
+-1.008
+-1.3359999999999999
+-1.062
+-0.885
+-1.068
+-1.038
+-1.335
+-1.427
+-1.052
+-1.255
+-2.2670000000000012
+-1.1219999999999999
+-1.169
+-1.1360000000000001
+-1.4170000000000003
+-1.4690000000000005
+-1.159
+-1.1469999999999998
+-1.223
+-1.182
+-1.5970000000000004
+0.47199999999999964
+-1.027
+-1.3010000000000002
+-1.6330000000000005
+-1.211
+-1.4380000000000002
+-1.133
+-1.473
+-1.8660000000000005
+-1.055
+-1.081
+-1.2899999999999998
+-1.334
+-1.2080000000000002
+-1.5490000000000004
+-1.059
+-1.18
+-1.326
+-1.3410000000000006
+-1.162
+-0.975
+-1.3930000000000002
+-1.077
+-1.443
+-1.0819999999999999
+-1.1600000000000001
+-1.4680000000000002
+-1.2770000000000001
+-1.487
+-1.4120000000000001
+-1.112
+-1.1119999999999999
+-1.04
+-1.138
+-1.138
+-1.063
+-1.197
+-1.141
+-1.7370000000000005
+-1.4290000000000003
+-1.6080000000000003
+-1.3390000000000046
+-1.082
+-1.5110000000000001
+-1.189
+0.8629999999999999
+-1.054
+-1.1760000000000015
+-1.3170000000000002
+-1.151
+-1.332
+-1.5690000000000004
+-1.179
+-1.6630000000000005
+-1.2449999999999999
+-1.066
+-1.111
+-1.5620000000000003
+-1.198
+-0.995
+-1.375
+-1.0959999999999999
+-1.6300000000000003
+-1.3200000000000003
+-1.3450000000000002
+-1.5970000000000004
+-1.067
+-1.157
+-1.4930000000000003
+-1.326
+-1.5850000000000035
+-1.119
+-1.3350000000000002
+-1.348
+-1.6780000000000004
+-1.078
+-1.2750000000000001
+-1.1560000000000001
+-1.1960000000000002
+-1.252
+-1.1019999999999999
+-1.074
+-1.4010000000000002
+-2.2810000000000006
+-1.5720000000000003
+-1.5330000000000004
+-1.059
+-1.1219999999999999
+-1.147
+-2.1460000000000026
+-1.4860000000000002
+-1.6960000000000006
+-1.4200000000000002
+-1.9880000000000004
+-1.0120000000000011
+0.23099999999999954
+-1.58
+-1.6320000000000003
+-1.024
+-1.093
+-1.105
+-1.6910000000000003
+-1.3440000000000003
+-1.0139999999999998
+-1.3820000000000001
+0.765
+-1.115
+-1.166
+-1.149
+-1.0839999999999999
+-1.2690000000000001
+-1.4270000000000003
+-1.0950000000000015
+-1.137
+-1.022
+-1.3490000000000002
+-1.4890000000000025
+-1.15
+-1.1280000000000001
+-1.7150000000000003
+-1.094
+-1.306
+-1.3930000000000002
+-1.26
+-1.241
+-1.203
+-1.5070000000000001
+-1.8250000000000006
+-1.181000000000002
+-1.1289999999999998
+-1.085
+-1.067
+-1.202
+-1.4220000000000002
+-1.3830000000000002
+-1.25
+-1.4660000000000002
+-1.7180000000000004
+-1.222
+-1.4830000000000003
+-1.262
+-1.125
+-1.089
+-1.6340000000000003
+-1.2120000000000002
+-1.093
+-1.2480000000000002
+0.6349999999999999
+-1.4890000000000003
+-1.368
+-1.326
+-1.074
+-1.2300000000000002
+-1.258
+-1.332
+-1.29
+-1.3970000000000002
+-1.3410000000000002
+-2.0490000000000004
+-1.2
+-1.278
+-1.312
+-1.4960000000000002
+-0.6480000000000004
+-1.277
+-1.405
+-1.191
+-1.3510000000000002
+-1.138
+-1.439
+-1.3450000000000002
+-1.028
+-1.239
+-1.487
+-1.246
+-1.3850000000000002
+-1.2249999999999999
+-1.017
+-1.295
+-1.229
+-1.5690000000000004
+-1.071
+-1.5420000000000003
+-1.3410000000000002
+-1.8730000000000007
+-1.106
+-1.174
+-1.048
+-1.2710000000000001
+-0.826
+-1.4780000000000002
+-1.348
+-1.0419999999999998
+-1.318
+-1.4760000000000002
+0.5569999999999998
+-1.1219999999999999
+-1.115
+-1.151
+-1.216
+-1.214
+-1.233
+-0.11200000000000121
+-1.3940000000000001
+0.06399999999999939
+-1.075
+-1.34
+-0.9550000000000001
+-1.5700000000000003
+-1.054
+-1.7310000000000019
+-0.9249999999999999
+-1.4260000000000002
+-1.1
+-1.5240000000000005
+-1.2300000000000004
+-1.047
+-1.2389999999999999
+-1.0479999999999998
+-1.354
+-1.254
+-1.1139999999999999
+-1.107
+-1.097
+-1.4690000000000003
+1.156
+-1.9220000000000006
+-1.208
+-0.944
+-1.4850000000000003
+-1.1540000000000001
+-1.1
+-1.5560000000000005
+-2.080000000000001
+-1.1239999999999999
+-1.5390000000000001
+-1.219
+-1.223
+-1.4980000000000002
+-1.3140000000000003
+-1.026
+-1.1140000000000003
+-1.3430000000000002
+-1.6840000000000006
+-1.8820000000000006
+-1.145
+-1.8480000000000005
+-1.308
+-1.4460000000000002
+-1.4460000000000004
+-1.7920000000000003
+-2.5000000000000027
+-1.9390000000000005
+-1.4800000000000004
+-1.13
+-1.368
+-1.5350000000000004
+-1.3390000000000002
+-1.328
+-2.0740000000000007
+-1.4810000000000003
+-1.17
+-1.5750000000000004
+-1.33
+-1.1600000000000001
+-1.083
+-1.314
+-1.1800000000000002
+-1.362
+-1.2890000000000001
+-1.105
+-1.5270000000000001
+-1.4780000000000002
+-1.4630000000000003
+-1.313
+0.14799999999999935
+-1.3180000000000003
+-1.5160000000000002
+-1.146
+-1.5490000000000004
+-1.055
+-1.3010000000000002
+-1.115
+-1.079
+-1.134
+-1.352
+-1.156
+-1.102
+-1.4690000000000003
+-1.338
+-1.3910000000000002
+-1.107
+-1.4090000000000003
+-1.1059999999999999
+-1.128
+-1.242
+-1.051
+-1.5650000000000002
+-1.3540000000000005
+-1.1099999999999999
+-1.5520000000000003
+-1.053
+-1.239
+-1.7140000000000006
+-1.5110000000000001
+-1.19
+-1.4350000000000003
+-1.217
+-1.4490000000000003
+-1.1219999999999999
+-1.312
+-1.3960000000000001
+-1.243
+-1.29
+-1.142
+-1.7410000000000005
+-1.2489999999999999
+-1.3960000000000004
+-1.137
+-1.344
+-1.04
+-1.156
+-1.3450000000000002
+-1.292
+-1.104
+-1.154
+-1.127
+-1.182
+-1.0399999999999998
+-1.3250000000000002
+-0.979
+-1.3150000000000002
+-1.164
+-1.082
+0.7449999999999999
+-1.248
+-1.425
+-1.3960000000000001
+-1.284
+-1.399
+-1.5830000000000002
+-1.3120000000000003
+-1.217
+-1.6590000000000003
+-1.135
+-1.179
+-1.7250000000000005
+0.23999999999999955
+-1.4100000000000004
+-1.6490000000000005
+-1.7520000000000004
+-1.5830000000000004
+-1.176
+-1.162
+-1.175
+-1.059
+-1.7470000000000003
+-1.2439999999999998
+-1.7920000000000005
+-1.191
+-1.2690000000000001
+-1.066
+-1.255
+-1.5130000000000001
+0.1749999999999995
+-1.37
+-1.7240000000000006
+-1.6900000000000004
+-1.4560000000000002
+-1.2309999999999999
+-1.231
+-1.2770000000000001
+-1.04
+-1.0819999999999999
+-1.4350000000000003
+-1.1179999999999999
+-0.6450000000000005
+-1.133
+-1.151
+-1.4770000000000003
+-1.5780000000000003
+-1.2890000000000001
+-1.3710000000000002
+-1.4810000000000003
+-1.151
+-1.342
+-1.072
+-1.5120000000000002
+-1.322
+-1.319
+-0.3950000000000002
+-1.29
+-1.5650000000000035
+-1.4800000000000002
+-1.0439999999999998
+-1.2600000000000016
+-1.31
+-1.2040000000000002
+-1.9080000000000006
+-1.4390000000000003
+-1.5740000000000003
+-1.5260000000000002
+-1.8760000000000008
+-1.2570000000000001
+-1.5380000000000003
+-1.5370000000000004
+-1.5710000000000002
+-1.193
+-1.463
+-1.6350000000000002
+-1.338
+-1.075
+-1.089
+-1.7700000000000005
+-1.9350000000000005
+-1.156
+-1.263
+-1.183
+-0.887
+-0.8130000000000044
+-1.162
+-1.089
+-1.032
+-1.8060000000000005
+-1.2690000000000001
+-1.8590000000000007
+-1.9030000000000005
+-1.7810000000000006
+-1.5290000000000004
+-1.198
+-1.249
+-1.069
+-1.6110000000000002
+-1.1560000000000001
+-1.0419999999999998
+-1.5030000000000001
+-1.5480000000000005
+-1.4620000000000002
+-1.2380000000000002
+-1.2510000000000001
+-1.318
+-1.5820000000000003
+-1.5200000000000005
+-1.4680000000000004
+-0.7560000000000001
+-1.288
+-1.37
+-1.178
+-1.27
+-1.201
+-1.278
+-1.3590000000000002
+-0.8880000000000007
+-1.334
+-1.2340000000000002
+-1.059
+-1.051
+-0.6110000000000003
+-2.1530000000000014
+-1.8100000000000005
+-1.087
+-1.135
+-1.5120000000000002
+-1.094
+-1.375
+-1.189
+-1.112
+-1.0839999999999999
+-1.022
+-1.237
+-1.181
+-1.108
+-0.06700000000000128
+-1.2530000000000003
+-1.6970000000000005
+-1.076
+-1.4590000000000003
+-1.6620000000000006
+-1.3410000000000002
+-1.0310000000000001
+-2.2040000000000024
+-1.455
+-1.18
+-1.1389999999999998
+-0.911
+-1.3610000000000002
+-1.272
+-1.0539999999999998
+-1.084
+-1.25
+-1.0679999999999998
+-0.994
+-1.05
+0.7529999999999999
+-1.26
+-1.1440000000000001
+-1.3690000000000002
+-1.6920000000000004
+-1.2890000000000001
+-1.6190000000000007
+-1.088
+-1.045
+-2.1050000000000013
+-1.3520000000000003
+-1.419
+-1.1989999999999998
+-1.2440000000000009
+-1.165
+-1.2129999999999999
+-1.5490000000000004
+0.6689999999999999
+-1.5820000000000003
+-1.5270000000000001
+-1.4320000000000002
+-1.7430000000000019
+-1.332
+-1.102
+-1.271
+-1.207
+-1.3330000000000002
+-1.093
+-0.5340000000000007
+-1.3460000000000003
+-1.4930000000000003
+-1.099
+-1.7730000000000006
+-1.4140000000000001
+-1.158
+-1.086
+-1.6120000000000003
+-1.4150000000000003
+-1.113
+-1.584
+-1.5550000000000002
+-1.4710000000000003
+-0.8580000000000007
+-0.8130000000000005
+-1.117
+-1.766000000000003
+-1.105
+-1.102
+-1.1079999999999999
+-1.2040000000000002
+-1.036
+-1.143
+-1.3130000000000002
+-1.095
+-1.423
+-1.2550000000000001
+-1.3810000000000004
+-1.8510000000000004
+-1.087
+-1.09
+-0.7280000000000005
+-1.193
+-1.268
+-1.155
+-1.115
+-1.1880000000000002
+-1.5090000000000003
+-1.083
+-1.143
+-1.3170000000000026
+-0.5100000000000003
+-1.2190000000000003
+-1.4830000000000005
+-1.7110000000000003
+-1.302
+-0.6389999999999999
+-1.022
+-1.4370000000000003
+-1.3550000000000002
+-1.3850000000000002
+-1.7030000000000007
+0.5559999999999998
+-1.127
+-1.3430000000000002
+-1.028
+-1.5780000000000003
+-1.18
+-1.071
+-1.1889999999999998
+-1.218
+-1.0739999999999998
+-1.4720000000000002
+-1.6400000000000003
+-1.3160000000000003
+-1.8870000000000007
+-1.18
+-1.212
+-1.073
+-1.107
+-1.338
+-1.21
+-1.5340000000000003
+-1.288
+-1.4130000000000003
+-1.113
+-1.3390000000000002
+-1.4980000000000002
+-1.102
+-1.2970000000000015
+-1.127
+-1.171
+-1.075
+-1.322
+-0.8710000000000007
+-1.399
+-1.16
+-1.3280000000000003
+-1.061
+-1.32
+-1.343
+-1.33
+-1.167
+-0.5600000000000003
+-1.4430000000000003
+0.6089999999999998
+-0.974
+-1.1249999999999998
+-1.8380000000000005
+-1.4590000000000003
+-0.32900000000000174
+-1.049
+-1.226
+-1.7380000000000004
+-1.6060000000000003
+-1.045
+-1.33
+-1.5350000000000004
+-2.065000000000001
+-1.254
+-1.3630000000000002
+-1.5320000000000005
+-1.5120000000000002
+-1.085
+-1.7090000000000005
+0.958
+-1.325
+-0.996
+-1.3850000000000038
+-1.7410000000000005
+-1.6480000000000004
+-1.082
+-1.6640000000000004
+-2.3170000000000015
+-1.211
+-0.7340000000000004
+-2.718000000000004
+-1.23
+-1.001
+-1.218
+-1.5370000000000004
+-1.4310000000000003
+-0.5440000000000004
+-1.5450000000000004
+-1.7370000000000005
+-1.3220000000000003
+-1.1059999999999999
+0.21899999999999953
+-1.7820000000000005
+-1.208
+-1.6580000000000004
+-1.252
+-1.7470000000000003
+-2.5550000000000006
+-1.071
+-1.147
+-1.349
+-1.227
+-1.2429999999999999
+-1.168
+-1.329
+-1.3240000000000003
+-1.2590000000000001
+-1.069
+-1.107
+-1.083
+-2.2110000000000007
+-1.241
+-1.174
+-1.2960000000000003
+-1.2890000000000001
+-1.9660000000000006
+-1.266
+-1.2890000000000001
+-1.201
+-1.117
+-1.096
+-1.5060000000000004
+-1.045
+-1.2130000000000007
+-1.043
+-1.237
+-1.6280000000000006
+0.8559999999999999
+-1.163
+-1.4850000000000005
+-1.158
+-1.1900000000000002
+-1.163
+-1.361
+-1.3360000000000003
+-1.5770000000000004
+-1.043
+-1.235
+-1.4860000000000002
+-1.8480000000000005
+-1.7620000000000005
+-1.242
+-1.141
+-0.9420000000000001
+-1.2890000000000001
+-1.36
+-0.1200000000000001
+-1.203
+-0.913
+-1.7110000000000003
+-1.131
+-1.8170000000000004
+-1.246
+-1.3910000000000002
+-1.9420000000000006
+0.7009999999999998
+-1.451
+-1.3530000000000002
+-1.117
+-1.6280000000000001
+-1.322
+-1.235
+-1.045
+-1.4000000000000004
+-1.119
+-1.152
+-1.3990000000000005
+-2.065000000000002
+-1.105
+-1.1150000000000009
+0.7079999999999997
+-0.5400000000000004
+-0.23400000000000032
+-1.2890000000000001
+-1.229
+-1.3350000000000002
+-1.0040000000000013
+-1.286
+-1.065
+-1.088
+-1.207
+-1.4050000000000002
+-1.130000000000002
+-1.082
+-1.1320000000000001
+-1.4150000000000003
+-1.118
+-1.411
+-1.0190000000000001
+-1.4330000000000003
+-1.207
+-1.119
+-1.6030000000000006
+-1.175
+-1.5220000000000002
+-1.228
+-0.7340000000000005
+-1.4540000000000002
+-0.14699999999999996
+-1.3690000000000002
+-0.9750000000000006
+-1.302
+-1.047
+-0.6100000000000004
+-1.1300000000000001
+-0.7120000000000004
+-1.4840000000000002
+-1.111
+-1.3470000000000002
+-1.103
+0.6119999999999999
+-0.4840000000000003
+0.911
+-1.3200000000000025
+0.37999999999999967
+-1.293
+-0.6090000000000001
+-1.08
+-1.5770000000000004
+-1.3250000000000002
+-1.091
+-1.4880000000000002
+-1.3370000000000002
+-1.168
+-1.6000000000000003
+-1.1860000000000002
+-1.202
+-1.306
+-1.138
+-1.6710000000000003
+-1.0679999999999998
+-1.076
+-1.5140000000000002
+-1.262
+-1.243
+-1.4600000000000002
+-1.5530000000000004
+-1.3870000000000002
+-1.1500000000000001
+-1.184
+-1.236
+-1.387
+-1.053
+-1.066
+-1.031
+-0.7700000000000006
+-1.0439999999999998
+-1.1589999999999998
+-1.3550000000000002
+-1.215
+-1.148
+-1.2480000000000002
+-1.309
+-1.2970000000000022
+-1.26
+-1.037
+-0.968
+-1.7940000000000005
+-0.8370000000000005
+-1.6060000000000003
+-1.071
+-1.8140000000000005
+-1.046
+-1.114
+-1.172
+-1.5970000000000004
+-1.1219999999999999
+-1.142
+-0.1400000000000058
+-1.2140000000000002
+-1.4600000000000004
+-1.181
+-1.096
+-0.7020000000000004
+-1.023
+-0.21099999999999997
+-1.3800000000000003
+-1.5510000000000004
+-1.204
+-1.8410000000000006
+-1.12
+-1.274
+-1.4930000000000003
+-1.062
+-0.6010000000000002
+-1.1920000000000004
+-2.5490000000000013
+-1.2950000000000002
+-1.4000000000000006
+-1.213
+-0.7920000000000005
+-1.0890000000000013
+-1.3550000000000004
+-1.2790000000000001
+-1.6380000000000003
+-1.058
+-1.211
+-1.374
+-1.045
+-0.95
+-1.1450000000000002
+-1.7270000000000003
+-1.3210000000000002
+0.6519999999999999
+-1.283
+-1.183
+-1.6720000000000004
+-0.44900000000000034
+-0.981
+-1.6350000000000002
+-1.051
+-1.35
+-1.5770000000000004
+-1.2469999999999999
+-1.2970000000000002
+-1.2240000000000002
+-1.3070000000000002
+-0.9949999999999999
+-1.5350000000000001
+-1.288
+-1.8190000000000004
+-1.2040000000000002
+-1.2710000000000001
+-1.4170000000000003
+-0.9279999999999999
+-1.3900000000000001
+-1.7400000000000004
+-1.286
+0.5209999999999997
+-0.913
+-1.1480000000000001
+-1.188
+-0.5020000000000003
+-0.9169999999999998
+-1.0240000000000045
+-1.101
+0.7549999999999999
+-1.1340000000000001
+-1.4160000000000004
+-1.3120000000000003
+-1.5820000000000003
+-1.5720000000000003
+-1.6300000000000003
+-1.3330000000000002
+-1.4460000000000002
+-1.114
+-1.144
+-1.119
+-0.969
+-0.13200000000000003
+-1.6480000000000006
+-1.113
+-1.4800000000000002
+-1.4370000000000003
+-1.2850000000000001
+-1.5070000000000001
+-1.4290000000000003
+-1.3230000000000002
+-1.282
+-1.1030000000000013
+-1.6680000000000004
+-1.0250000000000001
+-1.6880000000000004
+-1.4280000000000002
+-1.063
+-0.9560000000000008
+-1.303
+-1.455
+-1.334
+-1.4680000000000002
+-0.983
+0.5329999999999996
+-1.2690000000000001
+-1.0440000000000003
+-0.7720000000000007
+-0.34700000000000286
+-1.2770000000000001
+-1.097
+0.2819999999999996
+-1.318
+-1.429
+-1.4000000000000001
+-1.5060000000000002
+-1.037
+-1.4260000000000002
+-1.276
+-1.137
+-1.11
+-1.279
+0.13499999999999945
+-1.3690000000000002
+-1.074
+-1.4840000000000004
+-1.3880000000000001
+-1.0
+-1.3120000000000003
+-1.356
+-1.3670000000000004
+-1.056
+-1.025
+-1.051
+-1.213000000000001
+-1.5810000000000004
+-1.091
+-2.0070000000000006
+-0.6950000000000004
+-1.2320000000000002
+-0.9629999999999999
+-1.3370000000000002
+-1.3800000000000001
+-0.3300000000000005
+-1.274
+-1.119
+-0.6840000000000002
+-1.054
+-1.265
+-1.4790000000000003
+-1.6700000000000004
+-1.4520000000000002
+-1.447
+-1.225
+-1.196
+-0.879
+-0.973
+-0.6520000000000005
+-1.572
+-1.067
+-1.3100000000000003
+-1.087
+-1.282
+-1.3000000000000003
+-1.159
+-1.365
+-1.049
+-1.4250000000000003
+-1.067
+-1.166
+-1.2530000000000001
+-1.023
+-1.1130000000000013
+-0.5380000000000004
+-1.115
+-1.0230000000000001
+-1.076
+-1.038
+-1.2810000000000001
+-1.6390000000000005
+1.084
+-1.5760000000000003
+-1.183
+-1.9470000000000007
+-1.111
+0.7619999999999998
+-1.139
+-1.14
+-1.1820000000000002
+-1.051
+-0.7430000000000002
+-1.049
+-0.4800000000000003
+-1.1660000000000001
+-1.054
+-1.065
+-1.0859999999999999
+-1.233
+-1.004
+-1.327
+-1.3860000000000003
+-1.0
+-1.4520000000000002
+-1.8630000000000007
+-1.242
+-1.164
+-1.166
+-0.5020000000000004
+-1.144
+-0.987
+-1.266
+-1.5480000000000003
+-1.5790000000000002
+-1.153
+-1.142000000000004
+-1.121
+-1.222
+-1.2650000000000001
+0.831
+-1.4570000000000003
+-1.086
+0.8409999999999999
+-1.2350000000000003
+-1.038
+-1.4650000000000003
+-1.7010000000000005
+-0.6150000000000003
+-1.3130000000000002
+-1.061
+-1.217
+-1.3910000000000002
+-1.081
+-1.175
+-1.049
+-1.239
+-1.0280000000000002
+-1.4090000000000005
+-1.2
+-1.5580000000000003
+-1.3720000000000003
+-1.4890000000000003
+-1.121
+-1.3780000000000001
+-0.6610000000000038
+-1.2879999999999998
+-1.7500000000000004
+-1.155
+0.5139999999999997
+-1.3830000000000005
+-1.108
+-1.9750000000000008
+-0.7989999999999999
+-1.189
+-0.7020000000000003
+0.6899999999999998
+-1.228
+-1.151
+-0.6200000000000006
+-0.9160000000000007
+-0.31600000000000017
+-0.2099999999999999
+-1.4130000000000003
+-1.4940000000000002
+-1.4430000000000003
+-1.3530000000000002
+-1.2590000000000001
+-1.004
+-1.2220000000000002
+-1.2280000000000002
+-1.8260000000000005
+-1.0510000000000002
+-1.3280000000000003
+-1.4440000000000004
+-1.252
+-1.234
+-1.2560000000000002
+-1.03
+-0.774
+-1.0979999999999999
+0.7469999999999999
+-1.112
+-1.5260000000000002
+-0.2780000000000002
+-1.097
+-1.291
+-1.063
+-1.1720000000000002
+-1.3510000000000002
+-1.267
+-1.1840000000000004
+-1.3199999999999998
+-1.441
+-1.046
+-1.4250000000000003
+-1.172
+-1.139
+-0.943
+-1.047
+-1.067
+-0.999
+-0.894
+-1.1460000000000012
+-1.1400000000000001
+-1.072
+-1.328
+-0.46500000000000036
+-1.091
+-0.4340000000000002
+-1.1960000000000002
+-1.204
+-1.282
+-1.4640000000000002
+-0.6810000000000004
+-1.229
+-1.4680000000000004
+-1.167
+-1.209
+-1.109
+-1.033
+-0.996
+-1.3380000000000016
+-1.208
+-0.863
+-1.3370000000000002
+-0.9709999999999999
+-1.079
+-1.233
+-1.3990000000000002
+-1.2550000000000026
+-1.081
+-1.079
+-1.236
+-1.164
+-1.4800000000000002
+-0.7200000000000004
+-1.286
+-1.8840000000000008
+-1.183
+-1.4180000000000001
+-0.885
+-1.3890000000000002
+-1.091
+-1.093
+-1.13
+-1.1580000000000001
+-1.5020000000000002
+-1.187
+-1.7810000000000001
+-1.0579999999999998
+-0.9699999999999999
+-1.221
+-1.089
+-0.9450000000000001
+-0.32300000000000034
+-1.3010000000000002
+0.5229999999999997
+-0.9450000000000006
+-1.1469999999999998
+0.0799999999999994
+-1.205
+-1.5210000000000004
+-1.4530000000000003
+-1.0819999999999999
+-0.8170000000000005
+-1.049
+-1.334
+-1.7660000000000005
+-1.3619999999999999
+-1.1509999999999998
+-1.2690000000000001
+-1.1110000000000009
+-1.7310000000000003
+-1.158
+-1.4540000000000002
+-1.301000000000001
+-0.11199999999999985
+-1.06
+-1.226
+-1.247
+-1.4610000000000003
+-1.094
+-1.2449999999999999
+-1.5170000000000003
+-1.312
+-2.2170000000000005
+-1.318
+-1.3370000000000002
+-1.3730000000000002
+0.9089999999999998
+-0.055000000000000014
+-1.2900000000000003
+-0.8420000000000001
+-1.05
+-1.221
+-2.3200000000000016
+-1.0040000000000007
+-0.13199999999999987
+-1.58
+-1.6470000000000002
+-1.194
+-1.4040000000000004
+-1.332
+-1.0270000000000015
+-1.4350000000000003
+-1.126
+-0.9960000000000001
+-0.3120000000000004
+-1.055
+-1.252
+-1.6680000000000006
+-1.105
+-1.048
+-0.7480000000000001
+-1.189
+-1.7430000000000005
+-1.169
+-1.295
+-1.5250000000000004
+-1.125
+-0.986
+-1.5120000000000025
+-1.171
+-1.228
+-1.2460000000000002
+-1.5100000000000002
+-1.235
+-1.239
+-1.395
+-0.813
+-1.364
+-1.053
+-1.351
+-1.076
+-1.1469999999999998
+-1.9540000000000006
+-0.5390000000000004
+-1.22
+-1.7320000000000002
+-1.28
+-2.1180000000000008
+0.8209999999999997
+-1.079
+-1.107
+-1.103
+-1.4370000000000003
+-1.2209999999999999
+-0.9720000000000006
+-1.107
+-1.5810000000000004
+-0.08799999999999993
+-1.075
+-1.8790000000000004
+-0.5260000000000004
+-0.9680000000000007
+-1.136
+-1.0590000000000002
+-1.2559999999999998
+-0.42500000000000043
+-1.4360000000000002
+-0.7610000000000006
+-0.7290000000000004
+-1.992000000000002
+-1.189
+-1.175
+0.5689999999999997
+-0.6810000000000004
+-1.8600000000000003
+-1.1320000000000001
+-1.3850000000000002
+-1.177
+-1.3590000000000002
+-0.938
+-1.4590000000000003
+-1.0519999999999998
+-1.043
+-1.4200000000000002
+-1.097
+-0.3930000000000004
+-1.041
+-0.1720000000000002
+-1.03
+-1.115
+-1.342
+-2.581000000000002
+-0.9190000000000005
+-1.0939999999999999
+-0.33300000000000013
+-1.09
+-1.5200000000000002
+-1.351
+-0.6710000000000003
+-1.3210000000000002
+-1.4180000000000001
+-1.339
+-1.124
+-1.2620000000000002
+-0.99
+-1.13
+-1.4600000000000004
+-0.5960000000000003
+-1.077
+-0.8490000000000001
+-0.8160000000000005
+-0.694
+-0.8000000000000005
+-1.6570000000000003
+-1.239
+-1.05
+-1.248
+1.139
+-1.1569999999999998
+-1.285
+-1.6240000000000003
+0.9519999999999995
+-0.975
+-1.15
+-1.173
+-1.053
+-0.7889999999999999
+-1.184
+-1.5490000000000004
+-1.1280000000000001
+-0.9390000000000001
+-1.9380000000000006
+-1.294
+-0.6120000000000001
+-1.5670000000000004
+-1.4100000000000001
+-1.3850000000000002
+-1.014
+-0.781
+-1.041
+-0.6440000000000003
+-0.2540000000000003
+-1.2260000000000002
+0.37599999999999967
+-1.2670000000000001
+-1.077
+-1.2750000000000004
+-1.4750000000000003
+-0.749
+-1.2550000000000001
+-1.3120000000000003
+-1.36
+-1.266
+-1.067
+-1.148
+-1.153
+0.4499999999999994
+-1.3250000000000002
+-1.4110000000000023
+-1.07
+-1.057
+-0.714
+-1.259
+-1.3350000000000002
+0.7529999999999998
+-1.3090000000000002
+-1.3640000000000003
+-1.173
+-1.046
+-1.162
+1.0210000000000001
+-1.4330000000000003
+-0.9400000000000001
+-1.2870000000000001
+-0.7430000000000002
+-0.49400000000000027
+0.8899999999999999
+0.9519999999999998
+-1.244
+-1.064
+-0.891
+-1.3570000000000002
+-1.1480000000000001
+-1.195
+-0.9320000000000002
+-1.3780000000000001
+-0.8450000000000002
+0.5409999999999997
+-1.3290000000000002
+-1.254
+-1.0559999999999998
+-1.205
+-0.3770000000000003
+-0.5980000000000003
+-1.372
+-0.011000000000000343
+-1.302
+-1.171
+-1.3600000000000003
+-1.051
+-1.154
+-1.018
+-1.228
+-1.038
+-1.8990000000000005
+-1.1129999999999998
+-0.44400000000000006
+-0.482
+-1.374
+1.0039999999999998
+-1.04
+-1.09
+0.6009999999999998
+-1.1360000000000001
+-1.1700000000000013
+-1.1030000000000009
+-1.2630000000000008
+-1.8380000000000005
+-0.891
+-0.3000000000000001
+-0.999
+-1.0330000000000001
+-0.911000000000001
+-0.804
+-0.5240000000000001
+-1.187
+1.0399999999999998
+-1.095
+-1.5140000000000002
+0.11599999999999984
+-1.035
+-0.6430000000000003
+-1.6520000000000006
+-1.085
+-1.3670000000000002
+-1.8790000000000004
+-1.079
+-2.013000000000001
+-0.8480000000000001
+-1.247
+-1.2510000000000001
+0.936
+-1.327
+-1.064
+-0.982
+-1.1139999999999999
+-1.236
+-1.3790000000000002
+-0.9859999999999999
+-1.112
+-1.439
+-1.1640000000000004
+-1.1219999999999999
+-1.5200000000000005
+0.702
+-1.125000000000002
+-1.3710000000000002
+-0.8120000000000003
+-1.2920000000000003
+-1.038
+-1.052
+-1.206
+-0.8619999999999999
+-0.5770000000000001
+-1.0010000000000001
+-1.062
+-1.8780000000000006
+-0.963
+-1.299
+0.25799999999999945
+-1.7910000000000004
+-1.348
+-1.119
+0.8409999999999997
+-1.19
+-0.2710000000000001
+-0.902
+-1.7960000000000005
+-1.207
+-1.4050000000000002
+-0.92
+-1.0419999999999998
+-1.3940000000000001
+-1.154
+-0.852
+-0.9570000000000007
+-1.5110000000000001
+-0.6620000000000001
+-0.985
+-1.5140000000000002
+-1.082
+-0.8460000000000004
+-0.8250000000000002
+-1.3820000000000001
+-1.3530000000000002
+-1.3130000000000002
+-1.216
+-1.2900000000000003
+-0.5820000000000002
+-0.737
+-1.116
+-0.9999999999999999
+-1.213
+-0.5050000000000003
+-1.082
+-0.8060000000000004
+-0.381
+-0.5850000000000003
+-1.1189999999999998
+-1.354
+0.8959999999999999
+0.46799999999999975
+0.2039999999999964
+-1.3510000000000002
+-1.091
+-1.19
+-1.3230000000000002
+-1.3680000000000014
+-1.051
+-1.9620000000000006
+-1.282
+-1.099
+-1.3230000000000002
+-1.071
+-1.1880000000000002
+-1.222
+-1.112
+-0.835
+-1.308
+-0.7670000000000002
+-0.8430000000000002
+-1.4710000000000003
+-1.2510000000000001
+-0.998
+-1.4510000000000003
+-1.2690000000000001
+-0.7780000000000005
+-1.213
+-1.3570000000000002
+-1.062
+-1.034
+-0.8160000000000005
+-0.9020000000000001
+-0.8580000000000001
+-2.1510000000000016
+-1.232
+-1.069
+-1.068
+-1.182
+-0.43000000000000027
+-1.103
+-1.6240000000000003
+-1.27
+-0.378
+-1.168
+-1.2429999999999999
+-1.057
+-1.296
+-0.5100000000000002
+-0.44100000000000006
+-0.9760000000000001
+-1.094
+-1.4000000000000001
+0.0019999999999946727
+-1.221
+-1.2299999999999998
+0.10799999999999971
+-1.4170000000000005
+-1.074
+-0.6050000000000001
+-1.146
+-1.3530000000000002
+-1.303
+-0.81
+-0.8050000000000006
+-0.5320000000000004
+-1.069
+-0.9490000000000007
+-1.09
+-0.7210000000000006
+-1.154
+-0.8040000000000006
+-1.359
+-1.193
+-1.036
+-1.3990000000000002
+-1.075
+-1.6220000000000003
+-0.6580000000000003
+-0.963
+-1.034
+-1.101
+-1.4220000000000002
+-0.8580000000000001
+-1.131
+-1.7150000000000003
+-0.7370000000000002
+-0.3770000000000003
+-0.2350000000000001
+-1.1380000000000001
+-1.4930000000000003
+-1.4050000000000002
+-1.254
+0.1759999999999977
+-0.7689999999999999
+0.03399999999999992
+-1.131
+-1.4350000000000003
+-1.284
+0.4779999999999999
+0.23899999999999966
+-1.176
+-1.238
+-1.344
+-0.7929999999999999
+0.8700000000000001
+-0.24500000000000033
+-1.15
+-1.002
+-1.044
+-1.059
+-0.41400000000000003
+-1.094
+-1.074
+-1.115
+-0.866
+-1.3
+-1.4290000000000003
+-1.066
+-1.2560000000000024
+-1.063
+-1.8610000000000007
+-1.131
+-1.2710000000000012
+-0.1669999999999999
+-0.49
+-1.274
+-1.3150000000000002
+-1.4980000000000002
+0.8300000000000001
+-0.2879999999999999
+-1.6590000000000003
+-1.062
+-0.06999999999999995
+-1.062
+-1.215
+-1.3630000000000002
+-1.045
+-1.1239999999999999
+-1.107
+-1.215
+0.1399999999999998
+0.7919999999999999
+-1.1230000000000016
+-1.195
+-0.35100000000000015
+-1.8710000000000004
+-0.32700000000000007
+-1.135
+-1.0870000000000013
+-0.896
+-1.279
+-1.183
+-0.978
+-1.268
+1.415
+-1.077
+-1.181
+-1.125
+0.07299999999999979
+-1.116
+-1.038
+-1.173
+-1.144
+-1.022
+-1.32
+-1.3760000000000001
+-0.821
+-0.9769999999999999
+-1.137
+-1.131
+-1.083
+0.06999999999999838
+-1.095
+-1.116
+-0.921
+0.5269999999999995
+-1.5320000000000003
+-1.109
+-1.087
+-0.9870000000000001
+-1.039
+-1.1869999999999998
+-1.126
+-1.1869999999999998
+0.9219999999999999
+-0.913
+-1.266
+-1.286
+0.6489999999999998
+-1.135
+-0.7460000000000002
+1.6090000000000055
+0.3329999999999984
+-1.242
+-1.058
+-1.322
+-1.5190000000000001
+-0.789
+1.2249999999999872
+-0.72
+-1.7960000000000005
+-1.192
+-1.4090000000000003
+-0.5410000000000007
+-0.45600000000000024
+-1.371
+-0.2609999999999999
+0.8620000000000003
+-1.2830000000000001
+-0.6890000000000004
+-1.3570000000000002
+-0.929
+-1.0480000000000005
+-0.981
+-1.272
+-0.8150000000000001
+-1.061
+-0.6030000000000004
+-0.14500000000000324
+-0.8140000000000001
+-1.24
+-1.062
+-1.4020000000000001
+-1.133
+0.7089999999999961
+-0.6850000000000003
+-1.6220000000000003
+-0.39600000000000024
+-1.238
+-0.7020000000000002
+-1.5190000000000003
+0.30199999999999316
+-0.9000000000000007
+-1.001
+-1.3090000000000002
+-1.5710000000000004
+-0.4810000000000005
+-1.3980000000000001
+-1.3960000000000001
+-0.965
+-0.32800000000000024
+-1.5270000000000004
+-1.193
+-1.2229999999999999
+-0.9999999999999999
+-1.2630000000000001
+-1.212
+-1.103
+-0.9920000000000001
+-0.6200000000000007
+-1.312
+-1.027
+1.753000000000003
+-1.162000000000002
+-1.279
+0.045999999999999625
+-1.1760000000000002
+-0.3290000000000034
+-1.5400000000000005
+-1.111
+-0.3290000000000003
+-1.4170000000000003
+-1.053
+-1.2850000000000001
+0.6069999999999995
+-1.178
+-1.063
+-0.8200000000000003
+-0.13400000000000012
+-0.891
+-1.346
+-0.8870000000000002
+-1.145
+-1.111
+-0.1529999999999998
+-1.074
+-0.3050000000000003
+-1.1179999999999999
+-1.167
+-1.083
+0.06199999999999961
+-1.5820000000000003
+-1.238
+-1.326
+-0.988
+-1.119
+0.09599999999999953
+-1.205
+-1.012
+-0.3370000000000001
+-1.13
+-1.5310000000000004
+-0.9879999999999999
+-1.4450000000000003
+-1.331
+-0.11700000000000031
+-2.735000000000003
+0.7359999999999998
+-1.36
+-0.922
+0.8309999999999997
+-1.1900000000000002
+-1.078
+-1.4160000000000001
+-0.375
+-1.144
+-1.3620000000000003
+-1.1239999999999999
+-0.28900000000000003
+-1.092
+-1.144
+-0.21499999999999964
+-1.1840000000000002
+-0.93
+-1.090000000000006
+-0.938
+0.6559999999999998
+-0.7660000000000007
+2.5040000000000004
+1.2819999999999998
+-1.3450000000000006
+-1.37
+-1.068
+-0.7090000000000002
+-0.43300000000000033
+-0.17399999999999993
+-0.9170000000000006
+-1.063
+-1.6130000000000004
+-0.3769999999999999
+-1.044
+-1.121
+-1.355000000000009
+-1.286
+-0.07300000000000004
+1.488
+-0.6960000000000002
+-1.055
+-1.2830000000000001
+-0.835
+-1.233
+-0.977
+-1.2349999999999999
+-1.124
+0.42600000000000016
+-0.33399999999999985
+-0.373
+-1.3150000000000002
+-1.4470000000000003
+-1.105
+-0.7390000000000004
+-1.0859999999999999
+-0.8439999999999999
+-0.91
+-1.4080000000000004
+-1.095
+-0.42300000000000004
+0.7839999999999998
+1.149
+-1.5050000000000003
+-1.0300000000000002
+-1.298
+-0.6700000000000003
+1.6409999999999998
+-1.101
+-1.024
+-1.2140000000000002
+-0.1070000000000003
+-1.3550000000000002
+-1.09
+0.06399999999999958
+-0.7660000000000003
+-1.2200000000000002
+-1.053
+-1.161
+-1.5810000000000004
+-1.158
+-0.9050000000000001
+-1.094
+-1.071
+-1.3090000000000002
+0.394999999999997
+-0.5240000000000007
+-0.08099999999999985
+-1.5580000000000003
+-1.107
+-1.7080000000000002
+-1.135
+-1.076
+-0.983
+-1.260000000000003
+-1.4720000000000002
+-1.4840000000000004
+-1.5110000000000003
+-1.5130000000000003
+-1.4080000000000001
+0.4379999999999998
+-1.4370000000000003
+-1.1219999999999999
+-1.5670000000000002
+-0.16700000000000031
+-0.6660000000000003
+-1.053
+-1.4110000000000003
+-1.17
+-0.788
+-1.086
+-1.2710000000000001
+0.17999999999999988
+-1.4020000000000001
+-1.076
+-0.9550000000000002
+-1.463
+-1.057
+-1.173
+-1.4910000000000003
+-1.1649999999999998
+-0.015000000000000908
+-1.274
+-0.5360000000000004
+-1.212
+-1.124
+-1.267
+-1.267
+-1.217
+-0.9910000000000005
+-1.097
+-0.6640000000000001
+-0.33800000000000013
+-1.211
+-0.9470000000000002
+-1.1989999999999998
+-1.3250000000000002
+1.0749999999999993
+-1.0999999999999999
+-0.23700000000000004
+-0.7470000000000003
+-1.4320000000000004
+-1.165
+-1.109
+-0.968
+-1.395
+-0.8190000000000006
+-1.2850000000000001
+0.4589999999999951
+-0.7070000000000002
+1.0339999999999956
+-1.109
+-1.5060000000000002
+-1.4340000000000004
+-0.47500000000000075
+-2.197000000000001
+-0.32099999999999984
+0.46199999999999863
+-1.079
+-0.951
+0.6849999999999999
+-1.254
+0.09800000000000017
+0.7089999999999997
+0.956
+-1.258
+-0.9640000000000005
+-0.8760000000000001
+-1.2060000000000002
+-1.1520000000000001
+-1.125
+-0.2949999999999998
+-1.2229999999999999
+-0.32099999999999984
+-0.5620000000000002
+-0.26
+-0.34600000000000014
+-1.5120000000000002
+-1.248
+-0.32199999999999984
+-1.302
+-0.856
+-1.1320000000000003
+-0.552
+-1.1370000000000013
+-1.6490000000000005
+-0.9629999999999999
+-1.143
+-0.6949999999999998
+-1.5700000000000003
+-0.7529999999999999
+-1.9600000000000017
+-0.7220000000000006
+2.112
+-0.20899999999999974
+0.7149999999999943
+-1.3230000000000002
+-1.459
+-0.246
+0.001000000000000334
+-1.3600000000000005
+0.4949999999999941
+-0.8390000000000001
+-1.0
+-0.8470000000000002
+-1.186
+-0.6640000000000005
+-1.296
+-0.32500000000000023
+-0.544
+-1.006
+-1.155
+-1.294
+-1.05
+0.4979999999999958
+-0.7769999999999998
+-1.179
+2.1019999999999937
+-1.1840000000000002
+-1.071
+-1.278
+0.5049999999999992
+0.13899999999999968
+-0.13200000000000067
+-1.348
+-0.3029999999999998
+-1.7060000000000028
+-1.4020000000000001
+-1.6460000000000004
+-0.42900000000000005
+0.7739999999999998
+-1.4890000000000003
+-0.8769999999999998
+-0.9430000000000001
+-1.073
+-0.5010000000000001
+-0.2710000000000002
+-0.981
+-0.6719999999999999
+-1.4230000000000003
+-1.09
+-1.237
+-0.6470000000000001
+-0.12200000000000001
+-1.7370000000000005
+-1.065
+-1.2040000000000002
+-1.155
+-0.7619999999999999
+-1.5560000000000003
+-1.009
+-0.925
+-0.2000000000000001
+-1.9100000000000006
+0.5419999999999984
+2.094999999999997
+0.994
+1.6049999999999964
+-1.14
+0.1299999999999998
+-1.255
+-1.216
+0.4039999999999979
+-0.5780000000000002
+-1.453
+-1.068
+-1.009
+-0.5430000000000004
+-1.3310000000000004
+0.8419999999999956
+-1.2919999999999998
+-0.777
+-1.078
+-1.248
+-1.459
+-1.169
+-1.1280000000000001
+-0.025000000000000078
+-1.093
+2.5309999999999975
+-1.016
+-1.413
+-1.15
+-1.191
+-1.4470000000000003
+-1.153
+-0.17200000000000004
+0.0999999999999998
+-1.7870000000000004
+-1.0750000000000002
+-1.7410000000000005
+0.13799999999999968
+-1.22
+-0.6060000000000001
+-0.41200000000000025
+-1.338
+-0.776
+-0.893
+-0.2509999999999998
+0.855
+-1.1099999999999999
+-0.8980000000000002
+-1.3900000000000001
+-0.7890000000000001
+-1.239
+-1.143
+-0.7760000000000017
+-1.154
+-0.20500000000000007
+-0.29999999999999993
+-1.13
+-1.5060000000000002
+-1.2480000000000002
+-1.137
+-1.4200000000000002
+-0.5480000000000003
+-0.864
+0.40699999999999986
+-0.994
+-1.121
+1.507
+-1.064
+-0.8070000000000005
+-1.1809999999999998
+0.04200000000000004
+-1.126
+0.11999999999999789
+-1.9880000000000004
+-0.019999999999999976
+1.2959999999999996
+-1.355000000000002
+-0.8909999999999998
+-1.004
+-0.7090000000000001
+-1.116
+-1.362
+-0.49500000000000033
+-1.106
+-0.5750000000000003
+0.3389999999999995
+-0.15400000000000003
+-1.223
+-0.8720000000000003
+-1.134
+-0.3460000000000003
+-1.016
+0.05399999999999472
+-1.141
+-1.072
+-1.165
+-1.254
+-1.5930000000000004
+-0.3819999999999999
+-1.1769999999999998
+-1.5030000000000003
+-1.2379999999999998
+-1.315000000000001
+-1.6920000000000004
+-0.6520000000000001
+-1.21
+-1.1520000000000001
+-0.4390000000000005
+-1.3170000000000002
+-0.5760000000000001
+-1.064
+-1.6300000000000003
+-0.7400000000000004
+-1.7050000000000005
+-1.111
+-1.232
+-0.32099999999999984
+-1.142
+-0.976
+-0.04999999999999981
+-0.645
+-1.083
+-1.3080000000000003
+-1.3970000000000002
+-1.1
+-1.192
+0.21699999999999964
+-0.7700000000000002
+-0.981
+-0.88
+0.2529999999999961
+-0.3310000000000004
+1.3159999999999998
+-1.7060000000000004
+1.746999999999999
+-1.114
+-1.5790000000000002
+-1.5630000000000002
+-1.5900000000000003
+-1.4190000000000003
+-0.34599999999999986
+-1.214
+-0.9250000000000002
+0.6209999999999979
+-0.6140000000000001
+-0.7810000000000005
+-0.5910000000000001
+-0.5580000000000002
+0.11299999999999955
+-0.7080000000000002
+-1.1199999999999999
+-0.7850000000000001
+-0.21399999999999997
+-1.172
+-1.2730000000000001
+-0.8700000000000003
+-1.281
+-1.22
+-0.6950000000000001
+-0.8379999999999999
+-1.1150000000000007
+-1.0339999999999998
+-1.6110000000000004
+-1.031
+-1.35
+0.2329999999999991
+-1.103
+-1.225
+-0.863
+-1.239
+-1.04
+0.5469999999999926
+-0.391
+0.4229999999999998
+-1.0490000000000002
+-0.53
+-1.093
+-1.207
+-0.5160000000000001
+-0.5820000000000002
+-0.8560000000000001
+-1.3090000000000004
+1.261
+0.15100000000000047
+0.16799999999999915
+-1.6380000000000003
+-1.205
+-1.056
+-1.099
+-0.7799999999999999
+-1.6140000000000003
+-1.216
+-1.0440000000000003
+-1.3930000000000002
+-1.8790000000000004
+0.657
+-1.272
+-0.45499999999999996
+-1.6050000000000004
+-1.005
+1.9119999999999968
+-1.4150000000000003
+0.8540000000000001
+-1.7440000000000002
+-0.539
+-1.039
+-0.9340000000000002
+-1.064
+-0.8250000000000001
+0.04399999999999926
+-1.119
+-0.14700000000000013
+-1.163
+-1.203
+-1.141
+-0.862
+-1.9480000000000006
+-0.33199999999999985
+-1.121
+-1.1280000000000001
+-0.9580000000000003
+-1.083
+-0.703
+-1.1850000000000012
+-0.14400000000000013
+-0.5720000000000005
+0.09599999999999975
+-1.077
+-1.124
+-0.8760000000000001
+-1.1179999999999999
+-1.052
+0.4149999999999999
+-1.24
+1.2390000000000012
+-1.245
+-0.46499999999999986
+-1.245
+1.3089999999999895
+-1.3110000000000002
+-1.1160000000000003
+-0.2580000000000017
+-1.3230000000000004
+-1.021
+-0.09800000000000009
+-0.41500000000000004
+-1.5470000000000004
+-1.071
+-0.011000000000000752
+0.17799999999999927
+-1.052
+-1.025
+-1.218
+-1.215
+-1.042
+-0.8959999999999999
+-0.3159999999999997
+-0.844
+0.6959999999999977
+-1.2349999999999999
+-1.3139999999999998
+1.0979999999999996
+-0.0359999999999997
+-1.08
+-0.736
+-1.6690000000000005
+-1.4970000000000003
+-1.0930000000000002
+-1.1600000000000001
+-1.002
+-1.0579999999999998
+-0.8240000000000001
+-1.016
+-0.2839999999999998
+-1.058
+-0.5030000000000003
+-1.048
+0.7579999999999998
+-1.036
+-0.9240000000000002
+0.833
+-0.8060000000000003
+-0.7670000000000002
+-1.016
+-1.064
+-0.7840000000000007
+1.004
+0.09599999999999946
+-1.224
+-1.209
+-1.7380000000000004
+-1.255
+-1.072
+-1.422
+-1.1
+-1.4050000000000002
+-1.1860000000000002
+-1.4640000000000002
+-1.155
+-1.2500000000000002
+-0.671
+-0.19200000000000006
+-1.508
+-1.4470000000000003
+-1.319
+-1.011
+-1.2
+-1.1599999999999997
+-1.101
+-1.303
+-1.0979999999999999
+-1.1320000000000001
+0.12499999999999833
+-0.5600000000000003
+-1.135
+-0.983
+-1.244
+-1.03
+-1.31
+-0.5260000000000006
+-1.6610000000000005
+-0.6730000000000002
+-1.014
+-1.374
+-1.3120000000000014
+-1.4180000000000001
+-1.053
+-0.9709999999999999
+-0.8390000000000001
+-1.186
+-0.9139999999999999
+-1.364
+-1.5900000000000003
+-1.174
+-0.535
+-1.19
+-1.05
+-1.143
+-1.1309999999999998
+-1.5080000000000005
+-1.185
+-1.024
+0.728
+-1.161
+-1.045
+-1.092
+-1.004
+-1.063
+-1.07
+-0.973
+-1.17
+-1.13
+-0.8210000000000005
+-1.4010000000000007
+-0.958
+0.9859999999999999
+-1.1320000000000001
+-1.1540000000000001
+-0.8160000000000001
+-1.106
+-1.139
+-1.1219999999999999
+-1.089
+-1.7690000000000017
+-1.2720000000000002
+3.2240000000000015
+-1.146
+-0.29
+-1.7180000000000004
+-0.0380000000000007
+-1.3610000000000002
+-1.3890000000000002
+-0.14100000000000043
+-0.959
+-1.177
+-1.049
+0.5689999999999996
+-1.141
+0.11999999999999984
+-0.9649999999999999
+-1.0490000000000002
+-1.202
+-1.4460000000000002
+-1.8180000000000005
diff --git a/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/FFA-test-1.data-00000-of-00001 b/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/FFA-test-1.data-00000-of-00001
new file mode 100644
index 0000000..adb5f00
Binary files /dev/null and b/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/FFA-test-1.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/FFA-test-1.index b/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/FFA-test-1.index
new file mode 100644
index 0000000..7348b4a
Binary files /dev/null and b/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/FFA-test-1.index differ
diff --git a/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/FFA-test.data-00000-of-00001 b/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/FFA-test.data-00000-of-00001
new file mode 100644
index 0000000..70c9e28
Binary files /dev/null and b/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/FFA-test.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/FFA-test.index b/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/FFA-test.index
new file mode 100644
index 0000000..048fc90
Binary files /dev/null and b/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/FFA-test.index differ
diff --git a/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/checkpoint b/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/checkpoint
new file mode 100644
index 0000000..fe39af7
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/checkpoints/FFA-test-1/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA-test-1"
+all_model_checkpoint_paths: "FFA-test-1"
diff --git a/DQN_mulit_tensorflow_2/checkpoints/FFA1000/FFA1000.index b/DQN_mulit_tensorflow_2/checkpoints/FFA1000/FFA1000.index
new file mode 100644
index 0000000..7745b6e
Binary files /dev/null and b/DQN_mulit_tensorflow_2/checkpoints/FFA1000/FFA1000.index differ
diff --git a/DQN_mulit_tensorflow_2/checkpoints/FFA1000/checkpoint b/DQN_mulit_tensorflow_2/checkpoints/FFA1000/checkpoint
new file mode 100644
index 0000000..29be90e
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/checkpoints/FFA1000/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA1000"
+all_model_checkpoint_paths: "FFA1000"
diff --git a/DQN_mulit_tensorflow_2/checkpoints/FFA1500/FFA1500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/checkpoints/FFA1500/FFA1500.data-00000-of-00001
new file mode 100644
index 0000000..f907d6e
Binary files /dev/null and b/DQN_mulit_tensorflow_2/checkpoints/FFA1500/FFA1500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/checkpoints/FFA1500/FFA1500.index b/DQN_mulit_tensorflow_2/checkpoints/FFA1500/FFA1500.index
new file mode 100644
index 0000000..166a75c
Binary files /dev/null and b/DQN_mulit_tensorflow_2/checkpoints/FFA1500/FFA1500.index differ
diff --git a/DQN_mulit_tensorflow_2/checkpoints/FFA1500/checkpoint b/DQN_mulit_tensorflow_2/checkpoints/FFA1500/checkpoint
new file mode 100644
index 0000000..b7b6bee
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/checkpoints/FFA1500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA1500"
+all_model_checkpoint_paths: "FFA1500"
diff --git a/DQN_mulit_tensorflow_2/checkpoints/FFA500/FFA500.data-00000-of-00001 b/DQN_mulit_tensorflow_2/checkpoints/FFA500/FFA500.data-00000-of-00001
new file mode 100644
index 0000000..b49b71c
Binary files /dev/null and b/DQN_mulit_tensorflow_2/checkpoints/FFA500/FFA500.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/checkpoints/FFA500/FFA500.index b/DQN_mulit_tensorflow_2/checkpoints/FFA500/FFA500.index
new file mode 100644
index 0000000..322db14
Binary files /dev/null and b/DQN_mulit_tensorflow_2/checkpoints/FFA500/FFA500.index differ
diff --git a/DQN_mulit_tensorflow_2/checkpoints/FFA500/checkpoint b/DQN_mulit_tensorflow_2/checkpoints/FFA500/checkpoint
new file mode 100644
index 0000000..10482c1
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/checkpoints/FFA500/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "FFA500"
+all_model_checkpoint_paths: "FFA500"
diff --git a/DQN_mulit_tensorflow_2/checkpoints/pre-train/checkpoint b/DQN_mulit_tensorflow_2/checkpoints/pre-train/checkpoint
new file mode 100644
index 0000000..7f0143b
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/checkpoints/pre-train/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "pre-train"
+all_model_checkpoint_paths: "pre-train"
diff --git a/DQN_mulit_tensorflow_2/checkpoints/pre-train/pre-train.data-00000-of-00001 b/DQN_mulit_tensorflow_2/checkpoints/pre-train/pre-train.data-00000-of-00001
new file mode 100644
index 0000000..f890448
Binary files /dev/null and b/DQN_mulit_tensorflow_2/checkpoints/pre-train/pre-train.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/checkpoints/pre-train/pre-train.index b/DQN_mulit_tensorflow_2/checkpoints/pre-train/pre-train.index
new file mode 100644
index 0000000..3e3ad69
Binary files /dev/null and b/DQN_mulit_tensorflow_2/checkpoints/pre-train/pre-train.index differ
diff --git a/DQN_mulit_tensorflow_2/constants.py b/DQN_mulit_tensorflow_2/constants.py
new file mode 100644
index 0000000..75a2db1
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/constants.py
@@ -0,0 +1,21 @@
+from pommerman import constants
+
+MIN_REPLAY_MEMORY_SIZE = 1000 # Minimum number of steps in a memory to start training
+MINIBATCH_SIZE = 256 # How many steps (samples) to use for training
+UPDATE_EVERY = 20 # Terminal states (end of episodes)
+MAX_BUFFER_SIZE = 100_000
+MAX_BUFFER_SIZE_PRE = 1_000_000
+DISCOUNT = 0.95
+MAX_STEPS = constants.MAX_STEPS
+
+# Environment settings
+EPISODES = 100000
+SHOW_EVERY = 1
+
+# Exploration settings
+epsilon = 0.95 # not a constant, going to be decayed
+EPSILON_DECAY = 0.99975
+MIN_EPSILON = 0.95
+
+SHOW_PREVIEW = True
+SHOW_GAME = 100
diff --git a/DQN_mulit_tensorflow_2/main_FFA.py b/DQN_mulit_tensorflow_2/main_FFA.py
new file mode 100644
index 0000000..885d790
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/main_FFA.py
@@ -0,0 +1,149 @@
+
+import constants
+import pommerman
+import numpy as np
+import pandas as pd
+
+#from DQNAgent_modified import DQNAgent
+from DQNAgent_modified_nhwc import DQNAgent
+from pommerman.agents import SimpleAgent
+from utility import featurize2D, reward_shaping
+
+
+def main():
+ agent1 = DQNAgent()
+ agent2 = SimpleAgent()
+ agent3 = SimpleAgent()
+ agent4 = SimpleAgent()
+
+ agent_list = [agent1, agent2, agent3, agent4]
+ env = pommerman.make('PommeFFACompetitionFast-v0', agent_list)
+
+ episode_rewards = [] # 记录平均reward
+
+ win = 0
+ draw = 0
+ total_game = 0
+ reward_to_csv = []
+ result_to_csv = []
+
+ total_numOfSteps = 0
+ episode = 0
+
+
+ while True:
+
+ current_state = env.reset()
+ # 将state 转化 1D array
+
+ episode_reward = 0
+ numOfSteps = 0
+ episode += 1
+ done = False
+
+ while not done:
+
+ state_feature = featurize2D(current_state[0])
+ numOfSteps += 1
+ total_numOfSteps += 1
+
+ if constants.epsilon > np.random.random() and total_numOfSteps >= constants.MIN_REPLAY_MEMORY_SIZE:
+ #if constants.epsilon > np.random.random():
+ # 获取动作
+ actions = env.act(current_state)
+ actions[0] = np.argmax(agent1.action_choose(state_feature)).tolist()
+ else:
+ # 随机动作
+ actions = env.act(current_state)
+ # actions[0] = random.randint(0, 5)
+
+ new_state, result, done, info = env.step(actions)
+
+ if 10 not in new_state[0]["alive"]:
+ done = True
+
+ # reward_shaping
+ agent1.buffer.append_action(actions[0])
+ reward = reward_shaping(current_state[0], new_state[0], actions[0], result[0], agent1.buffer.buffer_action)
+ # print("reward: ",reward)
+ next_state_feature = featurize2D(new_state[0])
+ episode_reward += reward
+
+ # 每一定局数显示游戏画面
+ # if constants.SHOW_PREVIEW and not episode % constants.SHOW_GAME:
+ # env.render()
+
+ # 储存记忆
+ agent1.buffer.append([state_feature, actions[0], reward, next_state_feature, done])
+
+ # 学习!
+ agent1.train()
+
+ # 更新state
+ current_state = new_state
+
+ if done:
+ break
+
+ result = 0
+
+ if done:
+ episode_rewards.append(episode_reward)
+ total_game += 1
+ if 0 in info.get('winners', []):
+ win += 1
+ result = 2
+
+ # 记录胜负情况
+ if numOfSteps == constants.MAX_STEPS + 1:
+ draw += 1
+ result = 1
+ win_rate = win / total_game
+ draw_rate = draw / total_game
+ # 存reward
+ reward_to_csv.append(episode_reward)
+ # 存result
+ result_to_csv.append(result)
+
+ if episode % constants.SHOW_EVERY == 0:
+ if result == 1:
+ print("{} episodes done, result: {} , steps: {}".format(episode,
+ 'draw',
+ numOfSteps))
+
+ print("Reward {:.2f}, Average Episode Reward: {:.3f}, win_rate:{:.2f}, draw_rate:{:.2f}".format(
+ episode_reward,
+ np.mean(episode_rewards),
+ win_rate,
+ draw_rate))
+ else:
+ print("{} episodes done, result: {} , steps: {}".format(episode,
+ 'win' if result == 2 else "lose",
+ numOfSteps))
+
+ print("Reward {:.3f}, Average Episode Reward: {:.3f}, win_rate:{:.2f}, draw_rate:{:.2f}".format(
+ episode_reward,
+ np.mean(episode_rewards),
+ win_rate,
+ draw_rate))
+
+ # agent1.epsilon_decay()
+
+ agent1.save_weights(episode)
+
+ # 记录结果,留作图表
+ if episode % 100 == 0:
+ df_reward = pd.DataFrame({"reward": reward_to_csv})
+ df_reward.to_csv("reward.csv", index=False, mode="a", header=False)
+ print("successfully saved reward")
+ reward_to_csv = []
+ df_result = pd.DataFrame({"result": result_to_csv})
+ df_result.to_csv("result.csv", index=False, mode="a", header=False)
+ print("successfully saved result")
+ result_to_csv = []
+
+ env.close()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/DQN_mulit_tensorflow_2/main_test.py b/DQN_mulit_tensorflow_2/main_test.py
new file mode 100644
index 0000000..621d6da
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/main_test.py
@@ -0,0 +1,154 @@
+import time
+
+import constants
+import pommerman
+import numpy as np
+
+# from DQNAgent_radio import DQNAgent
+from DQNAgent_modified import DQNAgent
+# from DQNAgent_one_vs_one import DQNAgent
+# from DQNAgent_dueling_dqn import DQNAgent
+#from DQNAgent_double_dqn import DQNAgent
+# from DQNAgent_dueling_dqn import DQNAgent
+# from DQNAgent_modified_filter_tong import DQNAgent
+# from DQNAgent_radio_filter2 import DQNAgent
+# from DQNAgent_noisy import DQNAgent
+from pommerman.agents import SimpleAgent, RandomAgent
+from utility import featurize2D, reward_shaping
+
+
+# from DQNAgent_radio import DQNAgent
+
+def main():
+ agent1 = DQNAgent()
+ agent2 = SimpleAgent()
+ agent3 = DQNAgent()
+ agent4 = SimpleAgent()
+
+ # agent1 = DQNAgent()
+ # agent2 = DQNAgent()
+ # agent3 = DQNAgent()
+ # agent4 = DQNAgent()
+
+ agent_list = [agent1, agent2, agent3, agent4]
+
+ #env = pommerman.make("PommeFFACompetitionFast-v0", agent_list)
+ env = pommerman.make("PommeRadioCompetition-v2", agent_list)
+
+ episode_rewards = [] # 记录平均reward
+
+ win = 0
+ draw = 0
+ total_game = 0
+
+ total_numOfSteps = 0
+ episode = 0
+ # while True:
+ for i in range(100):
+ # agent1.save_model()
+ current_state = env.reset()
+ # 将state 转化 1D array
+
+ episode_reward = 0
+ numOfSteps = 0
+ episode += 1
+ done = False
+
+ while not done:
+
+ state_feature = featurize2D(current_state[0])
+
+ numOfSteps += 1
+ total_numOfSteps += 1
+ # if numOfSteps % 10 == 0:
+ # actions = env.act(current_state)
+ # actions[0] = 5
+ # print("BOMB!")
+ # if constants.epsilon > np.random.random() and total_numOfSteps >= constants.MIN_REPLAY_MEMORY_SIZE:
+ # if constants.epsilon > np.random.random():
+ # # 获取动作
+ actions = env.act(current_state)
+ actions[0] = np.argmax(agent1.action_choose(state_feature)).tolist()
+ #actions[0] = agent1.act_filter(current_state[0], actions[0])
+
+ # else:
+ # # 随机动作
+ # actions = env.act(current_state)
+ # print("simple: ", actions[0])
+ # # actions[0] = random.randint(0, 5)
+
+ new_state, result, done, info = env.step(actions)
+
+ if 10 not in new_state[0]["alive"]:
+ done = True
+
+ # reward_shaping
+ # agent1.buffer.append_action(actions[0])
+ # reward = reward_shaping(current_state[0], new_state[0], actions[0], result[0], agent1.buffer.buffer_action)
+
+ # print("action: ", actions[0], "step_reward: ", reward)
+ # print("step reward: ",reward)
+ # next_state_feature = featurize2D(new_state[0])
+ # episode_reward += reward
+
+ # 每一定局数显示游戏画面
+ # if constants.SHOW_PREVIEW and not episode % constants.SHOW_GAME:
+ env.render()
+ time.sleep(10000)
+
+ # 储存记忆
+ # agent1.buffer.append([state_feature, actions[0], reward, next_state_feature, done])
+
+ # 学习!
+ # agent1.train()
+
+ # 更新state
+ current_state = new_state
+
+ if done:
+ break
+
+ result = 0
+
+ if done:
+ episode_rewards.append(episode_reward)
+ total_game += 1
+ if 0 in info.get('winners', []):
+ win += 1
+ result = 2
+
+ # 记录胜负情况
+ if numOfSteps == constants.MAX_STEPS + 1:
+ draw += 1
+ result = 1
+
+ win_rate = win / total_game
+ draw_rate = draw / total_game
+
+ if episode % constants.SHOW_EVERY == 0:
+ if result == 1:
+ print("{} episodes done, result: {} , steps: {}".format(episode,
+ 'draw',
+ numOfSteps))
+
+ print("Reward {:.2f}, Average Episode Reward: {:.3f}, win_rate:{:.2f}, draw_rate:{:.2f}".format(
+ episode_reward,
+ np.mean(episode_rewards),
+ win_rate,
+ draw_rate))
+ else:
+ print("{} episodes done, result: {} , steps: {}".format(episode,
+ 'win' if result == 2 else "lose",
+ numOfSteps))
+
+ print("Reward {:.3f}, Average Episode Reward: {:.3f}, win_rate:{:.2f}, draw_rate:{:.2f}".format(
+ episode_reward,
+ np.mean(episode_rewards),
+ win_rate,
+ draw_rate))
+ print("win: ", win, " draw: ", draw)
+ env.close()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/DQN_mulit_tensorflow_2/replay_memory.py b/DQN_mulit_tensorflow_2/replay_memory.py
new file mode 100644
index 0000000..d352fa7
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/replay_memory.py
@@ -0,0 +1,109 @@
+import random
+import collections
+import numpy as np
+import constants
+
+
+class replay_Memory():
+ def __init__(self, MAX_BUFFER_SIZE):
+ self.buffer = collections.deque(maxlen=MAX_BUFFER_SIZE)
+ self.buffer_episode = collections.deque()
+ self.buffer_processing = collections.deque()
+ self.buffer_action = collections.deque([0, 0, 0, 0], maxlen=4)
+ self.buffer_td = collections.deque(maxlen=MAX_BUFFER_SIZE)
+ self.alpha = 0.6
+ self.n_step = 4
+ self.gamma = 0.9
+
+ def append(self, transition):
+ self.buffer.append(transition)
+
+ def append_action(self, action):
+ self.buffer_action.append(action)
+
+ def append_augmentation(self, transition):
+ self.buffer_episode.append(transition)
+
+ def append_processing(self, transition):
+ self.buffer_processing.append(transition)
+
+ def append_pri(self, state, action, reward, next_state, done, td_error):
+ # pri DQN
+ transition = [state, action, reward, next_state, done]
+ self.append_td(td_error)
+ self.buffer.append(transition)
+ return True
+
+ def append_td(self, td_error):
+ self.buffer_td.append(td_error)
+
+ def clear(self):
+ self.buffer_episode.clear()
+ self.buffer_processing.clear()
+
+ def merge(self):
+ for element in self.buffer_processing:
+ self.buffer.append(element)
+
+ def merge_negative(self):
+ for element in self.buffer_processing:
+ if element[2] < 0:
+ self.buffer.append(element)
+
+ def sample(self, batch):
+ mini_batch = random.sample(self.buffer, batch)
+ return mini_batch
+
+ def sample_element(self, batch):
+ mini_batch = random.sample(self.buffer, batch)
+ current_state, action, reward, new_states, done = [], [], [], [], []
+
+ for transition in mini_batch:
+ curr_state, act, r, new_state, d = transition
+ current_state.append(curr_state)
+ action.append(act)
+ reward.append(r)
+ new_states.append(new_state)
+ done.append(d)
+
+ return np.array(current_state), action, reward, np.array(new_states), done
+
+ def sample_element_pri(self, batch_size):
+ # Prioritized DQN
+ # 根据td_error排序,求出索引index, 从小到大
+ index = np.argsort(np.array(self.buffer_td).flatten()).tolist()
+ # buffer 按index排序
+ buffer_sort = self.buffer
+ if len(index) != 0 and len(buffer_sort) != 0:
+ for i in range(len(self.buffer)):
+ buffer_sort[i] = self.buffer[index[i]]
+ prioritization = int(batch_size * self.alpha) # self.alpha = 0.6
+ batch_prioritized = []
+ for i in range(prioritization):
+ # 反向添加,从大到小
+ batch_prioritized.append(buffer_sort[-i - 1])
+ mini_batch = random.sample(self.buffer, batch_size - prioritization)
+ td = self.buffer_td
+ # 最训练使用数据batch= batch_prioritized(按td_error从大到小)+mini_batch(随机抽取)
+ batch = batch_prioritized + mini_batch
+ current_state, action, reward, new_states, done, td_error = [], [], [], [], [], []
+
+ for transition in batch:
+ curr_state, act, r, new_state, d = transition
+ current_state.append(curr_state)
+ action.append([act])
+ reward.append([r])
+ new_states.append(new_state)
+ done.append([d])
+
+ return np.array(current_state), action, reward, np.array(new_states), done
+
+ def sample_all(self):
+ # for pre-train
+ current_state = [transition[0] for transition in self.buffer]
+ action = [transition[0] for transition in self.buffer]
+
+ return current_state, action
+
+ def size(self):
+ return len(self.buffer)
diff --git a/DQN_mulit_tensorflow_2/second_model/keras_metadata.pb b/DQN_mulit_tensorflow_2/second_model/keras_metadata.pb
new file mode 100644
index 0000000..f3af04e
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/second_model/keras_metadata.pb
@@ -0,0 +1,12 @@
+
+Broot"_tf_keras_sequential*B{"name": "sequential", "trainable": true, "expects_training_arg": true, "dtype": "float32", "batch_input_shape": null, "must_restore_from_config": false, "class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "InputLayer", "config": {"batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "sparse": false, "ragged": false, "name": "conv2d_input"}}, {"class_name": "Conv2D", "config": {"name": "conv2d", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Conv2D", "config": {"name": "conv2d_2", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "dtype": "float32", "data_format": "channels_last"}}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 128, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 64, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}]}, "shared_object_id": 17, "input_spec": [{"class_name": "InputSpec", "config": {"dtype": null, "shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "ndim": 4, "max_ndim": null, "min_ndim": null, "axes": {}}}], "build_input_shape": {"class_name": "TensorShape", "items": [null, 18, 11, 11]}, "is_graph_network": true, "full_save_spec": {"class_name": "__tuple__", "items": [[{"class_name": "TypeSpec", "type_spec": "tf.TensorSpec", "serialized": [{"class_name": "TensorShape", "items": [null, 18, 11, 11]}, "float32", "conv2d_input"]}], {}]}, "save_spec": {"class_name": "TypeSpec", "type_spec": "tf.TensorSpec", "serialized": [{"class_name": "TensorShape", "items": [null, 18, 11, 11]}, "float32", "conv2d_input"]}, "keras_version": "2.7.0", "backend": "tensorflow", "model_config": {"class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "InputLayer", "config": {"batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "sparse": false, "ragged": false, "name": "conv2d_input"}, "shared_object_id": 0}, {"class_name": "Conv2D", "config": {"name": "conv2d", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 1}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 2}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 3}, {"class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 4}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 5}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 6}, {"class_name": "Conv2D", "config": {"name": "conv2d_2", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 7}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 8}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 9}, {"class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "dtype": "float32", "data_format": "channels_last"}, "shared_object_id": 10}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 128, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 11}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 12}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 13}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 64, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 14}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 15}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 16}]}}, "training_config": {"loss": "mse", "metrics": [[{"class_name": "MeanMetricWrapper", "config": {"name": "accuracy", "dtype": "float32", "fn": "categorical_accuracy"}, "shared_object_id": 19}]], "weighted_metrics": null, "loss_weights": null, "optimizer_config": {"class_name": "Adam", "config": {"name": "Adam", "learning_rate": 0.0001, "decay": 0.0, "beta_1": 0.9, "beta_2": 0.999, "epsilon": 1e-07, "amsgrad": false}}}}2
+
+root.layer_with_weights-0"_tf_keras_layer*
+{"name": "conv2d", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "stateful": false, "must_restore_from_config": false, "class_name": "Conv2D", "config": {"name": "conv2d", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 1}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 2}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 3, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 4, "axes": {"-3": 18}}, "shared_object_id": 20}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 18, 11, 11]}}2
+ root.layer_with_weights-1"_tf_keras_layer* {"name": "conv2d_1", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 4}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 5}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 6, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 4, "axes": {"-3": 256}}, "shared_object_id": 21}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 256, 11, 11]}}2
+ root.layer_with_weights-2"_tf_keras_layer* {"name": "conv2d_2", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Conv2D", "config": {"name": "conv2d_2", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 7}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 8}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 9, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 4, "axes": {"-3": 256}}, "shared_object_id": 22}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 256, 11, 11]}}2
+root.layer-3"_tf_keras_layer*{"name": "flatten", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "dtype": "float32", "data_format": "channels_last"}, "shared_object_id": 10, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 1, "axes": {}}, "shared_object_id": 23}}2
+root.layer_with_weights-3"_tf_keras_layer*{"name": "dense", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 128, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 11}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 12}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 13, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 2, "axes": {"-1": 30976}}, "shared_object_id": 24}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 30976]}}2
+root.layer_with_weights-4"_tf_keras_layer*{"name": "dense_1", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 64, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 14}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 15}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 16, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 2, "axes": {"-1": 128}}, "shared_object_id": 25}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 128]}}2
+Rroot.keras_api.metrics.0"_tf_keras_metric*{"class_name": "Mean", "name": "loss", "dtype": "float32", "config": {"name": "loss", "dtype": "float32"}, "shared_object_id": 26}2
+Sroot.keras_api.metrics.1"_tf_keras_metric*{"class_name": "MeanMetricWrapper", "name": "accuracy", "dtype": "float32", "config": {"name": "accuracy", "dtype": "float32", "fn": "categorical_accuracy"}, "shared_object_id": 19}2
\ No newline at end of file
diff --git a/DQN_mulit_tensorflow_2/second_model/saved_model.pb b/DQN_mulit_tensorflow_2/second_model/saved_model.pb
new file mode 100644
index 0000000..5390aa3
Binary files /dev/null and b/DQN_mulit_tensorflow_2/second_model/saved_model.pb differ
diff --git a/DQN_mulit_tensorflow_2/second_model/variables/variables.data-00000-of-00001 b/DQN_mulit_tensorflow_2/second_model/variables/variables.data-00000-of-00001
new file mode 100644
index 0000000..9d336c9
Binary files /dev/null and b/DQN_mulit_tensorflow_2/second_model/variables/variables.data-00000-of-00001 differ
diff --git a/DQN_mulit_tensorflow_2/second_model/variables/variables.index b/DQN_mulit_tensorflow_2/second_model/variables/variables.index
new file mode 100644
index 0000000..ad5c4d5
Binary files /dev/null and b/DQN_mulit_tensorflow_2/second_model/variables/variables.index differ
diff --git a/DQN_mulit_tensorflow_2/utility.py b/DQN_mulit_tensorflow_2/utility.py
new file mode 100644
index 0000000..59d5cf4
--- /dev/null
+++ b/DQN_mulit_tensorflow_2/utility.py
@@ -0,0 +1,534 @@
+import numpy as np
+
+
+def reward_shaping(current_state, new_state, action, result, action_list):
+ r_win = 0
+ r_lose = -1
+
+ r_wood = 0.01
+ r_powerup = 0.05
+ r_kick = 0.02
+
+ # r_lay_bomb = -0.005
+ r_lay_bomb = 0.001
+ r_lay_bomb_near_enemy = 0.2
+ r_attack_teammate = -0.1
+ r_get_away_from_bomb = 0.005
+ r_get_close_to_bomb = -0.01
+
+ r_avoid = 0.001
+ r_move = 0.001
+ r_stay = -0.003
+ r_move_towards_wood = -0.01
+ r_move_loop = -0.005
+ r_dead_end = -0.1
+ r_ignore_penalty = -0.0015
+
+ reward = 0
+
+ X = current_state["position"][0]
+ Y = current_state["position"][1]
+ new_X = new_state["position"][0]
+ new_Y = new_state["position"][1]
+
+ # 左下情况
+ enemies = [11, 12, 13]
+ teammate = [10]
+
+ current_grids = []
+ if X - 1 >= 0:
+ current_grids.append(current_state["board"][X - 1][Y])
+ if X + 1 <= 10:
+ current_grids.append(current_state["board"][X + 1][Y])
+ if Y - 1 >= 0:
+ current_grids.append(current_state["board"][X][Y - 1])
+ if Y + 1 <= 10:
+ current_grids.append(current_state["board"][X][Y + 1])
+
+
+ if result == 1:
+ reward += r_win
+
+ # 检查是否踢了炸弹
+ if current_state["can_kick"] is True and new_state["can_kick"] is False:
+ reward += r_kick
+
+ if action == 0:
+ reward = check_avoid_flame(reward, r_avoid, current_grids)
+ reward = check_corner_bomb(current_state, X, Y, reward, r_avoid, r_stay, current_grids)
+ reward = check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y)
+ reward = check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb,
+ r_get_close_to_bomb, action_list)
+ return reward
+
+ # 移动reward
+ if action == 1:
+ # 检查是否撞墙
+ if current_state["position"] == new_state["position"]:
+ reward += r_move_towards_wood
+ else:
+ reward += r_move
+ reward = check_dead_end(new_state, new_X, new_Y, action_list, reward, r_dead_end)
+ reward = check_wood_and_power(current_state, new_X, new_Y, action_list, current_grids, reward,
+ r_ignore_penalty)
+ reward = check_move_loop(action_list, reward, r_move_loop)
+ reward = check_power_up(new_X, new_Y, current_state, reward, r_powerup)
+ reward = check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y)
+ reward = check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb,
+ r_get_close_to_bomb, action_list)
+ return reward
+
+ if action == 2:
+ # 检查是否撞墙
+ if current_state["position"] == new_state["position"]:
+ reward += r_move_towards_wood
+ else:
+ reward += r_move
+ reward = check_dead_end(new_state, new_X, new_Y, action_list, reward, r_dead_end)
+ reward = check_wood_and_power(current_state, new_X, new_Y, action_list, current_grids, reward,
+ r_ignore_penalty)
+ reward = check_move_loop(action_list, reward, r_move_loop)
+ reward = check_power_up(new_X, new_Y, current_state, reward, r_powerup)
+ reward = check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y)
+ reward = check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb,
+ r_get_close_to_bomb, action_list)
+ return reward
+
+ if action == 3:
+ # 检查是否撞墙
+ if current_state["position"] == new_state["position"]:
+ reward += r_move_towards_wood
+ else:
+ reward += r_move
+ reward = check_dead_end(new_state, new_X, new_Y, action_list, reward, r_dead_end)
+ reward = check_wood_and_power(current_state, new_X, new_Y, action_list, current_grids, reward,
+ r_ignore_penalty)
+ reward = check_move_loop(action_list, reward, r_move_loop)
+ reward = check_power_up(new_X, new_Y, current_state, reward, r_powerup)
+ reward = check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y)
+ reward = check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb,
+ r_get_close_to_bomb, action_list)
+ return reward
+
+ if action == 4:
+ # 检查是否撞墙
+ if current_state["position"] == new_state["position"]:
+ reward += r_move_towards_wood
+ else:
+ reward += r_move
+ reward = check_dead_end(new_state, new_X, new_Y, action_list, reward, r_dead_end)
+ reward = check_wood_and_power(current_state, new_X, new_Y, action_list, current_grids, reward,
+ r_ignore_penalty)
+ reward = check_move_loop(action_list, reward, r_move_loop)
+ reward = check_power_up(new_X, new_Y, current_state, reward, r_powerup)
+ reward = check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y)
+ reward = check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb,
+ r_get_close_to_bomb, action_list)
+ return reward
+
+ # 放炸弹reward,包括检查wood, 敌人等
+ if action == 5:
+
+ reward = check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y)
+ reward = check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb,
+ r_get_close_to_bomb, action_list)
+ if current_state["ammo"] != 0:
+ reward += r_lay_bomb
+ reward = check_bomb_reward(current_state, X, Y, reward, r_wood, r_lay_bomb_near_enemy, r_attack_teammate,
+ enemies, teammate)
+ else:
+ #reward += (2 * r_move_towards_wood)
+ reward += -0.001
+
+ return reward
+
+
+def check_bomb_reward(current_state, X, Y, reward, r_wood, r_lay_bomb_near_enemy, r_attack_teammate, enemies, teammate):
+ blast_strength = current_state["blast_strength"]
+ # 判断炸弹左方是否有墙
+ for strength in range(1, blast_strength):
+ # 检查是否超出地图边界
+ if Y - strength < 0:
+ break
+ # 检查是否有wood
+ elif current_state["board"][X][Y - strength] == 2:
+ reward += r_wood
+ break
+ # 如果是rigid,则break
+ elif current_state["board"][X][Y - strength] == 1:
+ break
+ # 如果爆炸范围内有敌人,获得reward
+ elif current_state["board"][X][Y - strength] in enemies:
+ reward += r_lay_bomb_near_enemy
+ elif current_state["board"][X][Y - strength] in teammate:
+ reward += r_attack_teammate
+
+ # 判断炸弹右方是否有墙
+ for strength in range(1, blast_strength):
+ # 检查是否超出地图边界
+ if Y + strength > 10:
+ break
+ # 检查是否有wood
+ elif current_state["board"][X][Y + strength] == 2:
+ reward += r_wood
+ break
+ # 如果是rigid,则break
+ elif current_state["board"][X][Y + strength] == 1:
+ break
+ # 如果爆炸范围内有敌人,获得reward
+ elif current_state["board"][X][Y + strength] in enemies:
+ reward += r_lay_bomb_near_enemy
+ elif current_state["board"][X][Y + strength] in teammate:
+ reward += r_attack_teammate
+
+ # 判断炸弹上方是否有墙
+ for strength in range(1, blast_strength):
+ # 检查是否超出地图边界
+ if X - strength < 0:
+ break
+ # 检查是否有wood
+ elif current_state["board"][X - strength][Y] == 2:
+ reward += r_wood
+ break
+ # 如果是rigid,则break
+ elif current_state["board"][X - strength][Y] == 1:
+ break
+ # 如果爆炸范围内有敌人,获得reward
+ elif current_state["board"][X - strength][Y] in enemies:
+ reward += r_lay_bomb_near_enemy
+ elif current_state["board"][X - strength][Y] in teammate:
+ reward += r_attack_teammate
+
+ # 判断炸弹下方是否有墙
+ for strength in range(1, blast_strength):
+ # 检查是否超出地图边界
+ if X + strength > 10:
+ break
+ # 检查是否有wood
+ elif current_state["board"][X + strength][Y] == 2:
+ reward += r_wood
+ break
+ # 如果是rigid,则break
+ elif current_state["board"][X + strength][Y] == 1:
+ break
+ # 如果爆炸范围内有敌人,获得reward
+ elif current_state["board"][X + strength][Y] in enemies:
+ reward += r_lay_bomb_near_enemy
+ elif current_state["board"][X + strength][Y] in teammate:
+ reward += r_attack_teammate
+ return reward
+
+
+def check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y):
+ # 若agent与火焰位置重叠,则死亡,返回reward
+
+ if current_state["flame_life"][X][Y] == 0 and new_state["flame_life"][new_X][new_Y] != 0:
+ reward += r_lose
+ return reward
+
+
+def check_power_up(new_X, new_Y, current_state, reward, r_power_up):
+ if current_state["board"][new_X][new_Y] in [6, 7, 8]:
+ reward += r_power_up
+
+ return reward
+
+
+def check_corner_bomb(current_state, X, Y, reward, r_avoid, r_stay, current_grids):
+ # action 0 来躲避左上bomb
+ find_bomb = False
+ if X - 1 >= 0 and Y - 1 >= 0 and current_state["board"][X - 1][Y - 1] == 3:
+ reward += r_avoid
+ find_bomb = True
+ if X - 1 >= 0 and Y - 2 >= 0 and current_state["board"][X - 1][Y - 2] == 3:
+ reward += r_avoid
+ find_bomb = True
+ # 右上
+ if X - 1 >= 0 and Y + 1 <= 10 and current_state["board"][X - 1][Y + 1] == 3:
+ reward += r_avoid
+ find_bomb = True
+ if X - 1 >= 0 and Y + 2 <= 10 and current_state["board"][X - 1][Y + 2] == 3:
+ reward += r_avoid
+ find_bomb = True
+ # 左下
+ if X + 1 <= 10 and Y - 1 >= 0 and current_state["board"][X + 1][Y - 1] == 3:
+ reward += r_avoid
+ find_bomb = True
+ if X + 2 <= 10 and Y - 1 >= 0 and current_state["board"][X + 2][Y - 1] == 3:
+ reward += r_avoid
+ find_bomb = True
+ # 右下
+ if X + 1 <= 10 and Y + 1 <= 10 and current_state["board"][X + 1][Y + 1] == 3:
+ reward += r_avoid
+ find_bomb = True
+ if X + 1 <= 10 and Y + 2 <= 10 and current_state["board"][X + 1][Y + 2] == 3:
+ reward += r_avoid
+ find_bomb = True
+ if not find_bomb and 3 not in current_grids:
+ reward += r_stay
+
+ return reward
+
+
+def check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb, r_get_close_to_bomb,
+ action_list):
+ # 远离上下左右四个方向的炸弹
+ if action_list[2] == 5 and (X != new_X or Y != new_Y):
+ reward += r_get_away_from_bomb
+ elif action_list[2] == 5 and (X == new_X and Y == new_Y):
+ reward += 2 * r_get_close_to_bomb
+ # 上
+ if X - 1 >= 0 and current_state["board"][X - 1][Y] == 3 and (abs((X - 1) - new_X) + abs(Y - new_Y)) > 1:
+ reward += r_get_away_from_bomb
+ if X - 1 >= 0 and current_state["board"][X - 1][Y] == 3 and (abs((X - 1) - new_X) + abs(Y - new_Y)) == 1:
+ reward += 2 * r_get_close_to_bomb
+ if X - 2 >= 0 and current_state["board"][X - 2][Y] == 3 and (abs((X - 2) - new_X) + abs(Y - new_Y)) > 2 and \
+ current_state["board"][X - 1][Y] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif X - 2 >= 0 and current_state["board"][X - 2][Y] == 3 and (abs((X - 2) - new_X) + abs(Y - new_Y)) <= 2 and \
+ current_state["board"][X - 1][Y] not in [1, 2]:
+ reward += r_get_close_to_bomb
+ if X - 3 >= 0 and current_state["board"][X - 3][Y] == 3 and (abs((X - 3) - new_X) + abs(Y - new_Y)) > 3 and \
+ current_state["board"][X - 1][Y] not in [1, 2] and current_state["board"][X - 2][Y] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif X - 3 >= 0 and current_state["board"][X - 3][Y] == 3 and (abs((X - 3) - new_X) + abs(Y - new_Y)) <= 3 and \
+ current_state["board"][X - 1][Y] not in [1, 2] and current_state["board"][X - 2][Y] not in [1, 2]:
+ reward += r_get_close_to_bomb
+ # 下
+ if X + 1 <= 10 and current_state["board"][X + 1][Y] == 3 and (abs((X + 1) - new_X) + abs(Y - new_Y)) > 1:
+ reward += r_get_away_from_bomb
+ if X + 1 <= 10 and current_state["board"][X + 1][Y] == 3 and (abs((X + 1) - new_X) + abs(Y - new_Y)) == 1:
+ reward += 2 * r_get_close_to_bomb
+ if X + 2 <= 10 and current_state["board"][X + 2][Y] == 3 and (abs((X + 2) - new_X) + abs(Y - new_Y)) > 2 and \
+ current_state["board"][X + 1][Y] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif X + 2 <= 10 and current_state["board"][X + 2][Y] == 3 and (abs((X + 2) - new_X) + abs(Y - new_Y)) <= 2 and \
+ current_state["board"][X + 1][Y] not in [1, 2]:
+ reward += r_get_close_to_bomb
+ if X + 3 <= 10 and current_state["board"][X + 3][Y] == 3 and (abs((X + 3) - new_X) + abs(Y - new_Y)) > 3 and \
+ current_state["board"][X + 1][Y] not in [1, 2] and current_state["board"][X + 2][Y] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif X + 3 <= 10 and current_state["board"][X + 3][Y] == 3 and (abs((X + 3) - new_X) + abs(Y - new_Y)) <= 3 and \
+ current_state["board"][X + 1][Y] not in [1, 2] and current_state["board"][X + 2][Y] not in [1, 2]:
+ reward += r_get_close_to_bomb
+
+ # 左
+ if Y - 1 >= 0 and current_state["board"][X][Y - 1] == 3 and (abs(X - new_X) + abs((Y - 1) - new_Y)) > 1:
+ reward += r_get_away_from_bomb
+ if Y - 1 >= 0 and current_state["board"][X][Y - 1] == 3 and (abs(X - new_X) + abs((Y - 1) - new_Y)) == 1:
+ reward += 2 * r_get_close_to_bomb
+ if Y - 2 >= 0 and current_state["board"][X][Y - 2] == 3 and (abs(X - new_X) + abs((Y - 2) - new_Y)) > 2 and \
+ current_state["board"][X][Y - 1] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif Y - 2 >= 0 and current_state["board"][X][Y - 2] == 3 and (abs(X - new_X) + abs((Y - 2) - new_Y)) <= 2 and \
+ current_state["board"][X][Y - 1] not in [1, 2]:
+ reward += r_get_close_to_bomb
+ if Y - 3 >= 0 and current_state["board"][X][Y - 3] == 3 and (abs(X - new_X) + abs((Y - 3) - new_Y)) > 3 and \
+ current_state["board"][X][Y - 1] not in [1, 2] and current_state["board"][X][Y - 2] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif Y - 3 >= 0 and current_state["board"][X][Y - 3] == 3 and (abs(X - new_X) + abs((Y - 3) - new_Y)) <= 3 and \
+ current_state["board"][X][Y - 1] not in [1, 2] and current_state["board"][X][Y - 2] not in [1, 2]:
+ reward += r_get_close_to_bomb
+
+ # 右
+ if Y + 1 <= 10 and current_state["board"][X][Y + 1] == 3 and (abs(X - new_X) + abs((Y + 1) - new_Y)) > 1:
+ reward += r_get_away_from_bomb
+ if Y + 1 <= 10 and current_state["board"][X][Y + 1] == 3 and (abs(X - new_X) + abs((Y + 1) - new_Y)) == 1:
+ reward += 2 * r_get_close_to_bomb
+ if Y + 2 <= 10 and current_state["board"][X][Y + 2] == 3 and (abs(X - new_X) + abs((Y + 2) - new_Y)) > 2 and \
+ current_state["board"][X][Y + 1] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif Y + 2 <= 10 and current_state["board"][X][Y + 2] == 3 and (abs(X - new_X) + abs((Y + 2) - new_Y)) <= 2 and \
+ current_state["board"][X][Y + 1] not in [1, 2]:
+ reward += r_get_close_to_bomb
+ if Y + 3 <= 10 and current_state["board"][X][Y + 3] == 3 and (abs(X - new_X) + abs((Y + 3) - new_Y)) > 3 and \
+ current_state["board"][X][Y + 1] not in [1, 2] and current_state["board"][X][Y + 2] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif Y + 3 <= 10 and current_state["board"][X][Y + 3] == 3 and (abs(X - new_X) + abs((Y + 3) - new_Y)) <= 3 and \
+ current_state["board"][X][Y + 1] not in [1, 2] and current_state["board"][X][Y + 2] not in [1, 2]:
+ reward += r_get_close_to_bomb
+
+ # 检查四角方向 左上
+ if X - 1 >= 0 and Y - 1 >= 0 and current_state["board"][X - 1][Y - 1] == 3 and (abs((X - 1) - new_X)) + abs(
+ (Y - 1) - new_Y) > 2:
+ reward += r_get_away_from_bomb
+ elif X - 1 >= 0 and Y - 1 >= 0 and current_state["board"][X - 1][Y - 1] == 3 and (abs((X - 1) - new_X)) + abs(
+ (Y - 1) - new_Y) < 2:
+ reward += r_get_close_to_bomb
+ # 左下
+ if X + 1 <= 10 and Y - 1 >= 0 and current_state["board"][X + 1][Y - 1] == 3 and (abs((X + 1) - new_X)) + abs(
+ (Y - 1) - new_Y) > 2:
+ reward += r_get_away_from_bomb
+ elif X + 1 <= 10 and Y - 1 >= 0 and current_state["board"][X + 1][Y - 1] == 3 and (abs((X + 1) - new_X)) + abs(
+ (Y - 1) - new_Y) < 2:
+ reward += r_get_close_to_bomb
+ # 右上
+ if X - 1 >= 0 and Y + 1 <= 10 and current_state["board"][X - 1][Y + 1] == 3 and (abs((X - 1) - new_X)) + abs(
+ (Y + 1) - new_Y) > 2:
+ reward += r_get_away_from_bomb
+ elif X - 1 >= 0 and Y + 1 <= 10 and current_state["board"][X - 1][Y + 1] == 3 and (abs((X - 1) - new_X)) + abs(
+ (Y + 1) - new_Y) < 2:
+ reward += r_get_close_to_bomb
+
+ # 右下
+ if X + 1 <= 10 and Y + 1 <= 10 and current_state["board"][X + 1][Y + 1] == 3 and (abs((X + 1) - new_X)) + abs(
+ (Y + 1) - new_Y) > 2:
+ reward += r_get_away_from_bomb
+ elif X + 1 <= 10 and Y + 1 <= 10 and current_state["board"][X + 1][Y + 1] == 3 and (abs((X + 1) - new_X)) + abs(
+ (Y - 1) + new_Y) < 2:
+ reward += r_get_close_to_bomb
+
+ return reward
+
+
+def check_move_loop(action_list, reward, r_move_loop):
+ check_list = [[1, 2, 1, 2],
+ [2, 1, 2, 1],
+ [3, 4, 3, 4],
+ [4, 3, 4, 3],
+ [1, 2, 3, 4],
+ [2, 1, 3, 4],
+ [3, 4, 1, 2],
+ [3, 4, 2, 1],
+ [1, 2, 4, 3],
+ [2, 1, 4, 3],
+ [4, 3, 1, 2],
+ [4, 3, 2, 1]]
+ # if action_list[0] == action_list[2] and action_list[1] == action_list[3] and (
+ # action_list[0] and action_list[1] and action_list[2] and action_list[3] in [1, 2, 3, 4]) and \
+ # action_list[1] != action_list[0] and (action_list not in check_list):
+ if action_list in check_list:
+ reward += r_move_loop
+ return reward
+
+
+def check_dead_end(new_state, new_X, new_Y, action_list, reward, r_dead_end):
+ if action_list[2] == 5 and action_list[3] == 1:
+ if (new_Y - 1 < 0 or new_state["board"][new_X][new_Y - 1] in [1, 2, 3]) and \
+ (new_Y + 1 > 10 or new_state["board"][new_X][new_Y + 1] in [1, 2, 3]) and \
+ (new_X - 1 < 0 or new_state["board"][new_X - 1][new_Y] in [1, 2, 3]):
+ reward += r_dead_end
+ return reward
+ elif action_list[2] == 5 and action_list[3] == 2:
+ if (new_Y - 1 < 0 or new_state["board"][new_X][new_Y - 1] in [1, 2, 3]) and \
+ (new_Y + 1 > 10 or new_state["board"][new_X][new_Y + 1] in [1, 2, 3]) and \
+ (new_X + 1 > 10 or new_state["board"][new_X + 1][new_Y] in [1, 2, 3]):
+ reward += r_dead_end
+ return reward
+ elif action_list[2] == 5 and action_list[3] == 3:
+ if (new_X - 1 < 0 or new_state["board"][new_X - 1][new_Y] in [1, 2, 3]) and \
+ (new_Y - 1 < 0 or new_state["board"][new_X][new_Y - 1] in [1, 2, 3]) and \
+ (new_X + 1 > 10 or new_state["board"][new_X + 1][new_Y] in [1, 2, 3]):
+ reward += r_dead_end
+ return reward
+ elif action_list[2] == 5 and action_list[3] == 4:
+ if (new_X - 1 < 0 or new_state["board"][new_X - 1][new_Y] in [1, 2, 3]) and \
+ (new_Y + 1 > 10 or new_state["board"][new_X][new_Y + 1] in [1, 2, 3]) and \
+ (new_X + 1 > 10 or new_state["board"][new_X + 1][new_Y] in [1, 2, 3]):
+ reward += r_dead_end
+ return reward
+
+ return reward
+
+
+def check_avoid_flame(reward, r_avoid, current_grids):
+ if 4 in current_grids and all((grid in [1, 2, 3, 4]) for grid in current_grids):
+ reward += r_avoid
+ return reward
+
+
+def check_wood_and_power(current_state, new_X, new_Y, action_list, current_grids, reward, r_ignore_penalty):
+ power = False
+ # 先检查power 更重要
+ if ((6 or 7 or 8) in current_grids) and current_state["board"][new_X][new_Y] not in [6, 7, 8] and (
+ 5 not in action_list) and \
+ current_state["ammo"] != 0:
+ reward += r_ignore_penalty
+ power = True
+ # if power is False and (2 in current_grids) and (5 not in action_list) and current_state["ammo"] != 0:
+ # reward += r_ignore_penalty
+
+ return reward
+
+
+def featurize2D(states, partially_obs=True):
+ # 共18个矩阵
+ X = states["position"][0]
+ Y = states["position"][1]
+ shape = (11, 11)
+ # for 1v1
+ #shape = (8, 8)
+
+ # path, rigid, wood, bomb, flame, fog, power_up, agent1, agent2, agent3, agent4
+ def get_partially_obs(states, X, Y):
+ # board = np.zeros(shape)
+ board = np.full(shape, 5)
+ for x in range(10):
+ for y in range(10):
+ if X - 4 <= x <= X + 4 and Y - 4 <= y <= Y + 4:
+ board[x][y] = states["board"][x][y]
+ states["board"] = board
+ return states
+
+ def get_matrix(board, key):
+ res = board[key]
+ return res.reshape(shape).astype(np.float64)
+
+ def get_map(board, item):
+ map = np.zeros(shape)
+ map[board == item] = 1
+ return map
+
+ if partially_obs:
+ states = get_partially_obs(states, X, Y)
+
+ board = get_matrix(states, "board")
+
+ path = get_map(board, 0)
+ rigid = get_map(board, 1)
+ wood = get_map(board, 2)
+ bomb = get_map(board, 3)
+ flame = get_map(board, 4)
+ # fog = get_map(board, 5)
+ fog = np.zeros(shape)
+ agent1 = get_map(board, 10)
+ agent2 = get_map(board, 11)
+ agent3 = get_map(board, 12)
+ agent4 = get_map(board, 13)
+
+ power_up = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 6 or num == 7 or num == 8:
+ new_row.append(1)
+ else:
+ new_row.append(0.0)
+ power_up.append(new_row)
+
+ bomb_blast_strength = get_matrix(states, 'bomb_blast_strength')
+ bomb_life = get_matrix(states, 'bomb_life')
+ bomb_moving_direction = get_matrix(states, 'bomb_moving_direction')
+ flame_life = get_matrix(states, 'flame_life')
+
+ ammo_2D, blast_strength_2D, can_kick_2D = rebuild_1D_element(states)
+
+ feature2D = [path, rigid, wood, bomb, flame, fog, power_up, agent1, agent2, agent3, agent4, bomb_blast_strength,
+ bomb_life, bomb_moving_direction, flame_life, ammo_2D, blast_strength_2D, can_kick_2D]
+
+ return np.array(feature2D)
+
+
+def rebuild_1D_element(states):
+ shape = (11,11)
+ #shape = (8, 8)
+ ammo = states["ammo"]
+ ammo_2D = np.full(shape, ammo)
+
+ blast_strength = states["blast_strength"]
+ blast_strength_2D = np.full(shape, blast_strength)
+
+ can_kick = states["can_kick"]
+ can_kick_2D = np.full(shape, int(can_kick))
+
+ return ammo_2D, blast_strength_2D, can_kick_2D
diff --git a/DQN_multi_tensorflow/DQNAgent.py b/DQN_multi_tensorflow/DQNAgent.py
new file mode 100644
index 0000000..444f46a
--- /dev/null
+++ b/DQN_multi_tensorflow/DQNAgent.py
@@ -0,0 +1,156 @@
+from keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPooling2D
+from keras import Sequential
+from tensorflow.keras.optimizers import Adam
+from pommerman.agents import BaseAgent
+from pommerman.agents.random_agent import RandomAgent
+from pommerman import characters
+
+from gym.spaces import Discrete
+from utility import reward_shaping
+from DQN_new import constants
+from replay_memory import replay_Memory
+import numpy as np
+import os
+import tensorflow as tf
+
+
+class DQNAgent(BaseAgent):
+ """DQN second try with keras"""
+
+ def __init__(self, character=characters.Bomber):
+ super(DQNAgent, self).__init__(character)
+ self.baseAgent = RandomAgent()
+
+ self.training_model = self.new_model()
+ self.trained_model = self.new_model()
+ # self.trained_model.set_weights(self.training_model.get_weights())
+
+ self.epsilon = constants.epsilon
+ self.min_epsilon = constants.MIN_EPSILON
+ self.eps_decay = constants.EPSILON_DECAY
+ self.buffer = replay_Memory(constants.MAX_BUFFER_SIZE)
+ self.target_update_counter = 0
+
+ def new_model(self):
+
+ model = Sequential()
+ input_shape = (constants.MINIBATCH_SIZE, 14, 11, 11)
+ model.add(Conv2D(256, 3, input_shape=input_shape[1:], activation="relu"))
+ # print(model.output_shape)
+ model.add(MaxPooling2D(pool_size=(3, 3), data_format="channels_first"))
+ model.add(Dropout(0.2))
+
+ model.add(Conv2D(256, 2, activation="relu"))
+ model.add(MaxPooling2D(pool_size=(2, 2), data_format="channels_first"))
+ model.add(Dropout(0.2))
+
+ model.add(Flatten())
+ model.add(Dense(64))
+
+ model.add(Dense(6, activation='softmax'))
+ model.compile(loss="mse", optimizer=Adam(learning_rate=0.001), metrics=['accuracy'])
+
+ return model
+
+ def act(self, obs, action_space):
+ return self.baseAgent.act(obs, Discrete(6))
+
+ def train(self, done, numOfEpisode):
+
+ if self.buffer.size() < constants.MIN_REPLAY_MEMORY_SIZE:
+ return
+
+ # if numOfEpisode == 0:
+ # self.training_model.load_weights('./checkpoints/my_checkpoint')
+ # self.trained_model.load_weights('./checkpoints/my_checkpoint')
+
+ # if numOfEpisode % 999 == 0:
+ # checkpoint_path = "/checkpoints/training1/cp.ckpt"
+ # checkpoint_dir = os.path.dirname(checkpoint_path)
+ #
+ # # 检查点重用
+ # cp_callback = tf.keras.callbacks.ModelCheckpoint(filepath=checkpoint_path,
+ # save_weights_only=True,
+ # verbose=1)
+ # # 完成一次训练后存档参数
+ # if numOfEpisode == 4999:
+ # self.training_model.save_weights('/checkpoints/my_checkpoint')
+
+ # 取样
+ mini_batch = self.buffer.sample(constants.MINIBATCH_SIZE)
+
+ computed_reward = reward_shaping(mini_batch)
+
+ # 在样品中取 current_states, 从模型中获取Q值
+ current_states = np.array([transition[0] for transition in mini_batch])
+ current_qs_list = self.training_model.predict(self.get_state(current_states))
+
+ # 将计算后的reward赋值到mini_batch中
+ for index in range(0, constants.MINIBATCH_SIZE):
+ mini_batch[index][2] = computed_reward[index]
+
+ # 在样品中取 next_state, 从网络中获取Q值
+ next_state = np.array([transition[3] for transition in mini_batch])
+ future_qs_list = self.trained_model.predict(self.get_state(next_state))
+
+ # X为state,Y为所预测的action
+ X = []
+ Y = []
+
+ for index, (current_state, action, reward, new_current_state, done) in enumerate(mini_batch):
+ # 如果 done, 则不会有future_Q
+ if not done:
+ # 更新Q值
+ max_future_q = np.max(future_qs_list[index])
+ next_q = reward + constants.DISCOUNT * max_future_q
+ else:
+ next_q = reward
+
+ # 取新q值
+ # next_q = (constants.DISCOUNT * max_future_q + reward) * ~done + done * reward
+
+ # 在给定的states下更新Q值
+ current_qs = current_qs_list[index]
+ current_qs[action] = next_q
+
+ # 添加训练数据
+ X.append(current_state)
+ # X.append(tf.reshape(current_state,(-1,12,8,8)))
+ Y.append(current_qs)
+
+ # 提取出local中的state
+ X = self.get_state(X)
+
+ # 开始训练
+ # 使用tensorflow的dataset接口,但没什么用
+ # X = tf.reshape(X, (-1, 12, 8, 8))
+ # train_dataset = tf.data.Dataset.from_tensor_slices((X, Y))
+ # self.training_model.fit(train_dataset, verbose=0, shuffle=False)
+
+ self.training_model.fit(np.array(X), np.array(Y), epochs=4, batch_size=constants.MINIBATCH_SIZE, verbose=0,
+ shuffle=False)
+
+ # 更新网络更新计数器
+ if done:
+ self.target_update_counter += 1
+
+ # 网络更新计数器达到上限,更新网络
+ if self.target_update_counter > constants.UPDATE_EVERY:
+ self.trained_model.set_weights(self.training_model.get_weights())
+ self.target_update_counter = 0
+
+ def get_q_value(self, state):
+
+ state_reshape = tf.reshape(state, (-1, 14, 11, 11))
+ return self.training_model.predict_on_batch(state_reshape)
+
+ # epsilon衰减
+
+ def get_state(self, states):
+ new_state = [states[index]["local"] for index in range(0, constants.MINIBATCH_SIZE)]
+ # for index in range(0, constants.MINIBATCH_SIZE):
+ # new_state.append(states[index]["local"])
+ return new_state
+
+ def epsilon_decay(self):
+ self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_epsilon else self.epsilon
diff --git a/DQN_multi_tensorflow/constants.py b/DQN_multi_tensorflow/constants.py
new file mode 100644
index 0000000..401471d
--- /dev/null
+++ b/DQN_multi_tensorflow/constants.py
@@ -0,0 +1,19 @@
+MIN_REPLAY_MEMORY_SIZE = 1_000 # Minimum number of steps in a memory to start training
+MINIBATCH_SIZE = 128 # How many steps (samples) to use for training
+UPDATE_TARGET_EVERY = 10 # Terminal states (end of episodes)
+MAX_BUFFER_SIZE = 50_000
+DISCOUNT = 0.95
+
+# Environment settings
+EPISODES = 3000
+SHOW_EVERY = 1
+
+# Exploration settings
+epsilon = 1 # not a constant, going to be decayed
+EPSILON_DECAY = 0.99975
+MIN_EPSILON = 0.001
+
+# Stats settings
+AGGREGATE_STATS_EVERY = 300 # episodes
+SHOW_PREVIEW = False
+
diff --git a/DQN_multi_tensorflow/main_multi.py b/DQN_multi_tensorflow/main_multi.py
new file mode 100644
index 0000000..1f6b0be
--- /dev/null
+++ b/DQN_multi_tensorflow/main_multi.py
@@ -0,0 +1,97 @@
+import random
+import tensorflow as tf
+import pommerman
+import numpy as np
+from pommerman import constants
+#from DQN_new import DQNAgent
+from DQNAgent import DQNAgent
+#from pommerman.agents.simple_agent import SimpleAgent
+from pommerman.agents import SimpleAgent
+import constants
+from utility import featurize2D,featurize
+
+
+def main():
+
+ agent1 = DQNAgent()
+ agent2 = SimpleAgent()
+ agent3 = SimpleAgent()
+ agent4 = SimpleAgent()
+
+ agent_list = [agent1, agent2, agent3, agent4]
+ env = pommerman.make('PommeRadioCompetition-v2', agent_list)
+
+ episode_rewards = [] # 记录平均reward
+ #action_n = env.action_space.n # action空间
+
+ win = 0
+ total_game = 0
+ win_rate = 0
+
+ # devices = tf.config.experimental.list_physical_devices('GPU')
+ # tf.config.experimental.set_memory_growth(devices[0], True)
+
+ for episode in range(constants.EPISODES + 1):
+
+ current_state = env.reset()
+ # 将state 转化 1D array
+ state_feature = featurize(current_state[0])
+ episode_reward = 0
+
+ done = False
+ while not done:
+ if constants.epsilon > np.random.random():
+ # 获取动作
+ actions = env.act(current_state)
+ actions[0] = np.argmax(agent1.get_q_value(state_feature["local"])).tolist()
+ #actions[0] = random.randint(0, 5)
+ else:
+ # 随机动作
+ actions = env.act(current_state)
+ actions[0] = random.randint(0, 5)
+
+ new_state, reward, done, info = env.step(actions)
+ next_state_feature = featurize(new_state[0])
+ episode_reward += reward[0]
+
+ # 每一定局数显示游戏画面
+ # if constants.SHOW_PREVIEW and not episode % constants.SHOW_GAME:
+ env.render()
+
+ # 储存记忆
+ agent1.buffer.append([state_feature, actions[0], reward[0], next_state_feature, done])
+
+ # 学习!
+ agent1.train(done, episode+1)
+
+ # 更新state
+ current_state = new_state
+
+ if done:
+ break
+
+ if done:
+ episode_rewards.append(episode_reward)
+ total_game += 1
+ if 0 in info.get('winners', []):
+ win += 1
+
+ win_rate = win / total_game
+
+ if episode % constants.SHOW_EVERY == 0:
+ print("{} of 3000 episodes done, result: {}".format(episode + 1,
+ 'Win' if 0 in info.get('winners', []) else 'Lose'))
+ print("Average Episode Reward: {:.3f}, win_rate_last_1000_game:{:.2f}".format(np.mean(episode_rewards),
+ win_rate))
+
+ if total_game >= 999:
+ win = 0
+ total_game = 0
+
+ agent1.epsilon_decay()
+
+ env.close()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/DQN_multi_tensorflow/replay_memory.py b/DQN_multi_tensorflow/replay_memory.py
new file mode 100644
index 0000000..904ef8c
--- /dev/null
+++ b/DQN_multi_tensorflow/replay_memory.py
@@ -0,0 +1,31 @@
+import random
+import collections
+
+
+class replay_Memory():
+ def __init__(self, MAX_BUFFER_SIZE):
+ self.buffer = collections.deque(maxlen=MAX_BUFFER_SIZE)
+
+ def append(self, transition):
+ self.buffer.append(transition)
+
+ # 无reward
+ def sample(self, batch):
+ mini_batch = random.sample(self.buffer, batch)
+ # curr_s, act, reward, next_s, done = [], [], [], [], []
+ #
+ # # transition: tuple
+ # for transition in mini_batch:
+ # c_s, a, r, n_s, d = transition
+ # curr_s.append(c_s)
+ # act.append([a])
+ # reward.append([r])
+ # next_s.append(n_s)
+ # done.append([d])
+
+ # 构建transition
+ return mini_batch
+
+
+ def size(self):
+ return len(self.buffer)
diff --git a/DQN_multi_tensorflow/test.py b/DQN_multi_tensorflow/test.py
new file mode 100644
index 0000000..181b69d
--- /dev/null
+++ b/DQN_multi_tensorflow/test.py
@@ -0,0 +1,3 @@
+import random
+for i in range(0,10):
+ print(i)
\ No newline at end of file
diff --git a/DQN_multi_tensorflow/utility.py b/DQN_multi_tensorflow/utility.py
new file mode 100644
index 0000000..4f01a8d
--- /dev/null
+++ b/DQN_multi_tensorflow/utility.py
@@ -0,0 +1,404 @@
+import numpy as np
+from pommerman import utility
+
+
+def featurize(states):
+ '''
+ Converts the states(dict) into list of 1D numpy arrays
+
+ Input:
+ - env: gym environment
+ - states: list[num_agents, dict(15)] for each agent
+ Output:
+ - feature: list[num_agents, 372]
+ '''
+
+ # length = len(env.featurize(states[0]).tolist())
+ # list = env.featurize(states[0]).tolist()
+ # states = states[0]
+ local = featurize2D(states)
+ """
+ board = states["board"].reshape(-1).astype(np.float32)
+ bomb_blast_strength = states["bomb_blast_strength"].reshape(-1).astype(np.float32)
+ bomb_life = states["bomb_life"].reshape(-1).astype(np.float32)
+ bomb_moving_direction = states["bomb_moving_direction"].reshape(-1).astype(np.float32)
+ flame_life = states["flame_life"].reshape(-1).astype(np.float32)
+ local.append(board.tolist())
+ local.append(bomb_blast_strength.tolist())
+ local.append(bomb_life.tolist())
+ local.append(bomb_moving_direction.tolist())
+ local.append(flame_life.tolist())
+ """
+ feature = {'local': local}
+ additional = []
+ position = utility.make_np_float(states["position"])
+ ammo = utility.make_np_float([states["ammo"]]) # fff
+ blast_strength = utility.make_np_float([states["blast_strength"]])
+ can_kick = utility.make_np_float([states["can_kick"]])
+ teammate = utility.make_np_float([states["teammate"].value])
+ enemies = utility.make_np_float([e.value for e in states["enemies"]])
+ # print(position, ammo, blast_strength, can_kick, teammate, enemies)
+ """
+ additional.append(position.tolist())
+ additional.append(ammo.tolist())
+ additional.append(blast_strength.tolist())
+ additional.append(can_kick.tolist())
+ additional.append(teammate.tolist())
+ additional.append(enemies.tolist())
+ """
+ # print(additional)
+ # position占两个数,所以你要取ammo的话就要取additional[2]
+ additional = np.concatenate(
+ (position, ammo,
+ blast_strength, can_kick, teammate, enemies))
+
+ feature['additional'] = additional.tolist()
+ return feature
+
+
+def reward_shaping(minibatch):
+ mini_batch = minibatch
+
+ statesl, statesa, actions, rewards, next_statesl, next_statesa, done = [], [], [], [], [], [], []
+ # epistep = []
+
+ for transition in mini_batch: # transition: tuple
+ state, additions, re, new_state, done_mask = transition
+ statesl.append(state['local'])
+ statesa.append(state['additional'])
+ actions.append([additions])
+ rewards.append([re])
+ next_statesl.append(new_state['local'])
+ next_statesa.append(new_state['additional'])
+ done.append([done_mask])
+ """
+ sl_lst = np.array(sl_lst)
+ sa_lst = np.array(sa_lst)
+ a_lst = np.array(a_lst)
+ r_lst = np.array(r_lst)
+ sl_prime_list = np.array(sl_prime_list)
+ sa_prime_list = np.array(sa_prime_list)
+ done_mask_list = np.array(done_mask_list)
+ """
+ computed_reward = []
+ for local, additions, action, state_local, state_additions, re in zip(next_statesl, next_statesa, actions, statesl,
+ statesa, rewards):
+ computed_reward.append(reward(local, additions, action, state_local, state_additions, re))
+ # 这是得到的reward
+
+ return computed_reward
+
+
+def featurize2D(states):
+ feature2D = []
+ # 共9个矩阵
+ for element_board in rebuild_board(states["board"]):
+ feature2D.append(element_board)
+
+ feature2D.append(states["bomb_blast_strength"].tolist())
+ feature2D.append(states["bomb_life"].tolist())
+ feature2D.append(states["bomb_moving_direction"].tolist())
+ feature2D.append(states["flame_life"].tolist())
+
+ return feature2D
+
+
+def rebuild_board(board):
+ # 将board中数据分离,2D化
+ rigid = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 1:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ rigid.append(new_row)
+
+ wood = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 2:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ wood.append(new_row)
+
+ bomb = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 3:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ bomb.append(new_row)
+
+ flame = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 4:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ flame.append(new_row)
+
+ # fog部分
+ fog = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 5:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ fog.append(new_row)
+
+ # 充数的fog
+ # fog = [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ # [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ # [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ # [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ # [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ # [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ # [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ # [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]]
+
+ power_up = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 6 or num == 7 or num == 8:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ power_up.append(new_row)
+
+ agent1 = []
+ # 如果是9为此处为agent,则取1.0
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 9:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ agent1.append(new_row)
+
+ agent2 = []
+ # 如果是10为此处为agent,则取1.0
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 10:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ agent2.append(new_row)
+
+ agent3 = []
+ # 如果是11为此处为agent,则取1.0
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 11:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ agent3.append(new_row)
+
+ agent4 = []
+ # 如果是12为此处为agent,则取1.0
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 12:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ agent4.append(new_row)
+
+ return rigid, wood, bomb, flame, fog, power_up, agent1, agent2, agent3, agent4
+
+
+def reward(state_local, state_additions, action, new_state_local, new_state_additions, rewards):
+ # set up reward
+ r_wood = 0.1
+ r_powerup = 0.3
+ r_put_bomb = 0.08
+ r_win = 1
+ r_fail = -5
+ r_kick = 0.3
+ r_kill_enemy_maybe = 0.5
+ r_dies = -3
+
+ rigid = np.array(state_local[0])
+ wood = np.array(state_local[1])
+ bomb = np.array(state_local[2])
+ agents = np.array(state_local[5])
+ # power_up = featurel[3]
+ position0 = int(state_additions[0])
+ position1 = int(state_additions[1])
+ p0 = int(new_state_additions[0])
+ p1 = int(new_state_additions[1])
+ ammo = int(state_additions[2])
+ blast_strength = int(state_additions[3])
+ can_kick = int(state_additions[4])
+ teammate = int(state_additions[5])
+ enemie1 = int(state_additions[6])
+ enemie2 = int(state_additions[7])
+ rewards = np.array(rewards)
+ reward = 0
+ # sagents = sl[4]
+ sbomb = np.array(new_state_local[2])
+
+ # reward_done
+ # print(rewards)
+ if rewards == 1:
+ reward += r_win
+ if rewards == -1:
+ reward += r_fail
+
+ # reward_powerup
+ sammo = int(new_state_additions[2])
+ if ammo > 1 and ammo > sammo:
+ reward += r_powerup
+ sstrength = int(new_state_additions[3])
+ if blast_strength > sstrength:
+ reward += r_powerup
+ skick = int(new_state_additions[4])
+ if can_kick and not skick:
+ reward += r_powerup
+ # print(action)
+
+ # reward_wood
+ if action == 5:
+ reward += r_put_bomb
+ bomb_flame = build_flame(position0, position1, rigid, blast_strength)
+ num_wood = np.count_nonzero(wood * bomb_flame == 1)
+ reward += num_wood * r_wood
+ '''
+ # test
+ print('rigid')
+ print(rigid)
+ print('position_bomb')
+ print(position_bomb)
+ print('f')
+ print(f)
+ print('l')
+ print(l)
+ print('bomb_flame')
+ print(bomb_flame)
+ print('num_wood')
+ print(num_wood)
+ print('-------------------------------------')
+ '''
+ """
+ exist_bomb = []
+ for row, rowbomb in enumerate(bomb):
+ for col, _ in enumerate(rowbomb):
+ if bomb[row, col] == 1:
+ exist_bomb.append((row, col))
+ #print(bomb)
+ #print(exist_bomb)
+
+ if exist_bomb:
+ for ebomb in exist_bomb:
+ bomb_flame1 = self.build_flame(ebomb[0], ebomb[1], rigid, blast_strength)
+ if bomb_flame1[position0, position1] == 1:
+ reward -= 0.5
+ #print(bomb_flame1)
+ """
+ # reward_kick
+ if sbomb[position0, position1] == 1 and rewards != -1:
+ reward += r_kick
+ '''
+ # reward_kill_enemy
+ enemy_position = [] #需要知道敌人位置
+ if int(action.item()) == 5:
+ bomb_position = np.array([position0,position1])
+ bomb_flame = self.build_flame(position0, position1, rigid, blast_strength)
+ if bomb_position in np.argwhere(bomb==1) and np.argwhere(enemy_position*bomb_flame == 1).size != 0:
+ reward += r_kill_enemy_maybe
+ '''
+
+ '''
+ # reward_dies
+ if is_alive == 0:
+ reward += r_dies
+ '''
+
+ return reward
+
+
+def build_flame(position0, position1, rigid, blast_strength):
+ position_bomb = np.array([position0, position1])
+ m = position_bomb[0]
+ n = position_bomb[1]
+ l = blast_strength - 1
+ f = [l, l, l, l] # Scope of flame: up down left right
+ bomb_flame = np.zeros_like(rigid)
+
+ # 判断实体墙或边界是否阻断火焰
+ flame_up = np.zeros_like(bomb_flame)
+ flame_down = np.zeros_like(bomb_flame)
+ flame_left = np.zeros_like(bomb_flame)
+ flame_right = np.zeros_like(bomb_flame)
+ if m - f[0] < 0: # 上边界
+ f[0] = m
+ flame_up[m - f[0]:m, n] = 1
+ if m + f[1] > bomb_flame.shape[0] - 1: # 下边界
+ f[1] = bomb_flame.shape[0] - 1 - m
+ flame_down[m + 1:m + f[1] + 1, n] = 1
+ if n - f[2] < 0: # 左边界
+ f[2] = n
+ flame_left[m, n - f[2]:n] = 1
+ if n + f[3] > bomb_flame.shape[0] - 1: # 右边界
+ f[3] = bomb_flame.shape[0] - 1 - n
+ flame_right[m, n + 1:n + f[3] + 1] = 1
+
+ rigid_0 = flame_up * rigid
+ rigid_1 = flame_down * rigid
+ rigid_2 = flame_left * rigid
+ rigid_3 = flame_right * rigid
+ if np.argwhere(rigid_0 == 1).size != 0: # 上实体墙
+ rigid_up = np.max(np.argwhere(rigid_0 == 1)[:, 0][0])
+ if rigid_up >= m - f[0]:
+ f[0] = m - rigid_up - 1
+ if np.argwhere(rigid_1 == 1).size != 0: # 下实体墙
+ rigid_down = np.min(np.argwhere(rigid_1 == 1)[:, 0][0])
+ if rigid_down <= m + f[1]:
+ f[1] = rigid_down - m - 1
+ if np.argwhere(rigid_2 == 1).size != 0: # 左实体墙
+ rigid_left = np.max(np.argwhere(rigid_2 == 1)[0, :][1])
+ if rigid_left >= n - f[2]:
+ f[2] = n - rigid_left - 1
+ if np.argwhere(rigid_3 == 1).size != 0: # 右实体墙
+ rigid_right = np.min(np.argwhere(rigid_3 == 1)[0, :][1])
+ if rigid_right <= n + f[3]:
+ f[3] = rigid_right - n - 1
+ bomb_flame[m - f[0]:m + f[1] + 1, n] = 1
+ bomb_flame[m, n - f[2]:n + f[3] + 1] = 1
+
+ '''
+ # test
+ print('rigid')
+ print(rigid)
+ print('position_bomb')
+ print(position_bomb)
+ print('f')
+ print(f)
+ print('l')
+ print(l)
+ print('bomb_flame')
+ '''
+ # print(bomb_flame)
+ # print(blast_strength)
+ '''
+ print('num_wood')
+ print(num_wood)
+ print('-------------------------------------')
+ '''
+ return bomb_flame
diff --git a/DQN_new/DQNAgent.py b/DQN_new/DQNAgent.py
index 2921053..6da3d83 100644
--- a/DQN_new/DQNAgent.py
+++ b/DQN_new/DQNAgent.py
@@ -10,6 +10,8 @@
from DQN_new import constants
from replay_memory import replay_Memory
import numpy as np
+import os
+import tensorflow as tf
class DQNAgent(BaseAgent):
@@ -32,20 +34,20 @@ def __init__(self, character=characters.Bomber):
def new_model(self):
model = Sequential()
- input_shape = (constants.MINIBATCH_SIZE, 9, 8, 8)
+ input_shape = (constants.MINIBATCH_SIZE, 12, 8, 8)
model.add(Conv2D(256, 3, input_shape=input_shape[1:], activation="relu"))
# print(model.output_shape)
- model.add(MaxPooling2D(pool_size=(2, 2), data_format='channels_first'))
+ model.add(MaxPooling2D(pool_size=(2, 2), data_format="channels_first"))
model.add(Dropout(0.2))
model.add(Conv2D(256, 2, activation="relu"))
- model.add(MaxPooling2D(pool_size=(2, 2), data_format='channels_first'))
+ model.add(MaxPooling2D(pool_size=(2, 2), data_format="channels_first"))
model.add(Dropout(0.2))
model.add(Flatten())
model.add(Dense(64))
- model.add(Dense(6, activation='linear'))
+ model.add(Dense(6, activation='softmax'))
model.compile(loss="mse", optimizer=Adam(learning_rate=0.001), metrics=['accuracy'])
return model
@@ -53,11 +55,28 @@ def new_model(self):
def act(self, obs, action_space):
return self.baseAgent.act(obs, Discrete(6))
- def train(self, done):
+ def train(self, done, numOfEpisode):
if self.buffer.size() < constants.MIN_REPLAY_MEMORY_SIZE:
return
+ # if numOfEpisode == 0:
+ # self.training_model.load_weights('./checkpoints/my_checkpoint')
+ # self.trained_model.load_weights('./checkpoints/my_checkpoint')
+
+ if numOfEpisode % 999 == 0:
+ checkpoint_path = "/checkpoints/training1/cp.ckpt"
+ checkpoint_dir = os.path.dirname(checkpoint_path)
+
+ # 检查点重用
+ cp_callback = tf.keras.callbacks.ModelCheckpoint(filepath=checkpoint_path,
+ save_weights_only=True,
+ verbose=1)
+ # 完成一次训练后存档参数
+ if numOfEpisode == 4999:
+ self.training_model.save_weights('/checkpoints/my_checkpoint')
+
+
# 取样
mini_batch = self.buffer.sample(constants.MINIBATCH_SIZE)
@@ -71,7 +90,7 @@ def train(self, done):
# X为state,Y为所预测的action
X = []
- y = []
+ Y = []
for index, (current_state, action, reward, new_current_state, done) in enumerate(mini_batch):
@@ -88,26 +107,211 @@ def train(self, done):
current_qs[action] = next_q
# 添加训练数据
- X.append(current_state)
- y.append(current_qs)
+ X.append(np.array(current_state))
+ # X.append(tf.reshape(current_state,(-1,12,8,8)))
+ Y.append(np.array(current_qs))
+
+ # 开始训练
+ # X = tf.reshape(X, (-1, 12, 8, 8))
+ # train_dataset = tf.data.Dataset.from_tensor_slices((X, Y))
+ # self.training_model.fit(train_dataset, verbose=0, shuffle=False)
- # 开始训练
- self.training_model.fit(np.array(X), np.array(y), batch_size=constants.MINIBATCH_SIZE, verbose=0, shuffle=False)
+ self.training_model.fit(np.array(X), np.array(Y), epochs=4, batch_size=constants.MINIBATCH_SIZE, verbose=0,
+ shuffle=False)
# 更新网络更新计数器
if done:
self.target_update_counter += 1
# 网络更新计数器达到上限,更新网络
- if self.target_update_counter > constants.UPDATE_TARGET_EVERY:
+ if self.target_update_counter > constants.UPDATE_EVERY:
self.trained_model.set_weights(self.training_model.get_weights())
- self.target_update_counter = 0
+ self.target_update_counter = 0
def get_q_value(self, state):
-
- return self.training_model.predict_on_batch(np.array(state).reshape(-1,9,8,8))
+ state_reshape = np.array(state).reshape(-1, 12, 8, 8)
+ return self.training_model.predict_on_batch(state_reshape)
# epsilon衰减
+ def reward(self, featurel, featurea, action, sl, sa, rewards):
+ # set up reward
+ r_wood = 0.1
+ r_powerup = 0.3
+ r_put_bomb = 0.08
+ r_win = 1
+ r_fail = -5
+ r_kick = 0.3
+ r_kill_enemy_maybe = 0.5
+ r_dies = -3
+
+ rigid = featurel[0].numpy()
+ wood = featurel[1].numpy()
+ bomb = featurel[2].numpy()
+ agents = featurel[4].numpy()
+ power_up = featurel[3]
+ position0 = int(featurea[0].item())
+ position1 = int(featurea[1].item())
+ p0 = int(sa[0].item())
+ p1 = int(sa[1].item())
+ ammo = int(featurea[2].item())
+ blast_strength = int(featurea[3].item())
+ can_kick = int(featurea[4].item())
+ teammate = int(featurea[5].item())
+ enemies = int(featurea[6].item())
+ rewards = rewards.numpy()
+ reward = 0
+ # sagents = sl[4]
+ sbomb = sl[2].numpy()
+
+ # reward_done
+ # print(rewards)
+ if rewards == 1:
+ reward += r_win
+ if rewards == -1:
+ reward += r_fail
+
+ # reward_powerup
+ sammo = int(sa[2].item())
+ if ammo > 1 and ammo > sammo:
+ reward += r_powerup
+ sstrength = int(sa[3].item())
+ if blast_strength > sstrength:
+ reward += r_powerup
+ skick = int(sa[4].item())
+ if can_kick and not skick:
+ reward += r_powerup
+ # print(action)
+
+ # reward_wood
+ if int(action.item()) == 5:
+ reward += r_put_bomb
+ bomb_flame = self.build_flame(position0, position1, rigid, blast_strength)
+ num_wood = np.count_nonzero(wood * bomb_flame == 1)
+ reward += num_wood * r_wood
+ '''
+ # test
+ print('rigid')
+ print(rigid)
+ print('position_bomb')
+ print(position_bomb)
+ print('f')
+ print(f)
+ print('l')
+ print(l)
+ print('bomb_flame')
+ print(bomb_flame)
+ print('num_wood')
+ print(num_wood)
+ print('-------------------------------------')
+ '''
+ """
+ exist_bomb = []
+ for row, rowbomb in enumerate(bomb):
+ for col, _ in enumerate(rowbomb):
+ if bomb[row, col] == 1:
+ exist_bomb.append((row, col))
+ #print(bomb)
+ #print(exist_bomb)
+
+ if exist_bomb:
+ for ebomb in exist_bomb:
+ bomb_flame1 = self.build_flame(ebomb[0], ebomb[1], rigid, blast_strength)
+ if bomb_flame1[position0, position1] == 1:
+ reward -= 0.5
+ #print(bomb_flame1)
+ """
+ # reward_kick
+ if sbomb[position0, position1] == 1 and rewards != -1:
+ reward += r_kick
+ '''
+ # reward_kill_enemy
+ enemy_position = [] #需要知道敌人位置
+ if int(action.item()) == 5:
+ bomb_position = np.array([position0,position1])
+ bomb_flame = self.build_flame(position0, position1, rigid, blast_strength)
+ if bomb_position in np.argwhere(bomb==1) and np.argwhere(enemy_position*bomb_flame == 1).size != 0:
+ reward += r_kill_enemy_maybe
+ '''
+
+ '''
+ # reward_dies
+ if is_alive == 0:
+ reward += r_dies
+ '''
+
+ return reward
+
+ def build_flame(self, position0, position1, rigid, blast_strength):
+
+ position_bomb = np.array([position0, position1])
+ m = position_bomb[0]
+ n = position_bomb[1]
+ l = blast_strength - 1
+ f = [l, l, l, l] # Scope of flame: up down left right
+ bomb_flame = np.zeros_like(rigid)
+
+ # 判断实体墙或边界是否阻断火焰
+ flame_up = np.zeros_like(bomb_flame)
+ flame_down = np.zeros_like(bomb_flame)
+ flame_left = np.zeros_like(bomb_flame)
+ flame_right = np.zeros_like(bomb_flame)
+ if m - f[0] < 0: # 上边界
+ f[0] = m
+ flame_up[m - f[0]:m, n] = 1
+ if m + f[1] > bomb_flame.shape[0] - 1: # 下边界
+ f[1] = bomb_flame.shape[0] - 1 - m
+ flame_down[m + 1:m + f[1] + 1, n] = 1
+ if n - f[2] < 0: # 左边界
+ f[2] = n
+ flame_left[m, n - f[2]:n] = 1
+ if n + f[3] > bomb_flame.shape[0] - 1: # 右边界
+ f[3] = bomb_flame.shape[0] - 1 - n
+ flame_right[m, n + 1:n + f[3] + 1] = 1
+
+ rigid_0 = flame_up * rigid
+ rigid_1 = flame_down * rigid
+ rigid_2 = flame_left * rigid
+ rigid_3 = flame_right * rigid
+ if np.argwhere(rigid_0 == 1).size != 0: # 上实体墙
+ rigid_up = np.max(np.argwhere(rigid_0 == 1)[:, 0][0])
+ if rigid_up >= m - f[0]:
+ f[0] = m - rigid_up - 1
+ if np.argwhere(rigid_1 == 1).size != 0: # 下实体墙
+ rigid_down = np.min(np.argwhere(rigid_1 == 1)[:, 0][0])
+ if rigid_down <= m + f[1]:
+ f[1] = rigid_down - m - 1
+ if np.argwhere(rigid_2 == 1).size != 0: # 左实体墙
+ rigid_left = np.max(np.argwhere(rigid_2 == 1)[0, :][1])
+ if rigid_left >= n - f[2]:
+ f[2] = n - rigid_left - 1
+ if np.argwhere(rigid_3 == 1).size != 0: # 右实体墙
+ rigid_right = np.min(np.argwhere(rigid_3 == 1)[0, :][1])
+ if rigid_right <= n + f[3]:
+ f[3] = rigid_right - n - 1
+ bomb_flame[m - f[0]:m + f[1] + 1, n] = 1
+ bomb_flame[m, n - f[2]:n + f[3] + 1] = 1
+
+ '''
+ # test
+ print('rigid')
+ print(rigid)
+ print('position_bomb')
+ print(position_bomb)
+ print('f')
+ print(f)
+ print('l')
+ print(l)
+ print('bomb_flame')
+ '''
+ # print(bomb_flame)
+ # print(blast_strength)
+ '''
+ print('num_wood')
+ print(num_wood)
+ print('-------------------------------------')
+ '''
+ return bomb_flame
+
def epsilon_decay(self):
self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_epsilon else self.epsilon
diff --git a/DQN_new/constants.py b/DQN_new/constants.py
index 8cc9d7f..b1edc9d 100644
--- a/DQN_new/constants.py
+++ b/DQN_new/constants.py
@@ -1,6 +1,6 @@
MIN_REPLAY_MEMORY_SIZE = 1_000 # Minimum number of steps in a memory to start training
-MINIBATCH_SIZE = 256 # How many steps (samples) to use for training
-UPDATE_TARGET_EVERY = 10 # Terminal states (end of episodes)
+MINIBATCH_SIZE = 128 # How many steps (samples) to use for training
+UPDATE_EVERY = 10 # Terminal states (end of episodes)
MAX_BUFFER_SIZE = 50_000
DISCOUNT = 0.95
@@ -17,3 +17,6 @@
AGGREGATE_STATS_EVERY = 300 # episodes
SHOW_PREVIEW = False
+
+def MAX_BUFFER_SIZE_PRE():
+ return None
\ No newline at end of file
diff --git a/DQN_new/main.py b/DQN_new/main.py
index 1daa910..3a0b335 100644
--- a/DQN_new/main.py
+++ b/DQN_new/main.py
@@ -53,13 +53,13 @@ def main():
# 每一定局数显示游戏画面
# if constants.SHOW_PREVIEW and not episode % constants.SHOW_GAME:
- # env.render()
+ env.render()
# 储存记忆
agent1.buffer.append([state_feature, actions, reward, next_state_feature, done])
# 学习!
- agent1.train(done)
+ agent1.train(done, episode+1)
# 更新state
current_state = new_state
diff --git a/DQN_new/utility.py b/DQN_new/utility.py
index 3b14c0d..7792559 100644
--- a/DQN_new/utility.py
+++ b/DQN_new/utility.py
@@ -95,15 +95,49 @@ def rebuild_board(board):
new_row.append(0.0)
power_up.append(new_row)
- agents = []
- # 如果是9,10,11,12代为此处为agent,则取1.0
+ agent1 = []
+ # 如果是9为此处为agent,则取1.0
for row in board:
new_row = []
for num in row:
- if num == 9 or num == 10 or num == 11 or num == 12:
+ if num == 9:
new_row.append(1.0)
else:
new_row.append(0.0)
- agents.append(new_row)
+ agent1.append(new_row)
+
+ agent2 = []
+ # 如果是10为此处为agent,则取1.0
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 10:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ agent2.append(new_row)
+
+ agent3 = []
+ # 如果是11为此处为agent,则取1.0
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 11:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ agent3.append(new_row)
+
+ agent4 = []
+ # 如果是12为此处为agent,则取1.0
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 12:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ agent4.append(new_row)
+
+ return rigid, wood, bomb, power_up, agent1, agent2, agent3, agent4
- return rigid, wood,bomb, power_up, agents
diff --git a/DockerAgent/model/first_model.h5 b/DockerAgent/model/first_model.h5
new file mode 100644
index 0000000..1dfde1d
Binary files /dev/null and b/DockerAgent/model/first_model.h5 differ
diff --git a/Group_C/Agent1/keras_metadata.pb b/Group_C/Agent1/keras_metadata.pb
new file mode 100644
index 0000000..30e5627
--- /dev/null
+++ b/Group_C/Agent1/keras_metadata.pb
@@ -0,0 +1,12 @@
+
+Broot"_tf_keras_sequential*B{"name": "sequential", "trainable": true, "expects_training_arg": true, "dtype": "float32", "batch_input_shape": null, "must_restore_from_config": false, "class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "InputLayer", "config": {"batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "sparse": false, "ragged": false, "name": "conv2d_input"}}, {"class_name": "Conv2D", "config": {"name": "conv2d", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Conv2D", "config": {"name": "conv2d_2", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "dtype": "float32", "data_format": "channels_last"}}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 128, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 6, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}]}, "shared_object_id": 17, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 4, "axes": {"-3": 18}}, "shared_object_id": 18}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 18, 11, 11]}, "is_graph_network": true, "full_save_spec": {"class_name": "__tuple__", "items": [[{"class_name": "TypeSpec", "type_spec": "tf.TensorSpec", "serialized": [{"class_name": "TensorShape", "items": [null, 18, 11, 11]}, "float32", "conv2d_input"]}], {}]}, "save_spec": {"class_name": "TypeSpec", "type_spec": "tf.TensorSpec", "serialized": [{"class_name": "TensorShape", "items": [null, 18, 11, 11]}, "float32", "conv2d_input"]}, "keras_version": "2.6.0", "backend": "tensorflow", "model_config": {"class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "InputLayer", "config": {"batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "sparse": false, "ragged": false, "name": "conv2d_input"}, "shared_object_id": 0}, {"class_name": "Conv2D", "config": {"name": "conv2d", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 1}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 2}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 3}, {"class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 4}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 5}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 6}, {"class_name": "Conv2D", "config": {"name": "conv2d_2", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 7}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 8}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 9}, {"class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "dtype": "float32", "data_format": "channels_last"}, "shared_object_id": 10}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 128, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 11}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 12}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 13}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 6, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 14}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 15}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 16}]}}, "training_config": {"loss": "mse", "metrics": [[{"class_name": "MeanMetricWrapper", "config": {"name": "accuracy", "dtype": "float32", "fn": "categorical_accuracy"}, "shared_object_id": 19}]], "weighted_metrics": null, "loss_weights": null, "optimizer_config": {"class_name": "Adam", "config": {"name": "Adam", "learning_rate": 0.0001, "decay": 0.0, "beta_1": 0.9, "beta_2": 0.999, "epsilon": 1e-07, "amsgrad": false}}}}2
+
+root.layer_with_weights-0"_tf_keras_layer*
+{"name": "conv2d", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "stateful": false, "must_restore_from_config": false, "class_name": "Conv2D", "config": {"name": "conv2d", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 1}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 2}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 3, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 4, "axes": {"-3": 18}}, "shared_object_id": 18}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 18, 11, 11]}}2
+ root.layer_with_weights-1"_tf_keras_layer* {"name": "conv2d_1", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 4}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 5}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 6, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 4, "axes": {"-3": 256}}, "shared_object_id": 20}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 256, 11, 11]}}2
+ root.layer_with_weights-2"_tf_keras_layer* {"name": "conv2d_2", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Conv2D", "config": {"name": "conv2d_2", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 7}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 8}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 9, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 4, "axes": {"-3": 256}}, "shared_object_id": 21}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 256, 11, 11]}}2
+root.layer-3"_tf_keras_layer*{"name": "flatten", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "dtype": "float32", "data_format": "channels_last"}, "shared_object_id": 10, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 1, "axes": {}}, "shared_object_id": 22}}2
+root.layer_with_weights-3"_tf_keras_layer*{"name": "dense", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 128, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 11}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 12}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 13, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 2, "axes": {"-1": 30976}}, "shared_object_id": 23}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 30976]}}2
+root.layer_with_weights-4"_tf_keras_layer*{"name": "dense_1", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 6, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 14}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 15}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 16, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 2, "axes": {"-1": 128}}, "shared_object_id": 24}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 128]}}2
+Rroot.keras_api.metrics.0"_tf_keras_metric*{"class_name": "Mean", "name": "loss", "dtype": "float32", "config": {"name": "loss", "dtype": "float32"}, "shared_object_id": 25}2
+Sroot.keras_api.metrics.1"_tf_keras_metric*{"class_name": "MeanMetricWrapper", "name": "accuracy", "dtype": "float32", "config": {"name": "accuracy", "dtype": "float32", "fn": "categorical_accuracy"}, "shared_object_id": 19}2
\ No newline at end of file
diff --git a/Group_C/Agent1/saved_model.pb b/Group_C/Agent1/saved_model.pb
new file mode 100644
index 0000000..59c1b16
Binary files /dev/null and b/Group_C/Agent1/saved_model.pb differ
diff --git a/Group_C/Agent1/variables/variables.data-00000-of-00001 b/Group_C/Agent1/variables/variables.data-00000-of-00001
new file mode 100644
index 0000000..6d0387f
Binary files /dev/null and b/Group_C/Agent1/variables/variables.data-00000-of-00001 differ
diff --git a/Group_C/Agent1/variables/variables.index b/Group_C/Agent1/variables/variables.index
new file mode 100644
index 0000000..3c34d50
Binary files /dev/null and b/Group_C/Agent1/variables/variables.index differ
diff --git a/Group_C/Agent3/keras_metadata.pb b/Group_C/Agent3/keras_metadata.pb
new file mode 100644
index 0000000..30e5627
--- /dev/null
+++ b/Group_C/Agent3/keras_metadata.pb
@@ -0,0 +1,12 @@
+
+Broot"_tf_keras_sequential*B{"name": "sequential", "trainable": true, "expects_training_arg": true, "dtype": "float32", "batch_input_shape": null, "must_restore_from_config": false, "class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "InputLayer", "config": {"batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "sparse": false, "ragged": false, "name": "conv2d_input"}}, {"class_name": "Conv2D", "config": {"name": "conv2d", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Conv2D", "config": {"name": "conv2d_2", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "dtype": "float32", "data_format": "channels_last"}}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 128, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 6, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}]}, "shared_object_id": 17, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 4, "axes": {"-3": 18}}, "shared_object_id": 18}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 18, 11, 11]}, "is_graph_network": true, "full_save_spec": {"class_name": "__tuple__", "items": [[{"class_name": "TypeSpec", "type_spec": "tf.TensorSpec", "serialized": [{"class_name": "TensorShape", "items": [null, 18, 11, 11]}, "float32", "conv2d_input"]}], {}]}, "save_spec": {"class_name": "TypeSpec", "type_spec": "tf.TensorSpec", "serialized": [{"class_name": "TensorShape", "items": [null, 18, 11, 11]}, "float32", "conv2d_input"]}, "keras_version": "2.6.0", "backend": "tensorflow", "model_config": {"class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "InputLayer", "config": {"batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "sparse": false, "ragged": false, "name": "conv2d_input"}, "shared_object_id": 0}, {"class_name": "Conv2D", "config": {"name": "conv2d", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 1}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 2}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 3}, {"class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 4}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 5}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 6}, {"class_name": "Conv2D", "config": {"name": "conv2d_2", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 7}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 8}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 9}, {"class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "dtype": "float32", "data_format": "channels_last"}, "shared_object_id": 10}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 128, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 11}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 12}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 13}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 6, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 14}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 15}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 16}]}}, "training_config": {"loss": "mse", "metrics": [[{"class_name": "MeanMetricWrapper", "config": {"name": "accuracy", "dtype": "float32", "fn": "categorical_accuracy"}, "shared_object_id": 19}]], "weighted_metrics": null, "loss_weights": null, "optimizer_config": {"class_name": "Adam", "config": {"name": "Adam", "learning_rate": 0.0001, "decay": 0.0, "beta_1": 0.9, "beta_2": 0.999, "epsilon": 1e-07, "amsgrad": false}}}}2
+
+root.layer_with_weights-0"_tf_keras_layer*
+{"name": "conv2d", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "stateful": false, "must_restore_from_config": false, "class_name": "Conv2D", "config": {"name": "conv2d", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 18, 11, 11]}, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 1}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 2}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 3, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 4, "axes": {"-3": 18}}, "shared_object_id": 18}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 18, 11, 11]}}2
+ root.layer_with_weights-1"_tf_keras_layer* {"name": "conv2d_1", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 4}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 5}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 6, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 4, "axes": {"-3": 256}}, "shared_object_id": 20}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 256, 11, 11]}}2
+ root.layer_with_weights-2"_tf_keras_layer* {"name": "conv2d_2", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Conv2D", "config": {"name": "conv2d_2", "trainable": true, "dtype": "float32", "filters": 256, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "same", "data_format": "channels_first", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 7}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 8}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 9, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 4, "axes": {"-3": 256}}, "shared_object_id": 21}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 256, 11, 11]}}2
+root.layer-3"_tf_keras_layer*{"name": "flatten", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "dtype": "float32", "data_format": "channels_last"}, "shared_object_id": 10, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 1, "axes": {}}, "shared_object_id": 22}}2
+root.layer_with_weights-3"_tf_keras_layer*{"name": "dense", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 128, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 11}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 12}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 13, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 2, "axes": {"-1": 30976}}, "shared_object_id": 23}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 30976]}}2
+root.layer_with_weights-4"_tf_keras_layer*{"name": "dense_1", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 6, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 14}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 15}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 16, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 2, "axes": {"-1": 128}}, "shared_object_id": 24}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 128]}}2
+Rroot.keras_api.metrics.0"_tf_keras_metric*{"class_name": "Mean", "name": "loss", "dtype": "float32", "config": {"name": "loss", "dtype": "float32"}, "shared_object_id": 25}2
+Sroot.keras_api.metrics.1"_tf_keras_metric*{"class_name": "MeanMetricWrapper", "name": "accuracy", "dtype": "float32", "config": {"name": "accuracy", "dtype": "float32", "fn": "categorical_accuracy"}, "shared_object_id": 19}2
\ No newline at end of file
diff --git a/Group_C/Agent3/saved_model.pb b/Group_C/Agent3/saved_model.pb
new file mode 100644
index 0000000..1d8e183
Binary files /dev/null and b/Group_C/Agent3/saved_model.pb differ
diff --git a/Group_C/Agent3/variables/variables.data-00000-of-00001 b/Group_C/Agent3/variables/variables.data-00000-of-00001
new file mode 100644
index 0000000..7ce1c6e
Binary files /dev/null and b/Group_C/Agent3/variables/variables.data-00000-of-00001 differ
diff --git a/Group_C/Agent3/variables/variables.index b/Group_C/Agent3/variables/variables.index
new file mode 100644
index 0000000..7bbe625
Binary files /dev/null and b/Group_C/Agent3/variables/variables.index differ
diff --git a/Group_C/README.md b/Group_C/README.md
new file mode 100644
index 0000000..528dc3c
--- /dev/null
+++ b/Group_C/README.md
@@ -0,0 +1,71 @@
+# Architecture
+
+We were able to implement all the descendants of Rainbow DQN except Categorical DQN
+
+- [x] basic DQN
+- [x] Double DQN
+- [x] Prioritized Experience Replay
+- [x] Dueling Network Architectures
+- [x] Noisy Nets
+- [x] Multi Step Reinforcement Learning
+- [ ] Categorical DQN
+
+# Getting Started with our DQNAgent
+
+# Pre-requisites
+
+* [Python 3.6.0](https://www.python.org/downloads/release/python-360/)+ (including `pip`)
+* [Docker](https://www.docker.com/) (only needed for `DockerAgent`)
+* [tensorflow 2.6.2](https://www.tensorflow.org/hub/installation)
+* [Keras 2.6.0](https://keras.io/getting_started/)
+* Others are all included in [requirements](Group_C/requirements.txt)
+# Installation
+
+* Clone the repository
+```
+$ git clone https://github.com/oxFFFF-Q/Project_AI.git
+```
+
+## Pip
+
+* Install the `pommerman` package. This needs to be done every time the code is updated to get the
+latest modules
+```
+$ cd ~/playground
+$ pip install -U .
+```
+
+## Conda
+
+* Install the `pommerman` environment.
+```
+$ cd ~/playground
+$ conda env create -f env.yml
+$ conda activate pommerman
+```
+
+* To update the environment
+```
+$ conda env update -f env.yml --prune
+```
+
+# Launch the agent
+We have seperately trained models for player 1 [Agent1](Group_C/agents/Agent1.py) and player 3 [Agent3](Group_C/agents/Agent3.py). Run [main_test.py](Group_C/main_test.py) to test them palying against two [SimpleAgent](pommerman/agents/simple_agent.py).
+
+# Train your agent
+
+## A Simple Example
+
+Run [main_train.py](Group_C/main_train.py) to train our final DQN model for radio team competition of two [SimpleAgent](pommerman/agents/simple_agent.py) as enemies and a [SimpleAgent](pommerman/agents/simple_agent.py) as teammate.
+
+The training will not automatically stop, but need to be done manully, according to the given out report about the rewards. The paramaters will be recorded every 100 episodes. Run [main_save_model.py](Group_C/main_save_model.py) to save the model. The name of the model is required. The best one is usually among the last few models.
+
+## Use other strategies
+
+Select other names for `strategy` in [main_train.py](Group_C/main_train.py) to try other achietectures. Make sure of the consistency of the `strategy` in [main_save_model.py](Group_C/main_save_model.py).
+
+
+
+# Visualize the experiment results
+
+Our experiment results are all stored in [data](Group_C/result_image/data). Run [make_image.py](Group_C/result_image/make_image.py) to get a visualization of them.
diff --git a/Group_C/agents/Agent1.py b/Group_C/agents/Agent1.py
new file mode 100644
index 0000000..b753897
--- /dev/null
+++ b/Group_C/agents/Agent1.py
@@ -0,0 +1,115 @@
+from pommerman.agents import BaseAgent
+from Group_C.utility.action_filter import action_filter
+from Group_C.utility.communication import message
+import numpy as np
+import tensorflow as tf
+import collections
+import os
+
+
+class DQNAgent(BaseAgent):
+ """DQNAgent for running"""
+
+ def __init__(self, *args, **kwargs):
+ super(DQNAgent, self).__init__(*args, **kwargs)
+
+ self.gpu = os.environ["CUDA_VISIBLE_DEVICES"] = "0"
+ self.DQN_model = tf.keras.models.load_model("./Agent1")
+ self.action_list = collections.deque([0, 0, 0], maxlen=3)
+
+ def action_predict(self, state):
+ state_feature = self.featurize2D(state)
+ state_reshape = tf.reshape(state_feature, (-1, 18, 11, 11))
+ q_table = self.DQN_model.predict(state_reshape)
+ return q_table
+
+ def act(self, observation, action_space):
+ mess, observation = message(observation) # Communication: information sending, receiving processing
+ action = np.argmax(self.action_predict(observation)).tolist()
+ self.action_list.append(action)
+ action = action_filter(observation, self.action_list)
+ self.action_list[-1] = action
+ return [action, mess[0], mess[1]]
+
+ def episode_end(self, reward):
+ pass
+
+ def shutdown(self):
+ pass
+
+ def featurize2D(self, states, partially_obs=True):
+ # 18 channels in total
+ X = states["position"][0]
+ Y = states["position"][1]
+ shape = (11, 11)
+
+ # get limited observation
+ def get_partially_obs(states, X, Y):
+ # board = np.zeros(shape)
+ board = np.full(shape, 5)
+ for x in range(10):
+ for y in range(10):
+ if X - 4 <= x <= X + 4 and Y - 4 <= y <= Y + 4:
+ board[x][y] = states["board"][x][y]
+ states["board"] = board
+ return states
+
+ def get_matrix(board, key):
+ res = board[key]
+ return res.reshape(shape).astype(np.float64)
+
+ def get_map(board, item):
+ map = np.zeros(shape)
+ map[board == item] = 1
+ return map
+
+ if partially_obs:
+ states = get_partially_obs(states, X, Y)
+
+ board = get_matrix(states, "board")
+
+ path = get_map(board, 0)
+ rigid = get_map(board, 1)
+ wood = get_map(board, 2)
+ bomb = get_map(board, 3)
+ flame = get_map(board, 4)
+ fog = np.zeros(shape)
+ agent1 = get_map(board, 10)
+ agent2 = get_map(board, 11)
+ agent3 = get_map(board, 12)
+ agent4 = get_map(board, 13)
+
+ power_up = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 6 or num == 7 or num == 8:
+ new_row.append(1)
+ else:
+ new_row.append(0.0)
+ power_up.append(new_row)
+
+ bomb_blast_strength = get_matrix(states, 'bomb_blast_strength')
+ bomb_life = get_matrix(states, 'bomb_life')
+ bomb_moving_direction = get_matrix(states, 'bomb_moving_direction')
+ flame_life = get_matrix(states, 'flame_life')
+
+ ammo_2D, blast_strength_2D, can_kick_2D = self.rebuild_1D_element(states)
+
+ feature2D = [path, rigid, wood, bomb, flame, fog, power_up, agent1, agent2, agent3, agent4, bomb_blast_strength,
+ bomb_life, bomb_moving_direction, flame_life, ammo_2D, blast_strength_2D, can_kick_2D]
+
+ return np.array(feature2D)
+
+ def rebuild_1D_element(self, states):
+ shape = (11, 11)
+ ammo = states["ammo"]
+ ammo_2D = np.full(shape, ammo)
+
+ blast_strength = states["blast_strength"]
+ blast_strength_2D = np.full(shape, blast_strength)
+
+ can_kick = states["can_kick"]
+ can_kick_2D = np.full(shape, int(can_kick))
+
+ return ammo_2D, blast_strength_2D, can_kick_2D
diff --git a/Group_C/agents/Agent3.py b/Group_C/agents/Agent3.py
new file mode 100644
index 0000000..0413272
--- /dev/null
+++ b/Group_C/agents/Agent3.py
@@ -0,0 +1,115 @@
+from pommerman.agents import BaseAgent
+from Group_C.utility.action_filter import action_filter
+from Group_C.utility.communication import message
+import numpy as np
+import tensorflow as tf
+import collections
+import os
+
+
+class DQNAgent(BaseAgent):
+ """DQNAgent for running"""
+
+ def __init__(self, *args, **kwargs):
+ super(DQNAgent, self).__init__(*args, **kwargs)
+
+ self.gpu = os.environ["CUDA_VISIBLE_DEVICES"] = "0"
+ self.DQN_model = tf.keras.models.load_model("./Agent3")
+ self.action_list = collections.deque([0, 0, 0], maxlen=3)
+
+ def action_predict(self, state):
+ state_feature = self.featurize2D(state)
+ state_reshape = tf.reshape(state_feature, (-1, 18, 11, 11))
+ q_table = self.DQN_model.predict(state_reshape)
+ return q_table
+
+ def act(self, observation, action_space):
+ mess, observation = message(observation) # Communication: information sending, receiving processing
+ action = np.argmax(self.action_predict(observation)).tolist()
+ self.action_list.append(action)
+ action = action_filter(observation, self.action_list)
+ self.action_list[-1] = action
+ return [action, mess[0], mess[1]]
+
+ def episode_end(self, reward):
+ pass
+
+ def shutdown(self):
+ pass
+
+ def featurize2D(self, states, partially_obs=True):
+ # 18 channels in total
+ X = states["position"][0]
+ Y = states["position"][1]
+ shape = (11, 11)
+
+ # get limited observation
+ def get_partially_obs(states, X, Y):
+ # board = np.zeros(shape)
+ board = np.full(shape, 5)
+ for x in range(10):
+ for y in range(10):
+ if X - 4 <= x <= X + 4 and Y - 4 <= y <= Y + 4:
+ board[x][y] = states["board"][x][y]
+ states["board"] = board
+ return states
+
+ def get_matrix(board, key):
+ res = board[key]
+ return res.reshape(shape).astype(np.float64)
+
+ def get_map(board, item):
+ map = np.zeros(shape)
+ map[board == item] = 1
+ return map
+
+ if partially_obs:
+ states = get_partially_obs(states, X, Y)
+
+ board = get_matrix(states, "board")
+
+ path = get_map(board, 0)
+ rigid = get_map(board, 1)
+ wood = get_map(board, 2)
+ bomb = get_map(board, 3)
+ flame = get_map(board, 4)
+ fog = np.zeros(shape)
+ agent1 = get_map(board, 10)
+ agent2 = get_map(board, 11)
+ agent3 = get_map(board, 12)
+ agent4 = get_map(board, 13)
+
+ power_up = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 6 or num == 7 or num == 8:
+ new_row.append(1)
+ else:
+ new_row.append(0.0)
+ power_up.append(new_row)
+
+ bomb_blast_strength = get_matrix(states, 'bomb_blast_strength')
+ bomb_life = get_matrix(states, 'bomb_life')
+ bomb_moving_direction = get_matrix(states, 'bomb_moving_direction')
+ flame_life = get_matrix(states, 'flame_life')
+
+ ammo_2D, blast_strength_2D, can_kick_2D = self.rebuild_1D_element(states)
+
+ feature2D = [path, rigid, wood, bomb, flame, fog, power_up, agent1, agent2, agent3, agent4, bomb_blast_strength,
+ bomb_life, bomb_moving_direction, flame_life, ammo_2D, blast_strength_2D, can_kick_2D]
+
+ return np.array(feature2D)
+
+ def rebuild_1D_element(self, states):
+ shape = (11, 11)
+ ammo = states["ammo"]
+ ammo_2D = np.full(shape, ammo)
+
+ blast_strength = states["blast_strength"]
+ blast_strength_2D = np.full(shape, blast_strength)
+
+ can_kick = states["can_kick"]
+ can_kick_2D = np.full(shape, int(can_kick))
+
+ return ammo_2D, blast_strength_2D, can_kick_2D
diff --git a/Group_C/agents/DQNAgent_basic.py b/Group_C/agents/DQNAgent_basic.py
new file mode 100644
index 0000000..e7855ed
--- /dev/null
+++ b/Group_C/agents/DQNAgent_basic.py
@@ -0,0 +1,203 @@
+from keras.layers import Dense, Flatten, Conv2D
+from keras import Sequential
+from tensorflow.keras.optimizers import Adam
+from pommerman.agents import BaseAgent
+from pommerman.agents import RandomAgent
+from pommerman import characters
+from gym.spaces import Discrete
+from Group_C.utility.replay_memory import replay_Memory
+from Group_C.utility import constants
+import numpy as np
+import tensorflow as tf
+import copy
+
+
+class DQNAgent(BaseAgent):
+ """Basic DQN """
+
+ def __init__(self, character=characters.Bomber):
+ super(DQNAgent, self).__init__(character)
+ self.baseAgent = RandomAgent()
+
+ self.training_model = self.new_model()
+ self.trained_model = self.new_model()
+ self.trained_model.set_weights(self.training_model.get_weights())
+
+ self.epsilon = constants.epsilon
+ self.min_epsilon = constants.MIN_EPSILON
+ self.eps_decay = constants.EPSILON_DECAY
+ self.buffer = replay_Memory(constants.MAX_BUFFER_SIZE)
+ self.update_counter = 0
+
+ def new_model(self):
+
+ model = Sequential()
+ input_shape = (constants.MINIBATCH_SIZE, 18, 11, 11,)
+ model.add(Conv2D(256, 3, (1, 1), input_shape=input_shape[1:], activation="relu", data_format="channels_first",
+ padding="same"))
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ model.add(Flatten())
+ model.add(Dense(128, activation="relu"))
+ model.add(Dense(6, activation='linear'))
+ model.compile(loss="mse", optimizer=Adam(learning_rate=0.0001), metrics=['accuracy'])
+ model.summary()
+ return model
+
+ def action_choose(self, state):
+
+ state_reshape = tf.reshape(state, (-1, 18, 11, 11))
+ q_table = self.training_model.predict(state_reshape)
+ if np.random.random() <= 0.001:
+ print(q_table)
+ return q_table
+
+ def act(self, obs, action_space):
+ return self.baseAgent.act(obs, Discrete(6))
+
+ def save_buffer(self, state_feature, action, reward, next_state_feature, done, Data_processing=False):
+ """if you want rotate observation into other 3 corner, set Data_processing to True"""
+ if Data_processing:
+ self.data_processing(state_feature, action, reward, next_state_feature, done)
+ else:
+ self.buffer.append([state_feature, action, reward, next_state_feature, done])
+
+ def train(self):
+
+ if self.buffer.size() < constants.MIN_REPLAY_MEMORY_SIZE:
+ return
+
+ current_states, action, reward, new_states, done = self.buffer.sample_element(constants.MINIBATCH_SIZE)
+
+ # Take the current_states in the sample, get the Q value from the model
+ current_states_q = self.training_model.predict(current_states)
+
+ # Take next_state in the sample, get the Q value from the old network
+ new_states_q = self.trained_model.predict(new_states)
+
+ # X is the state, Y is the predicted action
+ states = []
+ actions = []
+
+ for index in range(constants.MINIBATCH_SIZE):
+
+ if done[index] is not True:
+ # Update Q value,
+ new_state_q = reward[index] + constants.DISCOUNT * np.max(new_states_q[index])
+ else:
+ new_state_q = reward[index]
+
+ # Update the Q value for the given states
+ current_better_q = current_states_q[index]
+ current_better_q[action[index]] = new_state_q
+
+ # Add training data
+ states.append(current_states[index])
+ actions.append(current_better_q)
+
+ # Start training
+ self.training_model.fit(np.array(states), np.array(actions), batch_size=constants.MINIBATCH_SIZE, verbose=0,
+ shuffle=False)
+
+ # Update network update counters
+ if done:
+ self.update_counter += 1
+
+ # Network update counter reaches upper limit, update network
+ if self.update_counter > constants.UPDATE_EVERY:
+ self.trained_model.set_weights(self.training_model.get_weights())
+ self.update_counter = 0
+
+ # epsilon decay
+ def epsilon_decay(self):
+ self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_epsilon else self.epsilon
+
+ def save_weights(self, numOfEpisode):
+
+ # Archive parameters after training
+ # save weight every "save_weight" episode, change it in constants.py
+ if numOfEpisode % constants.save_weight == 0:
+ self.training_model.save_weights(('./checkpoints/episode{:}/episode{:}'.format(numOfEpisode, numOfEpisode)))
+ print("weights saved!")
+
+ def load_weights(self, weight_name):
+ self.training_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ self.trained_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ print("weights loaded!")
+
+ def save_model(self, model_name):
+ self.training_model.save("./{:}".format(model_name))
+
+ def data_processing(self, state_feature, action, reward, next_state_feature, done):
+ # Convert the top left map to another location
+ def convert_left_bottom(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=1)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=1)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 3
+ elif action == 2:
+ action = 4
+ elif action == 3:
+ action = 2
+ elif action == 4:
+ action = 1
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ def convert_right_bottom(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=2)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=2)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 2
+ elif action == 2:
+ action = 1
+ elif action == 3:
+ action = 4
+ elif action == 4:
+ action = 3
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ def convert_right_top(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=3)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=3)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 4
+ elif action == 2:
+ action = 3
+ elif action == 3:
+ action = 1
+ elif action == 4:
+ action = 2
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ # Rotate
+
+ state_left_bottom, next_state_left_bottom, action_left_bottom = convert_left_bottom(state_feature,
+ next_state_feature,
+ action)
+ state_right_bottom, next_state_right_bottom, action_right_bottom = convert_right_bottom(state_feature,
+ next_state_feature,
+ action)
+ state_right_top, next_state_right_top, action_right_top = convert_right_top(state_feature, next_state_feature,
+ action)
+ self.buffer.append([state_feature, action, reward, next_state_feature, done])
+ self.buffer.append([state_left_bottom, action_left_bottom, reward, next_state_left_bottom, done])
+ self.buffer.append([state_right_bottom, action_right_bottom, reward, next_state_right_bottom, done])
+ self.buffer.append([state_right_top, action_right_top, reward, next_state_right_top, done])
diff --git a/Group_C/agents/DQNAgent_double_dqn.py b/Group_C/agents/DQNAgent_double_dqn.py
new file mode 100644
index 0000000..518dd6e
--- /dev/null
+++ b/Group_C/agents/DQNAgent_double_dqn.py
@@ -0,0 +1,215 @@
+from keras.layers import Dense, Flatten, Conv2D
+from keras import Sequential
+from tensorflow.keras.optimizers import Adam
+from pommerman.agents import BaseAgent
+from pommerman.agents import RandomAgent
+from pommerman import characters
+from gym.spaces import Discrete
+from Group_C.utility.replay_memory import replay_Memory
+from Group_C.utility import constants
+import numpy as np
+import tensorflow as tf
+
+
+
+class DQNAgent(BaseAgent):
+ """DQN second try with keras"""
+
+ def __init__(self, character=characters.Bomber):
+ super(DQNAgent, self).__init__(character)
+ self.baseAgent = RandomAgent()
+
+ self.training_model = self.new_model()
+ self.trained_model = self.new_model()
+ self.trained_model.set_weights(self.training_model.get_weights())
+ # self.load_weights()
+
+ self.epsilon = constants.epsilon
+ self.min_epsilon = constants.MIN_EPSILON
+ self.eps_decay = constants.EPSILON_DECAY
+ self.buffer = replay_Memory(constants.MAX_BUFFER_SIZE)
+ self.update_counter = 0
+
+ def new_model(self):
+
+ model = Sequential()
+ input_shape = (constants.MINIBATCH_SIZE, 18, 11, 11,)
+ model.add(Conv2D(256, 3, (1, 1), input_shape=input_shape[1:], activation="relu", data_format="channels_first",
+ padding="same"))
+ # print(model.output_shape)
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ # print(model.output_shape)
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ # print(model.output_shape)
+
+ model.add(Flatten())
+ model.add(Dense(128, activation="relu"))
+ model.add(Dense(6, activation='linear'))
+ model.compile(loss="mse", optimizer=Adam(learning_rate=0.0001), metrics=['accuracy'])
+ return model
+
+ def act(self, obs, action_space):
+ return self.baseAgent.act(obs, Discrete(6))
+
+ def train(self):
+
+ if self.buffer.size() < constants.MIN_REPLAY_MEMORY_SIZE:
+ return
+
+ current_states, action, reward, new_states, done = self.buffer.sample_element(constants.MINIBATCH_SIZE)
+
+ # Take the current_states in the sample, get the Q value from the model
+ current_states_q = self.training_model.predict(current_states)
+ double_new_states_q = self.training_model.predict(new_states)
+ # Take next_state in the sample, get the Q value from the old network
+ new_states_q = self.trained_model.predict(new_states)
+
+ # X is the state, Y is the predicted action
+ states = []
+ actions = []
+
+ for index in range(constants.MINIBATCH_SIZE):
+
+ if done[index] is not True:
+ # Update Q value, Double DQN
+ target = reward[index] + constants.DISCOUNT * new_states_q[index][np.argmax(double_new_states_q[index])]
+ else:
+ target = reward[index]
+
+ # estimate q-values based on current state
+ q_values = current_states_q[index]
+
+ # Update the Q value for the given states
+ current_better_q = np.array(q_values)
+ current_better_q[action[index]] = target
+
+ # Add training data
+ states.append(current_states[index])
+ actions.append(current_better_q)
+
+ # Use dedicated data api, but slower.
+ # states = tf.reshape(states, (-1, 12, 8, 8))
+ # train_dataset = tf.data.Dataset.from_tensor_slices((states, actions))
+ # self.training_model.fit(train_dataset, verbose=0, shuffle=False)
+
+ # Start training
+ self.training_model.fit(np.array(states), np.array(actions), batch_size=constants.MINIBATCH_SIZE, verbose=0,
+ shuffle=False)
+
+ # Update network update counters
+ if done:
+ self.update_counter += 1
+
+ # Network update counter reaches upper limit, update network
+ if self.update_counter > constants.UPDATE_EVERY:
+ self.trained_model.set_weights(self.training_model.get_weights())
+ self.update_counter = 0
+
+ def save_buffer(self, state_feature, action, reward, next_state_feature, done, Data_processing=False):
+ """if you want rotate observation into other 3 corner, set Data_processing to True"""
+ if Data_processing:
+ self.data_processing(state_feature, action, reward, next_state_feature, done)
+ else:
+ self.buffer.append([state_feature, action, reward, next_state_feature, done])
+
+ def action_choose(self, state):
+
+ state_reshape = tf.reshape(state, (-1, 18, 11, 11))
+ q_table = self.training_model.predict(state_reshape)
+ if np.random.random() <= 0.001:
+ print(q_table)
+ return q_table
+
+ # epsilon decay
+ def epsilon_decay(self):
+ self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_epsilon else self.epsilon
+
+ def save_weights(self, numOfEpisode):
+
+ # Archive parameters after training
+ # save weight every "save_weight" episode, change it in constants.py
+ if numOfEpisode % constants.save_weight == 0:
+ self.training_model.save_weights(('./checkpoints/episode{:}/episode{:}'.format(numOfEpisode, numOfEpisode)))
+ print("weights saved!")
+
+ def load_weights(self, weight_name):
+ self.training_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ self.trained_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ print("weights loaded!")
+
+ def save_model(self, model_name):
+ self.training_model.save("./{:}".format(model_name))
+
+ def data_processing(self, state_feature, action, reward, next_state_feature, done):
+ # Convert the top left map to another location
+ def convert_left_bottom(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=1)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=1)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 3
+ elif action == 2:
+ action = 4
+ elif action == 3:
+ action = 2
+ elif action == 4:
+ action = 1
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ def convert_right_bottom(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=2)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=2)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 2
+ elif action == 2:
+ action = 1
+ elif action == 3:
+ action = 4
+ elif action == 4:
+ action = 3
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ def convert_right_top(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=3)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=3)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 4
+ elif action == 2:
+ action = 3
+ elif action == 3:
+ action = 1
+ elif action == 4:
+ action = 2
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ # Rotate
+
+ state_left_bottom, next_state_left_bottom, action_left_bottom = convert_left_bottom(state_feature,
+ next_state_feature,
+ action)
+ state_right_bottom, next_state_right_bottom, action_right_bottom = convert_right_bottom(state_feature,
+ next_state_feature,
+ action)
+ state_right_top, next_state_right_top, action_right_top = convert_right_top(state_feature, next_state_feature,
+ action)
+ self.buffer.append([state_feature, action, reward, next_state_feature, done])
+ self.buffer.append([state_left_bottom, action_left_bottom, reward, next_state_left_bottom, done])
+ self.buffer.append([state_right_bottom, action_right_bottom, reward, next_state_right_bottom, done])
+ self.buffer.append([state_right_top, action_right_top, reward, next_state_right_top, done])
\ No newline at end of file
diff --git a/Group_C/agents/DQNAgent_dueling_dqn.py b/Group_C/agents/DQNAgent_dueling_dqn.py
new file mode 100644
index 0000000..d40f766
--- /dev/null
+++ b/Group_C/agents/DQNAgent_dueling_dqn.py
@@ -0,0 +1,236 @@
+from tensorflow.keras.optimizers import Adam
+from pommerman.agents import BaseAgent
+from pommerman.agents import RandomAgent
+from pommerman import characters
+from gym.spaces import Discrete
+from Group_C.utility.replay_memory import replay_Memory
+from Group_C.utility import constants
+import numpy as np
+import tensorflow as tf
+import tensorflow.keras as keras
+
+class Dueling_Model(tf.keras.Model):
+
+ def __init__(self, ):
+ super(Dueling_Model, self).__init__()
+
+ self.c1 = keras.layers.Conv2D(256, 3, (1, 1), input_shape=(constants.MINIBATCH_SIZE, 18, 11, 11,)[1:],
+ activation="relu", data_format="channels_first",
+ padding="same")
+ self.c2 = keras.layers.Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same")
+
+ self.c3 = keras.layers.Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same")
+ self.flatten = keras.layers.Flatten()
+ self.l1 = keras.layers.Dense(128, activation="relu")
+
+ self.V = keras.layers.Dense(1, activation=None)
+ self.A = keras.layers.Dense(6, activation=None)
+
+ def call(self, inputs):
+ x = self.c1(inputs)
+ x = self.c2(x)
+ x = self.c3(x)
+ x = self.flatten(x)
+ x = self.l1(x)
+
+ V = self.V(x)
+ A = self.A(x)
+ mean = tf.math.reduce_mean(A, axis=1, keepdims=True)
+
+ output = V + tf.subtract(A, mean)
+ return output
+
+ def advantage(self, state):
+ x = self.c1(state)
+ x = self.c2(x)
+ x = self.c3(x)
+ x = self.flatten(x)
+ x = self.l1(x)
+ A = self.A(x)
+ return A
+
+
+class DQNAgent(BaseAgent):
+ """DQN second try with keras"""
+
+ def __init__(self, character=characters.Bomber):
+ super(DQNAgent, self).__init__(character)
+ self.baseAgent = RandomAgent()
+
+ self.training_model = Dueling_Model()
+ self.trained_model = Dueling_Model()
+
+ self.trained_model.set_weights(self.training_model.get_weights())
+ # self.load_weights()
+
+ self.epsilon = constants.epsilon
+ self.min_epsilon = constants.MIN_EPSILON
+ self.eps_decay = constants.EPSILON_DECAY
+ self.buffer = replay_Memory(constants.MAX_BUFFER_SIZE)
+ self.update_counter = 0
+
+ self.training_model.compile(loss="mse", optimizer=Adam(learning_rate=0.0001), metrics=['accuracy'])
+ self.trained_model.compile(loss="mse", optimizer=Adam(learning_rate=0.0001), metrics=['accuracy'])
+
+ def act(self, obs, action_space):
+ return self.baseAgent.act(obs, Discrete(6))
+
+ def train(self):
+
+ if self.buffer.size() < constants.MIN_REPLAY_MEMORY_SIZE:
+ return
+
+ current_states, action, reward, new_states, done = self.buffer.sample_element(constants.MINIBATCH_SIZE)
+
+ # Take the current_states in the sample, get the Q value from the model
+ current_states_q = self.training_model.call(current_states)
+ # double_new_qs = self.training_model.call(new_states)
+
+ # Take next_state in the sample, get the Q value from the old network
+ # new_states_q = tf.math.reduce_max(self.trained_model.call(new_states), axis=1, keepdims=True)
+ new_states_q = self.trained_model.call(new_states)
+
+ # X is the state, Y is the predicted action
+ states = []
+ actions = []
+
+ for index in range(constants.MINIBATCH_SIZE):
+
+ if done[index] is not True:
+ # Update Q value
+ new_state_q = reward[index] + constants.DISCOUNT * np.max(new_states_q[index])
+ else:
+ new_state_q = reward[index]
+
+ # estimate q-values based on current state
+ q_values = current_states_q[index]
+ # Update the Q value for the given states
+ current_better_q = np.array(q_values)
+ current_better_q[action[index]] = new_state_q
+
+ # Add training data
+ states.append(current_states[index])
+ actions.append(current_better_q)
+
+ # Start training
+ self.training_model.fit(np.array(states), np.array(actions), batch_size=constants.MINIBATCH_SIZE, verbose=0,
+ shuffle=False)
+ # Update network update counters
+ if done:
+ self.update_counter += 1
+
+ # Network update counter reaches upper limit, update network
+ if self.update_counter > constants.UPDATE_EVERY:
+ self.trained_model.set_weights(self.training_model.get_weights())
+ self.update_counter = 0
+
+ def save_buffer(self, state_feature, action, reward, next_state_feature, done, Data_processing=False):
+ """if you want rotate observation into other 3 corner, set Data_processing to True"""
+ if Data_processing:
+ self.data_processing(state_feature, action, reward, next_state_feature, done)
+ else:
+ self.buffer.append([state_feature, action, reward, next_state_feature, done])
+
+ def action_choose(self, state):
+ state_reshape = tf.reshape(state, (-1, 18, 11, 11))
+ q_table = self.training_model.advantage(state_reshape)
+ if np.random.random() <= 0.001:
+ print(q_table)
+ return q_table
+
+ # epsilon decay
+ def epsilon_decay(self):
+ self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_epsilon else self.epsilon
+
+ def save_buffer(self, state_feature, action, reward, next_state_feature, done):
+ self.buffer.append([state_feature, action, reward, next_state_feature, done])
+
+ def save_weights(self, numOfEpisode):
+
+ # Archive parameters after training
+ # save weight every "save_weight" episode, change it in constants.py
+ if numOfEpisode % constants.save_weight == 0:
+ self.training_model.save_weights(('./checkpoints/episode{:}/episode{:}'.format(numOfEpisode, numOfEpisode)))
+ print("weights saved!")
+
+ def load_weights(self, weight_name):
+ self.training_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ self.trained_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ print("weights loaded!")
+
+ def save_model(self, model_name):
+ self.training_model.save("./{:}".format(model_name))
+
+ def data_processing(self, state_feature, action, reward, next_state_feature, done):
+ # Convert the top left map to another location
+ def convert_left_bottom(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=1)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=1)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 3
+ elif action == 2:
+ action = 4
+ elif action == 3:
+ action = 2
+ elif action == 4:
+ action = 1
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ def convert_right_bottom(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=2)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=2)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 2
+ elif action == 2:
+ action = 1
+ elif action == 3:
+ action = 4
+ elif action == 4:
+ action = 3
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ def convert_right_top(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=3)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=3)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 4
+ elif action == 2:
+ action = 3
+ elif action == 3:
+ action = 1
+ elif action == 4:
+ action = 2
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ # Rotate
+
+ state_left_bottom, next_state_left_bottom, action_left_bottom = convert_left_bottom(state_feature,
+ next_state_feature,
+ action)
+ state_right_bottom, next_state_right_bottom, action_right_bottom = convert_right_bottom(state_feature,
+ next_state_feature,
+ action)
+ state_right_top, next_state_right_top, action_right_top = convert_right_top(state_feature, next_state_feature,
+ action)
+ self.buffer.append([state_feature, action, reward, next_state_feature, done])
+ self.buffer.append([state_left_bottom, action_left_bottom, reward, next_state_left_bottom, done])
+ self.buffer.append([state_right_bottom, action_right_bottom, reward, next_state_right_bottom, done])
+ self.buffer.append([state_right_top, action_right_top, reward, next_state_right_top, done])
diff --git a/Group_C/agents/DQNAgent_final.py b/Group_C/agents/DQNAgent_final.py
new file mode 100644
index 0000000..d0feefb
--- /dev/null
+++ b/Group_C/agents/DQNAgent_final.py
@@ -0,0 +1,144 @@
+from keras.layers import Dense, Flatten, Conv2D
+from keras import Sequential
+from tensorflow.keras.optimizers import Adam
+from pommerman.agents import BaseAgent
+from pommerman.agents import RandomAgent
+from pommerman import characters
+from gym.spaces import Discrete
+from Group_C.utility.replay_memory import replay_Memory
+from Group_C.utility import constants
+import numpy as np
+import tensorflow as tf
+
+
+class DQNAgent(BaseAgent):
+ """DQN combine double DQN and priority memory"""
+
+ def __init__(self, character=characters.Bomber):
+ super(DQNAgent, self).__init__(character)
+ self.baseAgent = RandomAgent()
+
+ self.training_model = self.new_model()
+ self.trained_model = self.new_model()
+ self.trained_model.set_weights(self.training_model.get_weights())
+
+ self.epsilon = constants.epsilon
+ self.min_epsilon = constants.MIN_EPSILON
+ self.eps_decay = constants.EPSILON_DECAY
+ self.buffer = replay_Memory(constants.MAX_BUFFER_SIZE)
+ self.update_counter = 0
+
+ def new_model(self):
+
+ model = Sequential()
+ input_shape = (constants.MINIBATCH_SIZE, 18, 11, 11,)
+ model.add(Conv2D(256, 3, (1, 1), input_shape=input_shape[1:], activation="relu", data_format="channels_first",
+ padding="same"))
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+
+ model.add(Flatten())
+ model.add(Dense(128, activation="relu"))
+ model.add(Dense(6, activation='linear'))
+ model.compile(loss="mse", optimizer=Adam(learning_rate=0.0001), metrics=['accuracy'])
+ return model
+
+ def act(self, obs, action_space):
+ return self.baseAgent.act(obs, Discrete(6))
+
+ def train(self):
+
+ if self.buffer.size() < constants.MIN_REPLAY_MEMORY_SIZE:
+ return
+
+ current_states, action, reward, new_states, done = self.buffer.sample_element_pri(constants.MINIBATCH_SIZE)
+
+ # Take the current_states in the sample, get the Q value from the model
+ current_states_q = self.training_model.predict(current_states)
+ double_new_states_q = self.training_model.predict(new_states)
+ # Take next_state in the sample, get the Q value from the old network
+ new_states_q = self.trained_model.predict(new_states)
+
+ # X is the state, Y is the predicted action
+ states = []
+ actions = []
+
+ for index in range(constants.MINIBATCH_SIZE):
+
+ if done[index] is not True:
+ # Update Q value, Double DQN
+ target = reward[index] + constants.DISCOUNT * new_states_q[index][np.argmax(double_new_states_q[index])]
+ else:
+ target = reward[index]
+
+ # estimate q-values based on current state
+ q_values = current_states_q[index]
+
+ # Update the Q value for the given states
+ current_better_q = np.array(q_values)
+ current_better_q[action[index]] = target
+ # current_better_q = tf.convert_to_tensor(current_better_q)
+
+ # Add training data
+ states.append(current_states[index])
+ actions.append(current_better_q)
+
+ # Start training
+ self.training_model.fit(np.array(states), np.array(actions), batch_size=constants.MINIBATCH_SIZE, verbose=0,
+ shuffle=False)
+
+ # Update network update counters
+ if done:
+ self.update_counter += 1
+
+ # Network update counter reaches upper limit, update network
+ if self.update_counter > constants.UPDATE_EVERY:
+ self.trained_model.set_weights(self.training_model.get_weights())
+ self.update_counter = 0
+
+ def calculate_td_error(self, state, action, reward, new_state, done):
+ state_ = tf.reshape(state, (-1, 18, 11, 11))
+ new_state_ = tf.reshape(new_state, (-1, 18, 11, 11))
+ q_values = self.training_model.predict(state_)[0]
+ q_value = q_values[action]
+ if not done:
+ target = reward + constants.DISCOUNT * \
+ self.trained_model.predict(new_state_)[0, np.argmax(self.training_model.predict(new_state_))]
+ else:
+ target = reward
+
+ td_error = target - q_value
+ return td_error
+
+ def action_choose(self, state):
+
+ state_reshape = tf.reshape(state, (-1, 18, 11, 11))
+ q_table = self.training_model.predict(state_reshape)
+ if np.random.random() <= 0.001:
+ print(q_table)
+ return q_table
+
+ def save_buffer(self, state_feature, action, reward, next_state_feature, done):
+ td_error = self.calculate_td_error(state_feature, action, reward, next_state_feature, done)
+ self.buffer.append_pri(state_feature, action, reward, next_state_feature, done, td_error)
+
+ # epsilon decay
+
+ def epsilon_decay(self):
+ self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_epsilon else self.epsilon
+
+ def save_weights(self, numOfEpisode):
+
+ # Archive parameters after training
+ # save weight every "save_weight" episode, change it in constants.py
+ if numOfEpisode % constants.save_weight == 0:
+ self.training_model.save_weights(('./checkpoints/episode{:}/episode{:}'.format(numOfEpisode, numOfEpisode)))
+ print("weights saved!")
+
+ def load_weights(self, weight_name):
+ self.training_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ self.trained_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ print("weights loaded!")
+
+ def save_model(self, model_name):
+ self.training_model.save("./{:}".format(model_name))
diff --git a/Group_C/agents/DQNAgent_multi_steps.py b/Group_C/agents/DQNAgent_multi_steps.py
new file mode 100644
index 0000000..dd2050b
--- /dev/null
+++ b/Group_C/agents/DQNAgent_multi_steps.py
@@ -0,0 +1,215 @@
+from keras.layers import Dense, Flatten, Conv2D
+from keras import Sequential
+from tensorflow.keras.optimizers import Adam
+from pommerman.agents import BaseAgent
+from pommerman.agents import RandomAgent
+from pommerman import characters
+from gym.spaces import Discrete
+from Group_C.utility.replay_memory import replay_Memory
+from Group_C.utility import constants
+import numpy as np
+import tensorflow as tf
+import copy
+
+
+class DQNAgent(BaseAgent):
+ """DQN second try with keras"""
+
+ def __init__(self, character=characters.Bomber):
+ super(DQNAgent, self).__init__(character)
+ self.baseAgent = RandomAgent()
+
+ self.training_model = self.new_model()
+ self.trained_model = self.new_model()
+ self.trained_model.set_weights(self.training_model.get_weights())
+ # self.load_weights()
+
+ self.epsilon = constants.epsilon
+ self.min_epsilon = constants.MIN_EPSILON
+ self.eps_decay = constants.EPSILON_DECAY
+ self.buffer = replay_Memory(constants.MAX_BUFFER_SIZE)
+ self.update_counter = 0
+
+ def new_model(self):
+
+ model = Sequential()
+ input_shape = (constants.MINIBATCH_SIZE, 18, 11, 11,)
+ model.add(Conv2D(256, 3, (1, 1), input_shape=input_shape[1:], activation="relu", data_format="channels_first",
+ padding="same"))
+ # print(model.output_shape)
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ # print(model.output_shape)
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ # print(model.output_shape)
+
+ model.add(Flatten())
+ model.add(Dense(128, activation="relu"))
+ model.add(Dense(6, activation='linear'))
+ model.compile(loss="mse", optimizer=Adam(learning_rate=0.0001), metrics=['accuracy'])
+ model.summary()
+ return model
+
+ def action_choose(self, state):
+
+ state_reshape = tf.reshape(state, (-1, 18, 11, 11))
+ q_table = self.training_model.predict(state_reshape)
+ if np.random.random() <= 0.001:
+ print(q_table)
+ return q_table
+
+ def act(self, obs, action_space):
+ return self.baseAgent.act(obs, Discrete(6))
+
+ def save_buffer(self, state_feature, action, reward, next_state_feature, done, Data_processing=False):
+ """if you want rotate observation into other 3 corner, set Data_processing to True"""
+ if Data_processing:
+ self.data_processing(state_feature, action, reward, next_state_feature, done)
+ else:
+ self.buffer.append([state_feature, action, reward, next_state_feature, done])
+
+ def train(self):
+ if self.buffer.size() < constants.MIN_REPLAY_MEMORY_SIZE:
+ return
+
+ current_states, action, reward, new_states, done = self.buffer.sample_element(constants.MINIBATCH_SIZE)
+
+ # Take the current_states in the sample, get the Q value from the model
+ current_states_q = self.training_model.predict(current_states)
+
+ # Take next_state in the sample, get the Q value from the old network
+ new_states_q = self.trained_model.predict(new_states)
+
+ # X is the state, Y is the predicted action
+ states = []
+ actions = []
+
+ for index in range(constants.MINIBATCH_SIZE):
+
+ if done[index] is not True:
+ # Update Q value
+ new_state_q = reward[index] + constants.DISCOUNT * np.max(new_states_q[index])
+ else:
+ new_state_q = reward[index]
+
+ # Update the Q value for the given states
+ current_better_q = current_states_q[index]
+ current_better_q[action[index]] = new_state_q
+
+ # Add training data
+ states.append(current_states[index])
+ actions.append(current_better_q)
+
+
+ # Use dedicated data api, but slower.
+ # states = tf.reshape(states, (-1, 12, 8, 8))
+ # train_dataset = tf.data.Dataset.from_tensor_slices((states, actions))
+ # self.training_model.fit(train_dataset, verbose=0, shuffle=False)
+
+ # Start training
+ self.training_model.fit(np.array(states), np.array(actions), batch_size=constants.MINIBATCH_SIZE, verbose=0,
+ shuffle=False)
+
+ # Update network update counters
+ if done:
+ self.update_counter += 1
+
+ # Network update counter reaches upper limit, update network
+ if self.update_counter > constants.UPDATE_EVERY:
+ self.trained_model.set_weights(self.training_model.get_weights())
+ self.update_counter = 0
+
+ # epsilon decay
+
+ def epsilon_decay(self):
+ self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_epsilon else self.epsilon
+
+ def save_weights(self, numOfEpisode):
+
+ # Archive parameters after training
+ # save weight every "save_weight" episode, change it in constants.py
+ if numOfEpisode % constants.save_weight == 0:
+ self.training_model.save_weights(('./checkpoints/episode{:}/episode{:}'.format(numOfEpisode, numOfEpisode)))
+ print("weights saved!")
+
+ def load_weights(self, weight_name):
+ self.training_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ self.trained_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ print("weights loaded!")
+
+ def save_model(self, model_name):
+ self.training_model.save("./{:}".format(model_name))
+
+ def data_processing(self, state_feature, action, reward, next_state_feature, done):
+ # Convert the top left map to another location
+ def convert_left_bottom(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=1)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=1)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 3
+ elif action == 2:
+ action = 4
+ elif action == 3:
+ action = 2
+ elif action == 4:
+ action = 1
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ def convert_right_bottom(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=2)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=2)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 2
+ elif action == 2:
+ action = 1
+ elif action == 3:
+ action = 4
+ elif action == 4:
+ action = 3
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ def convert_right_top(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=3)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=3)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 4
+ elif action == 2:
+ action = 3
+ elif action == 3:
+ action = 1
+ elif action == 4:
+ action = 2
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ # Rotate
+
+ state_left_bottom, next_state_left_bottom, action_left_bottom = convert_left_bottom(state_feature,
+ next_state_feature,
+ action)
+ state_right_bottom, next_state_right_bottom, action_right_bottom = convert_right_bottom(state_feature,
+ next_state_feature,
+ action)
+ state_right_top, next_state_right_top, action_right_top = convert_right_top(state_feature, next_state_feature,
+ action)
+ self.buffer.append([state_feature, action, reward, next_state_feature, done])
+ self.buffer.append([state_left_bottom, action_left_bottom, reward, next_state_left_bottom, done])
+ self.buffer.append([state_right_bottom, action_right_bottom, reward, next_state_right_bottom, done])
+ self.buffer.append([state_right_top, action_right_top, reward, next_state_right_top, done])
+
diff --git a/Group_C/agents/DQNAgent_noisy.py b/Group_C/agents/DQNAgent_noisy.py
new file mode 100644
index 0000000..23eaf75
--- /dev/null
+++ b/Group_C/agents/DQNAgent_noisy.py
@@ -0,0 +1,206 @@
+import copy
+import numpy as np
+import tensorflow as tf
+import tensorflow_addons as tfa
+from keras.layers import Dense, Flatten, Conv2D
+from keras import Sequential
+from tensorflow.keras.optimizers import Adam
+from pommerman.agents import BaseAgent
+from pommerman.agents import RandomAgent
+from pommerman import characters
+from gym.spaces import Discrete
+from Group_C.utility.replay_memory import replay_Memory
+from Group_C.utility import constants
+
+
+class DQNAgent(BaseAgent):
+ """DQN second try with keras"""
+
+ def __init__(self, character=characters.Bomber):
+ super(DQNAgent, self).__init__(character)
+ self.baseAgent = RandomAgent()
+
+ self.training_model = self.new_model()
+ self.trained_model = self.new_model()
+ self.trained_model.set_weights(self.training_model.get_weights())
+ # self.load_weights()
+
+ self.epsilon = constants.epsilon
+ self.min_epsilon = constants.MIN_EPSILON
+ self.eps_decay = constants.EPSILON_DECAY
+ self.buffer = replay_Memory(constants.MAX_BUFFER_SIZE)
+ self.update_counter = 0
+
+ def new_model(self):
+
+ model = Sequential()
+ input_shape = (constants.MINIBATCH_SIZE, 18, 11, 11,)
+ model.add(Conv2D(256, 3, (1, 1), input_shape=input_shape[1:], activation="relu", data_format="channels_first",
+ padding="same"))
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+
+ model.add(Flatten())
+ model.add(tfa.layers.NoisyDense(128, activation="relu"))
+ model.add(Dense(6, activation='linear'))
+ model.compile(loss="mse", optimizer=Adam(learning_rate=0.0001), metrics=['accuracy'])
+ return model
+
+ def act(self, obs, action_space):
+ return self.baseAgent.act(obs, Discrete(6))
+
+ def train(self):
+
+ if self.buffer.size() < constants.MIN_REPLAY_MEMORY_SIZE:
+ return
+
+ current_states, action, reward, new_states, done = self.buffer.sample_element(constants.MINIBATCH_SIZE)
+
+ # Take the current_states in the sample, get the Q value from the model
+ current_states_q = self.training_model.predict(current_states)
+
+ # Take next_state in the sample, get the Q value from the old network
+ new_states_q = self.trained_model.predict(new_states)
+
+ # X is the state, Y is the predicted action
+ states = []
+ actions = []
+
+ for index in range(constants.MINIBATCH_SIZE):
+
+ if done[index] is not True:
+ # Update Q value
+ new_state_q = reward[index] + constants.DISCOUNT * np.max(new_states_q[index])
+ else:
+ new_state_q = reward[index]
+
+ # Update the Q value for the given states
+ current_better_q = current_states_q[index]
+ current_better_q[action[index]] = new_state_q
+
+ # Add training data
+ states.append(current_states[index])
+ actions.append(current_better_q)
+
+ # Start training
+ self.training_model.fit(np.array(states), np.array(actions), batch_size=constants.MINIBATCH_SIZE, verbose=0,
+ shuffle=False)
+
+ # Update network update counters
+ if done:
+ self.update_counter += 1
+
+ # Network update counter reaches upper limit, update network
+ if self.update_counter > constants.UPDATE_EVERY:
+ self.trained_model.set_weights(self.training_model.get_weights())
+ self.update_counter = 0
+
+ def action_choose(self, state):
+
+ state_reshape = tf.reshape(state, (-1, 18, 11, 11))
+ q_table = self.training_model.predict(state_reshape)
+ if np.random.random() <= 0.001:
+ print(q_table)
+ return q_table
+
+ def save_buffer(self, state_feature, action, reward, next_state_feature, done, Data_processing=False):
+ """if you want rotate observation into other 3 corner, set Data_processing to True"""
+ if Data_processing:
+ self.data_processing(state_feature, action, reward, next_state_feature, done)
+ else:
+ self.buffer.append([state_feature, action, reward, next_state_feature, done])
+
+ # epsilon decay
+ def epsilon_decay(self):
+ self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_epsilon else self.epsilon
+
+ def save_weights(self, numOfEpisode):
+
+ # Archive parameters after training
+ # save weight every "save_weight" episode, change it in constants.py
+ if numOfEpisode % constants.save_weight == 0:
+ self.training_model.save_weights(('./checkpoints/episode{:}/episode{:}'.format(numOfEpisode, numOfEpisode)))
+ print("weights saved!")
+
+ def load_weights(self, weight_name):
+ self.training_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ self.trained_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ print("weights loaded!")
+
+ def save_model(self, model_name):
+ self.training_model.save("./{:}".format(model_name))
+
+ def data_processing(self, state_feature, action, reward, next_state_feature, done):
+ # Convert the top left map to another location
+ def convert_left_bottom(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=1)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=1)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 3
+ elif action == 2:
+ action = 4
+ elif action == 3:
+ action = 2
+ elif action == 4:
+ action = 1
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ def convert_right_bottom(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=2)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=2)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 2
+ elif action == 2:
+ action = 1
+ elif action == 3:
+ action = 4
+ elif action == 4:
+ action = 3
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ def convert_right_top(state_feature, next_state_feature, action):
+ state_feature_left_bottom = []
+ for board in state_feature:
+ state = np.rot90(board, k=3)
+ state_feature_left_bottom.append(state)
+ next_state_feature_left_bottom = []
+ for board in next_state_feature:
+ state = np.rot90(board, k=3)
+ next_state_feature_left_bottom.append(state)
+ if action == 1:
+ action = 4
+ elif action == 2:
+ action = 3
+ elif action == 3:
+ action = 1
+ elif action == 4:
+ action = 2
+ return np.array(state_feature_left_bottom), np.array(next_state_feature_left_bottom), action
+
+ # Rotate
+
+ state_left_bottom, next_state_left_bottom, action_left_bottom = convert_left_bottom(state_feature,
+ next_state_feature,
+ action)
+ state_right_bottom, next_state_right_bottom, action_right_bottom = convert_right_bottom(state_feature,
+ next_state_feature,
+ action)
+ state_right_top, next_state_right_top, action_right_top = convert_right_top(state_feature, next_state_feature,
+ action)
+ self.buffer.append([state_feature, action, reward, next_state_feature, done])
+ self.buffer.append([state_left_bottom, action_left_bottom, reward, next_state_left_bottom, done])
+ self.buffer.append([state_right_bottom, action_right_bottom, reward, next_state_right_bottom, done])
+ self.buffer.append([state_right_top, action_right_top, reward, next_state_right_top, done])
+
diff --git a/Group_C/agents/DQNAgent_priority_memory.py b/Group_C/agents/DQNAgent_priority_memory.py
new file mode 100644
index 0000000..2032f60
--- /dev/null
+++ b/Group_C/agents/DQNAgent_priority_memory.py
@@ -0,0 +1,143 @@
+from keras.layers import Dense, Flatten, Conv2D
+from keras import Sequential
+from tensorflow.keras.optimizers import Adam
+from pommerman.agents import BaseAgent
+from pommerman.agents import RandomAgent
+from pommerman import characters
+from gym.spaces import Discrete
+from Group_C.utility.replay_memory import replay_Memory
+from Group_C.utility import constants
+import numpy as np
+import tensorflow as tf
+import copy
+
+
+class DQNAgent(BaseAgent):
+ """DQN second try with keras"""
+
+ def __init__(self, character=characters.Bomber):
+ super(DQNAgent, self).__init__(character)
+ self.baseAgent = RandomAgent()
+
+ self.training_model = self.new_model()
+ self.trained_model = self.new_model()
+ self.trained_model.set_weights(self.training_model.get_weights())
+ # self.load_weights()
+
+ self.epsilon = constants.epsilon
+ self.min_epsilon = constants.MIN_EPSILON
+ self.eps_decay = constants.EPSILON_DECAY
+ self.buffer = replay_Memory(constants.MAX_BUFFER_SIZE)
+ self.update_counter = 0
+
+ def new_model(self):
+
+ model = Sequential()
+ input_shape = (constants.MINIBATCH_SIZE, 18, 11, 11,)
+ model.add(Conv2D(256, 3, (1, 1), input_shape=input_shape[1:], activation="relu", data_format="channels_first",
+ padding="same"))
+ # print(model.output_shape)
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ # print(model.output_shape)
+ model.add(Conv2D(256, 3, (1, 1), activation="relu", data_format="channels_first", padding="same"))
+ # print(model.output_shape)
+
+ model.add(Flatten())
+ model.add(Dense(128, activation="relu"))
+ model.add(Dense(6, activation='linear'))
+ model.compile(loss="mse", optimizer=Adam(learning_rate=0.0001), metrics=['accuracy'])
+ return model
+
+ def act(self, obs, action_space):
+ return self.baseAgent.act(obs, Discrete(6))
+
+ def train(self):
+
+ if self.buffer.size() < constants.MIN_REPLAY_MEMORY_SIZE:
+ return
+
+ current_states, action, reward, new_states, done = self.buffer.sample_element_pri(constants.MINIBATCH_SIZE)
+
+ # Take the current_states in the sample, get the Q value from the model
+ current_states_q = self.training_model.predict(current_states)
+
+ # Take next_state in the sample, get the Q value from the old network
+ new_states_q = self.trained_model.predict(new_states)
+
+ # X is the state, Y is the predicted action
+ states = []
+ actions = []
+
+ for index in range(constants.MINIBATCH_SIZE):
+
+ if done[index] is not True:
+ # Update Q value
+ new_state_q = reward[index] + constants.DISCOUNT * np.max(new_states_q[index])
+ else:
+ new_state_q = reward[index]
+
+ # Update the Q value for the given states
+ current_better_q = current_states_q[index]
+ current_better_q[action[index]] = new_state_q
+
+ # Add training data
+ states.append(current_states[index])
+ actions.append(current_better_q)
+
+ # Start training
+ self.training_model.fit(np.array(states), np.array(actions), batch_size=constants.MINIBATCH_SIZE, verbose=0,
+ shuffle=False)
+
+ # Update network update counters
+ if done:
+ self.update_counter += 1
+
+ # Network update counter reaches upper limit, update network
+ if self.update_counter > constants.UPDATE_EVERY:
+ self.trained_model.set_weights(self.training_model.get_weights())
+ self.update_counter = 0
+
+ def calculate_td_error(self, state, action, reward, new_state, done):
+ state_ = tf.reshape(state, (-1, 18, 11, 11))
+ new_state_ = tf.reshape(new_state, (-1, 18, 11, 11))
+ q_values = self.training_model.predict(state_)[0]
+ q_value = q_values[action]
+ if not done:
+ target = reward + constants.DISCOUNT * \
+ self.trained_model.predict(new_state_)[0, np.argmax(self.training_model.predict(new_state_))]
+ else:
+ target = reward
+
+ td_error = target - q_value
+ return td_error
+
+ def action_choose(self, state):
+
+ state_reshape = tf.reshape(state, (-1, 18, 11, 11))
+ q_table = self.training_model.predict(state_reshape)
+ if np.random.random() <= 0.001:
+ print(q_table)
+ return q_table
+
+ def save_buffer(self, state_feature, action, reward, next_state_feature, done):
+ td_error = self.calculate_td_error(state_feature, action, reward, next_state_feature, done)
+ self.buffer.append_pri(state_feature, action, reward, next_state_feature, done, td_error)
+
+ def epsilon_decay(self):
+ self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_epsilon else self.epsilon
+
+ def save_weights(self, numOfEpisode):
+
+ # Archive parameters after training
+ # save weight every "save_weight" episode, change it in constants.py
+ if numOfEpisode % constants.save_weight == 0:
+ self.training_model.save_weights(('./checkpoints/episode{:}/episode{:}'.format(numOfEpisode, numOfEpisode)))
+ print("weights saved!")
+
+ def load_weights(self, weight_name):
+ self.training_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ self.trained_model.load_weights('./checkpoints/{:}/{:}'.format(weight_name, weight_name))
+ print("weights loaded!")
+
+ def save_model(self, model_name):
+ self.training_model.save("./{:}".format(model_name))
diff --git a/Group_C/main_save_model.py b/Group_C/main_save_model.py
new file mode 100644
index 0000000..a39727d
--- /dev/null
+++ b/Group_C/main_save_model.py
@@ -0,0 +1,32 @@
+def main(model_name='new_model', strategy='DQN_basic'):
+ # strategies: 'DQN_basic', 'DQN_double', 'DQN_dueling', 'DQN_priority', 'DQN_noisy', 'DQN_multi_steps', 'DQN_final'
+
+ if strategy == 'DQN_basic':
+ from agents.DQNAgent_basic import DQNAgent
+ elif strategy == 'DQN_double':
+ from agents.DQNAgent_double_dqn import DQNAgent
+ elif strategy == 'DQN_dueling':
+ from agents.DQNAgent_dueling_dqn import DQNAgent
+ elif strategy == 'DQN_priority':
+ from agents.DQNAgent_priority_memory import DQNAgent
+ elif strategy == 'DQN_noisy':
+ from agents.DQNAgent_noisy import DQNAgent
+ elif strategy == 'DQN_multi_steps':
+ from agents.DQNAgent_multi_steps import DQNAgent
+ elif strategy == 'DQN_final':
+ from agents.DQNAgent_final import DQNAgent
+
+ agent1 = DQNAgent()
+
+ # load saved weights from checkpoint
+ weight = "episode100"
+ agent1.load_weights(weight)
+
+ # save your model
+ agent1.save_model(model_name)
+
+
+if __name__ == '__main__':
+ # rename your model
+ main(model_name='new_model', strategy='DQN_basic')
+ # strategies: 'DQN_basic', 'DQN_double', 'DQN_dueling', 'DQN_priority', 'DQN_noisy', 'DQN_multi_steps', 'DQN_final'
diff --git a/Group_C/main_test.py b/Group_C/main_test.py
new file mode 100644
index 0000000..ef1b6f4
--- /dev/null
+++ b/Group_C/main_test.py
@@ -0,0 +1,87 @@
+from Group_C.utility import constants
+from agents.Agent1 import DQNAgent as agent_1
+from agents.Agent3 import DQNAgent as agent_3
+from pommerman.agents import SimpleAgent
+
+import pommerman
+
+
+def main():
+ agent1 = agent_1()
+ agent2 = SimpleAgent()
+ agent3 = agent_3()
+ agent4 = SimpleAgent()
+
+ agent_list = [agent1, agent2, agent3, agent4]
+ env = pommerman.make("PommeRadioCompetition-v2", agent_list)
+ # Record average reward
+ episode_rewards = []
+ win = 0
+ draw = 0
+ total_game = 0
+
+ total_numOfSteps = 0
+ episode = 0
+ # Conduct 100 rounds of testing
+ for i in range(100):
+ current_state = env.reset()
+ # Convert state to 1D array
+ episode_reward = 0
+ numOfSteps = 0
+ episode += 1
+ done = False
+
+ while not done:
+
+ numOfSteps += 1
+ total_numOfSteps += 1
+ actions = env.act(current_state)
+ env.render()
+ new_state, result, done, info = env.step(actions)
+
+ if 10 not in new_state[0]["alive"]:
+ done = True
+
+ # Update state
+ current_state = new_state
+
+ if done:
+ break
+
+ result = 0
+
+ if done:
+ episode_rewards.append(episode_reward)
+ total_game += 1
+ if 0 in info.get('winners', []):
+ win += 1
+ result = 2
+
+ # Record wins and losses
+ if numOfSteps == constants.MAX_STEPS + 1:
+ draw += 1
+ result = 1
+
+ win_rate = win / total_game
+ draw_rate = draw / total_game
+
+ if episode % constants.SHOW_EVERY == 0:
+ if result == 1:
+ print("{} episodes done, result: {} , steps: {}, win_rate:{:.2f}, draw_rate:{:.2f}".format(episode,
+ 'draw',
+ numOfSteps,
+ win_rate,
+ draw_rate))
+ else:
+ print("{} episodes done, result: {} , steps: {}, win_rate:{:.2f}, draw_rate:{:.2f}".format(episode,
+ 'win' if result == 2 else "lose",
+ numOfSteps,
+ win_rate,
+ draw_rate))
+
+ print("win: ", win, " draw: ", draw)
+ env.close()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/Group_C/main_train.py b/Group_C/main_train.py
new file mode 100644
index 0000000..815b839
--- /dev/null
+++ b/Group_C/main_train.py
@@ -0,0 +1,167 @@
+from Group_C.utility import constants
+import pommerman
+import numpy as np
+import pandas as pd
+import random
+
+from pommerman.agents import SimpleAgent
+from Group_C.utility.utility import featurize2D, reward_shaping
+
+
+def main(strategy='DQN_basic'):
+ # strategies: 'DQN_basic', 'DQN_double', 'DQN_dueling', 'DQN_priority', 'DQN_noisy', 'DQN_multi_steps', 'DQN_final'
+
+ if strategy == 'DQN_basic':
+ from agents.DQNAgent_basic import DQNAgent
+ elif strategy == 'DQN_double':
+ from agents.DQNAgent_double_dqn import DQNAgent
+ elif strategy == 'DQN_dueling':
+ from agents.DQNAgent_dueling_dqn import DQNAgent
+ elif strategy == 'DQN_priority':
+ from agents.DQNAgent_priority_memory import DQNAgent
+ elif strategy == 'DQN_noisy':
+ from agents.DQNAgent_noisy import DQNAgent
+ elif strategy == 'DQN_multi_steps':
+ from agents.DQNAgent_multi_steps import DQNAgent
+ elif strategy == 'DQN_final':
+ from agents.DQNAgent_final import DQNAgent
+
+ agent1 = DQNAgent()
+ agent2 = SimpleAgent()
+ agent3 = SimpleAgent()
+ agent4 = SimpleAgent()
+
+ agent_list = [agent1, agent2, agent3, agent4]
+
+ env = pommerman.make('PommeFFACompetitionFast-v0', agent_list)
+ # Record average reward
+ episode_rewards = []
+
+ win = 0
+ draw = 0
+ total_game = 0
+ reward_to_csv = []
+ result_to_csv = []
+
+ total_numOfSteps = 0
+ episode = 0
+
+ """please stop training manually"""
+ while True:
+
+ current_state = env.reset()
+ # Convert state to 1D array
+ episode_reward = 0
+ numOfSteps = 0
+ episode += 1
+ done = False
+
+ while not done:
+
+ state_feature = featurize2D(current_state[2])
+ numOfSteps += 1
+ total_numOfSteps += 1
+ # Use random action to collect data
+ if constants.epsilon > np.random.random() and total_numOfSteps >= constants.MIN_REPLAY_MEMORY_SIZE:
+ # Get Action
+ actions = env.act(current_state)
+ actions[0] = np.argmax(agent1.action_choose(state_feature)).tolist()
+ else:
+ # Use random action collects data
+ actions = env.act(current_state)
+ actions[0] = random.randint(0, 5)
+
+ new_state, result, done, info = env.step(actions)
+
+ # If our agent is dead, the game is stopped and we accelerate training
+ if 10 not in new_state[0]["alive"]:
+ done = True
+
+ # reward_shaping
+ agent1.buffer.append_action(actions[0])
+ reward = reward_shaping(current_state[0], new_state[0], actions[0], result[0], agent1.buffer.buffer_action)
+
+ next_state_feature = featurize2D(new_state[0])
+ episode_reward += reward
+
+ # Display the game screen for each set number of games
+ if constants.SHOW_PREVIEW and not episode % constants.SHOW_GAME:
+ env.render()
+
+ # Store memory
+ agent1.save_buffer(state_feature, actions[0], reward, next_state_feature, done)
+ # Learn
+ agent1.train()
+ # Update state
+ current_state = new_state
+
+ if done:
+ break
+
+ result = 0
+
+ if done:
+ episode_rewards.append(episode_reward)
+ total_game += 1
+ if 0 in info.get('winners', []):
+ win += 1
+ result = 2
+
+ # Record win and losses
+ if numOfSteps == constants.MAX_STEPS + 1:
+ draw += 1
+ result = 1
+ win_rate = win / total_game
+ draw_rate = draw / total_game
+
+ # Store reward
+ reward_to_csv.append(episode_reward)
+ # Store result
+ result_to_csv.append(result)
+
+ if episode % constants.SHOW_EVERY == 0:
+ if result == 1:
+ print("{} episodes done, result: {} , steps: {}".format(episode,
+ 'draw',
+ numOfSteps))
+
+ print("Reward {:.2f}, Average Episode Reward: {:.3f}, win_rate:{:.2f}, draw_rate:{:.2f}".format(
+ episode_reward,
+ np.mean(episode_rewards),
+ win_rate,
+ draw_rate))
+ else:
+ print("{} episodes done, result: {} , steps: {}".format(episode,
+ 'win' if result == 2 else "lose",
+ numOfSteps))
+
+ print("Reward {:.3f}, Average Episode Reward: {:.3f}, win_rate:{:.2f}, draw_rate:{:.2f}".format(
+ episode_reward,
+ np.mean(episode_rewards),
+ win_rate,
+ draw_rate))
+
+ agent1.epsilon_decay()
+ agent1.save_weights(episode)
+
+ # function for data augmentation
+ # agent1.data_processing(numOfSteps, episode_reward, result, episode)
+
+ """If you want to save result and reward as csv, please uncomment the code below"""
+ # Record the results and chart them
+ # if episode % 50 == 0:
+ # df_reward = pd.DataFrame({"reward": reward_to_csv})
+ # df_reward.to_csv("reward.csv", index=False, mode="a", header=False)
+ # print("successfully saved reward")
+ # reward_to_csv = []
+ # df_result = pd.DataFrame({"result": result_to_csv})
+ # df_result.to_csv("result.csv", index=False, mode="a", header=False)
+ # print("successfully saved result")
+ # result_to_csv = []
+
+ env.close()
+
+
+if __name__ == '__main__':
+ main(strategy='DQN_double')
+ # strategies: 'DQN_basic', 'DQN_double', 'DQN_dueling', 'DQN_priority', 'DQN_noisy', 'DQN_multi_steps', 'DQN_final'
diff --git a/Group_C/requirements.txt b/Group_C/requirements.txt
new file mode 100644
index 0000000..e519b44
--- /dev/null
+++ b/Group_C/requirements.txt
@@ -0,0 +1,24 @@
+docker~=3.1
+gym~=0.10.5
+scipy~=1.0
+Pillow~=8.2.0
+ruamel.yaml~=0.15
+Flask~=1.1.2
+requests~=2.18
+jsonmerge~=1.5.1
+astroid>=2
+isort~=4.3.4
+pylint>=2
+websockets~=6.0
+websocket-client~=0.53.0
+python-cli-ui~=0.7.1
+python-rapidjson~=0.6.3
+Click~=7.1.2
+
+numpy~=1.20.1
+pandas~=1.2.4
+setuptools~=52.0.0
+tensorflow~=2.6.0
+keras~=2.6.0
+pyglet~=1.5.21
+matplotlib~=3.5.1
\ No newline at end of file
diff --git a/Group_C/result_image/Reward_rainbow.png b/Group_C/result_image/Reward_rainbow.png
new file mode 100644
index 0000000..9482d67
Binary files /dev/null and b/Group_C/result_image/Reward_rainbow.png differ
diff --git a/Group_C/result_image/data/00reward basic DQN.csv b/Group_C/result_image/data/00reward basic DQN.csv
new file mode 100644
index 0000000..71d7ef0
--- /dev/null
+++ b/Group_C/result_image/data/00reward basic DQN.csv
@@ -0,0 +1,1400 @@
+-1.303
+-1.05
+-1.27
+-1.038
+-1.24
+-2.0350000000000006
+-1.544
+-1.3370000000000002
+-1.6950000000000003
+-1.279
+-1.138
+-1.197
+-1.476
+-1.427
+-1.1620000000000001
+-1.4530000000000003
+-1.314
+-1.189
+-1.217
+-1.0779999999999998
+-1.198
+-1.108
+-1.169
+-1.22
+-1.058
+-1.249
+-1.166
+-1.099
+-1.203
+-1.218
+-1.2610000000000001
+-1.107
+-1.461
+-1.416
+-1.134
+-1.068
+-1.335
+-1.067
+-1.391
+-1.202
+-1.241
+-1.278
+-1.2630000000000001
+-1.344
+-1.186
+-1.151
+-1.195
+-1.2750000000000001
+-1.117
+-1.3
+-1.1400000000000001
+-1.4290000000000003
+-1.8560000000000005
+-1.107
+-1.294
+-1.094
+-1.278
+-1.4680000000000002
+-1.238
+-1.1809999999999998
+-1.159
+-1.143
+-1.3319999999999999
+-1.278
+-1.238
+-1.2890000000000001
+-1.6660000000000004
+-1.2449999999999999
+-1.183
+-1.104
+-1.4760000000000002
+-1.27
+-1.52
+-1.236
+-1.25
+-1.7050000000000003
+-1.7010000000000005
+-1.196
+-1.203
+-2.009000000000001
+-1.0739999999999998
+-1.182
+-1.5150000000000001
+-1.203
+-1.4000000000000001
+-1.409
+-1.7770000000000006
+-1.4180000000000001
+-1.225
+-1.6760000000000004
+-1.38
+-1.304
+-1.282
+-1.179
+-1.2830000000000001
+-1.37
+-1.4729999999999999
+-1.532
+-1.4020000000000001
+-1.2990000000000002
+-1.5540000000000003
+-1.1500000000000001
+-2.3689999999999936
+-1.8090000000000004
+-1.3770000000000002
+-1.2970000000000002
+-1.8270000000000004
+-1.5100000000000002
+-1.314
+-1.45
+-1.15
+-1.158
+-1.298
+-1.3210000000000002
+-1.375
+-1.28
+-1.317
+-0.979
+-1.327
+-1.6310000000000002
+-1.227
+-1.3079999999999998
+-1.4420000000000002
+-1.6210000000000002
+-1.5550000000000002
+-1.5220000000000002
+-1.8610000000000007
+-1.2479999999999998
+-1.389
+-1.367
+-1.9370000000000005
+-1.7350000000000003
+-1.3235000000000001
+-1.036
+-1.076
+-1.5540000000000003
+-1.9790000000000005
+-1.6940000000000004
+-1.34
+-2.317999999999995
+-1.127
+-1.286
+-1.4065
+-1.5900000000000003
+-2.429
+-1.415
+-2.3579999999999997
+-2.1290000000000004
+-1.176
+-1.9340000000000004
+-1.379
+-1.084
+-2.0980000000000008
+-1.292
+-1.201
+-1.111
+-1.4769999999999999
+-1.132
+-1.7630000000000003
+-1.27
+-1.1800000000000002
+-1.197
+-2.0220000000000002
+-1.251
+-1.8930000000000007
+-1.836
+-1.6890000000000003
+-2.572000000000001
+-1.147
+-1.326
+-1.118
+-1.059
+-1.308
+-1.5420000000000003
+-1.3199999999999998
+-1.5750000000000002
+-1.113
+-1.8300000000000005
+-1.548
+-1.6050000000000002
+-1.6540000000000004
+-1.182
+-1.298
+-1.407
+-1.223
+-2.5720000000000005
+-1.3730000000000002
+-1.477
+-1.283
+-1.218
+-1.4730000000000003
+-1.9400000000000006
+-1.2309999999999999
+-1.493
+-1.9530000000000007
+-1.2809999999999997
+-1.4980000000000002
+-1.208
+-1.142
+-1.365
+-1.252
+-1.5390000000000001
+-1.237
+-1.185
+-1.223
+-2.261
+-1.294
+-1.477
+-1.5690000000000002
+-1.177
+-1.6320000000000001
+-1.195
+-1.9070000000000007
+-1.2750000000000001
+-1.223
+-1.265
+-1.328
+-1.119
+-1.5640000000000003
+-1.7450000000000003
+-2.1720000000000006
+-2.1830000000000007
+-1.8040000000000003
+-2.0260000000000007
+-1.7270000000000003
+-1.5570000000000004
+-1.8950000000000005
+-1.234
+-1.5700000000000003
+-1.6490000000000002
+-1.315
+-2.7500000000000013
+-1.5590000000000002
+-1.125
+-1.244
+-1.5160000000000005
+-1.216
+-1.8400000000000005
+-1.512
+-1.5380000000000003
+-1.539999999999993
+-1.104
+-1.8630000000000004
+-1.6820000000000004
+-1.6340000000000003
+-1.345
+-0.8460000000000005
+-1.195
+-1.412
+-1.7625000000000002
+-1.3850000000000002
+-1.197
+-1.14
+-1.8120000000000003
+-1.9590000000000003
+-1.103
+-1.151
+-1.4529999999999998
+-1.6090000000000002
+-1.8380000000000007
+-2.028000000000001
+-1.8210000000000006
+-2.129
+-1.313
+-1.6750000000000003
+-2.1410000000000005
+-1.8410000000000006
+-1.33
+-1.2195
+-1.322
+-1.8040000000000007
+-0.45200000000000023
+-0.5310000000000002
+-0.46700000000000025
+-1.4330000000000003
+-1.597
+-1.4115000000000002
+-1.634999999999997
+-1.4220000000000002
+-1.7040000000000004
+-1.9330000000000003
+-1.2070000000000003
+-1.7340000000000004
+-1.3399999999999999
+-1.6
+-1.5530000000000004
+-1.5690000000000002
+-1.6060000000000003
+0.036
+-2.687999999999992
+-1.372
+-1.9540000000000006
+-1.5150000000000001
+0.02700000000000003
+-1.8855000000000004
+-1.9100000000000006
+-1.234
+-1.6775000000000002
+-1.238
+-1.4460000000000002
+-1.543
+-2.543000000000001
+-1.699
+-1.7460000000000004
+-1.377
+-1.252
+-1.7350000000000003
+-1.413
+-1.354
+-1.3930000000000002
+-2.45
+-2.029000000000001
+-1.3330000000000002
+-0.6100000000000004
+-1.5210000000000004
+-0.09499999999999995
+-1.9970000000000008
+-1.8720000000000006
+-2.4120000000000013
+-1.5860000000000003
+-1.32
+-1.067
+-1.9580000000000006
+-1.2349999999999999
+-0.9180000000000006
+-0.6250000000000002
+-1.483
+-1.162
+-1.3070000000000002
+-2.2285000000000004
+-1.374
+-1.7300000000000004
+-1.234
+-1.7980000000000005
+-1.5650000000000004
+-1.621000000000002
+-0.8290000000000005
+-1.8420000000000005
+-2.0020000000000007
+-1.4100000000000001
+-1.5730000000000004
+-1.331
+-1.2039999999999993
+-1.4960000000000004
+-1.5600000000000005
+-1.327
+-1.4449999999999967
+-0.34700000000000014
+-0.5530000000000004
+-1.1385000000000016
+-1.8000000000000007
+-1.7440000000000007
+-1.9030000000000005
+-1.2730000000000015
+-1.7150000000000005
+-1.479
+-1.6440000000000001
+-1.7080000000000002
+-1.5590000000000002
+-0.4670000000000002
+-1.7340000000000004
+-1.7750000000000004
+-0.9770000000000006
+-1.3190000000000002
+-2.2110000000000003
+-1.4810000000000003
+-0.7870000000000004
+-1.6620000000000004
+-1.4395
+-1.13
+-1.1640000000000001
+-1.1829999999999998
+-1.9010000000000007
+-0.35100000000000026
+-1.5280000000000002
+-1.338
+0.054000000000000194
+-0.6570000000000005
+-0.6470000000000005
+-1.9320000000000006
+-1.7620000000000005
+-1.212
+-1.229
+-1.332
+-1.9520000000000004
+-1.9040000000000006
+-0.9390000000000007
+-0.991
+-0.9080000000000006
+-1.316
+-1.8170000000000004
+-1.8720000000000008
+-1.1680000000000001
+-1.4480000000000002
+-1.4800000000000004
+-1.3730000000000002
+-1.554000000000003
+-0.24900000000000005
+-1.2389999999999999
+-1.2875
+-1.283
+-1.6530000000000005
+-1.5670000000000002
+-1.4360000000000002
+-1.355
+-1.071
+-0.7890000000000006
+-1.399
+-1.0470000000000006
+-0.19100000000000006
+-1.201
+-0.159
+-1.1
+-0.4490000000000002
+-1.123
+-0.7050000000000005
+-1.2510000000000001
+-1.574
+-0.06799999999999984
+-1.6010000000000004
+-1.4580000000000002
+-1.4400000000000002
+-1.6800000000000006
+-1.162
+-1.4180000000000001
+-2.299000000000001
+-0.4890000000000002
+-0.7650000000000005
+-1.4685000000000004
+-1.5970000000000004
+-1.6490000000000005
+-1.1969999999999998
+-1.5930000000000004
+-2.154000000000001
+-1.48
+-1.5400000000000003
+-0.17199999999999996
+-1.6740000000000004
+-1.1469999999999998
+-1.4260000000000002
+0.010000000000000064
+-0.5000000000000003
+-0.36300000000000016
+-1.5150000000000003
+-1.645
+-0.051999999999999935
+-1.7980000000000005
+-0.5235000000000003
+-1.338
+-1.8140000000000005
+-1.6340000000000003
+-1.3070000000000004
+-1.3820000000000001
+-1.148
+-0.5740000000000003
+-1.226
+-1.118
+0.08299999999999991
+-0.12799999999999997
+-1.552
+-0.15199999999999997
+-1.0905000000000007
+0.15200000000000014
+-1.5140000000000002
+-1.6500000000000004
+-0.4870000000000004
+-1.7370000000000003
+-1.419
+0.10600000000000019
+-1.6030000000000002
+-0.5070000000000002
+-1.4200000000000004
+-2.0970000000000004
+-1.115
+-1.124
+-0.22900000000000015
+-1.215
+-1.232
+0.033000000000000085
+0.09800000000000017
+-1.433
+-1.148
+-1.2960000000000003
+-1.292
+-1.087
+-1.0999999999999999
+-1.113
+-1.366
+-1.287
+-0.6460000000000004
+-1.146
+-1.4460000000000002
+-1.287
+0.0050000000000000565
+-1.108
+-1.415
+-1.216
+-1.5585000000000004
+-1.2355
+0.05300000000000011
+-0.06399999999999999
+-0.6740000000000005
+-0.12399999999999997
+-0.6440000000000005
+0.1690000000000002
+-1.4020000000000001
+-1.0619999999999998
+-1.1829999999999998
+-0.6830000000000004
+-0.2550000000000001
+-1.1669999999999998
+-1.146
+-1.5350000000000004
+-1.7390000000000003
+-0.6050000000000003
+-1.4660000000000002
+-0.26700000000000007
+-0.006999999999999912
+-0.6670000000000004
+-1.6635000000000004
+-1.0539999999999998
+-2.469999999999994
+-2.1095000000000006
+-0.3570000000000002
+-0.5400000000000003
+-0.09100000000000004
+-1.017
+-0.12600000000000003
+-1.8270000000000004
+-1.5740000000000003
+-0.20800000000000005
+-0.3845000000000002
+-1.281
+-0.8020000000000005
+-1.2719999999999998
+0.21650000000000016
+-0.06299999999999988
+-1.3280000000000003
+-0.11000000000000001
+-1.4890000000000003
+-0.6680000000000005
+-1.016
+-0.8070000000000006
+-1.4820000000000002
+-1.23
+-1.144
+-0.8260000000000006
+-1.1840000000000002
+-0.05799999999999998
+-1.3449999999999998
+-0.1790000000000001
+-1.1059999999999999
+-1.419
+-1.153
+-0.42200000000000026
+-0.9640000000000005
+-0.2390000000000001
+-0.2720000000000001
+-0.11699999999999999
+-0.8969999999999999
+-1.149
+-0.5210000000000002
+-0.214
+-0.16500000000000004
+-0.6820000000000005
+-0.30100000000000016
+0.009000000000000147
+-1.5810000000000002
+-1.299
+0.08000000000000004
+-1.2049999999999998
+-1.312
+0.017000000000000084
+-1.2694999999999999
+-0.8815000000000006
+-1.2650000000000001
+-0.2050000000000001
+-1.4560000000000004
+-1.5880000000000003
+-0.39600000000000013
+0.05000000000000006
+-0.7679999999999999
+-2.1625000000000005
+-1.214
+-1.508
+0.08400000000000019
+-1.6830000000000005
+-1.4830000000000003
+-1.2510000000000001
+-1.134
+-1.4270000000000003
+-1.1059999999999997
+-0.33400000000000013
+-1.8730000000000007
+-1.3980000000000001
+-1.4450000000000003
+-0.40200000000000025
+-1.4270000000000003
+-0.8280000000000003
+-1.1729999999999998
+-1.358
+-1.3490000000000002
+-1.6030000000000004
+-1.450000000000001
+-0.03299999999999985
+-0.01599999999999993
+-1.1874999999999998
+-1.469
+-0.18699999999999983
+-1.7160000000000004
+-1.523
+-1.359
+-1.286
+-1.407
+-1.337000000000004
+-1.336
+-1.312
+-0.525
+-0.2054999999999999
+-2.0150000000000006
+-1.6080000000000003
+-0.5720000000000004
+-1.136
+-2.681000000000002
+-0.589
+-1.3359999999999999
+0.07600000000000005
+-0.6825000000000004
+-1.2185000000000001
+-0.5170000000000002
+-0.07900000000000001
+-1.8395000000000001
+-1.0619999999999998
+0.13400000000000012
+0.12900000000000017
+-1.9850000000000003
+-0.2615
+0.2490000000000002
+-1.1589999999999998
+-1.5070000000000003
+-1.4529999999999998
+-1.6760000000000004
+-0.962
+0.10200000000000017
+-0.4840000000000004
+-1.4980000000000002
+-1.6460000000000004
+-1.2590000000000001
+0.22300000000000025
+-1.137
+-0.45900000000000035
+-1.4100000000000001
+-1.209
+-0.0679999999999999
+-1.089
+-0.30299999999999994
+-2.276000000000001
+0.1310000000000001
+-1.6300000000000003
+-0.9705000000000007
+0.20400000000000007
+-0.903
+-0.3440000000000002
+-0.7450000000000004
+-0.5730000000000003
+-1.4449999999999998
+-1.9370000000000007
+-1.4330000000000003
+-1.4675
+-1.0260000000000007
+-1.0219999999999998
+-0.5680000000000004
+-1.0160000000000005
+-1.257
+-0.3140000000000002
+-1.3760000000000001
+-0.333
+-0.2620000000000001
+-1.3995
+-1.6560000000000004
+-1.6830000000000003
+-0.6250000000000003
+-0.02699999999999992
+-0.938
+-0.6289999999999998
+0.22900000000000018
+-0.6860000000000005
+-0.35600000000000015
+0.07200000000000005
+-0.9779999999999999
+-1.257
+-0.053999999999999826
+-2.1750000000000007
+-1.1210000000000002
+-1.403
+-1.4620000000000002
+-1.6350000000000002
+-1.178
+0.18900000000000014
+-0.6120000000000003
+-1.2419999999999998
+-1.193
+-1.1109999999999998
+-1.139
+-0.2440000000000001
+-1.0799999999999998
+-1.258
+-1.377
+-1.301
+-0.7520000000000006
+-1.5290000000000004
+-0.2790000000000001
+-1.5680000000000005
+0.25700000000000023
+-0.01599999999999989
+-1.246
+-1.7750000000000006
+-1.2650000000000001
+-1.297
+-0.4620000000000003
+-1.2959999999999998
+-1.2919999999999998
+0.3090000000000002
+-1.1479999999999986
+-1.2029999999999998
+-1.0739999999999998
+-1.7180000000000004
+-1.034
+-1.483
+-1.5790000000000002
+0.1180000000000001
+0.16400000000000012
+-0.12000000000000002
+0.039000000000000215
+-0.6350000000000005
+-0.19699999999999995
+-0.018999999999999885
+-0.9349999999999998
+-1.4330000000000003
+-1.5310000000000001
+-1.26
+-1.001
+-1.0619999999999998
+-1.5630000000000002
+-1.302
+-1.7330000000000005
+-1.2060000000000013
+-0.08199999999999988
+-1.03
+0.04000000000000003
+-1.7590000000000006
+-1.5590000000000002
+-1.189
+-1.167
+-1.308
+-1.4990000000000003
+-1.5510000000000002
+-1.4520000000000002
+-0.04600000000000002
+-0.8820000000000006
+0.045000000000000026
+-1.321
+-1.5810000000000004
+-2.4224999999999994
+-0.7950000000000005
+-0.22099999999999995
+-1.2169999999999999
+-1.5150000000000001
+-0.37150000000000016
+-0.6355000000000001
+-1.183
+-1.333
+-1.2049999999999998
+-1.0739999999999998
+-1.4330000000000003
+-0.9700000000000001
+-1.6930000000000005
+-1.232
+-0.9400000000000005
+-2.1310000000000007
+-0.277
+-1.3210000000000002
+-1.4250000000000003
+-0.6430000000000003
+-1.5210000000000006
+-0.058999999999999886
+-1.379
+-1.4460000000000002
+-1.2634999999999998
+0.2820000000000002
+-0.06499999999999988
+0.19700000000000015
+0.14000000000000015
+-0.991
+-2.1550000000000007
+-0.06299999999999988
+-1.2249999999999999
+0.1880000000000001
+-0.5380000000000003
+-2.2699999999999947
+-1.4930000000000003
+-0.38400000000000023
+-0.009999999999999972
+-0.021999999999999985
+0.09200000000000012
+0.01800000000000003
+-0.9239999999999998
+-1.2055
+-0.06999999999999998
+-1.291
+-0.10900000000000001
+-1.5510000000000004
+-1.4810000000000003
+-1.073
+-1.6610000000000003
+-0.5250000000000001
+-1.3600000000000003
+-1.396
+-0.006999999999999927
+-1.1239999999999999
+-0.12749999999999992
+0.11700000000000015
+-1.168
+-1.35
+-1.2590000000000001
+-1.9640000000000009
+-1.5680000000000003
+-1.193
+-0.2680000000000001
+-1.491
+-1.5180000000000002
+-0.40900000000000014
+-1.015
+0.1820000000000001
+-1.0839999999999999
+-0.0029999999999999792
+-0.997
+0.3220000000000002
+0.25200000000000017
+-1.4920000000000002
+-0.058999999999999955
+-0.3560000000000001
+-1.234
+-1.4320000000000002
+-1.573
+-1.085
+-0.01299999999999997
+-1.4
+-0.272
+0.02300000000000014
+0.0030000000000001466
+-0.1179999999999998
+-1.7690000000000003
+-0.9179999999999999
+-0.7065000000000005
+0.2810000000000002
+0.08600000000000009
+-0.25500000000000006
+-0.6200000000000004
+-1.4900000000000002
+-1.26
+0.08700000000000016
+-0.42000000000000015
+-1.2934999999999999
+-0.2010000000000001
+0.1220000000000001
+-1.5785000000000005
+-1.304
+-1.5034999999999992
+-1.291
+-0.6179999999999999
+-0.4070000000000001
+-1.25
+-1.325
+-1.234
+-0.2670000000000002
+-0.6950000000000004
+-1.5220000000000005
+-1.041
+-0.9079999999999999
+-1.156
+-2.0280000000000005
+-1.381
+-0.5470000000000004
+-0.6730000000000005
+-1.321
+-1.028
+-1.057
+0.24100000000000016
+-1.3210000000000002
+0.003000000000000229
+-1.0859999999999999
+-1.1179999999999999
+-1.376
+-0.9949999999999998
+-0.5895000000000004
+-2.001000000000001
+0.1930000000000002
+-1.287
+-0.43500000000000033
+-1.1329999999999998
+-0.5320000000000001
+-1.152
+0.2280000000000002
+-0.41500000000000037
+-0.5930000000000004
+-0.22800000000000012
+-0.7570000000000005
+0.11200000000000004
+-1.1320000000000001
+-2.468000000000001
+-1.1900000000000042
+-1.1770000000000007
+-1.439
+-1.276
+-1.276
+-1.1039999999999999
+0.2650000000000002
+-0.005999999999999979
+-1.2999999999999998
+-0.9694999999999999
+-1.343
+-1.266
+-1.353
+-0.9889999999999999
+0.24500000000000016
+-1.6350000000000002
+-1.53
+0.037000000000000005
+-0.9639999999999999
+-0.1675
+-1.3610000000000002
+-0.27400000000000013
+-0.04299999999999987
+0.12700000000000017
+-0.1309999999999999
+-1.156
+-0.14799999999999994
+0.05400000000000006
+-1.0080000000000007
+-0.8050000000000004
+-1.8775000000000008
+0.20200000000000012
+-1.5369999999999922
+-0.8939999999999999
+-1.3085
+-1.0959999999999999
+-1.009
+-1.7740000000000005
+-1.9380000000000006
+-0.2595000000000001
+-1.5225
+-1.019
+-1.27
+-1.0819999999999999
+-1.132
+-1.001
+0.24300000000000022
+-0.4320000000000003
+-1.3335
+-0.46800000000000014
+-1.9140000000000006
+-1.6590000000000003
+-1.3920000000000001
+-1.04
+-0.07999999999999996
+-0.907
+-1.073
+-1.17
+-1.1039999999999999
+-1.074
+-1.091
+-0.12600000000000003
+0.010500000000000141
+-0.9829999999999999
+0.08200000000000018
+-0.7035000000000003
+0.2470000000000002
+-1.222
+-1.5385
+-0.06600000000000007
+-0.3870000000000001
+-0.10900000000000006
+-0.14499999999999996
+-1.399
+-0.10499999999999977
+-1.236
+-0.9069999999999999
+0.2820000000000002
+-1.1079999999999999
+-1.5600000000000003
+-0.24699999999999983
+-1.269
+-0.847
+-1.5380000000000003
+0.18400000000000014
+-0.20699999999999996
+-1.9010000000000007
+-0.983
+-1.076
+-0.018999999999999934
+-0.5410000000000003
+-1.139
+-0.7379999999999998
+-1.4210000000000003
+-1.3330000000000006
+0.13100000000000006
+-1.4360000000000002
+-1.325
+-1.169
+0.26000000000000006
+0.04700000000000003
+-1.257
+-1.101
+-1.459
+-1.467
+-1.066
+-1.081
+0.07800000000000015
+-0.856
+-1.446000000000001
+-1.144
+-0.9029999999999998
+-0.845
+0.1660000000000002
+-1.329
+0.18500000000000014
+0.1470000000000001
+0.29300000000000015
+-0.034500000000000024
+0.1390000000000001
+-1.105
+-0.7370000000000004
+-0.36150000000000027
+0.033500000000000175
+-0.26899999999999985
+-1.1289999999999998
+-0.4685000000000003
+-1.071
+-1.5850000000000002
+-1.459
+0.063
+-0.18399999999999997
+-0.3120000000000001
+-1.4740000000000002
+-1.105
+-0.0559999999999999
+-0.9924999999999998
+0.24000000000000019
+-1.093
+-1.1189999999999982
+-1.053
+-1.5010000000000003
+-1.9770000000000008
+-0.7144999999999999
+-0.1209999999999998
+-1.296
+-1.142
+-0.9989999999999999
+-1.3800000000000001
+-0.9270000000000006
+-0.4980000000000002
+0.1440000000000002
+-1.013
+-1.4930000000000003
+-0.03200000000000004
+-1.43
+0.24500000000000016
+0.1590000000000001
+-1.233
+-0.8849999999999998
+-1.059
+-1.5285000000000002
+-0.3380000000000002
+-1.2289999999999999
+-1.1360000000000006
+-0.6160000000000002
+-0.1750000000000001
+0.17300000000000015
+-1.1245
+-1.0639999999999998
+-1.553999999999999
+-1.0230000000000001
+-1.0090000000000001
+-1.23
+0.01800000000000016
+-1.243
+-0.721
+-1.4485000000000001
+-1.248
+-0.13699999999999984
+-0.5640000000000003
+-2.032000000000001
+-0.959
+-1.7000000000000002
+-3.1510000000000007
+0.07100000000000006
+-0.6260000000000003
+-1.229
+0.10000000000000009
+-0.985
+-0.5270000000000004
+-0.047999999999999904
+-0.11099999999999997
+-2.312
+-1.03
+-1.447
+-0.9669999999999997
+-0.10799999999999985
+-1.321
+-0.05400000000000005
+-0.6625000000000004
+-1.192
+-0.41100000000000025
+-0.9469999999999998
+0.12100000000000011
+-1.5035
+0.15100000000000013
+-0.8500000000000006
+0.39600000000000024
+-1.263
+-1.315
+-1.017
+0.24600000000000025
+0.2045000000000001
+-0.002999999999999823
+-1.2269999999999999
+-1.137
+-1.061
+-0.18200000000000002
+-0.9979999999999999
+0.24200000000000016
+0.16000000000000011
+-1.015
+-0.9510000000000005
+-1.3860000000000001
+-0.5310000000000002
+-1.174
+-1.1340000000000001
+0.2680000000000002
+-1.4669999999999987
+0.08900000000000013
+-1.1949999999999998
+-1.0959999999999999
+-1.089
+-0.9715000000000008
+-1.3695
+-0.0699999999999999
+0.14750000000000013
+-0.8640000000000005
+-2.025000000000001
+-0.44700000000000023
+0.1300000000000001
+-1.3459999999999999
+-1.3885
+-1.295
+-1.2369999999999997
+-1.6150000000000002
+-1.248
+-0.9490000000000007
+-1.21
+-1.027
+0.07700000000000014
+0.028999999999999988
+-0.5580000000000003
+0.016000000000000004
+-0.94
+-1.6905000000000006
+0.021000000000000074
+-0.853
+-0.7890000000000006
+0.1710000000000002
+-1.1190000000000009
+-1.4100000000000001
+-1.2500000000000009
+-0.4590000000000003
+-0.09999999999999996
+-1.1704999999999999
+-0.14099999999999996
+-0.9699999999999999
+-0.08699999999999997
+-1.312
+-0.25400000000000006
+-1.8870000000000005
+-1.1139999999999999
+-2.2460000000000004
+0.28600000000000025
+-1.157
+-1.002
+-1.1239999999999999
+-1.044
+-1.4040000000000001
+0.33600000000000024
+-1.4475000000000002
+-1.5580000000000003
+-1.165
+-1.109
+0.06700000000000009
+-0.9279999999999999
+-1.0659999999999998
+-0.0455
+0.1615000000000001
+-1.331
+0.007999999999999993
+0.04600000000000004
+-1.192
+-2.480000000000001
+-1.2240000000000073
+-1.9390000000000005
+-1.6130000000000004
+0.13400000000000012
+0.27000000000000024
+-1.216
+0.21600000000000022
+-1.4520000000000004
+-1.483
+-0.188
+-1.1189999999999998
+0.23800000000000013
+-1.0239999999999998
+-2.677000000000002
+-0.9369999999999999
+-1.1869999999999998
+-0.19499999999999998
+-1.7350000000000005
+-0.024999999999999835
+-0.01899999999999981
+-1.0205
+-1.2429999999999999
+-1.036
+0.09100000000000007
+0.07000000000000002
+-1.028
+-1.0359999999999998
+-0.09499999999999999
+-0.21500000000000014
+0.2550000000000002
+-0.4500000000000002
+0.32100000000000023
+-1.247
+-0.15299999999999989
+-1.0125
+-1.303
+-0.29300000000000004
+-0.028999999999999825
+-0.9204999999999999
+-1.5160000000000002
+-1.2919999999999998
+0.07100000000000004
+-1.9900000000000007
+-0.30500000000000005
+-0.833
+-1.3319999999999999
+-1.252
+-1.214
+0.3010000000000002
+-1.497
+0.11700000000000024
+-1.4680000000000002
+0.25400000000000017
+-1.295
+-1.329
+0.2830000000000002
+-1.8790000000000004
+-0.955
+-0.7040000000000004
+-0.3680000000000001
+-1.0070000000000001
+0.06400000000000017
+-1.229
+-1.012
+-1.0594999999999999
+-1.279
+-1.2870000000000001
+-1.14
+0.2815000000000002
+-0.8990000000000006
+0.12600000000000008
+-2.2685000000000004
+-0.31600000000000017
+0.1530000000000001
+-0.9489999999999998
+-0.9119999999999997
+0.1340000000000001
+-0.13500000000000012
+-1.3639999999999999
+-1.256
+-0.9259999999999999
+-1.12
+-0.33100000000000007
+-0.6130000000000003
+-0.8020000000000005
+0.03900000000000018
+0.20700000000000007
+-1.181
+-0.2149999999999999
+-0.9299999999999999
+0.16200000000000006
+-1.417
+-0.3770000000000003
+-0.17900000000000002
+0.07400000000000016
+-1.236
+-1.2979999999999998
+-1.254
+-2.1705000000000005
+-0.6410000000000003
+-0.052000000000000074
+-0.8069999999999998
+-1.298
+-1.9250000000000007
+-0.1869999999999999
+-0.307
+-0.9259999999999999
+-1.2059999999999997
+0.04700000000000021
+0.02200000000000022
+-0.3820000000000001
+-1.1819999999999997
+-1.1969999999999998
+-0.9039999999999998
+-1.1609999999999998
+0.07700000000000017
+-1.206000000000002
+0.11700000000000009
+-0.06599999999999986
+-1.3750000000000002
+-1.251
+-1.374000000000001
+-0.21499999999999986
+-1.224
+0.2900000000000002
+-0.9419999999999998
+-0.3700000000000002
+-1.25
+-0.9729999999999999
+-0.99
+-0.0449999999999999
+-0.39400000000000024
+-1.1360000000000001
+-0.7430000000000004
+-0.19299999999999998
+-1.3410000000000002
+-1.241
+-0.123
+0.14100000000000024
+-1.624
+-2.0730000000000004
+-0.9789999999999999
+-0.2200000000000001
+-1.266
+-0.20000000000000012
+-1.4730000000000003
+0.18500000000000014
+0.016000000000000035
+-0.07899999999999989
+-0.4070000000000001
+-0.9300000000000007
+0.2110000000000002
+0.2610000000000002
+-0.060499999999999915
+-1.3339999999999999
+-0.8549999999999999
+-1.1490000000000005
+-1.8460000000000005
+-1.4295
+-1.2640000000000031
+-1.201000000000001
+-0.31
+-0.5915000000000004
+0.2990000000000002
+-1.2890000000000001
+-1.4480000000000002
+-0.5015000000000002
+-0.252
+-1.204
+-1.6320000000000006
+-1.3615000000000002
+-1.7920000000000005
+-2.2580000000000013
+-1.1300000000000008
+-1.4180000000000001
+-0.7830000000000005
+-1.4640000000000002
+-1.58
+-2.181000000000001
+-1.345
+-1.384
+-0.161
+-2.128000000000001
+-1.6030000000000002
+-1.8930000000000007
+-0.5360000000000003
+-1.3890000000000102
+-3.9639999999999906
+-1.5190000000000001
+-1.2180000000000009
+-1.8630000000000004
+-2.4160000000000013
+-1.349
diff --git a/Group_C/result_image/data/01reward basic DQN with data augmentation.csv b/Group_C/result_image/data/01reward basic DQN with data augmentation.csv
new file mode 100644
index 0000000..5deee5a
--- /dev/null
+++ b/Group_C/result_image/data/01reward basic DQN with data augmentation.csv
@@ -0,0 +1,1000 @@
+-1.201
+-1.212
+-1.1229999999999998
+-1.211
+-1.1589999999999998
+-1.6020000000000003
+-1.2850000000000001
+-1.11
+-1.4420000000000002
+-1.2890000000000001
+-1.238
+-1.559
+-1.2730000000000001
+-1.268
+-1.075
+-1.207
+-1.071
+-1.208
+-1.059
+-1.549
+-1.318
+-1.2129999999999999
+-1.209
+-1.238
+-1.221
+-1.413
+-1.7220000000000002
+-1.7750000000000004
+-1.203
+-1.08
+-1.121
+-1.402
+-1.238
+-1.177
+-1.249
+-1.159
+-1.862
+-1.181
+-1.282
+-1.218
+-1.199
+-1.589
+-1.109
+-1.207
+-1.1019999999999999
+-1.3010000000000002
+-1.3459999999999999
+-1.2419999999999998
+-1.215
+-1.309
+-1.37
+-1.164
+-1.268
+-1.2349999999999999
+-1.1139999999999999
+-1.3399999999999999
+-1.104
+-1.404
+-1.084
+-1.2069999999999999
+-1.134
+-1.3319999999999999
+-1.026
+-1.389
+-1.128
+-1.162
+-1.102
+-1.5540000000000003
+-1.204
+-1.154
+-1.084
+-1.2349999999999999
+-1.206
+-1.2890000000000001
+-1.359
+-1.3519999999999999
+-1.203
+-1.1079999999999999
+-1.177
+-1.162
+-1.1549999999999998
+-1.161
+-1.3810000000000002
+-1.223
+-1.291
+-1.346
+-1.303
+-1.513
+-1.462
+-1.2530000000000001
+-1.278
+-1.9400000000000004
+-1.27
+-1.17
+-1.082
+-1.318
+-1.3970000000000002
+-1.325
+-1.2810000000000001
+-1.3830000000000002
+-1.37
+-1.443
+-1.27
+-1.354
+-1.093
+-1.166
+-1.266
+-1.076
+-1.3605
+-1.375
+-1.2610000000000001
+-1.4169999999999998
+-1.241
+-1.3970000000000002
+-1.266
+-1.115
+-1.5900000000000003
+-1.46
+-1.07
+-1.061
+-1.427
+-1.3250000000000002
+-1.241
+-1.509
+-1.2040000000000002
+-1.292
+-1.303
+-1.2550000000000001
+-1.411
+-1.109
+-1.1720000000000002
+-1.278
+-1.275
+-1.275
+-1.311
+-1.276
+-1.415
+-1.463
+-1.8860000000000006
+-1.2530000000000001
+-1.5540000000000003
+-1.3935000000000002
+-1.4330000000000003
+-1.6140000000000003
+-1.701
+-1.229
+-1.294
+-1.525
+-1.082
+-1.219
+-1.278
+-0.14800000000000002
+-1.6710000000000003
+-1.2650000000000001
+-1.338
+-1.109
+-1.5790000000000002
+-1.368
+-1.302
+-1.1809999999999998
+-1.233
+-1.2570000000000001
+-1.34
+-1.183
+-1.148
+-1.328
+-1.1480000000000001
+-1.156
+-1.2970000000000002
+-1.3940000000000001
+-1.5260000000000002
+-1.208
+-1.174
+-1.3130000000000002
+-1.311
+-1.4320000000000002
+-1.308
+-1.323
+-1.405
+-1.338
+-1.238
+-1.125
+-1.4780000000000002
+-1.219
+-1.258
+-1.228
+-1.119
+-1.1720000000000002
+-1.247
+-1.31
+-1.0339999999999998
+-1.298
+-1.288
+-1.439
+-1.488
+-1.475
+-1.3450000000000002
+-1.3050000000000002
+-1.323
+-1.1840000000000002
+-1.4220000000000002
+-1.7600000000000002
+-1.467
+-1.456
+-1.1500000000000001
+-1.238
+-1.252
+-1.7160000000000002
+-1.3290000000000002
+-1.3090000000000002
+-1.368
+-1.6840000000000004
+-1.393
+-1.8450000000000004
+-1.419
+-1.1019999999999999
+-1.181
+-1.148
+-1.8380000000000005
+-1.6350000000000002
+-1.233
+-1.2349999999999999
+-1.3370000000000002
+-1.3980000000000001
+-1.246
+-1.7080000000000004
+-1.331
+-1.2890000000000001
+-1.5160000000000002
+-1.208
+-1.5659999999999998
+-1.167
+-1.377
+-1.2690000000000001
+-1.4940000000000002
+-1.181
+-1.449
+-1.4050000000000002
+-1.184
+-1.4780000000000002
+-1.241
+-1.109
+-1.399
+-1.5770000000000004
+-1.218
+-1.2970000000000002
+-1.211
+-1.34
+-1.1855
+-1.4160000000000001
+-1.439
+-1.223
+-1.342
+-1.053
+-1.517
+-1.508
+-1.8480000000000003
+-1.358
+-1.24
+-1.9300000000000006
+-1.294
+-1.187
+-1.2590000000000001
+-1.5430000000000004
+-0.7590000000000005
+-1.35
+-1.4300000000000002
+-1.428
+-2.1500000000000004
+-1.356
+-1.3170000000000002
+-1.6590000000000003
+-1.8900000000000006
+-1.148
+-1.2129999999999999
+-1.8210000000000004
+-1.4900000000000002
+-1.22
+-1.4450000000000003
+-1.259
+-1.5360000000000003
+-1.231
+-1.107
+-1.187
+-1.1889999999999998
+-1.136
+-1.435
+-1.21
+-1.262
+-1.163
+-1.3079999999999983
+-1.23
+-1.5610000000000004
+-1.212
+-1.074
+-2.6420000000000012
+-1.157
+-1.206
+-1.307
+-1.6060000000000003
+-1.6330000000000005
+-2.426999999999994
+-1.368
+-1.0799999999999998
+-2.2120000000000006
+-1.407
+-1.2690000000000001
+-1.218
+-1.308
+-1.6510000000000002
+-1.4540000000000002
+-1.7699999999999991
+-1.413
+-0.24100000000000005
+-1.3820000000000001
+-1.166
+-1.5510000000000002
+-1.188
+-1.8410000000000002
+-1.4949999999999999
+-1.1700000000000002
+-1.681
+-1.6640000000000001
+-1.4100000000000001
+-1.363
+-1.345
+-1.1480000000000001
+-1.7190000000000003
+-1.006
+-1.464
+-1.2469999999999999
+-1.134
+-1.242
+-1.312
+-1.8070000000000004
+-1.4720000000000002
+-1.583
+-1.7610000000000003
+-1.5590000000000004
+-1.499
+-1.6285
+-1.399
+-1.1440000000000001
+-1.4130000000000003
+-1.8330000000000002
+-1.3780000000000001
+-1.064
+-1.258
+-1.368
+-1.8960000000000004
+-1.4900000000000002
+-1.5980000000000003
+-2.2010000000000014
+-1.5540000000000003
+-1.8860000000000003
+-1.9820000000000007
+-1.5290000000000004
+-2.379000000000001
+-1.319
+-1.3730000000000002
+-1.496
+-1.4400000000000002
+-1.7355000000000005
+-1.2489999999999999
+-1.0959999999999999
+-1.5115000000000003
+-1.6510000000000002
+-1.358
+-1.1260000000000001
+-1.573
+-0.4680000000000002
+-1.7915000000000005
+-1.403
+-1.6630000000000003
+-1.4400000000000002
+-2.2590000000000003
+-1.6740000000000004
+-0.8350000000000003
+-1.3159999999999998
+-1.5850000000000004
+-1.28
+-1.5930000000000004
+-1.479
+-3.361999999999996
+-1.2930000000000026
+-1.3010000000000002
+-1.574
+-1.268
+-0.7800000000000002
+-1.3450000000000002
+-1.4140000000000001
+-1.2219999999999955
+-1.9150000000000005
+-1.276
+-2.3410000000000006
+-1.4880000000000002
+-1.1039999999999999
+-1.2610000000000001
+-1.6770000000000003
+-1.076
+-1.307
+-1.7480000000000002
+-1.9630000000000005
+-1.4490000000000003
+-1.328
+-1.4890000000000003
+-2.0400000000000005
+-1.8370000000000002
+-1.3319999999999999
+-1.6340000000000003
+-1.4850000000000003
+-1.6980000000000004
+-0.6780000000000004
+-1.8910000000000005
+-1.491
+-1.229
+-1.239
+-2.2120000000000006
+-1.9110000000000003
+-1.5420000000000003
+-2.0140000000000007
+-2.4879999999999924
+-2.5450000000000093
+-1.6000000000000003
+-1.6950000000000003
+-1.8000000000000005
+-1.677
+-1.5920000000000005
+-1.1320000000000001
+-1.29
+-0.6400000000000001
+-1.7550000000000003
+-1.044
+-1.5490000000000004
+-1.6110000000000002
+-1.5230000000000001
+-1.5610000000000004
+-1.4060000000000001
+-2.197000000000001
+-2.734000000000001
+-0.5605000000000003
+-1.9940000000000007
+-1.7479999999999987
+-1.3510000000000004
+-1.8720000000000006
+-1.188
+-1.8630000000000004
+-1.5570000000000004
+-1.245
+-1.303
+-0.4470000000000002
+-1.5490000000000004
+-1.355
+-1.9220000000000006
+-1.078
+-1.5430000000000001
+-1.278
+-1.493
+-0.5270000000000002
+-1.4760000000000004
+-1.9069999999999974
+-1.9010000000000002
+-1.557
+-2.9975000000000005
+-1.6790000000000003
+-1.7480000000000002
+-1.4130000000000003
+-1.345
+-2.303999999999996
+-1.487
+-1.7100000000000004
+-1.3290000000000002
+-1.025
+-1.7060000000000004
+-1.7240000000000015
+-2.160000000000002
+-1.2485
+-2.225
+-2.907999999999983
+-1.3310000000000002
+-1.4180000000000001
+-1.3170000000000002
+-0.6080000000000003
+-1.143
+-1.5160000000000002
+-2.0460000000000007
+-0.9700000000000006
+-1.3050000000000002
+-1.9370000000000005
+0.002000000000000118
+-1.6270000000000002
+-1.384
+-1.3380000000000003
+-1.217
+-2.9430000000000014
+-1.56
+-1.4980000000000002
+-1.204
+-1.9280000000000006
+-1.5200000000000007
+-1.1340000000000001
+-0.5490000000000002
+-2.0414999999999965
+-0.8470000000000005
+-0.23100000000000004
+-1.5960000000000003
+-1.5735000000000003
+-1.5000000000000002
+-0.9330000000000007
+-1.4890000000000003
+-1.8950000000000005
+-1.6710000000000003
+-1.9250000000000045
+-3.456999999999991
+-1.9140000000000006
+-1.9520000000000002
+-1.3890000000000002
+-1.228
+-1.7130000000000012
+-1.59
+-1.8240000000000003
+-1.8770000000000007
+-1.349
+-1.4920000000000002
+-2.721000000000002
+-2.3760000000000012
+-1.8920000000000006
+-1.9680000000000004
+-1.1980000000000002
+-1.3390000000000004
+-1.257
+-1.059
+-2.232999999999997
+-2.234000000000001
+-1.138
+-1.073
+-1.9910000000000005
+-0.9590000000000004
+-1.469
+-1.3960000000000001
+-2.3639999999999937
+-2.8725000000000014
+-1.7390000000000003
+-1.5150000000000001
+-1.6085000000000003
+-1.4380000000000002
+-0.011999999999999955
+-1.282
+-1.8440000000000007
+-1.8730000000000002
+-2.0280000000000005
+-1.8400000000000003
+-2.3369999999999997
+-1.318
+-1.312
+-1.8370000000000006
+-0.44000000000000034
+-0.178
+-1.6460000000000004
+-1.353
+-0.14699999999999971
+-0.35300000000000015
+-1.015
+-1.646
+-1.226
+-1.4170000000000003
+-1.358
+-2.046000000000001
+-2.875000000000001
+-1.6720000000000006
+-1.26
+-1.459
+-1.4620000000000002
+-1.6830000000000003
+-0.5340000000000003
+-2.136000000000001
+-1.6960000000000002
+-1.5270000000000001
+0.12250000000000004
+-1.069
+-1.3490000000000082
+-1.6390000000000002
+-2.0760000000000005
+-1.303000000000005
+-1.142
+-1.2890000000000001
+-1.1
+-0.24199999999999988
+-1.1000000000000008
+-1.4225000000000003
+-1.304
+-1.5840000000000123
+-2.3275000000000006
+-1.375
+-0.9970000000000008
+-0.8550000000000005
+-1.6410000000000005
+-1.3
+-0.8920000000000005
+-2.1750000000000007
+-1.034
+-1.7590000000000003
+-1.9865000000000004
+-1.25
+-1.9460000000000006
+-1.6810000000000005
+-1.8830000000000007
+-1.359
+-1.8530000000000004
+-2.130000000000001
+-1.568
+-1.5170000000000001
+-1.347
+-1.0190000000000006
+-1.119
+-1.276
+-0.14599999999999988
+-1.667000000000003
+-0.8080000000000006
+-0.9839999999999999
+-1.353
+-1.6180000000000003
+-1.7010000000000003
+-0.9315
+-1.105
+-1.6680000000000004
+-1.121
+-1.042
+-1.059
+-1.6650000000000005
+-1.153
+-1.295
+-0.7684999999999997
+0.05700000000000015
+-1.4560000000000008
+-2.2239999999999975
+-1.5115
+-1.472
+-1.4160000000000001
+-1.7670000000000003
+-1.254
+-1.9650000000000007
+-1.3090000000000002
+-1.0099999999999998
+-2.541000000000001
+-1.13
+-0.979
+-1.5645000000000004
+-2.3900000000000006
+-1.4560000000000002
+-1.4340000000000004
+-1.147
+-1.23
+-1.073
+-1.7040000000000006
+-1.303
+-1.9470000000000005
+-0.24650000000000016
+-0.6270000000000003
+-1.5350000000000004
+-1.0740000000000005
+-1.4000000000000001
+-1.4740000000000002
+-1.086
+-0.018999999999999892
+-1.7260000000000015
+-1.5070000000000001
+-0.6260000000000004
+-1.0000000000000024
+-1.4360000000000004
+-1.168
+-1.204
+-1.8340000000000005
+-1.397500000000002
+-1.113
+-1.8750000000000004
+-1.0685
+-1.7380000000000004
+-1.325
+-2.166000000000001
+-1.202
+-1.4080000000000001
+-1.9860000000000009
+-1.0299999999999998
+-1.298999999999997
+-1.319
+-0.204
+-1.336
+-1.4160000000000001
+-0.3560000000000002
+-0.29800000000000004
+-1.6170000000000002
+-1.086
+-1.5550000000000002
+-1.1785
+-1.0639999999999998
+-1.08
+-2.4369999999999963
+-1.232
+-1.4350000000000005
+-1.9170000000000005
+-0.8870000000000006
+-0.7590000000000006
+-1.6930000000000005
+-1.588
+-1.5480000000000003
+-1.37
+-1.04
+-1.5659999999999998
+-1.149
+-2.4350000000000005
+-1.077
+-1.311
+-0.9199999999999998
+-1.5600000000000003
+-2.069000000000001
+-1.193
+-0.42100000000000004
+-1.4900000000000002
+-1.5150000000000001
+-2.024000000000003
+-1.2269999999999999
+-1.087
+-0.21100000000000013
+-1.5830000000000002
+-1.322
+-1.4980000000000002
+-1.8340000000000005
+-1.265
+-0.08800000000000002
+-1.2149999999999999
+-0.45799999999999996
+-2.318
+-1.2890000000000001
+-1.2389999999999999
+-0.4250000000000003
+-1.4059999999999933
+-0.5665000000000002
+-1.062
+-1.6790000000000003
+-0.22099999999999995
+-0.23900000000000005
+-1.9890000000000008
+-1.5520000000000034
+-0.7245000000000005
+-1.2779999999999998
+-2.1355000000000004
+-0.7450000000000003
+-1.1869999999999998
+-1.353
+-1.490000000000001
+-1.3679999999999999
+-1.2519999999999998
+-1.144
+-0.162
+-1.3300000000000003
+-0.999
+-1.5860000000000003
+-0.248
+-0.07599999999999997
+-1.8200000000000003
+-2.563000000000001
+-3.141000000000001
+-1.2570000000000001
+-1.3889999999999998
+-1.7180000000000004
+-0.09100000000000004
+-1.5910000000000002
+-1.4040000000000001
+-0.9965000000000006
+-1.0305
+-0.4100000000000003
+-1.0859999999999999
+-2.109000000000001
+-0.40000000000000024
+-0.9775000000000006
+-2.263000000000001
+-1.0235
+-0.9179999999999999
+-1.358
+-2.5600000000000005
+-0.12200000000000007
+-1.266
+-1.6080000000000005
+-0.9259999999999999
+-1.715
+-1.294
+-1.5000000000000029
+-0.4720000000000001
+-1.3950000000000002
+-1.1119999999999999
+-1.129
+-0.48100000000000026
+-1.016
+-1.2369999999999999
+-1.373
+-2.0080000000000005
+-1.0605
+-1.5380000000000003
+-1.022
+-1.108
+-1.353
+-0.05399999999999997
+-1.007
+-1.4775
+-1.3170000000000002
+-1.645
+-0.9429999999999998
+-0.9129999999999999
+-1.2579999999999998
+-1.5330000000000004
+-3.714999999999991
+-0.4320000000000002
+-0.5390000000000004
+-1.1295000000000006
+-0.942
+-1.3475000000000001
+-1.42
+0.038000000000000055
+-0.6360000000000002
+-1.7360000000000004
+-0.9289999999999999
+-1.356
+-1.1444999999999999
+-0.23850000000000005
+-1.043
+-0.026999999999999864
+-1.7675000000000005
+-1.2739999999999998
+-2.339000000000001
+-0.23099999999999998
+-1.1710000000000027
+-1.292
+0.15700000000000008
+0.08950000000000005
+-1.366
+-0.9250000000000046
+-1.3624999999999998
+-0.002999999999999973
+-0.7890000000000005
+-1.282
+-0.954
+-1.2185000000000001
+-1.275
+-1.6680000000000004
+-1.0909999999999997
+-1.386000000000001
+-1.094
+-0.020999999999999998
+-1.7010000000000005
+-0.13999999999999999
+-2.382000000000001
+-0.997
+-1.451
+-1.5670000000000002
+-1.5790000000000002
+-1.274
+0.09800000000000011
+-1.6070000000000002
+-1.037
+-1.141
+-1.0130000000000001
+-1.371
+-1.494
+-1.0324999999999998
+-0.9710000000000008
+0.07700000000000007
+-0.4680000000000001
+-1.242
+-0.35600000000000004
+-1.424
+-0.49750000000000033
+-1.389
+-1.023
+-0.3554999999999999
+-1.8250000000000006
+-1.7810000000000004
+-1.137
+-1.6310000000000002
+-1.8860000000000006
+-1.167
+-1.001
+0.09700000000000007
+-1.4460000000000002
+0.19800000000000018
+-1.167
+-1.253
+-0.969
+-0.7350000000000005
+-1.007
+-2.115
+-1.162
+-1.023
+-1.4050000000000002
+-1.161
+-1.003
+-0.5590000000000003
+-0.5740000000000003
+0.3160000000000002
+-1.7085000000000004
+-1.42
+-0.6740000000000005
+-1.138
+-1.165
+-1.236
+-1.105
+-1.085
+-1.06
+-1.7570000000000006
+-1.8180000000000007
+-1.7525000000000004
+-1.182
+-1.3510000000000006
+-0.9709999999999999
+-0.372
+-2.2410000000000005
+-1.5410000000000004
+-1.5130000000000001
+0.3350000000000002
+-0.9349999999999998
+-1.171
+-0.8359999999999999
+-0.6250000000000003
+0.27600000000000025
+-0.3160000000000001
+-1.1849999999999998
+0.13900000000000007
+-1.4760000000000002
+-1.334
+-1.131
+-0.8570000000000005
+-1.2950000000000046
+-1.6170000000000004
+-1.3730000000000002
+-1.097
+-0.33400000000000013
+-0.6320000000000005
+-1.3130000000000002
+-2.8610000000000024
+-1.5620000000000012
+-0.09299999999999986
+-1.427
+-0.5940000000000004
+-1.395
+-1.2510000000000001
+-0.5410000000000004
+-1.7440000000000004
+-1.34
+-2.6705000000000063
+-0.3360000000000002
+-2.3320000000000007
+-1.274
+-1.4725000000000001
+-1.6540000000000006
+-1.164
+-1.0835
+-1.4529999999999998
+-1.467
+-0.96
+-1.5570000000000004
+-1.113
+-1.183
+-2.4560000000000013
+-1.202
+-1.8570000000000007
+-0.22300000000000003
+-1.3940000000000001
+-1.1849999999999998
+0.19000000000000022
+-1.3270000000000002
+-1.9500000000000015
+-0.9885
+-2.203000000000001
+-1.435
+-2.5140000000000002
+-0.7080000000000004
+0.20000000000000015
+-2.300999999999998
+-0.922
+-1.5960000000000005
+-3.053
+-1.6760000000000004
+-1.4490000000000003
+-1.0159999999999998
+-1.5930000000000004
+-1.4770000000000003
+-1.6860000000000004
+-1.0279999999999998
+-1.4860000000000002
+-1.1684999999999999
+-1.02
+-1.8760000000000008
+-1.5260000000000002
+-2.097000000000001
+-1.0715000000000001
+-2.048000000000001
+-1.3420000000000014
+-1.031
+-0.164
+-0.8660000000000005
diff --git a/Group_C/result_image/data/02reward double DQN.csv b/Group_C/result_image/data/02reward double DQN.csv
new file mode 100644
index 0000000..48ee8e4
--- /dev/null
+++ b/Group_C/result_image/data/02reward double DQN.csv
@@ -0,0 +1,1000 @@
+-1.089
+-1.202
+-1.341
+-1.155
+-1.292
+-1.219
+-1.243
+-1.386
+-1.114
+-1.247
+-1.122
+-1.511
+-1.625
+-1.211
+-1.382
+-1.248
+-1.125
+-1.361
+-1.182
+-1.268
+-1.156
+-1.258
+-1.179
+-1.538
+-1.227
+-1.381
+-1.239
+-1.108
+-1.166
+-1.429
+-1.197
+-1.351
+-1.172
+-1.412
+-1.068
+-1.392
+-1.206
+-1.177
+-1.203
+-1.368
+-1.129
+-1.166
+-1.272
+-1.151
+-1.249
+-1.395
+-1.298
+-2.226
+-1.455
+-1.159
+-1.501
+-2.037
+-1.165
+-1.257
+-1.2
+-1.277
+-1.35
+-1.239
+-1.523
+-1.341
+-1.196
+-1.152
+-1.123
+-1.081
+-1.274
+-1.283
+-1.201
+-1.352
+-1.444
+-1.482
+-1.619
+-1.436
+-1.287
+-1.107
+-1.193
+-1.633
+-1.254
+-1.22
+-1.431
+-1.25
+-1.185
+-1.154
+-1.055
+-1.168
+-1.64
+-1.255
+-0.573
+-1.208
+-1.493
+-1.33
+-1.149
+-1.47
+-1.19
+-1.313
+-1.135
+-1.175
+-1.125
+-1.105
+-1.305
+-1.22
+-1.227
+-1.239
+-1.243
+-1.272
+-1.641
+-1.261
+-1.329
+-1.782
+-1.562
+-1.287
+-1.317
+-1.088
+-1.161
+-1.469
+-1.191
+-1.287
+-1.659
+-1.337
+-1.301
+-0.246
+-1.202
+-1.365
+-1.306
+-1.178
+-1.272
+-1.215
+-1.368
+-2.086
+-2.153
+-1.175
+-2.121
+-1.08
+-1.393
+-1.048
+-1.321
+-1.606
+-1.495
+-1.134
+-1.518
+-1.619
+-1.784
+-1.612
+-1.551
+-1.3595
+-1.5635
+-2.229
+-1.125
+-1.204
+-1.591
+-1.573
+-1.08
+-1.03
+-1.277
+-1.191
+-1.929
+-1.423
+-1.626
+-1.511
+-1.422
+-1.108
+-1.209
+-1.8835
+-2.191
+-1.626
+-1.247
+-1.266
+-1.452
+-1.782
+-1.351
+-1.319
+-1.611
+-1.204
+-1.731
+-1.534
+-1.8445
+-1.296
+-1.65
+-1.985
+-1.255
+-1.371
+-1.379
+-1.332
+-1.135
+-1.558
+-1.308
+-1.181
+-0.882
+-1.219
+-2.201
+-1.5935
+-1.515
+-2.1
+-1.301
+-2.023
+-1.672
+-1.371
+-1.677
+-1.737
+-1.872
+-1.025
+-1.407
+-1.286
+-2.449
+-1.827
+-1.214
+-2.1815
+-1.394
+-0.348
+-1.587
+-1.407
+-1.58
+-1.292
+-1.194
+-1.686
+-1.314
+-0.92
+-2.093
+-1.659
+-1.971
+-1.803
+-1.2
+-1.336
+-1.352
+-1.606
+-1.031
+-1.638
+-1.428
+-1.487
+-1.586
+-1.925
+-1.578
+-1.492
+-1.1965
+-2.476
+-1.512
+-2.206
+-1.797
+-1.73
+-0.767
+-0.466
+-2.422
+-1.744
+-1.207
+-1.105
+-1.278
+-1.438
+-1.482
+-2.212
+-1.209
+-1.721
+-1.9345
+-1.145
+-1.133
+-1.617
+-2.458
+-2.4345
+-1.295
+-1.713
+-2.259
+-1.776
+-1.401
+-1.657
+-2.108
+-2.123
+-1.15
+-1.927
+-1.229
+-2.311
+-1.603
+-1.548
+-1.865
+-1.807
+-1.229
+-2.043
+-1.609
+-1.887
+-0.49
+-1.237
+-1.987
+-1.519
+-2.274
+-1.978
+-1.3065
+-2.201
+-1.445
+-1.542
+-1.369
+-1.775
+-2.311
+-2.157
+-1.841
+-1.23
+-2.066
+-1.376
+-1.347
+-1.376
+-1.929
+-2.393
+-0.454
+-0.143
+-1.713
+-1.724
+-1.467
+-1.151
+-2.062
+-0.459
+-0.879
+-1.254
+-1.343
+-1.496
+-2.384
+-1.408
+-2.344
+-2.372
+-1.922
+-1.451
+-1.113
+-2.886
+-3.356
+-1.913
+-1.488
+0.219
+-0.698
+-0.233
+-1.339
+-2.423
+-2.779
+-1.436
+-1.153
+-2.631
+-2.155
+-1.738
+-1.562
+-1.322
+-2.392
+-2.812
+-0.939
+-2.371
+-1.31
+-0.962
+-1.967
+-1.341
+-2.938
+-1.152
+-0.506
+-1.113
+-1.918
+-1.178
+-1.579
+-1.81
+-1.478
+0.078
+-1.564
+-1.394
+-1.842
+-1.291
+-1.739
+-1.842
+-3.51
+-1.7
+-0.267
+-2.4165
+-1.383
+-0.105
+-1.521
+-1.93
+-1.585
+-2.201
+-1.359
+-1.621
+-1.985
+-1.287
+-1.463
+-1.089
+-1.701
+-0.97
+-1.04
+-1.418
+-2.1045
+-0.96
+-2.188
+-1.441
+-1.3725
+-3.19
+-2.532
+-1.143
+-1.613
+-1.875
+-1.349
+-1.274
+-2.238
+-1.633
+-2.687
+-1.839
+-1.913
+-2.613
+-2.317
+-1.144
+-2.003
+-1.9325
+-0.6590000000000004
+-1.8980000000000006
+-1.7870000000000004
+-1.0829999999999977
+-2.1130000000000004
+-1.329
+-1.6320000000000001
+-1.343
+-2.6140000000000017
+-1.327
+-2.1690000000000005
+-1.9130000000000005
+-1.208
+-1.7910000000000004
+-1.2830000000000001
+-1.3899999999999966
+-2.232999999999999
+-2.2330000000000005
+-2.583000000000001
+-1.7775000000000005
+-1.673
+-1.54
+-0.4900000000000003
+-0.988
+-1.6260000000000003
+-2.027
+-1.5360000000000003
+-2.005000000000001
+-1.7020000000000004
+-2.168000000000001
+-1.8640000000000003
+-1.6260000000000003
+-0.8400000000000005
+-1.8500000000000005
+-1.8170000000000006
+-2.193000000000001
+-1.4210000000000003
+-1.6690000000000005
+-2.464999999999997
+-1.2790000000000001
+-1.5060000000000002
+-2.0120000000000005
+-0.272
+-0.8140000000000006
+-2.2910000000000004
+-1.606
+-1.0979999999999972
+-0.2770000000000001
+-1.5280000000000002
+-0.8040000000000005
+-1.8120000000000003
+-1.7730000000000006
+-1.141
+-2.290999999999999
+-1.7050000000000005
+-1.8430000000000004
+-1.9550000000000007
+-0.6830000000000004
+-1.9540000000000006
+-1.5560000000000005
+-0.4030000000000002
+-1.5890000000000004
+-1.3439999999999999
+-1.9360000000000008
+-1.331
+-1.7760000000000005
+-0.052999999999999936
+-0.5890000000000004
+-1.131
+-1.7970000000000006
+-2.0310000000000006
+-0.49700000000000033
+-1.7340000000000004
+-2.628999999999998
+-1.1879999999999997
+-1.9380000000000006
+-0.9780000000000008
+-1.0999999999999972
+-1.391999999999988
+-1.3500000000000076
+-0.8080000000000006
+-0.8940000000000006
+-0.26200000000000007
+-1.3869999999999896
+-1.2610000000000028
+-1.7650000000000006
+-2.354000000000001
+-1.6620000000000004
+-2.2560000000000002
+-0.20800000000000013
+-0.7450000000000004
+-0.6750000000000004
+-0.7940000000000005
+-1.5120000000000005
+-1.5690000000000004
+-1.5
+-1.5270000000000001
+-0.7220000000000004
+-1.3150000000000002
+-1.351
+-1.2269999999999932
+-1.5615000000000003
+-0.15800000000000006
+-2.418
+-1.5320000000000003
+-2.565999999999999
+-0.6260000000000004
+-1.077000000000007
+-2.133000000000001
+-0.8200000000000004
+-1.8780000000000006
+-2.0970000000000004
+-2.4970000000000017
+-2.171
+-1.8660000000000005
+-0.47200000000000036
+-1.5220000000000002
+-1.8240000000000007
+-1.8060000000000005
+-1.6780000000000004
+-1.4480000000000004
+-0.5910000000000004
+-1.4380000000000002
+-0.13899999999999998
+-2.859000000000003
+-1.5410000000000008
+-1.249
+-1.1270000000000016
+-0.6360000000000003
+-1.3810000000000078
+-1.336
+-1.8270000000000008
+-2.4099999999999966
+-1.6450000000000018
+-1.9180000000000006
+-2.8609999999999918
+-0.7290000000000004
+-1.37
+-2.4020000000000015
+-0.8730000000000006
+-1.557
+-1.4530000000000003
+-0.7960000000000006
+-1.068999999999998
+-1.0400000000000018
+-1.1349999999999982
+-1.8490000000000004
+-1.018
+-1.4095
+-1.4729999999999908
+-1.4290000000000003
+-1.4850000000000003
+-1.665
+-0.22900000000000004
+-1.9250000000000007
+0.03200000000000011
+-1.139
+-2.097000000000001
+-1.4950000000000003
+-1.8700000000000059
+-1.6440000000000006
+-1.8030000000000004
+-1.5490000000000037
+-1.020000000000001
+-1.7360000000000004
+-2.6510000000000002
+-1.1980000000000002
+-2.121000000000001
+-0.4560000000000003
+-2.358000000000001
+-0.5410000000000003
+-1.7000000000000004
+-0.35000000000000014
+-1.8500000000000005
+-2.6630000000000003
+-0.6780000000000004
+-1.9160000000000008
+-1.4680000000000002
+-1.1809999999999943
+-0.9370000000000006
+-1.5930000000000002
+-1.293000000000001
+-1.3940000000000001
+-1.9995000000000007
+-1.3900000000000001
+-1.8380000000000005
+-1.2859999999999905
+-1.8020000000000005
+-0.3990000000000003
+-1.2500000000000064
+-2.506000000000002
+-1.9130000000000007
+-1.3770000000000002
+-1.4985000000000004
+-0.2800000000000001
+-0.6000000000000003
+-1.5140000000000002
+-2.0390000000000006
+-1.273
+-0.8380000000000006
+-1.8020000000000005
+-1.7070000000000005
+-1.7960000000000003
+-0.7120000000000004
+-1.9200000000000004
+-0.8370000000000005
+-0.4580000000000002
+-1.5240000000000002
+-1.631000000000002
+-1.395
+-1.6060000000000003
+-1.3850000000000002
+-1.4220000000000002
+0.21100000000000016
+-1.8610000000000033
+-1.3400000000000003
+-1.4440000000000004
+-0.29200000000000015
+-1.6520000000000001
+-1.9900000000000015
+-1.0439999999999998
+-0.4050000000000002
+-1.1189999999999998
+-0.7320000000000005
+-0.9280000000000006
+-1.8530000000000035
+-0.6460000000000005
+-1.105
+-2.3309999999999995
+-1.329
+-1.4980000000000004
+-1.468
+-2.5210000000000017
+-0.5810000000000002
+-0.08699999999999997
+-1.8850000000000007
+-0.11699999999999994
+-0.42400000000000027
+-2.1760000000000006
+-1.8610000000000007
+-1.2180000000000113
+-1.298
+0.0040000000000001215
+-1.4520000000000004
+-1.5020000000000002
+-2.0070000000000023
+-1.4450000000000032
+-1.2559999999999998
+-1.489
+-2.248000000000001
+-1.5500000000000003
+-0.20900000000000013
+-1.9760000000000015
+-0.5260000000000004
+-1.434
+-1.5880000000000003
+-1.5440000000000003
+-1.6110000000000002
+-2.1240000000000006
+-0.4910000000000004
+-0.9720000000000005
+-1.9090000000000007
+-2.012000000000001
+-1.9280000000000006
+-1.2469999999999999
+0.03000000000000004
+-1.5530000000000004
+-0.8080000000000005
+-0.6200000000000004
+-0.24600000000000005
+-1.3980000000000001
+-1.348
+-1.6350000000000002
+-1.188
+-1.3810000000000002
+-1.5230000000000001
+-1.0419999999999998
+-1.7360000000000004
+-0.5960000000000004
+-1.8200000000000005
+-1.0180000000000007
+-1.207
+-1.143
+-0.020999999999999956
+-1.103
+-1.9300000000000006
+-1.3679999999999999
+-0.7450000000000006
+-1.5900000000000003
+-1.0
+-1.9200000000000004
+-0.06599999999999985
+-1.6450000000000005
+-1.4320000000000004
+-0.7120000000000005
+-1.2730000000000001
+-1.3530000000000002
+-1.9460000000000006
+-0.25
+-2.313999999999999
+-0.9310000000000006
+-1.5200000000000002
+-0.3720000000000002
+-1.3810000000000002
+-0.234
+-0.8100000000000006
+-1.3900000000000001
+-1.8400000000000005
+-0.989
+-1.3260000000000007
+-0.3910000000000002
+-1.0929999999999997
+-1.3219999999999954
+-1.4420000000000004
+-1.7610000000000003
+-1.2039999999999993
+-1.6580000000000004
+-1.4350000000000003
+-0.6870000000000004
+-1.9060000000000006
+-1.5300000000000002
+-1.266
+-1.4660000000000002
+-1.1269999999999962
+-1.9579999999999995
+-1.4900000000000002
+-0.22300000000000003
+-0.4140000000000002
+-1.261
+-1.28
+-0.15300000000000002
+-1.077
+-1.6839999999999962
+-0.8230000000000006
+-2.1100000000000008
+-1.2999999999999998
+-0.8480000000000005
+-1.5230000000000001
+-1.023
+-1.8170000000000006
+-1.5590000000000002
+0.2890000000000002
+-1.9780000000000006
+-2.0695000000000006
+-1.5330000000000004
+-1.0050000000000006
+-1.5420000000000003
+-1.6290000000000004
+-1.1649999999999998
+-0.185
+-1.4080000000000004
+-2.297000000000001
+-0.2220000000000001
+-1.5910000000000002
+-2.1000000000000005
+-1.4060000000000004
+-1.1270000000000038
+-1.6320000000000003
+-1.6425000000000005
+0.17000000000000015
+-0.9730000000000006
+-1.9100000000000006
+-1.0700000000000005
+-1.5980000000000003
+-1.3825
+-1.086000000000001
+-1.244
+-1.9370000000000007
+-1.8065000000000004
+-0.5050000000000003
+-2.0460000000000007
+-2.8729999999999976
+-0.13899999999999987
+-0.6750000000000004
+-1.4890000000000003
+-1.6420000000000003
+-1.512
+-0.8720000000000006
+-0.34500000000000003
+-1.6130000000000004
+-1.343
+-1.6090000000000004
+-1.61
+-1.5310000000000001
+-1.4690000000000003
+-0.15799999999999997
+-1.9620000000000006
+-2.0120000000000005
+0.10400000000000009
+0.030000000000000124
+0.1120000000000001
+-1.6710000000000003
+-0.7300000000000004
+-1.431
+-1.144
+-1.1449999999999982
+-1.5340000000000003
+-0.35200000000000015
+-1.8040000000000007
+-1.2459999999999998
+-2.0970000000000004
+-1.3420000000000012
+-1.9340000000000006
+-1.268
+-1.7920000000000003
+0.008000000000000134
+-1.2450000000000068
+0.014000000000000103
+-0.8770000000000007
+-1.233
+-0.37700000000000006
+-1.344
+-1.364
+-0.9060000000000006
+-0.5220000000000002
+-1.4000000000000004
+-1.6530000000000005
+-1.7000000000000002
+-1.364
+-1.7380000000000004
+-1.8640000000000008
+-1.6890000000000005
+-0.40500000000000014
+0.031000000000000017
+-2.112000000000001
+-0.09899999999999992
+-1.251
+-0.6850000000000004
+-1.373
+-1.4120000000000001
+-1.343
+-1.35
+-1.5330000000000004
+-0.9170000000000007
+-1.9330000000000007
+-1.229
+-1.5320000000000003
+-0.5350000000000004
+-0.277
+-0.19499999999999973
+-0.7440000000000004
+-1.552
+-0.2159999999999999
+-1.141
+-1.1170000000000013
+0.24600000000000022
+-0.7640000000000005
+-2.3169999999999984
+-1.9900000000000007
+-1.7360000000000004
+-0.05299999999999986
+-1.6280000000000003
+-1.1225
+-1.5590000000000002
+0.2730000000000002
+-1.197
+-1.7030000000000005
+-1.113
+-0.7110000000000004
+-0.98
+-1.5220000000000002
+-1.125
+-0.6485000000000003
+-1.6650000000000014
+-2.1190000000000007
+-0.3800000000000002
+-0.5700000000000001
+-0.233
+-0.1700000000000001
+-0.6580000000000005
+-1.4439999999999977
+-1.5550000000000002
+-1.837000000000002
+-1.3850000000000002
+-1.4440000000000002
+-2.0730000000000004
+-1.4410000000000143
+-1.6050000000000004
+0.11800000000000022
+-1.3800000000000001
+-1.3129999999999935
+0.09500000000000007
+-0.7960000000000006
+-1.145
+-1.8310000000000004
+-0.13899999999999993
+-2.342000000000001
+-1.1849999999999998
+-1.4290000000000003
+-1.6100000000000003
+-0.32600000000000023
+-1.2049999999999998
+-1.423
+-0.5250000000000004
+-1.3860000000000001
+-1.0064999999999997
+-1.7645000000000006
+-1.2449999999999999
+0.3090000000000002
+-1.212
+-0.43300000000000016
+-1.3080000000000014
+-1.4640000000000002
+-1.366
+-0.9840000000000007
+-1.7269999999999992
+-1.8890000000000007
+-0.20400000000000013
+-0.21200000000000002
+0.19100000000000017
+-0.7870000000000005
+-0.03999999999999987
+-1.435
+-1.0980000000000008
+-1.3350000000000009
+-1.3670000000000002
+-1.6020000000000003
+-1.6700000000000004
+-1.8720000000000006
+-0.9870000000000008
+-0.8420000000000006
+-1.4280000000000002
+-0.14799999999999983
+-1.198
+-1.5900000000000003
+-1.8820000000000006
+-1.6130000000000004
+-1.5940000000000003
+-0.153
+-2.522000000000001
+-1.3699999999999999
+-1.6740000000000004
+-0.2270000000000001
+-0.7140000000000005
+-0.0969999999999999
+-0.7510000000000006
+-1.8120000000000003
+-1.2830000000000001
+-1.9130000000000007
+0.19300000000000014
+-0.8879999999999999
+0.2800000000000002
+-0.6260000000000004
+-1.826000000000007
+-0.9230000000000006
+-0.45500000000000035
+-0.8000000000000004
+-1.7010000000000005
+-1.3780000000000001
+0.18700000000000014
+-1.5600000000000003
+-2.3559999999999954
+-1.4095000000000002
+-0.26
+-1.9240000000000004
+-1.155
+-1.199999999999994
+-0.11599999999999991
+-0.36800000000000005
+0.03900000000000016
+-1.242
+-1.145
+-0.031999999999999945
+-0.7990000000000005
+-0.7290000000000004
+-1.3239999999999998
+-1.4470000000000003
+0.2830000000000002
+-1.3990000000000002
+-0.35400000000000026
+-1.7940000000000005
+-1.2794999999999999
+-1.286
+-0.6770000000000004
+-0.2190000000000001
+-0.012999999999999859
+-0.7520000000000006
+-1.0619999999999992
+-1.0870000000000009
+-1.8120000000000005
+-1.393
+-1.2950000000000002
+-1.286
+-2.352000000000001
+-1.211
+-0.03199999999999987
+-0.2535
+-1.3590000000000002
+-2.0940000000000003
+-1.3439999999999999
+-1.8250000000000006
+-0.4110000000000003
+-1.284
+-1.529
+-2.558999999999999
+-0.10599999999999991
+-1.0699999999999987
+-0.22599999999999992
+0.021000000000000157
+-1.2690000000000001
diff --git a/Group_C/result_image/data/03reward dueling DQN.csv b/Group_C/result_image/data/03reward dueling DQN.csv
new file mode 100644
index 0000000..244e5a4
--- /dev/null
+++ b/Group_C/result_image/data/03reward dueling DQN.csv
@@ -0,0 +1,1000 @@
+-1.178
+-1.228
+-1.371
+-1.411
+-1.182
+-1.025
+-1.496
+-1.204
+-1.044
+-1.158
+-1.166
+-1.658
+-1.197
+-1.286
+-1.573
+-1.123
+-1.7
+-1.159
+-1.219
+-1.435
+-1.126
+-2.093
+-1.14
+-1.113
+-1.154
+-1.22
+-1.37
+-1.279
+-1.391
+-1.303
+-1.267
+-1.34
+-2.002
+-1.726
+-1.229
+-1.643
+-1.393
+-1.405
+-1.105
+-1.242
+-1.247
+-1.636
+-1.669
+-1.114
+-1.106
+-1.308
+-1.231
+-1.143
+-1.274
+-1.416
+-1.221
+-1.247
+-1.364
+-1.227
+-1.584
+-1.238
+-1.511
+-1.194
+-1.617
+-1.346
+-1.721
+-1.135
+-2.004
+-1.197
+-1.388
+-1.069
+-1.126
+-1.238
+-1.169
+-1.252
+-1.525
+-0.696
+-1.465
+-1.194
+-1.264
+-1.347
+-1.348
+-1.663
+-1.385
+-1.399
+-1.449
+-1.552
+-1.404
+-1.454
+-1.147
+-2.171
+-2.588
+-1.436
+-1.328
+-1.053
+-1.419
+-1.275
+-1.535
+-1.246
+-1.502
+-1.798
+-2.105
+-2.191
+-1.302
+-2.4045
+-2.074
+-1.097
+-0.52
+-1.723
+-2.285
+-1.364
+-1.242
+-1.423
+-2.594
+-1.813
+-1.724
+-1.563
+-1.614
+-1.67
+-2.121
+-1.299
+-1.157
+-1.929
+-2.244
+-1.623
+-2.623
+-1.461
+-1.442
+-2.511
+-1.169
+-1.561
+-2.772
+-1.831
+-0.796
+-2.073
+-1.966
+-2.0465
+-1.66
+-1.98
+-0.725
+-1.627
+-1.447
+-2.748
+-1.467
+-2.004
+-1.215
+-2.147
+-1.384
+-1.413
+-1.861
+-2.44
+-2.751
+-1.973
+-1.769
+-2.18
+-0.473
+-1.009
+-1.809
+-1.8405
+-2.361
+-1.442
+-1.885
+-0.688
+-1.444
+-2.059
+-1.027
+-1.612
+-2.248
+-1.636
+-1.702
+-2.564
+-2.2
+-1.676
+-1.664
+-1.805
+-2.191
+-1.35
+-1.484
+-1.563
+-2.431
+-2.365
+-1.535
+-1.846
+-0.53
+-1.374
+-2.685
+-1.151
+-1.714
+-0.715
+-1.994
+-1.408
+-1.414
+-2.535
+-1.392
+-0.766
+-0.726
+-0.534
+-1.633
+-1.818
+-1.176
+-1.796
+-2.197
+-1.8545
+-1.827
+-1.276
+-0.688
+-1.732
+-2.068
+-2.5815
+-1.144
+-1.781
+-1.361
+-2.205
+-1.518
+-2.209
+-0.76
+-2.284
+-1.484
+-0.252
+-2.124
+-0.777
+-2.153
+-1.749
+-1.849
+-2.227
+-2.235
+-1.938
+-1.278
+-1.378
+-1.3725
+-1.4415
+-1.95
+-1.534
+-1.671
+-1.252
+-2.122
+-1.991
+-1.064
+-1.312
+-1.546
+-2.357
+-1.492
+-1.303
+-1.295
+-1.917
+-0.405
+-2.103
+-1.443
+-2.004
+-1.044
+-1.752
+-1.365
+-0.76
+-1.211
+-1.352
+-1.675
+-1.638
+-2.193
+-0.351
+-2.296
+-1.231
+-1.76
+-0.935
+-1.709
+-2.029
+-1.037
+-2.324
+-2.278
+-2.098
+-1.491
+-1.733
+-2.061
+-1.858
+-2.445
+-1.651
+-1.672
+-1.481
+-0.943
+-0.904
+-0.995
+-1.581
+-0.487
+-2.571
+-1.381
+-0.772
+-0.748
+-0.954
+-1.4065
+-1.86
+-2.237
+-0.927
+-2.659
+-2.382
+-1.713
+-1.831
+-1.222
+-2.751
+-0.266
+-1.743
+-0.487
+-1.59
+-1.56
+-1.393
+-0.649
+-0.782
+-1.731
+-0.65
+-1.279
+-1.836
+-1.909
+-1.487
+-2.494
+-1.655
+-1.285
+-0.248
+-1.345
+-1.601
+-0.354
+-1.279
+-2.602
+-2.21
+-1.4
+-0.258
+-0.198
+-0.329
+-0.798
+-1.507
+-1.516
+-1.426
+-1.343
+-1.777
+-1.577
+-0.731
+-2.491
+-0.529
+-1.334
+-1.118
+-1.795
+-1.346
+-1.082
+-1.322
+-2.467
+-0.59
+-2.937
+-1.661
+-1.665
+-1.539
+-1.901
+-0.61
+-2.033
+-0.433
+0.239
+-0.262
+-1.592
+-0.436
+-2.026
+-1.39
+-1.589
+-1.703
+-1.72
+0.224
+-1.819
+-1.715
+-1.435
+0.039
+-1.902
+-1.389
+-1.312
+-2.938
+-0.367
+-1.532
+-0.613
+-1.393
+-2.173
+-1.365
+-1.296
+-1.5495
+-0.489
+-1.345
+-1.3915
+-1.476
+-1.736
+-0.8035
+-2.111
+-1.398
+-1.085
+-1.426
+-0.622
+-0.669
+-1.837
+-2.361
+-1.721
+-1.6805
+-1.357
+-1.47
+-0.341
+-1.999
+-2.169
+-2.3385
+-1.605
+-0.825
+-2.797
+-1.673
+-0.695
+-2.336
+-1.965
+-2.399
+-1.975
+-0.374
+-1.424
+-1.7175
+-1.249
+0.039
+-1.746
+-1.307
+-0.482
+-1.1325
+-0.091
+-2.723
+-1.611
+-1.993
+-2.045
+-2.094
+-1.539
+-1.395
+-0.946
+-1.849
+-1.589
+-1.305
+-2.8695
+-2.406
+-1.883
+-1.848
+-1.639
+-0.199
+-1.67
+-1.854
+-0.777
+-1.707
+-1.39
+-1.401
+-1.241
+-1.326
+-1.081
+-0.21
+-2.399
+-1.406
+-1.4965
+-1.424
+-1.717
+-0.644
+-1.9
+-2.059
+-1.539
+-1.511
+-2.183
+-1.599
+-1.444
+-1.455
+0.279
+-1.721
+-1.297
+-1.2335
+-2.361
+-1.481
+-1.334
+-1.639
+-1.743
+0.286
+0.293
+-1.923
+-0.306
+-1.905
+-0.854
+-0.113
+-1.418
+-1.875
+-0.658
+-2.567
+-1.0365
+-1.33
+-1.3185
+-1.248
+-1.326
+-1.421
+-1.759
+-0.316
+-3.338
+-0.415
+-1.128
+-1.519
+-1.797
+-1.74
+-1.326
+-0.202
+-1.515
+-1.912
+-1.244
+-0.887
+-1.787
+-3.099
+-1.337
+-2.694
+-0.818
+-1.027
+-0.756
+-2.185
+-0.41
+-1.177
+-1.64
+-1.434
+0.297
+-0.299
+-1.748
+-1.454
+-2.584
+-1.72
+-0.979
+-2.165
+-1.379
+-0.288
+-1.848
+-1.549
+-1.43
+-1.776
+-1.818
+-1.444
+-0.063
+-2.315
+-2.041
+-1.481
+-1.065
+-1.9
+-0.996
+-0.117
+-1.746
+-0.722
+-1.528
+-1.257
+-1.404
+-1.579
+-2.0045
+-1.562
+-0.422
+-1.215
+-2.975
+-1.31
+-1.9395
+-0.57
+-1.408
+-1.678
+-1.923
+-1.953
+0.15
+0.045
+-2.737
+-0.762
+-1.7585
+-1.509
+-1.558
+-1.36
+-0.834
+-0.154
+-2.654
+-1.219
+-1.622
+-1.593
+-1.159
+-1.822
+-1.263
+-1.002
+0.115
+-1.608
+-1.541
+-1.341
+-0.08
+-2.248
+-2.091
+-1.477
+-2.103
+-2.635
+-0.882
+-2.441
+-1.034
+-1.634
+-2.181
+-1.375
+-1.679
+-1.317
+-2.085
+-3.175
+-2.507
+-1.874
+-1.894
+-1.534
+-1.466
+-1.427
+-0.6205
+-0.458
+-3.324
+-2.432
+-2.237
+-1.316
+-2.117
+-1.469
+-1.691
+-1.789
+-1.259
+-1.37
+-0.917
+-1.889
+-1.475
+-1.936
+-1.163
+-1.6205
+-1.196
+-2.35
+0.079
+-1.3
+-2.114
+-1.5
+-0.594
+-1.911
+-1.176
+-1.818
+-3.276
+-1.647
+-0.264
+-2.913
+-0.93
+-1.69
+-2.118
+-1.2635
+-2.649
+-2.937
+-0.514
+-1.4355
+-2.0325
+-1.949
+-2.55
+-1.319
+-2.034
+-1.58
+-2.068
+-1.975
+-1.205
+-1.263
+-1.716
+-1.399
+-1.372
+-2.305
+-0.869
+-2.251
+-2.832
+-2.83
+-1.457
+-1.875
+-1.208
+-2.763
+-0.653
+-1.143
+-2.846
+-3.227
+-1.352
+-1.571
+-1.491
+-1.314
+-2.986
+-1.476
+-2.991
+-2.359
+-0.928
+-2.003
+-1.447
+-2.042
+-2.294
+-1.731
+-2.071
+-1.73
+-3.486
+-0.865
+-2.032
+-2.997
+-1.938
+-2.138
+-1.62
+-1.616
+-1.308
+-2.144
+-2.081
+-1.504
+-1.962
+-1.902
+-1.514
+-2.071
+-2.295
+-1.719
+-1.546
+-0.309
+-2.2825
+-1.31
+-1.296
+-1.114
+-2.332
+-1.991
+-1.702
+-1.304
+-1.566
+-1.844
+-2.208
+-2.179
+-2.373
+-1.672
+-2.501
+-2.67
+-2.417
+-1.417
+-1.031
+-1.821
+-1.96
+-1.624
+-2.321
+-1.666
+-2.845
+-2.154
+-1.592
+-1.335
+-2.629
+-1.781
+-2.075
+-0.572
+-1.366
+-1.885
+-2.339
+-1.112
+-1.915
+-1.882
+-2.4355
+-0.124
+-1.678
+-1.525
+-2.018
+-2.27
+-1.791
+-1.736
+-2.528
+-2.3435
+-1.685
+-1.307
+-1.647
+-1.862
+-1.195
+-1.526
+-0.508
+-1.756
+-2.1095
+-1.968
+-1.874
+-1.62
+-2.157
+-1.441
+-2.487
+-1.29
+-2.636
+-1.213
+-2.913
+-1.694
+-0.209
+-0.398
+-1.51
+-1.212
+-1.681
+-2.129
+-1.895
+-1.5545
+-1.904
+-1.266
+-2.324
+-1.768
+-2.829
+-1.824
+-1.866
+-1.056
+-1.18
+-2.0015
+-1.293
+-2.022
+-2.943
+-1.8555
+-1.308
+-1.775
+-0.705
+-1.564
+-1.514
+-1.904
+-1.569
+-1.46
+-1.805
+-0.94
+-1.627
+-2.004
+-2.754
+-1.496
+-1.052
+-1.678
+-1.924
+-1.499
+-1.525
+-1.387
+-1.225
+-1.654
+-1.794
+-1.853
+-1.487
+-1.747
+-1.625
+-1.293
+-1.857
+-1.172
+-1.737
+-1.477
+-1.486
+-1.264
+-1.8275
+-1.622
+-1.45
+-0.972
+-1.77
+-2.16
+-0.95
+-2.721
+-0.628
+-1.426
+-1.5885
+-1.719
+-1.675
+-1.433
+-0.715
+-1.809
+-1.077
+-2.747
+-2.639
+-1.878
+-2.014
+-1.776
+-1.46
+-1.99
+-1.355
+-2.812
+-2.563
+-2.137
+-2.3115
+-1.65
+-3.282
+-2.153
+-0.3
+-1.283
+-2.067
+-1.567
+-2.483
+-1.728
+-2.324
+-2.461
+-2.6405
+-0.568
+-1.571
+-3.626
+-2.208
+-1.148
+-0.797
+-2.296
+-1.233
+-2.148
+-2.0085
+-1.916
+-1.985
+-2.468
+-2.712
+-0.417
+-2.593
+-3.404
+-1.959
+-0.221
+-1.445
+-2.32
+-1.524
+-2.071
+-2.766
+-1.691
+-1.584
+-2.867
+-3.421
+-2.5695
+-0.789
+-2.541
+-2.346
+-0.59
+-1.365
+-3.18
+-2.729
+-2.101
+-2.001
+-1.816
+-1.365
+-1.349
+-1.878
+-2.014
+-1.776
+-1.46
+-1.99
+-1.355
+-2.812
+-2.563
+-1.691
+-1.584
+-2.867
+-3.421
+-2.5695
+-0.789
+-2.541
+-2.346
+-0.59
+-1.266
+-2.324
+-1.768
+-2.829
+-1.824
+-1.866
+-3.421
+-2.5695
+-0.789
+-2.541
+-2.346
+-0.59
+-2.629
+-1.781
+-2.075
+-0.572
+-1.366
+-1.885
+-2.339
+-1.949
+-2.55
+-1.319
+-2.034
+-1.58
+-1.736
+-0.8035
+-2.111
+-1.398
+-1.085
+-1.426
+-0.622
+-0.669
+-1.056
+-1.18
+-2.0015
+-1.293
+-2.022
+-2.943
+-0.644
+-1.9
+-2.059
+-1.539
+-1.511
+-2.183
+-1.599
+-1.444
+-1.455
+-2.639
+-1.878
+-2.014
+-1.776
+-1.46
+-1.99
+-1.691
+-1.584
+-2.867
+-3.421
+-2.5695
+-0.789
+-1.29
+-2.636
+-1.213
+-2.913
+-1.694
+-0.209
+-0.398
+-2.137
+-2.3115
+-1.65
+-3.282
+-2.153
+-0.3
+-1.283
+-2.067
+-1.678
+-1.924
+-1.499
+-1.525
+-1.387
+-1.225
+-1.654
+-1.794
+-1.853
diff --git a/Group_C/result_image/data/04reward noisy network.csv b/Group_C/result_image/data/04reward noisy network.csv
new file mode 100644
index 0000000..282dcc6
--- /dev/null
+++ b/Group_C/result_image/data/04reward noisy network.csv
@@ -0,0 +1,1000 @@
+-1.5130000000000003
+-1.871
+-1.083
+-1.388
+-1.1669999999999998
+-1.151
+-1.272
+-1.3860000000000001
+-1.191
+-1.338
+-1.342
+-1.576
+-1.1239999999999999
+-1.125
+-1.395
+-1.228
+-1.6950000000000003
+-1.059
+-1.18
+-1.16
+-1.215
+-1.234
+-1.362
+-1.223
+-1.189
+-1.344
+-1.1749999999999998
+-1.279
+-1.214
+-1.138
+-1.219
+-1.637
+-1.157
+-1.326
+-1.157
+-1.111
+-1.4060000000000001
+-1.44
+-1.13
+-1.268
+-1.256
+-1.492
+-1.3940000000000001
+-1.157
+-1.11
+-1.394
+-1.1620000000000001
+-1.135
+-1.6260000000000001
+-1.3010000000000002
+-1.23
+-1.201
+-1.226
+-1.119
+-1.3820000000000001
+-1.14
+-1.14
+-1.23
+-1.2510000000000001
+-1.0839999999999999
+-1.043
+-1.4370000000000003
+-1.31
+-1.1280000000000001
+-1.35
+-1.132
+-1.389
+-1.126
+-1.477
+-1.2570000000000001
+-1.165
+-1.139
+-1.46
+-1.6350000000000002
+-1.338
+-1.254
+-1.111
+-1.202
+-1.192
+-1.082
+-1.1520000000000001
+-1.222
+-1.165
+-1.148
+-1.3410000000000002
+-1.7680000000000002
+-1.6020000000000003
+-1.479
+-1.1580000000000001
+-2.544999999999999
+-1.7120000000000002
+-1.356
+-1.209
+-1.288
+-1.091
+-1.372
+-1.33
+-1.348
+-1.3410000000000002
+-1.367
+-1.568
+-1.0399999999999998
+-1.17
+-1.8990000000000005
+-1.124
+-1.179
+-1.2040000000000002
+-1.1760000000000002
+-1.328
+-1.585
+-1.389
+-1.5680000000000003
+-1.749
+-1.735
+-1.6880000000000002
+-1.681
+-1.177
+-1.2389999999999999
+-1.133
+-1.236
+-1.291
+-1.374
+-1.114
+-1.62
+-1.19
+-1.242
+-1.7310000000000003
+-1.6640000000000004
+-2.1830000000000007
+-1.1709999999999998
+-1.2970000000000002
+-1.37
+-1.462
+-1.7830000000000004
+-1.6229999999999998
+-1.8720000000000003
+-1.669
+-1.213
+-2.4170000000000007
+-1.209
+-2.2540000000000004
+-1.276
+-1.5160000000000002
+-1.2970000000000002
+-1.7780000000000002
+-1.6710000000000003
+-1.7750000000000004
+-1.7230000000000003
+-2.4279999999999986
+-1.282
+-1.7550000000000001
+-3.1560000000000015
+-1.335
+-1.8650000000000007
+-1.362
+-1.5430000000000001
+-2.0300000000000007
+-1.173
+-1.7020000000000004
+-1.6260000000000003
+-1.233
+-1.8720000000000003
+-1.218
+-1.204
+-1.6640000000000001
+-1.205
+-1.451
+-1.4180000000000001
+-1.252
+-1.56
+-1.336
+-1.8020000000000005
+-1.295
+-1.5420000000000003
+-1.6500000000000004
+-1.255
+-1.4800000000000002
+-2.037000000000001
+-1.312
+-1.8040000000000005
+-1.9740000000000006
+-1.25
+-1.2970000000000002
+-1.524
+-1.2799999999999998
+-1.116
+-1.4490000000000003
+-1.206
+-1.0985
+-1.111
+-1.166
+-1.8220000000000005
+-1.517
+-1.441
+-1.193
+-1.592
+-1.6075000000000002
+-1.34
+-1.7730000000000004
+-1.6640000000000004
+-1.48
+-1.4770000000000003
+-1.6260000000000003
+-1.5640000000000003
+-1.152
+-1.737
+-1.233
+-1.3319999999999999
+-1.4820000000000002
+-1.6179999999999999
+-2.1740000000000004
+-2.387999999999993
+-1.276
+-1.9995000000000007
+-1.415
+-1.6530000000000005
+-1.304
+-2.3029999999999946
+-1.171
+-1.2429999999999999
+-1.5290000000000001
+-1.8160000000000003
+-1.8020000000000005
+-1.351
+-1.5420000000000011
+-1.072
+-1.2640000000000002
+-1.262
+-1.5550000000000002
+-0.17200000000000001
+-1.213
+-1.264
+-1.581
+-1.336
+-1.443
+-1.741
+-1.1989999999999978
+-1.3330000000000002
+-1.263
+-1.6050000000000004
+-1.8630000000000004
+-1.221
+-1.5300000000000002
+-1.4060000000000001
+-1.4330000000000003
+-1.203
+-2.197000000000001
+-2.4130000000000003
+-2.139999999999999
+-1.8720000000000006
+-1.282
+-1.7790000000000004
+-1.473
+-1.286
+-1.48
+-1.8290000000000004
+-1.6320000000000003
+-1.593
+-1.9550000000000005
+-1.6520000000000004
+-1.5310000000000001
+-1.258
+-1.8890000000000007
+-1.2199999999999966
+-1.395
+-1.6740000000000004
+-1.5070000000000001
+-1.6195000000000004
+-0.8760000000000006
+-1.5500000000000003
+-1.4020000000000001
+-1.447
+-2.3980000000000006
+-1.3719999999999999
+-1.5875000000000004
+-2.2119999999999957
+-1.472
+-1.3970000000000002
+-1.445
+-1.33
+-1.155
+-1.5615000000000003
+-1.9710000000000023
+-1.165
+-1.8920000000000006
+-2.0450000000000004
+-1.522
+-1.233
+-1.354
+-1.31
+-1.148
+-1.3119999999999998
+-1.7900000000000005
+-1.3980000000000001
+-1.5980000000000003
+-1.4840000000000002
+-1.5570000000000004
+-1.5660000000000003
+-1.5280000000000051
+-1.346
+-1.6790000000000003
+-1.5020000000000002
+-1.075
+-0.5930000000000004
+-1.6620000000000004
+-1.036
+-1.2009999999999985
+-1.8240000000000005
+-2.038500000000001
+-1.8330000000000006
+-1.5950000000000002
+-1.5640000000000005
+-1.303
+-2.1620000000000004
+-0.343
+-0.20900000000000013
+-1.1400000000000001
+-1.7065000000000006
+0.07000000000000008
+-2.449500000000001
+-1.4030000000000002
+-1.8630000000000004
+-0.7530000000000004
+-1.5430000000000001
+-1.9749999999999968
+-0.5620000000000002
+-1.242
+-2.498
+-2.594999999999997
+-1.6270000000000002
+-0.5570000000000004
+-0.7550000000000004
+-1.4929999999999992
+-1.4030000000000058
+-1.4865000000000002
+-0.11899999999999991
+-1.365
+-2.690999999999989
+-0.7875000000000005
+-1.3599999999999999
+-0.46900000000000036
+-1.6340000000000003
+-0.5240000000000004
+-1.1615000000000073
+-1.366
+-0.8930000000000006
+-0.9380000000000007
+-0.3065000000000001
+-1.123
+-1.6600000000000001
+-1.255
+-1.471
+-1.2799999999999998
+-1.5840000000000003
+-1.6230000000000002
+-1.9589999999999996
+-1.15
+-1.2389999999999999
+-1.0479999999999996
+-1.6370000000000005
+-1.8260000000000005
+0.17000000000000004
+-1.328
+-1.17
+-1.7120000000000004
+-2.045000000000001
+-1.4530000000000003
+-2.0015000000000014
+-1.335
+-1.5020000000000002
+0.1800000000000002
+-1.6180000000000003
+-1.139
+-1.244
+-1.237
+-1.9380000000000008
+-1.42
+-1.164
+-1.1375
+-0.3300000000000002
+-0.40200000000000025
+-1.6200000000000003
+-1.4540000000000002
+-1.218
+-1.2239999999999955
+-1.6690000000000005
+-2.199999999999999
+-1.8780000000000006
+-1.348
+-2.0430000000000006
+-1.6880000000000004
+-1.5135000000000003
+-1.0200000000000014
+-0.7900000000000005
+-1.9430000000000005
+-1.6659999999999953
+-2.152000000000001
+-1.4960000000000002
+-1.6090000000000002
+-1.7660000000000013
+-2.440000000000002
+-3.1690000000000014
+-2.758000000000002
+-1.5150000000000001
+-2.218000000000001
+-3.0920000000000005
+-1.09
+-0.9680000000000006
+-1.6660000000000004
+-1.4670000000000003
+-1.7300000000000004
+-1.7390000000000003
+-1.463
+-1.35
+-2.650999999999999
+-1.353
+-2.2890000000000006
+-1.0970000000000006
+-0.6370000000000003
+-3.497999999999995
+-1.9220000000000006
+-0.45300000000000024
+-1.4100000000000001
+-0.7980000000000005
+-2.162999999999998
+-1.279
+-2.062000000000001
+-1.9160000000000006
+-0.4770000000000002
+-2.162000000000001
+-1.8070000000000004
+-0.18500000000000003
+-1.3970000000000002
+-1.8690000000000007
+-1.2630000000000001
+-1.7430000000000003
+-1.3600000000000105
+-2.2200000000000006
+-1.6190000000000002
+-1.7200000000000002
+-2.104000000000001
+-1.4730000000000003
+-3.229000000000001
+-2.176000000000001
+-1.2075
+-1.9310000000000005
+-1.3010000000000002
+-1.8780000000000006
+-1.7260000000000004
+-2.254000000000001
+0.035000000000000045
+-1.5960000000000003
+-1.6890000000000005
+-2.1689999999999996
+-1.4960000000000002
+-1.4280000000000002
+-1.8590000000000004
+-1.6025000000000003
+-0.0949999999999998
+-1.5020000000000002
+-2.349999999999997
+-2.1480000000000006
+-1.7630000000000003
+-1.9580000000000006
+-1.6155000000000004
+-1.9905000000000002
+-1.4560000000000004
+-2.7779999999999867
+-0.35300000000000015
+-3.144999999999996
+-1.0679999999999998
+-1.241
+-1.2329999999999992
+-0.2125
+-1.5639999999999885
+-2.187999999999999
+-2.2875000000000005
+-2.2129999999999974
+-1.5755000000000003
+-1.7320000000000013
+-2.308
+-2.3480000000000008
+-2.075999999999998
+-1.5070000000000001
+-1.1610000000000007
+-1.8620000000000005
+-1.045
+-1.0790000000000008
+-1.2564999999999995
+-1.274
+-0.8720000000000007
+-1.7530000000000046
+-2.012000000000001
+-1.5980000000000019
+-2.9079999999999826
+-2.3320000000000007
+-1.7010000000000005
+-1.3640000000000003
+-1.7250000000000005
+-2.0810000000000004
+-1.8310000000000133
+-0.038999999999999875
+0.1420000000000001
+-0.8460000000000005
+-1.2930000000000001
+-1.224000000000013
+-1.308
+0.08150000000000025
+-2.4039999999999973
+-1.4620000000000002
+-1.294
+-0.044999999999999804
+-1.456
+-0.44199999999999995
+-1.6450000000000005
+-2.2970000000000006
+-1.3
+-1.0170000000000097
+-1.5645000000000002
+-1.337
+-1.6090000000000004
+-1.29
+-1.0000000000000007
+-1.1699999999999997
+-1.8480000000000005
+0.007000000000000049
+-2.0650000000000057
+-1.6569999999999951
+-1.352
+-1.2139999999999935
+-2.7600000000000016
+-1.4340000000000002
+-1.4980000000000002
+-0.2940000000000001
+-1.3595000000000002
+-0.9780000000000006
+-1.0234999999999992
+-1.6670000000000003
+-2.2519999999999962
+-2.627999999999989
+-2.0360000000000005
+-0.4630000000000003
+-0.061999999999999805
+-1.6160000000000003
+-1.1110000000000007
+-2.109000000000001
+0.1980000000000002
+-0.2810000000000001
+-1.1649999999999998
+-1.9550000000000016
+-0.7830000000000005
+-1.6830000000000003
+-0.7850000000000005
+-2.408000000000001
+-2.299999999999996
+-1.0239999999999998
+0.0110000000000002
+-0.5560000000000003
+-1.7660000000000005
+-1.9765000000000006
+-1.8614999999999937
+-0.1059999999999999
+-1.8955000000000006
+-1.402500000000003
+-1.4930000000000023
+-1.8260000000000005
+-1.2610000000000001
+-1.067
+-2.3299999999999996
+-0.8370000000000005
+-1.339
+-1.3560000000000003
+-0.5170000000000002
+-1.4100000000000001
+-0.3340000000000001
+-1.8349999999999993
+-1.8170000000000006
+-1.5000000000000002
+-0.7990000000000005
+-1.2269999999999999
+-0.4750000000000002
+-1.9770000000000008
+-1.0269999999999997
+-1.4840000000000002
+-0.5450000000000003
+-1.5960000000000003
+-2.3080000000000007
+-1.163
+-2.1150000000000007
+-0.10599999999999976
+-2.2010000000000005
+-0.31049999999999994
+-1.7730000000000026
+-1.322
+-1.122000000000001
+-1.4240000000000002
+-2.0340000000000007
+-1.476000000000016
+-2.0730000000000004
+-2.146000000000001
+-1.0120000000000005
+0.04300000000000003
+-0.6520000000000004
+-1.8510000000000004
+-1.3329999999999997
+-1.0019999999999998
+-2.236500000000001
+-1.4080000000000001
+-1.8425000000000007
+-0.5060000000000002
+-1.33
+-1.375
+-1.4830000000000003
+-1.4325
+-2.0600000000000005
+-1.5859999999999943
+-1.159
+-1.1540000000000088
+-1.8130000000000086
+-0.8750000000000006
+-2.929500000000001
+0.0590000000000002
+-2.355000000000001
+-1.8980000000000006
+-1.254
+-1.9390000000000007
+-1.4709999999999999
+-1.6550000000000002
+-1.8679999999999983
+-1.5745000000000005
+-1.212
+-2.341000000000001
+-1.5590000000000002
+-0.7490000000000004
+-0.06799999999999991
+-1.8260000000000018
+-1.3114999999999892
+-0.10299999999999991
+-0.8250000000000005
+-0.5190000000000002
+-0.9589999999999997
+-2.0280000000000005
+-1.4390000000000003
+-1.2169999999999936
+-2.176000000000001
+-0.8240000000000005
+-0.48600000000000027
+-1.251
+-2.038000000000004
+-1.308
+-1.7410000000000005
+-1.208
+-1.5850000000000002
+-0.18900000000000006
+-1.8030000000000004
+-0.006999999999999896
+-2.7430000000000057
+-1.1629999999999998
+-1.4820000000000002
+-1.4770000000000003
+-1.341
+-1.5220000000000002
+-1.2970000000000002
+-0.9920000000000027
+-0.9940000000000007
+-1.5740000000000003
+-0.9650000000000006
+-1.1099999999999999
+-2.1929999999999987
+-0.5650000000000003
+-2.089999999999999
+-1.3049999999999997
+-2.413499999999999
+0.12800000000000017
+-0.6750000000000004
+-2.0870000000000006
+-1.2429999999999999
+-2.0350000000000006
+-0.9640000000000006
+-1.8370000000000022
+-2.3610000000000015
+-1.3049999999999997
+-1.5200000000000056
+-1.3950000000000002
+-2.365
+-1.6150000000000002
+-1.7600000000000005
+-1.6730000000000005
+-2.5460000000000016
+-1.8450000000000006
+-3.291000000000004
+-2.99899999999998
+-1.0850000000000002
+-2.6349999999999887
+-2.7939999999999845
+-1.5530000000000004
+-2.472000000000001
+-2.4030000000000014
+-2.2360000000000007
+-3.419999999999998
+-2.495000000000001
+-2.246000000000001
+-1.7750000000000004
+0.26800000000000024
+-2.4970000000000008
+-2.0730000000000004
+-1.9970000000000008
+-2.295000000000001
+-0.7240000000000004
+-2.5530000000000044
+-1.601000000000001
+-2.237000000000001
+-2.387000000000001
+-2.9159999999999817
+-2.057000000000001
+-1.4810000000000003
+-2.9879999999999804
+-0.09899999999999988
+-1.7160000000000004
+-2.168000000000001
+-2.9879999999999804
+-2.2550000000000012
+-1.6770000000000005
+-2.554000000000002
+-1.5350000000000001
+-0.7780000000000005
+-2.2460000000000018
+-2.7490000000000014
+-2.4270000000000014
+-1.9350000000000005
+-1.4830000000000003
+-2.965999999999981
+-1.6710000000000003
+-1.8700000000000006
+-1.6330000000000005
+-1.9570000000000007
+-2.651000000000001
+-2.281000000000001
+-1.166
+-2.9879999999999804
+-1.5010000000000003
+-1.9010000000000007
+-2.5500000000000016
+-0.9500000000000006
+-3.00299999999998
+-1.9644999999999984
+-2.2330000000000076
+-1.354
+-1.687
+-1.343
+-1.9970000000000006
+-0.6110000000000004
+-1.334
+-1.385
+-1.1139999999999999
+-1.83
+-1.359
+-1.409
+-1.3940000000000001
+-1.5300000000000002
+-1.1880000000000002
+-2.1230000000000007
+-1.4180000000000001
+-1.5840000000000003
+-1.432
+-2.1110000000000007
+-1.3000000000000016
+-1.6510000000000002
+-1.532
+-1.487
+-1.439
+-1.7480000000000004
+-1.6840000000000004
+-2.5369999999999924
+-1.6470000000000002
+-2.9520000000000013
+-2.841000000000001
+-2.1670000000000007
+-1.6730000000000003
+-1.323
+-1.512
+-1.364
+-1.288
+-1.9960000000000007
+-1.355
+-1.364
+-1.5619999999999998
+-2.5180000000000007
+-2.783999999999986
+-2.9159999999999817
+-2.3280000000000007
+-2.58199999999999
+-1.025
+-2.9475000000000016
+-3.131000000000001
+-1.173
+-2.5000000000000067
+-3.916999999999982
+-2.9329999999999825
+-2.5280000000000014
+-1.8569999999999984
+0.1090000000000001
+-2.9879999999999804
+-1.6970000000000005
+-2.367000000000001
+-2.9320000000000013
+-2.7960000000000025
+-1.5870000000000002
+-1.5320000000000005
+-1.423
+-3.00799999999998
+-2.0570000000000004
+-1.6710000000000003
+-3.5279999999999943
+-1.8570000000000007
+-2.9970000000000017
+-1.8470000000000004
+-1.6620000000000004
+-1.345
+-1.385
+-1.385
+-1.6950000000000003
+-1.6130000000000004
+-1.428000000000001
+-1.365
+-1.385
+-1.365
+-1.658
+-1.363
+-1.385
+-1.295
+-1.6270000000000002
+-1.365
+-2.808000000000001
+-1.365
+-1.385
+-1.385
+-1.385
+-1.365
+-1.385
+-1.385
+-1.44
+-1.365
+-1.398
+-1.365
+-1.29
+-3.242000000000001
+-1.359
+-1.385
+-1.419
+-1.385
+-1.3490000000000002
+-1.385
+-1.9000000000000006
+-3.00999999999998
+-1.9580000000000006
+-2.8820000000000014
+-1.1020000000000008
+-1.1100000000000008
+-1.5630000000000002
+-2.1240000000000006
+-2.285000000000001
+-1.369
+-2.1240000000000006
+-1.800000000000011
+-1.229
+-1.228
+-3.3339999999999987
+-2.8830000000000013
+-3.5519999999999943
+-2.5764999999999896
+-2.7900000000000014
+-1.613000000000001
+-2.9879999999999804
+-2.0670000000000006
+-1.7960000000000005
+-1.6790000000000003
+-1.6700000000000013
+-2.6340000000000012
+-1.7670000000000017
+-2.723999999999987
+-2.715999999999991
+-2.8779999999999832
+-3.237
+-2.170000000000001
+-2.0155000000000007
+-2.0100000000000007
+-1.9270000000000005
+-2.0270000000000006
+-2.866999999999983
+-3.282000000000001
+-1.119
+-2.5699999999999923
+-2.9879999999999804
+-2.727000000000001
+-2.507000000000001
+-2.381000000000001
+-2.7499999999999987
+-2.9440000000000026
+-2.407999999999994
+-2.307000000000001
+-2.8119999999999856
+-3.147000000000001
+-2.385000000000001
+-2.788000000000001
+-2.247500000000001
+-2.8339999999999836
+-2.745000000000001
+-2.3860000000000063
+-3.051000000000002
+-2.270000000000001
+-2.5200000000000014
+-2.899999999999982
+-3.3769999999999936
+-1.9730000000000008
+-4.089999999999982
+-2.7019999999999884
+-4.16999999999998
+-2.569999999999989
+-2.2200000000000006
+-3.4259999999999975
+-1.9380000000000006
+-0.9310000000000006
+-3.00999999999998
+-1.3450000000000002
+-2.420000000000001
+-3.171999999999997
+-3.427999999999998
+-2.774999999999985
+-0.7490000000000004
+-1.1500000000000008
+-2.9750000000000014
+-1.6454999999999982
+-3.080000000000002
+-2.6900000000000013
+-2.1400000000000006
+-2.4400000000000013
+-3.5499999999999936
+-3.3199999999999976
+-2.9549999999999814
+-2.0500000000000007
+-3.3179999999999996
+-3.086000000000001
+-2.177000000000001
+-2.5270000000000015
+-2.8470000000000013
+-1.7380000000000013
+-2.547000000000001
+-3.6679999999999913
+-2.2270000000000008
+-2.017000000000001
+-2.1800000000000006
+-2.8190000000000013
+-1.7370000000000005
+-3.00999999999998
+-2.8779999999999837
+-3.386999999999997
+-3.067
+-2.8170000000000015
+-2.0360000000000005
+-2.0670000000000006
+-2.8890000000000016
+-3.0539999999999994
+-2.3000000000000007
+-0.8800000000000006
+-1.9140000000000006
+-0.7150000000000004
+-1.7340000000000004
+-2.458000000000001
+-2.019000000000001
+-1.345
+-1.365
+-1.385
+-1.365
+-3.160000000000001
+-3.4869999999999948
+-1.365
+-1.109
+-1.6030000000000002
+-1.365
+-1.385
+-2.248000000000001
+-1.345
+-1.385
+-1.385
+-1.385
+-2.804999999999985
+-1.345
+-1.8980000000000006
+-1.3450000000000002
+-1.365
+-1.365
+-1.385
+-2.446000000000001
+-2.591000000000001
+-2.320000000000001
+-1.9900000000000007
+-3.0690000000000017
+-3.00999999999998
+-1.345
diff --git a/Group_C/result_image/data/05reward priority memory.csv b/Group_C/result_image/data/05reward priority memory.csv
new file mode 100644
index 0000000..88991a6
--- /dev/null
+++ b/Group_C/result_image/data/05reward priority memory.csv
@@ -0,0 +1,800 @@
+-1.248
+-1.12
+-1.113
+-1.481
+-1.444
+-1.366
+-1.357
+-1.198
+-1.191
+-1.522
+-1.155
+-1.136
+-1.111
+-1.098
+-1.178
+-1.506
+-1.303
+-1.197
+-1.316
+-1.327
+-1.278
+-1.45
+-1.352
+-1.251
+-1.406
+-1.379
+-1.167
+-1.535
+-1.181
+-1.202
+-1.19
+-1.11
+-1.117
+-1.562
+-1.12
+-1.256
+-0.013
+-1.487
+-1.225
+-1.094
+-1.228
+-1.454
+-1.136
+-1.281
+-1.129
+-1.098
+-1.305
+-1.327
+-1.379
+-1.245
+-1.291
+-1.186
+-1.222
+-1.253
+-1.273
+-1.32
+-1.16
+-1.053
+-1.233
+-1.229
+-1.138
+-1.063
+-1.472
+-1.269
+-1.844
+-1.325
+-1.179
+-1.359
+-1.206
+-1.115
+-1.141
+-1.326
+-1.111
+-1.449
+-1.156
+-1.181
+-1.179
+-1.483
+-1.155
+-1.201
+-1.422
+-1.506
+-1.652
+-1.676
+-1.222
+-1.247
+-1.454
+-1.363
+-1.778
+-1.153
+-1.141
+-1.369
+-1.161
+-1.288
+-1.338
+-1.236
+-1.268
+-1.151
+-1.634
+-2.17
+-1.459
+-1.436
+-1.756
+-1.108
+-1.307
+-1.56
+-2.063
+-1.289
+-1.928
+-1.191
+-3.171
+-1.488
+-2.781
+-2.459
+-1.653
+-1.2645
+-1.275
+-1.948
+-1.55
+-1.313
+-2.161
+-1.852
+-1.729
+-1.211
+-1.876
+-1.346
+-1.483
+-1.5755
+-1.101
+-1.512
+-2.598
+-1.569
+-1.635
+-1.73
+-1.229
+-3.158
+-1.098
+-1.217
+-1.166
+-2.907
+-1.579
+-1.211
+-1.597
+-2.024
+-1.198
+-2.202
+-1.673
+-1.88
+-1.828
+-1.454
+-1.737
+-2.043
+-1.112
+-1.704
+-1.474
+-2.386
+-1.041
+-1.32
+-1.546
+-1.189
+-1.478
+-1.336
+-1.496
+-1.361
+-1.517
+-1.401
+-1.708
+-1.3775
+-1.493
+-2.217
+-2.151
+-1.57
+0.011
+-1.588
+-1.654
+-1.713
+-1.83
+-1.647
+-1.107
+-0.097
+-1.4755
+-0.261
+-1.474
+-1.454
+-1.612
+-1.738
+-0.312
+-0.513
+-0.1285
+-1.522
+-1.6295
+-1.657
+-1.485
+-1.7745
+-0.439
+-0.297
+-0.174
+-0.052
+-0.069
+-1.813
+-0.175
+-1.228
+-1.453
+-0.029
+-1.533
+-1.457
+-1.512
+-1.487
+0.149
+-0.467
+-0.531
+-1.368
+-1.339
+-1.407
+-1.182
+-1.595
+-1.48
+-1.033
+-1.418
+-2.058
+-1.383
+-1.643
+-1.095
+-1.211
+-0.935
+-1.309
+-0.079
+-1.545
+0.176
+-1.658
+-1.255
+-1.147
+-0.034
+-1.478
+-0.154
+-1.099
+-1.21
+-1.208
+-1.338
+-1.321
+-1.219
+-1.353
+-0.086
+-1.124
+-0.227
+-1.264
+-1.209
+-1.722
+-0.563
+-1.289
+0.086
+-1.292
+-1.322
+-0.287
+-1.261
+0.176
+-0.041
+0.157
+-0.2755
+0.069
+-0.063
+-1.293
+-0.047
+-1.812
+-1.672
+0.05
+-1.327
+-1.035
+-1.0785
+-1.66
+-0.336
+-1.192
+0.045
+-1.676
+0.047
+-1.006
+-0.347
+-0.004
+-1.665
+0.177
+-1.18
+-0.073
+-1.222
+-0.016
+-1.297
+-1.24
+-0.034
+-1.372
+-2.031
+-0.228
+-1.436
+-0.169
+-0.054
+0.223
+-1.453
+-1.762
+0.277
+-1.153
+-0.337
+-0.029
+-0.386
+-1.2
+-1.24
+-1.333
+-0.659
+-1.256
+-1.05
+0.049
+-1.2555
+0.095
+-1.266
+-1.181
+-1.525
+-0.533
+-1.642
+-0.974
+-0.477
+-0.027
+-1.328
+-1.285
+-0.041
+0.096
+-0.423
+-0.492
+-1.226
+-1.327
+-1.667
+-1.555
+-1.287
+-1.417
+-1.053
+-1.378
+-0.324
+-0.166
+-0.684
+-1.259
+-1.271
+-0.036
+-1.3435
+-1.477
+-1.283
+-1.296
+-1.31
+0.2055
+-1.212
+-1.246
+0.259
+-0.295
+0.095
+-1.201
+-1.094
+-1.098
+-1.246
+-1.249
+-1.473
+-0.411
+0.135
+-2.069
+-1.01
+-1.285
+-1.033
+-1.365
+-1.319
+-0.227
+-0.273
+0.23
+-1.091
+-1.654
+0.243
+0.07
+-1.647
+-1.094
+-1.368
+-0.189
+-1.535
+-0.771
+-1.474
+-0.973
+-1.072
+-1.478
+0.15
+-0.014
+-1.248
+-1.329
+-0.115
+-0.237
+-1.28
+-0.087
+0.066
+-1.317
+0.132
+0.06
+0.046
+-1.393
+-0.309
+-0.585
+-1.245
+-0.269
+-1.45
+-1.1245
+-1.3115
+-1.046
+-1.388
+-1.184
+-1.208
+-1.955
+-1.508
+-1.288
+-0.118
+-1.503
+-1.257
+-0.09
+0.033
+-0.094
+-1.556
+0.073
+0.245
+-0.938
+0.023
+-0.031
+-0.971
+-1.7695
+-0.347
+-1.408
+-1.015
+0.176
+-1.298
+-0.596
+-1.209
+-0.198
+-1.65
+2.50E-16
+0.301
+-1.071
+0.081
+-0.244
+-1.223
+-1.066
+-1.331
+-0.951
+-1.26
+-0.0425
+-1.459
+-1.161
+-1.333
+-0.219
+-1.441
+-1.3745
+-1.301
+-1.403
+-1.272
+0.121
+-1.092
+0.067
+-1.339
+-1.304
+-1.784
+-0.192
+-1.7335
+-1.266
+0.054
+-1.047
+0.001
+-1.53
+-1.029
+-1.243
+-2.012
+-1.176
+-1.208
+0.073
+-1.038
+-1.438
+-1.341
+-1.603
+-1.501
+0.006
+-0.646
+-1.206
+-1.249
+0.126
+-1.272
+-1.773
+-1.085
+-0.102
+0.091
+0.09
+0.273
+-1.171
+-1.464
+-0.918
+-1.224
+0.191
+-1.2335
+-1.406
+-1.367
+-1.2035
+-0.109
+-1.747
+-1.307
+-1.029
+-1.25
+-1.238
+-1.197
+-1.059
+-1.317
+-1.232
+-1.173
+-1.042
+-1.085
+-1.184
+-1.457
+-1.266
+0.229
+-1.688
+-1.611
+-1.693
+-0.021
+0.136
+-0.132
+-0.095
+-1.1525
+-2.502
+0.223
+-1.213
+-1.575
+-1.138
+-0.109
+-1.144
+-0.908
+-0.174
+-1.065
+-1.475
+-1.092
+-1.32
+-0.237
+-1.0125
+-1.017
+-1.131
+-1.08
+-0.928
+-2.062
+-2.15
+-1.215
+-0.563
+-1.329
+-1.538
+-1.318
+-1.407
+-1.133
+-1.53
+-1.827
+-2.319
+-1.894
+-1.854
+-3.259
+-2.11
+-2.035
+-1.932
+-1.495
+-1.9
+-2.121
+-2.632
+-2.888
+0.073
+-1.531
+-1.708
+-2.47
+-2.793
+-2.681
+-1.347
+-1.559
+-1.296
+-1.933
+-2.367
+-2.342
+-2.531
+-2.399
+-1.752
+-1.774
+-1.812
+-1.517
+-2.392
+-1.221
+-1.651
+-1.295
+-3.216
+-1.173
+-2.193
+-2.376
+-1.903
+-2.07
+-1.566
+-1.433
+-1.191
+-2.117
+-0.854
+-2.988
+-2.257
+-1.938
+-1.066
+-2.448
+-2.211
+-1.168
+-2.99
+-2.865
+-1.172
+-3.043
+-1.145
+-1.629
+-1.199
+-2.57
+-3.239
+-3.001
+-3.623
+-2.023
+-1.648
+-2.701
+-1.495
+-1.511
+-3.259
+-2.11
+-2.035
+-1.932
+-1.495
+-1.9
+-2.121
+-2.632
+-2.888
+0.073
+-1.531
+-2.07
+-1.566
+-1.433
+-1.191
+-2.117
+-0.854
+-2.988
+-2.257
+-1.629
+-1.199
+-2.57
+-3.239
+-3.001
+-3.623
+-2.023
+-1.648
+0.073
+-1.531
+-2.07
+-1.566
+-1.433
+-1.495
+-1.511
+-3.259
+-2.11
+-2.035
+-2.392
+-1.221
+-1.651
+-1.295
+-3.216
+-1.221
+-2.193
+-2.376
+-1.495
+-1.511
+-3.259
+-2.11
+-2.035
+-1.221
+-1.511
+-0.854
+-3.259
+-2.11
+-2.035
+-2.392
+-1.221
+-2.888
+0.073
+-1.531
+-2.07
+-1.191
+-2.117
+-0.854
+-2.988
+-2.257
+-1.191
+-2.117
+-0.854
+-2.988
+-2.257
+-2.888
+0.073
+-1.531
+-2.07
+-0.854
+-1.531
+-2.888
+0.073
+-1.531
+-2.07
+-2.11
+-2.035
+-2.392
+-1.221
+-2.023
+-1.648
+0.073
+-1.531
+-2.07
+-1.566
+-1.433
+-1.495
+-1.511
+0.073
+-1.531
+-2.023
+-1.648
+0.073
+-1.531
+-2.07
+-1.566
+-1.433
+-1.495
+-1.511
+-3.043
+-2.023
+-1.648
+0.073
+-1.531
+-2.07
+-2.117
+-0.854
+-2.988
+-2.257
+-1.433
+-1.495
+-1.511
+-2.988
+-2.035
+-3.216
+-1.221
+-2.193
+-2.376
+-2.117
+-1.433
+-1.495
+-1.511
+-2.035
+-2.023
+-1.648
+0.073
+-1.531
+-2.07
+-0.095
+-2.988
+-2.023
+-1.433
+-1.495
+-1.511
+-2.023
+-2.988
+-2.023
+-2.023
+-1.648
+0.073
+-1.531
+-2.07
+-3.216
+-3.216
+-3.216
+-1.221
+-2.193
+-2.376
+-2.023
+-0.095
+-2.392
+-1.221
+-1.651
+-1.295
+-2.023
+-1.648
+0.073
+-1.531
+-2.07
+-2.023
+-2.257
+-2.988
+-2.023
+-1.648
+-3.216
+-1.221
+-2.193
+-2.376
+-2.392
+-1.221
+-1.651
+-1.295
+-2.193
+-1.221
diff --git a/Group_C/result_image/data/06reward Multi-step DQN.csv b/Group_C/result_image/data/06reward Multi-step DQN.csv
new file mode 100644
index 0000000..60b0462
--- /dev/null
+++ b/Group_C/result_image/data/06reward Multi-step DQN.csv
@@ -0,0 +1,1650 @@
+-1.0419999999999998
+-1.106
+-1.077
+-1.105
+-1.5140000000000002
+-1.204
+-1.196
+-1.1640000000000001
+-1.8540000000000003
+-1.149
+-1.189
+-1.299
+-1.148
+-1.0719999999999998
+-1.346
+-1.141
+-1.216
+-1.065
+-1.133
+-1.167
+-1.167
+-1.0619999999999998
+-1.264
+-1.118
+-1.09
+-1.048
+-1.332
+-1.184
+-1.224
+-1.176
+-1.203
+-1.107
+-1.226
+-1.14
+-1.101
+-1.233
+-1.272
+-1.053
+-1.219
+-1.092
+-1.242
+-1.599
+-1.208
+-1.167
+-1.179
+-1.061
+-1.22
+-1.2850000000000001
+-1.1320000000000001
+-1.214
+-1.1239999999999999
+-1.235
+-1.127
+-1.2229999999999999
+-1.2510000000000001
+-1.7670000000000003
+-1.5480000000000003
+-1.2069999999999999
+-1.155
+-1.18
+-1.3980000000000001
+-1.408
+-1.127
+-1.275
+-1.217
+-1.22
+-1.4010000000000002
+-1.001
+-1.14
+-1.307
+-1.237
+-1.114
+-1.091
+-1.157
+-1.213
+-1.256
+-1.418
+-1.359
+-1.091
+-1.14
+-1.186
+-1.202
+-1.205
+-1.088
+-1.8220000000000005
+-1.4230000000000003
+-1.075
+-1.4560000000000002
+-1.19
+-1.5260000000000002
+-1.413
+-1.5530000000000004
+-2.3460000000000027
+-1.309
+-1.105
+-2.325000000000001
+-1.7140000000000004
+-0.9410000000000007
+-1.7100000000000004
+-2.2180000000000053
+-1.2880000000000003
+-3.3409999999999984
+-0.7280000000000004
+-1.125
+-2.1400000000000006
+-1.123
+-1.298
+-1.6030000000000002
+-1.263
+-1.221
+-1.5970000000000004
+-1.256
+-1.7570000000000006
+-1.3960000000000001
+-2.2100000000000017
+-1.6900000000000004
+-1.166
+-1.242
+-2.100999999999999
+-1.1
+-0.5830000000000003
+-2.3900000000000015
+-0.7620000000000005
+-2.072000000000001
+-2.4890000000000025
+-1.3019999999999978
+-1.2619999999999973
+-1.3800000000000001
+-1.5460000000000003
+-1.213
+-2.419999999999997
+-1.183
+-1.9030000000000005
+-1.13
+-1.5960000000000005
+-2.107000000000001
+-1.4620000000000002
+-1.113
+-1.7010000000000005
+-1.7960000000000007
+-1.5455000000000005
+-1.3050000000000002
+-1.119
+-1.225
+-2.1140000000000008
+-1.358
+-1.4840000000000013
+-1.7480000000000007
+-1.6820000000000004
+-1.216
+-2.8660000000000014
+-1.8350000000000004
+-2.701999999999997
+-1.9210000000000007
+-1.9450000000000007
+-1.323
+-2.0950000000000006
+-2.048000000000001
+-1.5410000000000004
+-1.246
+-1.141
+-1.6310000000000004
+-2.1430000000000007
+-1.6530000000000005
+-1.1390000000000016
+-2.377
+-1.189
+-1.5180000000000002
+-1.3010000000000002
+-1.319
+-1.6590000000000003
+-0.2680000000000001
+-2.0060000000000007
+-1.185
+-1.6800000000000006
+-2.1849999999999996
+-1.8030000000000004
+-1.182
+-1.5880000000000003
+-2.3590000000000027
+-1.208
+-2.2030000000000007
+-1.8260000000000005
+-2.141
+-1.6440000000000006
+-1.138
+-0.5920000000000003
+-1.64
+-1.8720000000000008
+-2.0135000000000005
+-1.6120000000000003
+-1.3920000000000001
+-1.5640000000000003
+-2.3770000000000007
+-1.4089999999999896
+-1.9500000000000006
+-2.205000000000001
+-1.7420000000000004
+-1.7420000000000004
+-1.279
+-1.3370000000000004
+-2.160000000000001
+-1.9760000000000009
+-1.4200000000000002
+-1.053
+-1.3900000000000001
+-1.8050000000000004
+-1.6780000000000004
+-1.6630000000000003
+-1.5940000000000003
+-1.121
+-1.134
+-1.4920000000000002
+-1.211
+-1.183
+-1.155
+-1.481
+-1.4330000000000003
+-1.3310000000000002
+-1.3600000000000003
+-2.289000000000001
+-1.5150000000000003
+-1.1640000000000001
+-2.085000000000001
+-1.2280000000000009
+-1.242
+-1.7750000000000004
+-2.604
+-2.001000000000001
+-1.7450000000000006
+-1.5869999999999966
+-1.443
+-1.297
+-1.306
+-1.2360000000000002
+-1.3710000000000004
+-0.6120000000000004
+-2.0910000000000006
+-1.5800000000000005
+-1.5970000000000004
+-1.5060000000000002
+-1.6060000000000003
+-2.4960000000000013
+-1.4740000000000002
+-2.2600000000000002
+-1.3230000000000002
+-1.9640000000000009
+-2.257000000000001
+-1.4445000000000003
+-1.35
+-1.5140000000000002
+-1.7030000000000005
+-1.7750000000000006
+-1.4690000000000003
+-1.174
+-1.6510000000000002
+-2.4370000000000003
+-1.7850000000000006
+-1.36
+-1.4610000000000003
+-1.1440000000000001
+-1.236
+-1.7460000000000004
+-1.3770000000000002
+-1.3790000000000004
+-1.4460000000000002
+-1.7340000000000004
+-1.5720000000000005
+-1.247
+-1.5890000000000004
+-1.168
+-1.9460000000000006
+-1.8660000000000005
+-1.175
+-1.3390000000000002
+-1.5330000000000004
+-1.5450000000000004
+-2.576999999999999
+-1.8320000000000005
+-1.0100000000000005
+-1.123
+-1.188
+-1.2420000000000002
+-1.179
+-1.5360000000000003
+-1.6600000000000004
+-1.7770000000000006
+-1.6140000000000003
+-2.735000000000001
+-2.038000000000001
+-1.232
+-0.5290000000000004
+-1.6930000000000005
+-1.211
+-1.8470000000000004
+-1.5460000000000003
+-1.5990000000000002
+-1.8890000000000005
+-1.8670000000000007
+-1.1300000000000001
+-2.001000000000001
+-1.624
+-1.6010000000000004
+-1.7760000000000007
+-1.1080000000000019
+-2.1790000000000007
+-2.1170000000000013
+-1.8540000000000005
+-2.025000000000001
+-0.9720000000000008
+-1.6260000000000003
+-1.5550000000000002
+-2.405000000000004
+-1.143
+-1.251
+-1.264
+-2.475999999999998
+-0.40900000000000025
+-1.263
+-1.255
+-2.017000000000001
+-1.041
+-1.246
+-1.7250000000000005
+-1.7440000000000007
+-0.8060000000000006
+-1.192
+-1.6440000000000006
+-1.134
+-1.6530000000000005
+-2.9700000000000015
+-1.2610000000000015
+-1.205500000000003
+-1.154
+-2.3020000000000014
+-1.3900000000000001
+-1.5410000000000004
+-1.5990000000000002
+-1.25
+-1.4610000000000003
+-1.3090000000000002
+-2.5340000000000016
+-0.9650000000000007
+-1.6700000000000004
+-2.902999999999982
+-2.7200000000000015
+-1.6460000000000004
+-1.4850000000000003
+-1.7730000000000006
+-2.812999999999986
+-2.4730000000000016
+-1.8980000000000008
+-2.4979999999999962
+-1.7890000000000006
+-1.6270000000000002
+-1.6140000000000003
+-1.435
+-1.7790000000000004
+-1.9820000000000009
+-2.1160000000000005
+-1.262
+-1.6510000000000005
+-1.2489999999999999
+-1.6200000000000003
+-2.137000000000001
+-1.8080000000000007
+-0.7100000000000004
+-1.4360000000000002
+-1.8660000000000005
+-2.1020000000000008
+-1.7130000000000005
+-1.3960000000000001
+-1.5010000000000001
+-1.4520000000000004
+-1.5979999999999965
+-2.435000000000001
+-1.085
+-1.7060000000000004
+-2.2920000000000003
+-2.410000000000001
+-1.6450000000000005
+-1.6925000000000003
+-1.242
+-1.4000000000000004
+-1.2249999999999999
+-1.4880000000000004
+-1.6169999999999953
+-0.5090000000000003
+-1.3670000000000002
+-2.0725000000000007
+-1.3545000000000003
+-1.8260000000000005
+-2.3499999999999943
+-2.181000000000001
+-1.2530000000000001
+-1.5450000000000004
+-1.5355000000000003
+-1.6830000000000003
+-1.9420000000000006
+-1.266
+-1.9110000000000007
+-1.6680000000000006
+-1.8620000000000005
+-1.3980000000000001
+-1.3770000000000002
+-2.4110000000000023
+-1.6780000000000004
+-2.0740000000000007
+-1.5530000000000004
+-1.2770000000000001
+-1.423
+-1.415
+-2.0110000000000006
+-1.9410000000000007
+-1.138
+-1.4020000000000001
+-1.3410000000000002
+-2.3910000000000005
+-1.298
+-1.3599999999999999
+-0.9640000000000007
+-1.9340000000000006
+-1.4580000000000002
+-2.1200000000000006
+-1.223
+-1.248
+-2.0260000000000007
+-1.4330000000000003
+-2.0650000000000004
+-1.44
+-1.0889999999999997
+-1.5270000000000001
+-2.0310000000000006
+-1.6575000000000004
+-1.312
+-0.4200000000000002
+-2.2170000000000005
+-0.7890000000000006
+-1.205
+-1.7630000000000003
+-1.5550000000000004
+-3.051000000000001
+-1.7210000000000005
+-1.7510000000000003
+-1.4170000000000003
+-1.8250000000000004
+-2.016000000000001
+-1.7330000000000005
+-1.6430000000000002
+-1.6650000000000005
+-2.0980000000000008
+-1.4040000000000001
+-3.3270000000000004
+-1.7550000000000006
+-1.6240000000000003
+-2.0070000000000006
+-2.519
+-1.9240000000000006
+-1.236
+-0.3080000000000002
+-1.3390000000000002
+-1.8270000000000004
+-1.4590000000000003
+-1.31
+-1.7560000000000004
+-3.0340000000000016
+-1.9270000000000005
+-1.286
+-1.7480000000000004
+-1.178
+-2.195000000000001
+-2.790000000000002
+-1.8840000000000006
+-2.216000000000001
+-1.423
+-1.5230000000000004
+-2.0680000000000005
+-1.294
+-1.7450000000000006
+-1.5110000000000001
+-1.6310000000000002
+-0.7180000000000004
+-1.8100000000000005
+-1.3010000000000002
+-2.7300000000000004
+-1.9020000000000006
+-1.0320000000000007
+-2.4840000000000018
+-1.6040000000000005
+-2.0570000000000004
+-2.0969999999999978
+-1.7120000000000004
+-1.4480000000000004
+-1.7630000000000003
+-1.32
+-2.1060000000000008
+-1.385
+-1.291
+-1.057
+-1.7630000000000006
+-1.0255000000000007
+-1.0299999999999998
+-1.6520000000000006
+-1.4890000000000003
+-1.6450000000000005
+-2.0070000000000006
+-1.7300000000000004
+-1.242
+-1.6290000000000004
+-2.3559999999999945
+-1.7480000000000004
+-1.9210000000000007
+-1.4180000000000001
+-2.0590000000000006
+-1.3690000000000002
+-1.4760000000000004
+-2.1455000000000006
+-2.153000000000001
+-1.8810000000000007
+-2.1610000000000023
+-1.5240000000000005
+-1.4320000000000002
+-1.9575000000000007
+-2.072000000000001
+-1.176
+-1.6500000000000001
+-1.186
+-1.306
+-1.4050000000000002
+-1.6540000000000004
+-1.9540000000000006
+-1.5040000000000002
+-2.2200000000000015
+-1.7220000000000004
+-2.5230000000000024
+-2.285000000000001
+-1.4520000000000004
+-1.5520000000000003
+-1.7530000000000006
+-1.8060000000000005
+-1.7640000000000007
+-1.431
+-2.2270000000000008
+-1.4410000000000003
+-2.3129999999999993
+-2.1979999999999946
+-2.647999999999988
+-2.1670000000000007
+-1.5250000000000004
+-1.2850000000000001
+-2.0060000000000007
+-1.7350000000000003
+-1.7580000000000005
+-1.3950000000000002
+-1.1390000000000007
+-2.1480000000000015
+-1.263
+-1.8890000000000007
+-1.246
+-1.6149999999999964
+-1.409
+-1.5610000000000002
+-1.254
+-1.7700000000000005
+-2.62
+-0.07800000000000001
+-1.1720000000000002
+-2.105500000000001
+-2.8190000000000013
+-1.9220000000000017
+-1.368
+-1.6180000000000003
+-1.8030000000000006
+-1.8190000000000006
+-1.6550000000000002
+-1.8460000000000005
+-2.26
+-1.3900000000000001
+-1.4620000000000002
+-0.8410000000000006
+-1.1320000000000001
+-1.222
+-1.8220000000000005
+-1.8730000000000004
+-3.3160000000000025
+-2.2519999999999922
+-1.8840000000000008
+-1.5110000000000003
+-2.355000000000001
+-1.7310000000000003
+-2.6060000000000016
+-2.146000000000001
+-1.8080000000000005
+-1.7130000000000005
+-1.087
+-1.8120000000000003
+-2.6420000000000012
+-1.5990000000000002
+-2.198000000000001
+-1.5300000000000002
+-2.204500000000001
+-1.7175000000000002
+-1.181
+-2.160000000000001
+-1.3650000000000002
+-1.2590000000000001
+-2.4720000000000018
+-1.8190000000000013
+-1.7720000000000007
+-1.2329999999999999
+-1.874
+-1.485
+-1.204
+-1.6960000000000004
+-1.7810000000000006
+-1.1749999999999998
+-1.4570000000000003
+-1.3489999999999984
+-1.221
+-1.4880000000000004
+-1.2880000000000003
+-2.0430000000000006
+-1.5780000000000003
+-1.5270000000000004
+-1.5080000000000002
+-1.274
+-2.1430000000000007
+-1.194
+-2.0920000000000005
+-1.2530000000000001
+-2.225000000000001
+-1.145
+-1.7100000000000004
+-1.0539999999999987
+-1.358
+-1.6150000000000002
+-1.5780000000000003
+-1.7880000000000005
+-2.181000000000001
+-1.8410000000000006
+-2.341999999999998
+-1.4355000000000002
+-1.8440000000000005
+-1.5720000000000005
+-1.8690000000000007
+-1.9530000000000007
+-1.6100000000000003
+-1.259
+-1.332
+-1.4460000000000002
+-3.4959999999999916
+-1.4410000000000003
+-1.209
+-2.067000000000001
+-1.7270000000000003
+-1.5590000000000002
+-1.3150000000000008
+-0.6120000000000003
+-1.9220000000000008
+-2.0360000000000005
+-1.6250000000000004
+-1.5660000000000003
+-1.4069999999999927
+-1.3650000000000002
+-1.3270000000000002
+-2.8209999999999846
+-1.151
+-1.4710000000000005
+-1.27
+-1.5300000000000002
+-1.8200000000000005
+-1.1460000000000001
+-1.336
+-1.5160000000000002
+-1.123
+-1.4540000000000002
+-1.5400000000000018
+-1.9430000000000005
+-1.103
+-2.1800000000000006
+-1.9040000000000008
+-1.9090000000000007
+-1.9270000000000005
+-1.19
+-1.8780000000000006
+-2.125000000000001
+-1.3640000000000003
+-1.6390000000000005
+-1.6130000000000004
+-3.051
+-1.7030000000000003
+-2.5040000000000013
+-1.5190000000000003
+-2.0580000000000007
+-0.3430000000000002
+-1.3239999999999905
+-2.507999999999999
+-1.3110000000000002
+-2.256000000000001
+-2.357000000000001
+-1.8040000000000005
+-1.169
+-1.9340000000000006
+-1.4380000000000002
+-1.3590000000000002
+-1.5310000000000001
+-1.7640000000000007
+-1.5230000000000001
+-2.130000000000001
+-1.2730000000000001
+-1.029
+-1.461
+-1.7290000000000005
+-1.5200000000000002
+-1.5680000000000005
+-2.0380000000000007
+-1.266
+-1.3470000000000002
+-2.0680000000000005
+-1.092
+-1.3840000000000001
+-0.30700000000000016
+-1.8470000000000004
+-1.7510000000000003
+-2.1499999999999972
+-1.268
+-1.4780000000000002
+-1.337
+-2.474000000000001
+-1.4700000000000002
+-0.47400000000000025
+-2.2269999999999994
+-1.6190000000000002
+-2.3550000000000013
+-1.7920000000000005
+-1.7260000000000004
+-2.311000000000001
+-1.343
+-2.262000000000001
+-2.0945000000000005
+-2.194000000000001
+-1.4170000000000003
+-1.2610000000000001
+-1.7200000000000004
+-1.216
+-1.093
+-2.013000000000001
+-2.1530000000000005
+-1.8370000000000006
+-1.3170000000000002
+-0.8970000000000006
+-1.7270000000000003
+-2.4019999999999992
+-1.6050000000000002
+-1.4970000000000003
+-1.5130000000000003
+-1.9750000000000005
+-0.39500000000000024
+-2.1390000000000007
+-1.6950000000000005
+-0.7830000000000005
+-1.6930000000000005
+-1.4350000000000003
+-1.173
+-1.7010000000000005
+-1.7540000000000004
+-0.214
+-1.7820000000000005
+-2.1399999999999983
+-1.4140000000000001
+-1.185
+-1.33
+-1.7870000000000004
+-2.6515000000000004
+-1.8310000000000006
+-1.9740000000000009
+-1.243
+-1.1119999999999999
+-1.5200000000000005
+-1.6510000000000002
+-1.5230000000000004
+-1.364
+-1.9590000000000005
+-1.8110000000000006
+-1.214
+-1.7840000000000007
+-1.163
+-1.9340000000000006
+-1.8490000000000006
+-1.5420000000000003
+-2.0060000000000007
+-1.154
+-1.3649999999999907
+-1.5890000000000002
+-1.1
+-1.1079999999999999
+-1.4300000000000002
+-1.8200000000000007
+-1.5620000000000003
+-1.091
+-1.5830000000000002
+-1.5510000000000004
+-2.168000000000001
+-1.3880000000000003
+-2.1100000000000008
+-1.7400000000000004
+-1.327
+-1.6900000000000004
+-2.102999999999995
+-1.066
+-2.0200000000000005
+-1.238
+-1.5820000000000003
+-2.220000000000002
+-2.077000000000001
+-2.475999999999998
+-1.222
+-1.264
+-1.4600000000000002
+-2.16
+-1.8650000000000007
+-1.5590000000000002
+-2.144000000000001
+-1.7920000000000005
+-1.4880000000000002
+-1.2280000000000002
+-1.206
+-1.301
+-1.9130000000000007
+-2.499000000000003
+-1.375
+-1.397
+-1.6690000000000005
+-1.5130000000000001
+-1.4740000000000002
+-1.7810000000000006
+-1.207
+-1.9850000000000005
+-2.021
+-2.1770000000000005
+-2.0000000000000004
+-1.4150000000000003
+-2.8810000000000016
+-2.105000000000001
+-1.7390000000000003
+-1.5640000000000003
+-2.169999999999995
+-2.0790000000000006
+-1.9030000000000005
+-2.1375000000000006
+-1.8590000000000004
+-1.9860000000000007
+-1.6350000000000002
+-1.3820000000000001
+-1.9410000000000007
+-1.052
+-1.9550000000000007
+-1.207
+-1.8520000000000008
+-1.3490000000000002
+-2.0820000000000007
+-1.5980000000000003
+-1.6800000000000004
+-1.3860000000000003
+-1.162
+-2.0360000000000005
+-1.3880000000000003
+-2.101000000000001
+-1.5190000000000003
+-3.144
+-1.157
+-1.8680000000000005
+-1.5420000000000003
+-2.0150000000000006
+-1.4370000000000003
+-1.5040000000000002
+-1.7300000000000004
+-1.5950000000000002
+-2.160000000000001
+-2.2519999999999962
+-1.5170000000000003
+-2.0180000000000007
+-1.3960000000000004
+-1.9860000000000007
+-2.037000000000001
+-1.7160000000000002
+-1.5000000000000002
+-2.125000000000001
+-1.5690000000000004
+-1.175
+-2.633000000000001
+-1.7680000000000007
+-2.0310000000000006
+-1.8330000000000006
+-1.6810000000000005
+-1.4480000000000002
+-1.339
+-2.3589999999999973
+-1.262
+-2.415000000000004
+-2.319000000000001
+-1.9510000000000005
+-1.286
+-1.5850000000000004
+-1.7650000000000006
+-2.162000000000004
+-1.7830000000000006
+-1.8515000000000006
+-1.7380000000000004
+-1.117
+-1.4410000000000003
+-1.245
+-1.6260000000000003
+-1.1080000000000005
+-2.4850000000000003
+-1.232
+-1.7410000000000045
+-1.4520000000000002
+-1.5050000000000003
+-1.6230000000000002
+-1.5670000000000002
+-0.8080000000000006
+-3.000000000000001
+-2.750000000000001
+-1.195
+-1.272
+-1.6090000000000004
+-1.8140000000000005
+-1.256
+-1.6080000000000003
+-1.6610000000000005
+-1.179
+-2.575000000000001
+-1.5255000000000003
+-1.6900000000000004
+-1.245
+-1.225
+-1.9460000000000006
+-1.189
+-2.032000000000001
+-1.3090000000000002
+-0.09499999999999997
+-1.6650000000000005
+-1.6200000000000006
+-3.2859999999999987
+-2.271
+-2.271000000000001
+-1.3010000000000002
+-1.5670000000000004
+-2.042000000000002
+-1.9290000000000007
+-1.4740000000000002
+-1.7090000000000005
+-1.17
+-1.7460000000000004
+-1.3150000000000002
+-2.288999999999998
+-1.8900000000000006
+-0.9340000000000006
+-1.3010000000000002
+-1.5470000000000002
+-2.407999999999997
+-1.5890000000000004
+-3.1710000000000003
+-1.4505000000000003
+-1.5950000000000004
+-2.264000000000001
+-1.4310000000000003
+-1.4880000000000002
+-1.2510000000000001
+-1.3780000000000001
+-1.7640000000000005
+-1.8630000000000007
+-1.282
+-1.291
+-2.328000000000001
+-2.197000000000001
+-2.8510000000000004
+-2.5380000000000003
+-1.548
+-0.9160000000000007
+-1.9130000000000007
+-1.5070000000000003
+-1.109
+-1.5920000000000003
+-2.021000000000001
+-2.2850000000000024
+-2.0300000000000007
+-1.4540000000000002
+-2.0580000000000007
+-1.9340000000000006
+-1.248
+-1.5920000000000005
+-3.1350000000000007
+-1.8670000000000007
+-0.6785000000000004
+-1.5380000000000003
+-1.5480000000000005
+-1.5050000000000003
+-1.455
+-1.14
+-1.364
+-1.5330000000000001
+-1.6960000000000004
+-1.375
+-1.7490000000000006
+-1.7060000000000004
+-1.9770000000000008
+-2.751500000000002
+-2.4869999999999974
+-1.1115
+-1.5240000000000005
+-1.101
+-1.4070000000000003
+-1.276
+-1.4020000000000001
+-1.9920000000000007
+-1.6570000000000005
+-1.3110000000000002
+-1.5910000000000002
+-1.346
+-1.6720000000000004
+-1.266
+-1.8790000000000004
+-1.5780000000000003
+-1.28
+-1.6630000000000003
+-1.5880000000000005
+-0.4710000000000002
+-2.8039999999999994
+-1.391
+-1.2630000000000001
+-2.929000000000001
+-1.4680000000000004
+-1.6660000000000004
+-2.0070000000000006
+-1.193
+-1.4180000000000001
+-1.116
+-2.263000000000001
+-1.163
+-2.0470000000000006
+-1.7930000000000006
+-1.8500000000000005
+-1.6190000000000002
+-1.8900000000000006
+-2.0520000000000005
+-1.403
+-1.7590000000000003
+-0.6050000000000003
+-2.1140000000000008
+-1.5310000000000001
+-1.9140000000000066
+-1.9020000000000008
+-0.047
+-1.5870000000000002
+-2.021000000000001
+-1.274
+-1.6530000000000005
+-1.4040000000000004
+-1.3660000000000003
+-1.4610000000000003
+-1.6970000000000005
+-1.2000000000000002
+-1.8510000000000004
+-1.5810000000000004
+-1.244
+-1.7010000000000005
+-1.2810000000000001
+-1.4930000000000003
+-2.1479999999999992
+-1.294
+-1.8700000000000006
+-2.1590000000000007
+-1.2810000000000001
+-1.3719999999999999
+-1.1440000000000001
+-1.8920000000000008
+-1.2570000000000001
+-1.266
+-1.6555000000000004
+-1.5290000000000004
+-2.3930000000000007
+-1.225
+-2.1200000000000006
+-1.151
+-1.7439999999999984
+-1.9220000000000006
+-1.8300000000000005
+-2.131000000000001
+-1.4180000000000001
+-1.8025000000000007
+-1.1159999999999992
+-1.167
+-1.346
+-1.3590000000000002
+-1.109
+-1.7080000000000006
+-1.2810000000000001
+-2.319000000000001
+-2.5030000000000014
+-1.358
+-1.7860000000000005
+-1.2570000000000001
+-1.5060000000000002
+-1.6050000000000004
+-1.5420000000000003
+-1.7045000000000003
+-1.1889999999999998
+-2.204999999999998
+-1.36
+-1.9900000000000007
+-1.8830000000000005
+-1.4670000000000003
+-1.3650000000000002
+-1.147
+-1.5750000000000002
+-1.3880000000000003
+-1.8560000000000008
+-1.311
+-2.301000000000001
+-1.8470000000000004
+-1.4490000000000003
+-1.033
+-1.2770000000000112
+-1.172
+-0.7720000000000006
+-1.22
+-1.5140000000000002
+-2.0710000000000006
+-1.9060000000000006
+-1.6770000000000005
+-1.7310000000000003
+-1.4660000000000002
+-1.8820000000000006
+-1.7520000000000007
+-1.2670000000000001
+-1.123
+-1.312
+-1.4940000000000002
+-1.6620000000000004
+-1.161
+-1.6550000000000002
+-1.6880000000000004
+-1.158
+-1.218
+-1.256
+-1.8720000000000006
+-1.214
+-1.23
+-1.153
+-1.074
+-1.214
+-0.4750000000000002
+-1.6580000000000004
+-2.6270000000000024
+-1.7780000000000005
+-1.083
+-2.249000000000001
+-2.242000000000001
+-1.9610000000000007
+-1.081
+-1.3250000000000002
+-1.5020000000000002
+-1.6330000000000005
+-1.3
+-2.0459999999999994
+-1.324
+-2.302000000000001
+-1.08
+-1.1400000000000001
+-1.8080000000000007
+-1.6880000000000002
+-1.08
+-1.6180000000000003
+-1.304
+-1.2149999999999999
+-1.5420000000000003
+-1.1219999999999999
+-1.132
+-1.2710000000000001
+-1.6340000000000003
+-1.5260000000000002
+-1.275
+-1.4640000000000002
+-1.37
+-1.9980000000000007
+-2.785000000000002
+-1.8130000000000006
+-2.029
+-1.2
+-0.8530000000000005
+-1.6455000000000004
+-1.4819999999999964
+-1.5870000000000002
+-1.286
+-1.593
+-1.9490000000000007
+-3.2530000000000054
+-3.3779999999999992
+-1.364
+-1.2570000000000001
+-1.7130000000000003
+-1.5870000000000004
+-1.4780000000000002
+-1.3480000000000003
+-0.24000000000000005
+-2.25
+-1.403
+-2.332000000000001
+-1.5970000000000004
+-1.3770000000000002
+-1.242
+-1.6820000000000004
+-1.07
+-1.6630000000000003
+-1.9170000000000007
+-1.6560000000000004
+-1.2790000000000001
+-1.034
+-1.8470000000000004
+-1.0100000000000007
+-1.139
+-1.6110000000000002
+-1.7490000000000006
+-2.0340000000000007
+-1.2039999999999997
+-1.2930000000000017
+-1.4220000000000002
+-1.6740000000000004
+-0.5265000000000003
+-1.7410000000000005
+-1.176
+-1.2890000000000001
+-1.8590000000000007
+-0.31500000000000017
+-1.0519999999999983
+-1.7200000000000006
+-2.532000000000001
+-2.204000000000001
+-2.123000000000003
+-1.4850000000000003
+-1.7480000000000002
+-1.5745000000000002
+-1.1480000000000006
+-1.359
+-1.7385000000000004
+-1.5000000000000002
+-1.127
+-1.8770000000000007
+-1.163
+-1.4730000000000003
+-2.272999999999999
+-1.2810000000000001
+-1.294
+-1.2770000000000001
+-2.7940000000000014
+-1.235
+-2.0660000000000007
+-1.4640000000000004
+-2.1110000000000007
+-1.8240000000000005
+-1.2670000000000001
+-2.608000000000001
+-1.083
+-1.2469999999999999
+-1.7760000000000007
+-1.243
+-1.6790000000000003
+-1.5270000000000001
+-1.9050000000000007
+-1.8180000000000005
+-2.1040000000000005
+-1.4209999999999998
+-1.3930000000000002
+-2.0730000000000004
+-1.302
+-1.5830000000000002
+-1.195
+-2.241000000000001
+-1.6510000000000002
+-1.4100000000000001
+-1.6720000000000006
+-1.339
+-1.9920000000000009
+-1.283
+-1.461
+-0.5140000000000003
+-1.4750000000000003
+-1.9830000000000005
+-1.29
+-1.439
+-1.156
+-1.372
+-1.9320000000000006
+-1.6030000000000004
+-1.4840000000000002
+-1.6249999999999905
+-1.3010000000000002
+-1.7020000000000004
+-1.7600000000000005
+-1.24
+-1.6700000000000004
+-2.9700000000000006
+-1.7640000000000007
+-1.177
+-1.133
+-1.6190000000000002
+-1.225
+-1.411
+-1.6030000000000002
+-2.260000000000001
+-1.5240000000000002
+-1.9460000000000006
+-1.8380000000000005
+-1.6980000000000004
+-2.2079999999999984
+-1.314
+-2.009000000000001
+-1.295
+-1.2150000000000005
+-1.7880000000000007
+-1.8170000000000006
+-1.478
+-1.419
+-0.25900000000000006
+-1.228
+-1.5660000000000003
+-1.8800000000000003
+-3.284000000000001
+-1.168
+-1.117
+-1.9900000000000007
+-1.8170000000000006
+-1.9640000000000017
+-1.7540000000000004
+-2.1700000000000004
+-1.6040000000000003
+-1.7570000000000003
+-1.162
+-1.8600000000000005
+-2.0250000000000012
+-1.3410000000000002
+-2.177999999999995
+-1.4440000000000004
+-1.7740000000000005
+-1.4630000000000003
+-1.5710000000000002
+-1.139
+-1.4610000000000003
+-1.5790000000000002
+-1.362
+-1.9390000000000007
+-1.104
+-1.9520000000000004
+-2.1390000000000007
+-1.318
+-1.2409999999999999
+-1.4380000000000002
+-2.5969999999999986
+-1.3210000000000002
+-0.24400000000000005
+-1.391
+-1.155
+-1.7195000000000005
+-1.5930000000000004
+-1.6840000000000004
+-2.208000000000001
+-1.234
+-2.1389999999999993
+-1.9090000000000007
+-1.226
+-1.4330000000000003
+-1.175
+-1.4820000000000002
+-1.15
+-1.484
+-1.3820000000000001
+-1.9980000000000007
+-1.241
+-1.232
+-2.6599999999999993
+-1.5940000000000003
+-1.4580000000000002
+-1.3960000000000004
+-1.438
+-1.325
+-1.6040000000000003
+-2.328000000000001
+-1.2329999999999999
+-1.7600000000000002
+-2.0550000000000006
+-1.6770000000000003
+-1.354
+-1.9100000000000006
+-1.8020000000000005
+-1.291
+-1.7660000000000005
+-1.1239999999999999
+-1.3010000000000002
+-2.740999999999986
+-0.36500000000000016
+-1.291
+-1.2200000000000002
+-1.333
+-1.238
+-1.475
+-1.6280000000000003
+-1.4010000000000002
+-1.023
+-1.4930000000000003
+-1.192
+-1.33
+-1.9770000000000008
+-1.6700000000000004
+-1.181
+-1.335
+-1.351
+-1.8500000000000005
+-1.5535000000000003
+-2.0630000000000006
+-1.7600000000000007
+-1.4290000000000003
+-1.6689999999999974
+-1.173
+-1.8000000000000005
+-1.9790000000000005
+-1.176
+-1.7600000000000005
+-2.165000000000001
+-2.2110000000000007
+-1.7910000000000006
+-1.3
+-2.3284999999999965
+-1.1600000000000001
+-1.5150000000000003
+-1.8920000000000006
+-1.3840000000000001
+-0.7380000000000004
+-1.9150000000000005
+-2.1590000000000007
+-2.1690000000000005
+-1.233
+-1.7690000000000006
+-2.4500000000000015
+-1.513
+-1.19
+-1.4330000000000003
+-1.431
+-1.145
+-1.7610000000000006
+-1.047
+-2.1100000000000008
+-1.413
+-1.284
+-1.4060000000000001
+-1.4390000000000003
+-1.2
+-1.286
+-1.6290000000000004
+-1.2730000000000001
+-1.158
+-1.7150000000000003
+-2.0200000000000005
+-1.4585000000000001
+-1.5160000000000002
+-1.173
+-1.8290000000000006
+-1.1789999999999998
+-1.7160000000000004
+-0.27400000000000013
+-1.2510000000000001
+-1.319
+-2.0340000000000007
+-2.1880000000000006
+-2.63
+-1.0360000000000007
+-1.21
+-1.479
+-3.813999999999993
+-1.5579999999999914
+-0.3190000000000001
+-1.4840000000000004
+-1.4440000000000002
+-0.7540000000000006
+-1.4090000000000003
+-1.7340000000000004
+-1.7850000000000006
+-1.4290000000000003
+-1.7120000000000004
+-1.4690000000000003
+-1.5840000000000003
+-1.9940000000000007
+-2.000000000000001
+-1.8750000000000004
+-1.2349999999999999
+-1.387
+-1.233
+-2.0299999999999994
+-1.1769999999999998
+-1.7710000000000004
+-1.6650000000000005
+-0.9710000000000006
+-1.343
+-1.231
+-1.6240000000000006
+-1.9299999999999982
+-1.8789999999999898
+-1.4130000000000003
+-1.174
+-1.4090000000000003
+-1.7140000000000004
+-1.212
+-1.1600000000000001
+-1.073
+-1.3860000000000001
+-1.365
+-1.6960000000000004
+-2.6140000000000017
+-2.0745000000000005
+-1.199
+-1.2
+-2.574000000000003
+-1.4440000000000004
+-1.391
+-1.894000000000002
+-1.1509999999999998
+-2.0969999999999973
+-1.485
+-1.8110000000000004
+-0.22300000000000006
+-2.7530000000000006
+-1.5819999999999925
+-1.3840000000000003
+-1.4930000000000003
+-2.3090000000000046
+-1.177
+-1.4260000000000002
+-1.1239999999999999
+-1.364
+-1.379
+-1.2530000000000001
+-1.8440000000000007
+-1.6660000000000004
+-0.3930000000000002
+-1.364
+-1.662999999999997
+-1.7580000000000005
+-1.5730000000000004
+-1.5510000000000004
+-1.306
+-2.256000000000002
+-1.106
+-1.278
+-1.3900000000000001
+-1.5650000000000004
+-1.283
+-1.8700000000000006
+-1.254
+-1.266
+-1.7470000000000006
+-1.3840000000000001
+-1.8960000000000008
+-1.5550000000000002
+-1.169
+-1.5970000000000004
+-1.3570000000000002
+-1.1749999999999998
+-2.025000000000001
+-2.0150000000000006
+-1.4170000000000003
+-1.282
+-2.0629999999999993
+-1.7600000000000005
+-1.6730000000000003
+-2.0270000000000006
+-1.7080000000000004
+-0.5810000000000004
+-1.307
+-1.8350000000000006
+-1.9800000000000006
+-1.338
+-1.8620000000000005
+-1.9210000000000007
+-2.2760000000000007
+-2.0880000000000005
+-1.2710000000000001
+-1.366
+-0.7110000000000003
+-1.16
+-1.2309999999999999
+-1.9750000000000005
+-1.5270000000000001
+-1.067
+-2.1930000000000005
+-1.3650000000000002
+-1.33
+-1.7555000000000005
+-1.5080000000000005
+-1.356
+-1.149
+-1.5000000000000004
+-1.6940000000000004
+-2.4950000000000014
+-1.328
+-0.5290000000000002
+-1.5200000000000005
+-1.374
+-2.576000000000001
+-1.9240000000000008
+-1.185
+-2.2140000000000004
+-1.241
+-1.247
+-1.296
+-1.276
+-2.7289999999999863
+-1.6300000000000003
+-1.046
+-1.205
+-2.268
+-1.7060000000000004
+-1.251
+-1.7220000000000004
+-1.8020000000000005
+-1.5905000000000005
+-1.3079999999999998
+-1.8710000000000004
+-1.2570000000000001
+-0.8860000000000006
+-1.258
+-1.6210000000000002
+-0.2980000000000002
+-1.5520000000000003
+-1.8750000000000007
+-1.4380000000000002
+-1.368
+-1.239
+-1.8765000000000007
+-1.9840000000000009
+-1.9360000000000008
+-1.7060000000000004
+-1.327
+-1.3760000000000003
+-1.154
+-1.133
+-1.7150000000000003
+-3.1640000000000006
+-2.5429999999999984
+-1.173
diff --git a/Group_C/result_image/data/07reward double + priority.csv b/Group_C/result_image/data/07reward double + priority.csv
new file mode 100644
index 0000000..d710603
--- /dev/null
+++ b/Group_C/result_image/data/07reward double + priority.csv
@@ -0,0 +1,1000 @@
+-1.345
+-1.475
+-1.686
+-1.361
+-1.221
+-1.3
+-1.325
+-1.219
+-1.212
+-1.465
+-1.217
+-1.164
+-1.12
+-1.417
+-1.158
+-1.244
+-1.607
+-1.142
+-1.426
+-1.297
+-1.074
+-1.734
+-1.332
+-1.462
+-1.08
+-1.259
+-1.08
+-1.268
+-1.137
+-1.24
+-1.236
+-1.413
+-1.152
+-1.413
+-1.171
+-1.292
+-1.281
+-1.853
+-1.489
+-1.58
+-1.515
+-1.261
+-1.082
+-1.21
+-1.223
+-1.276
+-1.16
+-1.199
+-1.257
+-1.201
+-1.17
+-1.383
+-1.331
+-1.288
+-1.7235
+-1.423
+-1.524
+-1.244
+-1.132
+-1.238
+-1.264
+-1.575
+-1.762
+-1.133
+-1.158
+-1.185
+-1.228
+-1.182
+-1.283
+-1.321
+-1.106
+-1.426
+-1.245
+-1.169
+-1.126
+-1.421
+-1.281
+-1.598
+-3.283
+-1.712
+-1.344
+-1.141
+-1.132
+-1.329
+-1.328
+-1.995
+-1.1
+-1.458
+-1.161
+-1.089
+-1.997
+-1.578
+-1.192
+-1.392
+-1.065
+-1.287
+-1.303
+-1.549
+-2.751
+-1.179
+-1.212
+-1.164
+-1.376
+-1.132
+-1.067
+-1.323
+-1.478
+-1.554
+-1.972
+-1.686
+-1.395
+-2.086
+-1.345
+-1.59
+-1.93
+-1.258
+-1.293
+-1.845
+-2.139
+-1.626
+-1.866
+-2.151
+-1.273
+-1.607
+-1.28
+-1.159
+-1.644
+-1.775
+-1.13
+-1.419
+-1.373
+-1.214
+-1.692
+-1.567
+-1.699
+-1.127
+-1.681
+-1.887
+-1.811
+-1.717
+-0.595
+-0.531
+-1.29
+-1.532
+-1.792
+-0.461
+-2.193
+-2.013
+-1.631
+-1.538
+-0.397
+-1.551
+-1.01
+-0.439
+-1.31
+-2.068
+-1.328
+-1.305
+-1.379
+-0.442
+-1.958
+-1.49
+-1.374
+-0.263
+-1.511
+-1.744
+-1.031
+-0.864
+-1.722
+-0.645
+-1.812
+-1.7175
+-2.039
+-0.159
+-0.453
+-0.145
+-1.481
+-1.846
+-0.442
+-1.528
+-1.263
+-1.766
+-1.819
+-1.205
+-0.677
+-0.404
+-0.439
+-1.342
+-1.715
+-0.557
+0.03
+-0.536
+-0.117
+-2.011
+-1.337
+-1.52
+-1.6435
+-1.617
+-1.624
+-1.792
+-1.335
+-0.709
+-1.413
+-0.645
+-0.372
+-1.7785
+-1.164
+-1.09
+-1.576
+-1.91
+-1.43
+-1.2175
+-1.566
+-1.755
+-1.224
+-0.121
+-1.687
+-1.422
+-1.578
+-0.134
+-0.634
+-1.657
+-0.436
+-0.3405
+-1.589
+-1.092
+-1.282
+-0.109
+-0.396
+0.29
+-1.182
+-1.084
+-1.035
+-0.292
+-1.407
+-1.548
+-1.462
+-0.306
+-1.622
+-0.3675
+-1.498
+-1.372
+-1.631
+-1.539
+0.064
+-1.385
+-1.314
+0.071
+-1.32
+-1.525
+-1.386
+-0.887
+0.107
+-0.086
+-0.329
+-0.366
+-1.305
+-0.468
+-1.323
+0.218
+0.14
+-1.8395
+0.022
+-0.925
+-1.444
+-1.595
+-1.42
+-1.28
+-0.078
+-1.462
+-1.398
+-0.978
+0.102
+-1.153
+-1.529
+-0.984
+-1.555
+-0.603
+0.038
+-1.11
+-0.017
+-1.858
+-1.335
+-1.672
+-1.248
+-1.34
+-1.444
+-0.061
+-1.503
+-0.36
+-1.145
+-1.189
+-0.095
+-1.3515
+-1.28
+-1.542
+-1.143
+-0.264
+-1.251
+-1.325
+-0.988
+-0.611
+-1.219
+-1.365
+-0.262
+-1.654
+-0.004
+-1.006
+-0.02
+-1.175
+-1.509
+-1.381
+-0.919
+-1.271
+-2.262
+-1.238
+-2.096
+-1.4695
+-1.641
+-2.249
+-1.532
+-1.296
+-1.643
+-0.0465
+-0.187
+0.108
+-0.109
+-1.492
+-0.641
+-0.583
+-0.04
+-1.282
+3.47E-17
+-0.375
+-1.147
+-1.275
+-1.039
+-0.177
+-0.23
+-0.639
+-1.428
+-1.458
+-0.416
+-1.345
+-1.252
+-1.513
+0.076
+-1.45
+0.077
+-1.5535
+-1.332
+-1.643
+-1.319
+-1.051
+-1.243
+0.114
+-1.3965
+-0.407
+-1.358
+-1.614
+-1.451
+-1.515
+-1.277
+-0.633
+-1.199
+-1.355
+-2.397
+0.183
+-1.349
+-1.332
+-1.337
+-1.292
+-2.194
+-1.327
+-1.014
+-1.067
+-1.123
+0.053
+-0.108
+-1.728
+-0.199
+0.289
+-1.453
+0.063
+-1.138
+-1.1015
+-1.3365
+-1.176
+-0.157
+-1.346
+0.071
+-1.263
+-0.088
+0.061
+-0.76
+-1.328
+-1.247
+-1.195
+-1.668
+0.214
+-0.233
+-0.034
+-1.038
+-1.24
+-1.115
+-0.209
+-0.018
+-2.126
+0.033
+-1.29
+-1.159
+-0.18
+0.257
+-0.68
+0.169
+-1.264
+-0.622
+-0.124
+-1.503
+0.088
+-1.303
+0.044
+-1.183
+-1.068
+-1.6435
+-1.273
+-0.668
+-1.167
+-0.022
+-0.574
+0.067
+2.02E-16
+-1.165
+-0.033
+-1.207
+-1.004
+-1.441
+-1.176
+-1.347
+-1.063
+-1.448
+-0.292
+-1.4345
+0.073
+-1.469
+-1.369
+0.174
+-1.388
+-0.201
+-1.476
+-1.383
+-1.156
+-1.332
+-1.171
+-1.645
+-0.837
+-0.876
+-1.457
+-0.046
+-1.921
+0.069
+-1.218
+-0.979
+-1.397
+-1.26
+0.076
+-1.526
+-0.64
+-0.191
+-1.283
+-0.197
+0.016
+-1.632
+-0.486
+-1.264
+0.176
+-0.158
+0.246
+-1.332
+-1.483
+0.016
+0.107
+-1.084
+-1.147
+-1.411
+-1.254
+-1.182
+-1.992
+-0.129
+-1.16
+-1.393
+-1.293
+-1.109
+-0.248
+-0.374
+-1.41
+0.203
+-0.233
+-1.179
+-1.088
+-0.109
+0.244
+-1.289
+-1.134
+-1.239
+0.073
+-1.267
+-1.068
+0.301
+-1.699
+-1.261
+-0.273
+-1.307
+-1.225
+-1.286
+0.135
+-0.448
+-0.312
+0.021
+-1.427
+0.188
+-1.248
+-1.427
+-0.172
+-0.332
+-0.096
+-0.282
+-1.044
+-1.179
+-1.539
+-1.286
+-1.282
+-0.284
+-0.964
+-1.148
+-0.9
+-1.178
+-1.069
+-1.102
+-0.227
+0.121
+0.249
+0.289
+0.006
+-1.199
+0.18
+-1.43
+-1.967
+-1.281
+-1.547
+-1.124
+0.047
+-1.373
+-1.182
+-1.377
+-1.171
+-1.406
+-0.142
+-0.019
+-1.069
+-1.338
+0.008
+-1.507
+0.293
+-1.17
+-1.633
+-1.417
+-1.514
+-0.167
+-1.207
+-1.076
+-0.116
+-1.127
+-1.792
+-0.551
+-1.996
+-1.099
+0.083
+-1.639
+-1.16
+-1.193
+-1.579
+-1.173
+-0.285
+-1.148
+0.253
+-0.392
+-0.03
+-1.15
+0.077
+-0.415
+0.23
+-0.784
+0.02
+-1.329
+-0.949
+-1.534
+-0.046
+-1.246
+-1.381
+-0.449
+0.128
+-1.48
+-1.225
+-0.934
+-1.101
+-0.461
+0.11
+0.285
+-1.431
+-1.405
+-0.683
+-1.357
+-1.427
+-1.002
+-0.214
+-1.655
+-1.388
+-1.097
+-1.677
+-1.98
+-2.149
+-1.067
+-0.499
+-1.321
+0.262
+-1.397
+0.019
+-1.16
+0.095
+-0.322
+-1.752
+0.152
+-1.564
+-1.053
+0.16
+-1.24
+0.209
+0.293
+-1.191
+-1.392
+-1.174
+-1.7105
+-0.588
+-0.691
+0.032
+-1.516
+-1.779
+-1.6395
+-0.606
+-1.134
+-1.3025
+-0.012
+-1.417
+-1.099
+-1.534
+0.154
+-1.354
+-0.66
+-0.05
+-0.949
+-1.821
+-0.095
+-1.089
+0.035
+-1.728
+-0.97
+-2.32
+-0.042
+-1.466
+-0.099
+-0.697
+-0.698
+-1.555
+-1.606
+-0.346
+-1.187
+0.01
+0.201
+-1.251
+-0.265
+-1.431
+0.212
+-0.848
+0.157
+-0.592
+-1.207
+-1.319
+0.071
+0.301
+0.059
+-1.37
+-0.257
+0.297
+-0.95
+-1.129
+-1.239
+-1.119
+-1.128
+-0.3005
+-1.295
+-1.344
+-1.007
+-1.31
+-1.318
+-1.455
+0.17
+-1.534
+-1.241
+-1.238
+-1.498
+-0.524
+-1.017
+-1.42
+0.066
+-1.252
+-1.181
+0.09
+0.097
+-0.5135
+0.144
+0.068
+-1.398
+-0.019
+-1.117
+0.144
+0.002
+-1.102
+-0.295
+-0.271
+-0.628
+-1.681
+0.131
+-1.348
+-0.005
+-0.038
+-0.979
+-1.017
+-0.003
+0.153
+-0.0815
+-0.469
+0.301
+-0.279
+-0.312
+-0.918
+0.107
+-1.5385
+-1.233
+-1.232
+-1.4145
+-1.384
+-2.202
+-1.026
+-1.615
+-1.142
+-0.303
+-0.143
+0.301
+0.133
+0.147
+-1.633
+0.211
+-1.649
+0.048
+0.205
+0.233
+-0.981
+-0.971
+-1.049
+-0.3295
+0.12
+0.285
+-0.182
+-1.016
+-1.391
+-1.187
+-1.398
+-1.196
+-1.415
+0.29
+-0.17
+-1.044
+0.281
+-1.847
+-0.776
+0.237
+0.242
+-1.486
+-0.166
+-1.711
+0.221
+-1.322
+-0.43
+-0.966
+-0.304
+0.019
+0.274
+-1.88
+-1.513
+-1.7875
+-1.258
+-1.3845
+-0.014
+-1.168
+-1.189
+-0.367
+-0.447
+0.169
+-0.075
+0.254
+-2.629
+0.282
+-0.484
+-0.1075
+-1.035
+0.267
+0.023
+-1.055
+-1.109
+-1.138
+-0.433
+0.209
+0.068
+0.17
+-1.2985
+-0.816
+-1.216
+-0.021
+-0.0535
+-1.24
+-1.02
+-1.179
+-1.333
+-0.036
+-0.965
+-0.105
+-0.4045
+-1.341
+-1.448
+-0.9655
+-1.053
+-0.027
+0.165
+-1.015
+-1.091
+-0.273
+-1.294
+-0.945
+0.133
+-1.197
+-0.234
+-1.265
+-0.035
+-0.444
+0.157
+-1.52
+0.281
+0.003
+-1.88
+0.279
+-0.7425
+-1.292
+-0.3595
+-1.153
+-1.236
+0.179
+-1.082
+-1.433
+-0.4065
+-1.35
+0.1145
+0.152
+-1.224
+-1.378
+-1.13
+-1.131
+0.123
+-1.026
+-0.882
+-0.239
+-1.17
+-0.436
+0.013
+-0.023
+-0.44
+0.08
+-1.406
+0.037
+-0.493
+-0.233
+0.095
+0.293
+0.131
+-1.015
+-1.101
+-1.4885
+0.285
+-1.263
+0.108
+-1.348
+-1.082
+-0.367
+-0.972
+-0.082
+0.048
+-1.392
+-1.191
+-1.052
+-1.296
+-1.075
+-1.539
+0.293
+-0.381
+-1.109
+0.14
+-1.612
+-1.231
+-1.406
+-0.094
+-1.375
+-1.593
+-1.606
+-0.093
+-1.162
+-2.356
+-0.138
+-0.421
+-1.744
+-1.247
+-2.414
+-1.37
+-0.754
+-0.752
+-1.84
+-1.961
+-1.503
+-1.651
+-0.678
+-1.478
+-1.823
+-2.2585
+-1.524
+-2.198
+-1.946
+-1.944
+-2.328
+-0.439
+-2.433
+-1.735
+-1.535
+-0.45
+-2.422
+-2.662
+-1.63
+-1.276
+-2.685
+-1.373
+-4.033
+-1.232
+-3.306
+-1.255
+-1.341
+-1.98
+-1.858
+-2.588
+-1.283
+-1.365
+-1.605
+-2.193
+-1.923
+-3.789
+-2.58
+-3.005
+-1.345
+-1.496
+-2.73
+-1.669
+-1.338
+-1.306
+-1.375
+-1.385
+-1.408
+-1.418
+-1.343
+-1.455
+-2.002
+-2.202
+-1.523
+-1.688
+-2.574
+-1.364
+-1.92
+-2.266
+-2.303
+-1.359
+-2.876
+-1.856
+-1.365
+-1.976
diff --git a/Group_C/result_image/data/08reward rainbow DQN.csv b/Group_C/result_image/data/08reward rainbow DQN.csv
new file mode 100644
index 0000000..1fafa68
--- /dev/null
+++ b/Group_C/result_image/data/08reward rainbow DQN.csv
@@ -0,0 +1,1100 @@
+-0.46299999999999986
+1.4089999999999965
+1.064999999999999
+-0.4149999999999998
+-0.256
+1.0859999999999987
+-0.13
+0.41000000000000014
+-0.473
+-0.832
+2.4049999999999843
+-0.6149999999999999
+-1.125
+-1.366
+-1.322
+-1.247
+-1.324
+-2.1440000000000023
+-1.262
+-1.299
+-1.203
+-1.8820000000000006
+-1.388
+-1.162
+-1.3450000000000002
+-1.243
+-1.5420000000000003
+-1.0759999999999998
+-1.2550000000000001
+-1.7160000000000004
+-1.3310000000000002
+-1.3465000000000003
+-1.9780000000000006
+-1.306
+-1.2690000000000001
+-1.6370000000000005
+-2.147000000000001
+-1.1469999999999998
+-1.463
+-1.295
+-1.243
+-1.4850000000000003
+-1.105
+-1.093
+-1.105
+-1.073
+-1.4230000000000003
+-1.177
+-2.229000000000001
+-1.136
+-1.166
+-1.24
+-1.4360000000000002
+-1.074
+-2.258000000000001
+-1.391
+-1.334
+-1.3359999999999999
+-1.3530000000000049
+-1.284
+-1.274
+-1.3550000000000002
+-2.3660000000000005
+-1.338
+-1.42
+-1.233
+-1.4460000000000002
+-0.7440000000000004
+-1.8180000000000005
+-2.7120000000000037
+-1.4290000000000003
+-2.142000000000001
+-1.234
+-1.2770000000000001
+-1.187
+-1.183
+-1.076
+-1.7840000000000005
+-1.4300000000000002
+-0.9530000000000007
+-1.2650000000000001
+-1.3719999999999999
+-1.9890000000000008
+-1.344
+-2.375000000000001
+-2.243000000000001
+-1.3239999999999998
+-1.473999999999999
+-1.144
+-1.3710000000000002
+-1.6300000000000003
+-1.7655000000000005
+-1.5450000000000004
+-1.3
+-2.8660000000000037
+-1.6830000000000003
+-1.127
+-1.8360000000000005
+-1.2530000000000001
+-1.135
+-1.5360000000000003
+-1.204
+-1.223
+-1.163
+-1.5300000000000002
+-1.6130000000000004
+-1.55750000000001
+-0.5700000000000002
+-1.254
+-1.2269999999999999
+-2.0680000000000005
+-1.9040000000000004
+-1.233
+-1.2750000000000001
+-1.154
+-0.9859999999999999
+-1.5230000000000001
+-1.083
+-2.0790000000000015
+-1.097
+-1.3130000000000002
+-1.292
+-2.072000000000001
+-1.25
+-0.21300000000000002
+-0.9160000000000007
+-1.0519999999999998
+-1.144
+-1.7620000000000005
+-1.5830000000000002
+-1.6630000000000003
+-1.0479999999999998
+-1.18
+-1.296
+-1.32
+-1.6060000000000003
+-1.6590000000000003
+-1.8710000000000007
+-1.111
+-1.196
+-1.185
+-1.087
+-1.144
+-1.4490000000000003
+-1.217
+-1.262
+-1.06
+-1.4219999999999942
+-1.354
+-1.371
+-1.232
+-1.087
+-1.7570000000000003
+-1.8520000000000008
+-1.9300000000000006
+-1.6020000000000003
+-1.9130000000000007
+-1.7470000000000057
+-1.174
+-1.3450000000000002
+-0.9030000000000006
+-1.244
+-1.9040000000000052
+-1.5560000000000003
+-1.232
+-1.0439999999999998
+-1.127
+-1.6950000000000003
+-1.3130000000000002
+-1.4020000000000001
+-1.304
+-1.8670000000000004
+-1.153
+-2.0870000000000006
+-1.9270000000000005
+-1.216
+-1.8860000000000006
+-1.7760000000000005
+-2.641000000000002
+-1.8000000000000005
+-1.7310000000000005
+-1.7090000000000005
+-1.185
+-1.4570000000000003
+-1.2550000000000001
+-1.4780000000000002
+-1.352
+-1.165
+-1.495
+-1.163
+-1.181
+-1.9370000000000007
+-1.266
+-2.0390000000000006
+-1.084
+-1.7390000000000005
+-2.325000000000001
+-1.0099999999999998
+-1.7210000000000005
+-1.307
+-1.0339999999999998
+-1.4180000000000001
+-1.4160000000000001
+-1.3550000000000002
+-1.312
+-1.437
+-1.4610000000000003
+-2.226000000000001
+-1.2890000000000001
+-1.19
+-1.1149999999999969
+-2.0310000000000006
+-1.3730000000000002
+-1.0139999999999998
+-1.3940000000000001
+-1.0550000000000008
+-1.2
+-1.136
+-2.6620000000000017
+-1.3010000000000002
+-1.1750000000000007
+-1.124
+-0.7790000000000005
+-1.8195000000000006
+-1.8290000000000006
+-1.174
+-2.0350000000000006
+-1.5130000000000003
+-1.143
+-2.0450000000000097
+-1.2630000000000001
+-1.093
+-2.138
+-1.061
+-1.057
+-1.6060000000000003
+-1.8410000000000004
+-2.0360000000000005
+-1.3780000000000001
+-2.0420000000000007
+-1.4120000000000004
+-1.5650000000000002
+-1.246
+-1.125
+-1.003
+-1.14
+-1.7970000000000006
+-1.6250000000000004
+-1.115
+-1.254
+-1.5880000000000005
+-1.101
+-1.5050000000000003
+-1.1640000000000001
+-1.168
+-1.195
+-1.1469999999999998
+-1.188
+-1.273
+-1.203
+-1.342
+-1.353
+-1.6340000000000006
+-2.0910000000000006
+-1.2469999999999999
+-1.6370000000000005
+-1.143
+-1.077
+-1.6940000000000004
+-1.8810000000000007
+-1.6040000000000005
+-1.6250000000000002
+-1.289
+-1.063
+-1.6450000000000005
+-1.8400000000000003
+-1.177
+-1.209
+-1.9830000000000005
+-1.5060000000000002
+-1.183
+-1.0779999999999998
+-2.131
+-1.4490000000000003
+-2.1680000000000006
+-1.9530000000000007
+-1.274
+-1.4210000000000003
+-1.278
+-1.0670000000000133
+-1.3245
+-1.7310000000000005
+-1.2180000000000002
+-1.225
+-1.9170000000000007
+-1.165
+-1.226
+-0.7590000000000006
+-1.5140000000000002
+-1.9870000000000008
+-0.748
+1.9879999999999869
+-0.3025
+0.8610000000000002
+0.6849999999999999
+-0.22699999999999987
+0.6839999999999998
+-1.1340000000000003
+1.2459999999999933
+-0.603
+-1.2730000000000001
+-1.237
+-1.105
+-1.3350000000000002
+-0.6860000000000004
+-1.0939999999999999
+-1.0119999999999998
+-1.05
+-1.169
+-1.6495000000000002
+-1.1609999999999998
+-1.1795
+-1.451
+-1.308
+-1.294
+-1.2069999999999999
+-1.054
+-1.213
+-1.2530000000000001
+-1.27
+-1.292
+-1.5530000000000004
+-1.166
+-1.104
+-1.243
+-1.2939999999999998
+-1.2289999999999999
+-1.189
+-1.163
+-1.5310000000000001
+-1.184
+-1.173
+-1.6520000000000004
+-1.071
+-1.066
+-1.185
+-1.368
+-1.076
+-1.068
+-1.5140000000000002
+-1.182
+-1.027
+-1.6230000000000002
+-1.06
+-1.4820000000000002
+-1.6210000000000004
+-1.123
+-1.1640000000000001
+-1.008
+-1.6560000000000001
+-1.117
+-1.2000000000000002
+-1.242
+-1.5040000000000002
+-1.0919999999999999
+-1.1329999999999998
+-1.134
+-1.3920000000000003
+-1.048
+-1.233
+-1.4210000000000003
+-1.178
+-1.336
+-2.608000000000004
+-1.201
+-1.144
+-1.7480000000000007
+-1.2910000000000001
+-1.221
+-1.5830000000000002
+-1.234
+-1.3250000000000002
+-1.5850000000000004
+-1.3090000000000002
+-1.4640000000000002
+-1.223
+-1.8800000000000008
+-1.065
+-1.4160000000000001
+-1.035
+-2.7420000000000027
+-1.17
+-1.176
+-1.187
+-1.284
+-1.193
+-1.043
+-1.6940000000000004
+-1.225
+-1.0239999999999998
+-1.083
+-1.6830000000000003
+-1.154
+-1.258
+-1.385
+-1.334
+-1.8080000000000005
+-1.185
+-1.3
+-1.073
+-1.1059999999999999
+-1.095
+-1.216
+-1.198
+-1.6620000000000004
+-1.3490000000000002
+-1.6865000000000003
+-1.193
+-1.244
+-1.5070000000000001
+-1.244
+-1.315
+-1.083
+-1.1320000000000001
+-1.344
+-1.213
+-1.083
+-1.038
+-1.027
+-1.172
+-1.7140000000000004
+-1.2510000000000001
+-1.3130000000000002
+-1.4220000000000002
+-2.0020000000000007
+-1.163
+-1.142
+-1.221
+-1.4130000000000003
+-1.1159999999999999
+-1.152
+-1.361
+-1.1320000000000001
+-1.212
+-1.1309999999999998
+-1.083
+-1.7970000000000006
+-1.184
+-1.27
+-1.125
+-1.134
+-1.1969999999999998
+-1.128
+-1.063
+-1.4610000000000003
+-1.1059999999999999
+-1.078
+-1.3960000000000001
+-1.1420000000000001
+-1.146
+-1.179
+-1.112
+-1.163
+-1.189
+-1.048
+-1.258
+-1.185
+-1.5270000000000001
+-1.173
+-1.173
+-1.088
+-1.2830000000000001
+-1.3639999999999999
+-1.324
+-1.185
+-1.273
+-1.2810000000000001
+-1.3400000000000003
+-1.1300000000000001
+-1.332
+-1.298
+-1.221
+-1.256
+-1.132
+-1.223
+-1.366
+-1.184
+-1.5530000000000004
+-1.5030000000000001
+-1.1520000000000001
+-1.5140000000000002
+-1.112
+-1.249
+-1.3730000000000002
+-1.2540000000000002
+-1.7430000000000005
+-1.115
+-1.4760000000000002
+-1.3940000000000001
+-1.137
+-1.057
+-1.8100000000000005
+-1.228
+-1.269
+-1.22
+-1.207
+-1.037
+-1.2089999999999999
+-1.06
+-1.0319999999999998
+-1.162
+-1.0399999999999998
+-1.5490000000000004
+-1.499
+-1.6430000000000002
+-1.091
+-1.18
+-1.2630000000000001
+-1.064
+-1.355
+-1.11
+-1.184
+-1.264
+-1.1460000000000001
+-1.243
+-1.6220000000000003
+-1.047
+-1.182
+-1.327
+-1.147
+-1.223
+-1.161
+-1.264
+-1.21
+-1.1420000000000001
+-1.4030000000000002
+-1.1
+-1.247
+-1.056
+-1.235
+-1.2830000000000001
+-1.7240000000000006
+-1.1179999999999999
+-1.4220000000000002
+-1.254
+-1.244
+-1.11
+-1.313
+-1.5650000000000004
+-1.183
+-1.4050000000000002
+-1.133
+-1.8180000000000005
+-1.324
+-1.196
+-1.072
+-1.6980000000000004
+-1.3279999999999998
+-1.4910000000000003
+-1.101
+-1.105
+-1.183
+-1.242
+-1.244
+-1.4929999999999999
+-1.111
+-1.113
+-1.125
+-1.4970000000000003
+-1.134
+-1.065
+-1.105
+-1.144
+-1.337
+-1.176
+-1.182
+-1.163
+-1.5100000000000002
+-1.4320000000000002
+-1.352
+-1.118
+-1.7560000000000004
+-1.2449999999999999
+-1.072
+-1.165
+-1.2730000000000001
+-1.2469999999999999
+-1.337
+-1.157
+-1.202
+-1.163
+-1.5500000000000003
+-1.153
+-1.078
+-1.141
+-1.246
+-1.275
+-1.133
+-2.287000000000001
+-1.202
+-1.6450000000000005
+-1.475
+-1.045
+-1.097
+-1.212
+-1.3730000000000002
+-1.0099999999999998
+-1.012
+-1.8110000000000004
+-1.153
+-1.163
+-1.384
+-1.5290000000000001
+-1.1
+-1.19
+-1.2249999999999999
+-1.262
+-1.202
+-1.175
+-1.2
+-1.306
+-1.2510000000000001
+-1.062
+-1.039
+-1.3610000000000002
+-1.043
+-1.111
+-1.15
+-1.544
+-1.353
+-1.116
+-1.8660000000000005
+-0.9919999999999999
+-1.2
+-1.079
+-1.4300000000000002
+-1.1480000000000001
+-1.094
+-1.7600000000000007
+-1.153
+-1.224
+-1.4340000000000002
+-1.174
+-1.192
+-1.1239999999999999
+-1.187
+-1.3530000000000002
+-1.188
+-1.175
+-1.4
+-1.212
+-1.267
+-1.241
+-1.278
+-1.147
+-1.095
+-1.153
+-1.5020000000000002
+-1.163
+-1.054
+-1.1019999999999999
+-1.2029999999999998
+-1.3730000000000002
+-1.182
+-1.4980000000000002
+-1.171
+-1.115
+-1.1520000000000001
+-1.026
+-1.157
+-1.153
+-1.1880000000000002
+-1.4160000000000001
+-1.6060000000000003
+-1.4940000000000002
+-1.1669999999999998
+-1.8830000000000005
+-1.1269999999999998
+-1.271
+-0.47300000000000025
+-1.6620000000000004
+-1.3250000000000002
+-1.407
+-1.214
+-1.097
+-1.123
+-1.287
+-1.133
+-1.152
+-1.163
+-1.247
+-1.304
+-1.219
+-1.051
+-1.5340000000000003
+-1.2120000000000002
+-1.055
+-1.226
+-1.3010000000000002
+-1.1360000000000001
+-1.4430000000000003
+-1.099
+-1.07
+-1.185
+-1.192
+-1.0659999999999998
+-1.249
+-1.385
+-1.193
+-1.2650000000000001
+-1.0959999999999999
+-1.15
+-1.192
+-1.161
+-1.4210000000000003
+-1.244
+-1.127
+-1.2349999999999999
+-1.212
+-1.234
+-1.4705000000000004
+-1.3820000000000001
+-1.2029999999999998
+-1.4720000000000004
+-1.336
+-1.4320000000000002
+-1.078
+-1.047
+-1.276
+-1.2750000000000001
+-1.0319999999999998
+-1.237
+-1.156
+-1.4520000000000002
+-1.162
+-1.202
+-1.284
+-1.27
+-1.343
+-1.353
+-1.1969999999999998
+-1.4020000000000001
+-1.252
+-1.419
+-1.272
+-1.293
+-1.3059999999999998
+-1.523
+-1.231
+-1.2429999999999999
+-1.105
+-1.4240000000000004
+-1.048
+-1.109
+-1.108
+-1.168
+-1.031
+-0.4090000000000002
+-1.244
+-1.2680000000000002
+-1.284
+-1.085
+-1.184
+-1.141
+-1.117
+-1.1440000000000001
+-1.311
+-1.8030000000000004
+-1.2830000000000001
+-1.247
+-1.365
+-1.082
+-1.168
+-1.067
+-1.163
+-1.184
+-1.11
+-1.042
+-1.306
+-1.073
+-1.185
+-1.137
+-1.242
+-1.1400000000000001
+-1.0999999999999999
+-1.4420000000000002
+-1.244
+-1.073
+-1.183
+-1.155
+-1.138
+-1.083
+-1.3090000000000002
+-1.373
+-1.296
+-1.286
+-1.1720000000000002
+-1.3430000000000002
+-1.342
+-1.475
+-1.327
+-1.163
+-2.212500000000001
+-1.323
+-2.330000000000001
+-1.153
+-1.234
+-1.127
+-1.092
+-1.304
+-1.176
+-1.185
+-1.242
+-1.5080000000000002
+-1.222
+-1.29
+-1.5910000000000002
+-1.165
+-1.108
+-1.113
+-1.243
+-1.1500000000000001
+-1.227
+-1.243
+-1.6630000000000003
+-1.283
+-1.207
+-1.4920000000000002
+-1.143
+-1.4745000000000004
+-1.234
+-1.115
+-1.031
+-1.242
+-1.233
+-1.6670000000000003
+-1.258
+-1.344
+-1.141
+-1.336
+-1.232
+-1.165
+-1.303
+-1.283
+-1.32
+-1.093
+-1.123
+-1.245
+-1.3440000000000003
+-1.179
+-1.211
+-1.7870000000000004
+-1.113
+-1.1400000000000001
+-1.123
+-2.4560000000000013
+-1.109
+-1.102
+-1.262
+-1.1260000000000001
+-1.098
+-0.7585000000000005
+-1.083
+-1.03
+-1.213
+-1.155
+-1.372
+-0.17700000000000002
+-1.6370000000000005
+-1.5180000000000002
+-1.5260000000000002
+-1.2049999999999998
+-1.131
+-1.091
+-1.066
+-1.7570000000000006
+-1.1320000000000001
+-1.099
+-1.37
+-1.135
+-1.07
+-1.6520000000000001
+-1.093
+-1.212
+-1.229
+-1.24
+-1.101
+-1.266
+-1.226
+-1.027
+-1.143
+-1.6020000000000003
+-1.5280000000000002
+-1.146
+-1.114
+-1.333
+-1.224
+-1.3170000000000002
+-1.7610000000000006
+-1.1920000000000002
+-1.222
+-1.5830000000000002
+-1.5990000000000002
+-1.2550000000000001
+-1.174
+-1.305
+-1.282
+-1.308
+-1.3090000000000002
+-1.182
+-1.185
+-1.133
+-1.3930000000000002
+-1.2
+-1.2020000000000002
+-1.264
+-1.141
+-1.6740000000000004
+-1.8200000000000007
+-1.134
+-1.22
+-1.37
+-1.11
+-1.4130000000000003
+-1.8420000000000005
+-1.5980000000000003
+-1.362
+-1.185
+-1.532
+-1.079
+-1.3730000000000002
+-1.133
+-1.1420000000000001
+-1.233
+-1.243
+-1.7240000000000006
+-1.244
+-1.082
+-1.194
+-1.148
+-1.2
+-1.182
+-1.1360000000000001
+-1.163
+-1.176
+-1.219
+-1.301
+-1.182
+-1.083
+-1.184
+-2.6974999999999953
+-0.2820000000000001
+-1.343
+-1.178
+-1.283
+-1.184
+-1.0579999999999998
+-1.241
+-1.26
+-1.187
+-1.312
+-1.3950000000000002
+-1.2930000000000001
+-1.1640000000000001
+-1.069
+-1.184
+-1.2730000000000001
+-1.5680000000000003
+-1.2530000000000001
+-1.411
+-1.014
+-1.0779999999999998
+-1.184
+-1.224
+-1.3690000000000002
+-1.1600000000000001
+-1.083
+-1.116
+-1.209
+-1.237
+-1.1520000000000001
+-1.059
+-1.1509999999999998
+-1.092
+-1.131
+-1.102
+-1.154
+-1.243
+-1.088
+-1.5510000000000002
+-1.055
+-1.285
+-1.096
+-1.308
+-1.142
+-1.105
+-1.274
+-1.222
+-1.241
+-1.168
+-1.4650000000000003
+-1.163
+-1.143
+-1.096
+-1.173
+-1.2650000000000001
+-1.162
+-1.119
+-1.3130000000000002
+-1.4720000000000004
+-1.13
+-1.3760000000000001
+-1.233
+-1.178
+-1.3210000000000002
+-1.5100000000000002
+-1.017
+-1.3330000000000002
+-1.185
+-1.133
+-1.1349999999999998
+-1.2049999999999998
+-1.123
+-1.039
+-1.181
+-1.223
+-1.262
+-1.7640000000000005
+-1.3
+-1.203
+-1.216
+-1.247
+-1.4540000000000002
+-1.157
+-1.186
+-1.343
+-1.5215000000000005
+-1.1280000000000001
+-1.303
+-1.203
+-1.344
+-1.105
+-1.0099999999999998
+-1.112
+-1.075
+-1.184
+-1.103
+-1.303
+-1.482
+-1.158
+-1.2770000000000001
+-1.163
+-1.163
+-1.28
+-1.153
+-1.083
+-1.081
+-1.214
+-1.242
+-1.439
+-1.203
+-1.0499999999999998
+-1.4020000000000001
+-1.271
+-1.151
+-1.5240000000000002
+-1.2730000000000001
+-1.0899999999999999
+-1.184
+-1.2510000000000001
+-1.106
+-1.213
+-1.303
+-1.229
+-1.512
+-1.184
+-1.095
+-1.271
+-1.184
+-1.1139999999999999
+-1.184
+-1.274
+-1.197
+-1.143
+-1.115
+-1.318
+-1.173
+-1.169
+-1.143
+-1.211
+-1.262
+-1.093
+-1.093
+-1.3130000000000002
+-1.083
+-2.1940000000000004
+-1.3250000000000002
+-1.119
+-1.163
+-1.113
diff --git a/Group_C/result_image/make_image.py b/Group_C/result_image/make_image.py
new file mode 100644
index 0000000..d7999d9
--- /dev/null
+++ b/Group_C/result_image/make_image.py
@@ -0,0 +1,95 @@
+# -*- coding: utf-8 -*-
+import matplotlib.pyplot as plt
+import numpy as np
+import os
+import csv
+
+
+def plot(name_png, title_graph, list_xy, list_label, range_epi):
+ """
+ make a line chart by all the csv from data and save it
+ input:
+ name_png: name of the diagram
+ title_graph: title of the diagram
+ List_label: labels of the line ,Format:['label1', 'label2', ....]
+ """
+ list_data = load_csv()
+ # list_maker = [',', '.', 'o', 'v', '^', '<', '>', '+', 'x', 'p', '*']
+ list_maker = [',', ',', ',', ',', ',', ',', ',', ',', ',', 'p', '*']
+ if len(list_data) != 0:
+ plt.figure(1)
+ plt.title(title_graph)
+ plt.xlabel(list_xy[0])
+ plt.ylabel(list_xy[1])
+ for index, list_d in enumerate(list_data):
+ s = len(list_d)
+ x = [i + 1 for i in range(s)]
+ x, y = epi_display_range(x, list_d, range_epi)
+ plt.plot(x, y, label=list_label[index], marker=list_maker[index],
+ alpha=0.9, linewidth=1)
+
+ # plt.gca().yaxis.set_major_locator(MultipleLocator(int(len(List_data[0]) / 10)))
+ if len(list_label) > 1:
+ plt.legend(loc='upper left', fontsize='xx-small', frameon=False)
+ png_path = get_path(name_png, 'graphs')
+ plt.savefig(png_path)
+ plt.show()
+
+
+def load_csv():
+ # load all the csv
+ check_folder(['graphs'])
+ path_folder = os.path.join(os.getcwd().replace('\\', '/'), 'csv')
+ dirs = os.listdir(path_folder)
+ dirs.sort(key=lambda x: int(x[:-11]))
+ list_data = []
+ if len(dirs) != 0:
+ for d in dirs:
+ path_csv = os.path.join(path_folder, d)
+ file = open(path_csv)
+ reader = csv.reader(file)
+ data = list(reader)
+ data = np.array(data)
+ data = data.astype(float).tolist()
+ average_data = sum_average(data)
+ # list_data.append([r[col] for r in data for col in range(len(data[0]))])
+ list_data.append(average_data)
+ return list_data
+
+
+def get_path(name, name_floder):
+ folder_path = os.getcwd().replace('\\', '/')
+ png_path = os.path.join(folder_path, name_floder, name)
+ return png_path
+
+
+def check_folder(list_name):
+ current_path = os.getcwd().replace('\\', '/')
+ if len(list_name) != 0:
+ for name in list_name:
+ path_folder = os.path.join(current_path, name)
+ if not os.path.exists(path_folder):
+ os.mkdir(path_folder)
+
+
+def sum_average(list_data):
+ list_data_ave = []
+ sum_data = 0
+ average = []
+ for index, data in enumerate(list_data):
+ sum_data += data[0]
+ average.append(sum_data / (index + 1))
+ return average
+
+
+def epi_display_range(x, y, range_epi):
+ return x[range_epi[0]:range_epi[1]], y[range_epi[0]:range_epi[1]]
+
+
+if __name__ == '__main__':
+ # plot('name_png', 'title_graph', ['episodes', 'result'], ['result', 'reward'], [100, 800])
+ plot('Reward_rainbow', 'Rainbow DQN', ['Episodes', 'Average reward'],
+ ['DQN', 'DQN with data augmentation', 'Double DQN', 'Dueling DQN',
+ 'DQN + noisy network', 'DQN + priority memory', 'Multi-step DQN',
+ 'Double DQN + priority', 'Rainbow DQN'], [50, 800])
+ # plot('Reward_DQN + priority memory', 'DQN + priority memory', ['episodes', 'reward'], ['Basic DQN'])
diff --git a/Group_C/utility/action_filter.py b/Group_C/utility/action_filter.py
new file mode 100644
index 0000000..24c16be
--- /dev/null
+++ b/Group_C/utility/action_filter.py
@@ -0,0 +1,302 @@
+import random
+
+import numpy as np
+
+
+def action_filter(current_state, action_list):
+ """Filtrate the action to improve survival rate"""
+ X = current_state["position"][0]
+ Y = current_state["position"][1]
+ # Record the movable positions around the agent
+ move_list = make_move_list(current_state, X, Y)
+ # If agent cannot move, then excute action 0 (waiting to die)
+ if move_list is None:
+ return 0
+ # Get the moveable position from agent after statistical analysing various actions
+ moveable_position = make_move_position(move_list, X, Y, action_list)
+ # Statistical analysing all bombs in visual range
+ bomb_list = make_bomb_list(current_state)
+ # Grade the danger level of the new location
+ moveable_position_score = make_dangerous_list(current_state, moveable_position, bomb_list, action_list)
+
+ actions = []
+ # Use a safe location if one exists
+ for action in moveable_position_score:
+ if action[3] == 0:
+ actions.append(action[2])
+ # If a original action is safe, directly choose the original action
+ if action_list[-1] in actions:
+ return action_list[-1]
+
+ # If there is no safe position, choose the position of risk degree 1
+ if actions is None:
+ actions = []
+ for action in moveable_position_score:
+ if action[3] == 1:
+ actions.append(action[2])
+ # If a original action is safe, directly choose the original action
+ if action_list[-1] in actions:
+ return action_list[-1]
+
+ # If there is no position of risk degree 1, directly choose the position of risk degree 2
+ if actions is None:
+ actions = []
+ for action in moveable_position_score:
+ if action[3] == 2:
+ actions.append(action[2])
+
+ # If there is no position of risk degree 2, choose the position of risk degree 3
+ if actions is None:
+ actions = []
+ for action in moveable_position_score:
+ if action[3] == 3:
+ actions.append(action[2])
+
+ if actions is None:
+ return random.randint(0, 5)
+
+ # If multiple actions are available, the non-0 action is preferred
+ if len(actions) != 0:
+ modified_action = [action for action in actions if action > 0]
+ if len(modified_action) > 0:
+ modified_action = random.sample(actions, 1)
+ return modified_action[0]
+ else:
+ return 0
+
+ else:
+ return action_list[-1]
+
+
+def make_move_list(current_state, X, Y):
+ """Check if the action is executable (meaningful)"""
+
+ def check_bomb_action(current_state):
+ """Check if Action 5 makes sense"""
+ meaningful = False
+ X = current_state["position"][0]
+ Y = current_state["position"][1]
+ blast_strength = current_state["blast_strength"]
+ # Determine if there's a wall to the left of the bomb
+ for strength in range(1, blast_strength):
+ # Check if an agent is outside the map boundaries
+ if Y - strength < 0:
+ break
+ # If it is rigid, break
+ elif current_state["board"][X][Y - strength] == 1:
+ break
+ # Check if there is wood, then set meaningful to true
+ elif current_state["board"][X][Y - strength] == 2:
+ meaningful = True
+ return meaningful
+ # It makes sense if there's an enemy in the blast range
+ elif current_state["board"][X][Y - strength] in [10, 11, 12, 13]:
+ meaningful = True
+ return meaningful
+
+ # Determine if there's a wall to the right of the bomb
+ for strength in range(1, blast_strength):
+ # Check if an agent is outside the map boundaries
+ if Y + strength > 10:
+ break
+ # If it is rigid, break
+ elif current_state["board"][X][Y + strength] == 1:
+ break
+ # Check if there is wood, then set meaningful to true
+ elif current_state["board"][X][Y + strength] == 2:
+ meaningful = True
+ return meaningful
+ # It makes sense if there's an enemy in the blast range
+ elif current_state["board"][X][Y + strength] in [10, 11, 12, 13]:
+ meaningful = True
+ return meaningful
+
+ # Determine if there is a wall above the bomb
+ for strength in range(1, blast_strength):
+ # Check if an agent is outside the map boundaries
+ if X - strength < 0:
+ break
+ # If it is rigid, break
+ elif current_state["board"][X - strength][Y] == 1:
+ break
+ # Check if there is wood, then set meaningful to true
+ elif current_state["board"][X - strength][Y] == 2:
+ meaningful = True
+ return meaningful
+ # It makes sense if there's an enemy in the blast range
+ elif current_state["board"][X - strength][Y] in [10, 11, 12, 13]:
+ meaningful = True
+ return meaningful
+
+ # Determine if there is a wall under the bomb
+ for strength in range(1, blast_strength):
+ # Check if an agent is outside the map boundaries
+ if X + strength > 10:
+ break
+ # If it is rigid, break
+ elif current_state["board"][X + strength][Y] == 1:
+ break
+ # Check if there is wood, then set meaningful to true
+ elif current_state["board"][X + strength][Y] == 2:
+ meaningful = True
+ return meaningful
+ # It makes sense if there's an enemy in the blast range
+ elif current_state["board"][X + strength][Y] in [10, 11, 12, 13]:
+ meaningful = True
+ return meaningful
+ return meaningful
+
+ def check_moveable(current_state, X, Y):
+ """Check whether an agent can be moved in this position"""
+ moveable = False
+ if X < 0 or X > 10 or Y < 0 or Y > 10:
+ return moveable
+ elif current_state["board"][X][Y] in [0, 4, 6, 7, 8]:
+ moveable = True
+ return moveable
+ elif current_state["board"][X][Y] == 3 and current_state["can_kick"] is True:
+ moveable = True
+ return moveable
+
+ return moveable
+
+ move_list = [0]
+ can_up = check_moveable(current_state, X - 1, Y)
+ if can_up:
+ move_list.append(1)
+ can_down = check_moveable(current_state, X + 1, Y)
+ if can_down:
+ move_list.append(2)
+ can_left = check_moveable(current_state, X, Y - 1)
+ if can_left:
+ move_list.append(3)
+ can_right = check_moveable(current_state, X, Y + 1)
+ if can_right:
+ move_list.append(4)
+ if current_state["ammo"] != 0 and check_bomb_action(current_state):
+ move_list.append(5)
+ return move_list
+
+
+def make_move_position(move_list, X, Y, action_list):
+ """Calculate risk degree of the movable position"""
+ # First two columns, post movement position, third column, movement, fourth column, risk degree
+ moveable_position = []
+
+ for action in move_list:
+ if action == 0:
+ moveable_position.append([X, Y, 0, 0])
+ elif action == 1:
+ moveable_position.append([X - 1, Y, 1, 0])
+ elif action == 2:
+ moveable_position.append([X + 1, Y, 2, 0])
+ elif action == 3:
+ moveable_position.append([X, Y - 1, 3, 0])
+ elif action == 4:
+ moveable_position.append([X, Y + 1, 4, 0])
+ elif action == 5:
+ if action_list[-2] != 5:
+ moveable_position.append([X, Y, 5, 0])
+ return moveable_position
+
+
+def make_bomb_list(current_state):
+ bomb_list = []
+ for X in range(11):
+ for Y in range(11):
+ if current_state["board"][X][Y] == 3:
+ bomb_list.append([X, Y])
+
+ return bomb_list
+
+
+def make_dangerous_list(current_state, moveable_position, bomb_list, action_list):
+ # safe 0 , dangerous 1, high_dangerous 2, death 3.
+
+ def check_block(current_state, position_agent, position_bomb):
+ # Check for obstacles between mines and agents
+ block = False
+ if position_agent[0] != position_bomb[0]:
+ for index in range(1, abs(position_agent[0] - position_bomb[0])):
+ # Check for wood and rigid
+ if current_state["board"][min(position_agent[0], position_bomb[0]) + index][position_agent[1]] in [1,
+ 2]:
+ block = True
+ break
+ elif position_agent[1] != position_bomb[1]:
+ for index in range(1, abs(position_agent[1] - position_bomb[1])):
+ # Check for wood and rigid
+ if current_state["board"][position_agent[0]][min(position_agent[1], position_bomb[1]) + index] in [1,
+ 2]:
+ block = True
+ break
+
+ return block
+
+ # Check dead ends
+ def check_dead_end(current_state, moveable_position, action_list):
+ for position in moveable_position:
+ if action_list[-2] == 5 and position[2] == 1:
+ if (position[1] - 1 < 0 or current_state["board"][position[0]][position[1] - 1] in [1, 2, 3]) and \
+ (position[1] + 1 > 10 or current_state["board"][position[0]][position[1] + 1] in [1, 2, 3]) and \
+ (position[0] - 1 < 0 or current_state["board"][position[0] - 1][position[1]] in [1, 2, 3]):
+ position[-1] = 3
+
+ elif action_list[-2] == 5 and position[2] == 2:
+ if (position[1] - 1 < 0 or current_state["board"][position[0]][position[1] - 1] in [1, 2, 3]) and \
+ (position[1] + 1 > 10 or current_state["board"][position[0]][position[1] + 1] in [1, 2, 3]) and \
+ (position[0] + 1 > 10 or current_state["board"][position[0] + 1][position[1]] in [1, 2, 3]):
+ position[-1] = 3
+
+ elif action_list[-2] == 5 and position[2] == 3:
+ if (position[0] - 1 < 0 or current_state["board"][position[0] - 1][position[1]] in [1, 2, 3]) and \
+ (position[1] - 1 < 0 or current_state["board"][position[0]][position[1] - 1] in [1, 2, 3]) and \
+ (position[0] + 1 > 10 or current_state["board"][position[0] + 1][position[1]] in [1, 2, 3]):
+ position[-1] = 3
+
+ elif action_list[-2] == 5 and position[2] == 4:
+ if (position[0] - 1 < 0 or current_state["board"][position[0] - 1][position[1]] in [1, 2, 3]) and \
+ (position[1] + 1 > 10 or current_state["board"][position[0]][position[1] + 1] in [1, 2, 3]) and \
+ (position[0] + 1 > 10 or current_state["board"][position[0] + 1][position[1]] in [1, 2, 3]):
+ position[-1] = 3
+
+ return moveable_position
+
+ # Check the flame
+ def check_flame(current_state, moveable_position):
+ for position in moveable_position:
+ if current_state["flame_life"][position[0]][position[1]] != 0:
+ position[-1] = 3
+ return moveable_position
+
+ # bomb_location_all = np.where(np.array(current_state["board"]) == 3)
+ # if len(bomb_location_all[0]) == 0:
+ # return moveable_position
+ moveable_position = check_dead_end(current_state, moveable_position, action_list)
+
+ # Check for flames, even if there is no bomb in sight
+ if len(bomb_list) == 0:
+ moveable_position = check_flame(current_state, moveable_position)
+ return moveable_position
+ '''
+ Moveable position is classified according to the danger level.
+ when agent is threatened by multiple bombs or flames in the same location,
+ the highest danger level is recorded first.
+ '''
+ for agent_position in moveable_position:
+ for bomb_position in bomb_list:
+ if bomb_position[0] == agent_position[0] or bomb_position[1] == agent_position[1]:
+ if check_block(current_state, agent_position, bomb_position) is False and abs(
+ agent_position[0] - bomb_position[0]) + abs(agent_position[1] - bomb_position[1]) <= \
+ (current_state["bomb_blast_strength"][bomb_position[0]][bomb_position[1]] - 1):
+ if current_state["flame_life"][agent_position[0]][agent_position[1]] >= 1 or \
+ current_state["bomb_life"][bomb_position[0]][bomb_position[1]] == 1:
+ agent_position[3] = 3
+ elif 1 < current_state["bomb_life"][bomb_position[0]][bomb_position[1]] <= 3:
+ if agent_position[3] < 2:
+ agent_position[3] = 2
+ elif 4 <= current_state["bomb_life"][bomb_position[0]][bomb_position[1]]:
+ if agent_position[3] < 1:
+ agent_position[3] = 1
+
+ return moveable_position
diff --git a/Group_C/utility/communication.py b/Group_C/utility/communication.py
new file mode 100644
index 0000000..206062c
--- /dev/null
+++ b/Group_C/utility/communication.py
@@ -0,0 +1,67 @@
+import numpy as np
+
+
+def message(current_state):
+ """
+ FFA: message = the maximum distance bomb coordinate with the agent
+ Radio: message = bomb coordinates 4-7 away from teammates,
+ Or the coordinates of the bomb with the maximum distance from teammates,
+ Or the maximum distance bomb coordinate with the agent
+ input: current_state
+ output: message, current_state processed using the received message
+ """
+
+ position_agent = current_state["position"]
+ board = current_state["board"]
+ teammate = current_state["teammate"].value
+ list_position_bomb = np.argwhere(board == 3).tolist()
+ position_teammate = np.argwhere(board == teammate).tolist()
+ r_mess = current_state['message']
+
+ # Correct the inverted coordinates and Ignore bombs on the edge
+ if len(list_position_bomb) != 0:
+ for index, position_bomb in enumerate(list_position_bomb):
+ position_bomb_ = [position_bomb[1], position_bomb[0]]
+ list_position_bomb[index] = position_bomb_
+ if position_bomb[0] < 1 or position_bomb[0] > 9 or \
+ position_bomb[1] < 1 or position_bomb[1] > 9:
+ list_position_bomb.pop(index)
+
+ # Add the received bomb position to the current state
+ if r_mess != (0, 0):
+ current_state["board"][r_mess[0]+1, r_mess[1]+1] = 3
+
+ if teammate == 9 or len(position_teammate) == 0:
+ if len(list_position_bomb) != 0:
+ return position_max_dis(position_agent, list_position_bomb), current_state
+ else:
+ return [0, 0], current_state
+ else:
+ if len(list_position_bomb) != 0:
+ return position_max_dis_limit(position_teammate[0], list_position_bomb), current_state
+ else:
+ return [0, 0], current_state
+
+
+def distance(x, y):
+ return np.linalg.norm(np.array(x) - np.array(y))
+
+
+def position_max_dis(x, list_y):
+ list_distance = []
+ for y in list_y:
+ list_distance.append(distance(x, y))
+ return [list_y[np.argmax(np.array(list_distance))][0] - 1, list_y[np.argmax(np.array(list_distance))][1] - 1]
+
+
+def position_max_dis_limit(x, list_y):
+ limit = [4, 7]
+ list_distance = []
+ for y in list_y:
+ dis = distance(x, y)
+ if limit[0] < dis < limit[1]:
+ list_distance.append(dis)
+ if len(list_distance) != 0:
+ return [list_y[np.argmax(np.array(list_distance))][0] - 1, list_y[np.argmax(np.array(list_distance))][1] - 1]
+ else:
+ return [0, 0]
\ No newline at end of file
diff --git a/Group_C/utility/constants.py b/Group_C/utility/constants.py
new file mode 100644
index 0000000..4b25f60
--- /dev/null
+++ b/Group_C/utility/constants.py
@@ -0,0 +1,26 @@
+from pommerman import constants
+
+MIN_REPLAY_MEMORY_SIZE = 1000 # Minimum number of steps in a memory to start training
+MINIBATCH_SIZE = 128 # How many steps (samples) to use for training
+UPDATE_EVERY = 10 # Terminal states (end of episodes)
+MAX_BUFFER_SIZE = 100_000
+MAX_BUFFER_SIZE_PRE = 1_000_000
+DISCOUNT = 0.95
+MAX_STEPS = constants.MAX_STEPS
+n_step = 5
+
+Threshold_min = -0.40 / 100
+Threshold_max = -0.20 / 300
+
+# Environment settings
+EPISODES = 100000
+SHOW_EVERY = 1
+
+# Exploration settings
+epsilon = 1 # not a constant, going to be decayed
+EPSILON_DECAY = 0.99975
+MIN_EPSILON = 0.95
+
+SHOW_PREVIEW = True
+SHOW_GAME = 100
+save_weight = 100
diff --git a/Group_C/utility/replay_memory.py b/Group_C/utility/replay_memory.py
new file mode 100644
index 0000000..852244a
--- /dev/null
+++ b/Group_C/utility/replay_memory.py
@@ -0,0 +1,116 @@
+import random
+import collections
+import numpy as np
+from Group_C.utility import constants
+
+
+class replay_Memory():
+ def __init__(self, MAX_BUFFER_SIZE):
+ self.n_step = constants.n_step
+ self.buffer = collections.deque(maxlen=MAX_BUFFER_SIZE)
+ self.n_step_buffer = collections.deque(maxlen=self.n_step)
+ self.buffer_episode = collections.deque()
+ self.buffer_processing = collections.deque()
+ self.buffer_action = collections.deque([0, 0, 0, 0], maxlen=4)
+ self.buffer_td = collections.deque(maxlen=MAX_BUFFER_SIZE)
+ self.alpha = 0.6
+ self.gamma = 0.9 # affinity for long term reward
+
+ def append(self, transition):
+ self.buffer.append(transition)
+
+ def append_n_step(self, state, action, reward, next_state, done):
+ # multi steps DQN
+ self.n_step_buffer.append((state, action, reward, next_state, done))
+ if len(self.n_step_buffer) < self.n_step:
+ return
+ l_reward, l_next_state, l_done = self.n_step_buffer[-1][-3:]
+ for transition in reversed(list(self.n_step_buffer)[:-1]):
+ r, n_s, d = transition[-3:]
+ l_reward = r + self.gamma * l_reward * (1 - d)
+ l_next_state, l_done = (n_s, d) if d else (l_next_state, l_done)
+ l_state, l_action = self.n_step_buffer[0][:2]
+ transition_ = (l_state, l_action, l_reward, l_next_state, l_done)
+ self.buffer.append(transition_)
+
+ def append_action(self, action):
+ self.buffer_action.append(action)
+
+ def append_augmentation(self, transition):
+ self.buffer_episode.append(transition)
+
+ def append_processing(self, transition):
+ self.buffer_processing.append(transition)
+
+ def append_pri(self, state, action, reward, next_state, done, td_error):
+ # pri DQN
+ transition = (state, action, reward, next_state, done)
+ self.append_td(td_error)
+ self.buffer.append(transition)
+
+ def append_td(self, td_error):
+ self.buffer_td.append(td_error)
+
+ def clear(self):
+ self.buffer_episode.clear()
+ self.buffer_processing.clear()
+
+ def merge(self):
+ for element in self.buffer_processing:
+ self.buffer.append(element)
+
+ def merge_negative(self):
+ for element in self.buffer_processing:
+ if element[2] < 0:
+ self.buffer.append(element)
+
+ def sample(self, batch):
+ mini_batch = random.sample(self.buffer, batch)
+ return mini_batch
+
+ def sample_element(self, batch):
+ mini_batch = random.sample(self.buffer, batch)
+ current_state, action, reward, new_states, done = [], [], [], [], []
+
+ for transition in mini_batch:
+ curr_state, act, r, new_state, d = transition
+ current_state.append(curr_state)
+ action.append(act)
+ reward.append(r)
+ new_states.append(new_state)
+ done.append(d)
+
+ return np.array(current_state), action, reward, np.array(new_states), done
+
+ def sample_element_pri(self, batch_size):
+ # Prioritized DQN
+ # According to td_error sort, find the index, from small to large
+ index = np.argsort(np.array(self.buffer_td).flatten()).tolist()
+ # Buffers are sorted by index
+ buffer_sort = self.buffer
+ if len(index) != 0 and len(buffer_sort) != 0:
+ for i in range(len(self.buffer)):
+ buffer_sort[i] = self.buffer[index[i]]
+ prioritization = int(batch_size * self.alpha) # self.alpha = 0.6
+ batch_prioritized = []
+ for i in range(prioritization):
+ # Add in reverse, from large to small
+ batch_prioritized.append(buffer_sort[-i - 1])
+ mini_batch = random.sample(self.buffer, batch_size - prioritization)
+ td = self.buffer_td
+ # Training uses data: batch= batch_prioritized(according to td_error to go from large to small)+mini_batch(random selection)
+ batch = batch_prioritized + mini_batch
+ current_state, action, reward, new_states, done, td_error = [], [], [], [], [], []
+
+ for transition in batch:
+ curr_state, act, r, new_state, d = transition
+ current_state.append(curr_state)
+ action.append([act])
+ reward.append([r])
+ new_states.append(new_state)
+ done.append([d])
+
+ return np.array(current_state), action, reward, np.array(new_states), done
+
+ def size(self):
+ return len(self.buffer)
diff --git a/Group_C/utility/utility.py b/Group_C/utility/utility.py
new file mode 100644
index 0000000..b2958e6
--- /dev/null
+++ b/Group_C/utility/utility.py
@@ -0,0 +1,550 @@
+import numpy as np
+
+
+def reward_shaping(current_state, new_state, action, result, action_list):
+
+ r_lose = -1
+
+ r_wood = 0.01
+ r_powerup = 0.05
+ r_kick = 0.02
+
+ r_lay_bomb = -0.005
+ r_lay_bomb_near_enemy = 0.2
+ r_attack_teammate = -0.1
+ r_get_away_from_bomb = 0.005
+ r_get_close_to_bomb = -0.01
+
+ r_avoid = 0.001
+ r_move = 0.001
+ r_stay = -0.003
+ r_move_towards_wood = -0.01
+ r_move_loop = -0.005
+ r_dead_end = -0.1
+ r_ignore_penalty = -0.0015
+
+ reward = 0
+
+ X = current_state["position"][0]
+ Y = current_state["position"][1]
+ new_X = new_state["position"][0]
+ new_Y = new_state["position"][1]
+
+ # Upper left corner when training, states of teammate and enemy
+ enemies = [11, 12, 13]
+ teammate = [10]
+
+ # Record the situation of the upper, lower, left and right grids of agent to facilitate the calculation of rewards
+ current_grids = []
+ if X - 1 >= 0:
+ current_grids.append(current_state["board"][X - 1][Y])
+ if X + 1 <= 10:
+ current_grids.append(current_state["board"][X + 1][Y])
+ if Y - 1 >= 0:
+ current_grids.append(current_state["board"][X][Y - 1])
+ if Y + 1 <= 10:
+ current_grids.append(current_state["board"][X][Y + 1])
+
+ # Check to see if the bomb was kicked
+ if current_state["can_kick"] is True and new_state["can_kick"] is False:
+ reward += r_kick
+
+ """
+ If the action value is 0, all rewards are counted.
+ Check_avoid_flame: Checks if action is intended to avoid flames
+ Check_corner_bomb: Checks if the action is used to avoid bombs
+ Check_in_flame: Checks whether the action causes agent death
+ Check_and_away_from_bomb: Checks whether the action is that agent away from the bomb
+ """
+ if action == 0:
+ reward = check_avoid_flame(reward, r_avoid, current_grids)
+ reward = check_corner_bomb(current_state, X, Y, reward, r_avoid, r_stay, current_grids)
+ reward = check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y)
+ reward = check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb,
+ r_get_close_to_bomb, action_list)
+ return reward
+
+ """
+ When executing the Move Action, collect rewards.
+ Check_dead_end: Checks whether an action is causing a dead end
+ Check_ignore_powerup: Checks if action ignores powerup (passing but not picking up)
+ Check_move_loop: Checks if an action is a meaningless operation
+ Check_power_up: Checks whether the action picks up powerUp
+ Check_in_flame: Checks whether the action causes agent death
+ Check_and_away_from_bomb: Checks whether the action is that agent away from the bomb
+ """
+ if action == 1:
+ # Check if agent hits a wall
+ if current_state["position"] == new_state["position"]:
+ reward += r_move_towards_wood
+ else:
+ reward += r_move
+ reward = check_dead_end(new_state, new_X, new_Y, action_list, reward, r_dead_end)
+ reward = check_ignore_powerup(current_state, new_X, new_Y, action_list, current_grids, reward,
+ r_ignore_penalty)
+ reward = check_move_loop(action_list, reward, r_move_loop)
+ reward = check_power_up(new_X, new_Y, current_state, reward, r_powerup)
+ reward = check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y)
+ reward = check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb,
+ r_get_close_to_bomb, action_list)
+ return reward
+
+ if action == 2:
+ # Check if agent hits a wall
+ if current_state["position"] == new_state["position"]:
+ reward += r_move_towards_wood
+ else:
+ reward += r_move
+ reward = check_dead_end(new_state, new_X, new_Y, action_list, reward, r_dead_end)
+ reward = check_ignore_powerup(current_state, new_X, new_Y, action_list, current_grids, reward,
+ r_ignore_penalty)
+ reward = check_move_loop(action_list, reward, r_move_loop)
+ reward = check_power_up(new_X, new_Y, current_state, reward, r_powerup)
+ reward = check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y)
+ reward = check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb,
+ r_get_close_to_bomb, action_list)
+ return reward
+
+ if action == 3:
+ # Check if agent hits a wall
+ if current_state["position"] == new_state["position"]:
+ reward += r_move_towards_wood
+ else:
+ reward += r_move
+ reward = check_dead_end(new_state, new_X, new_Y, action_list, reward, r_dead_end)
+ reward = check_ignore_powerup(current_state, new_X, new_Y, action_list, current_grids, reward,
+ r_ignore_penalty)
+ reward = check_move_loop(action_list, reward, r_move_loop)
+ reward = check_power_up(new_X, new_Y, current_state, reward, r_powerup)
+ reward = check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y)
+ reward = check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb,
+ r_get_close_to_bomb, action_list)
+ return reward
+
+ if action == 4:
+ # Check if agent hits a wall
+ if current_state["position"] == new_state["position"]:
+ reward += r_move_towards_wood
+ else:
+ reward += r_move
+ reward = check_dead_end(new_state, new_X, new_Y, action_list, reward, r_dead_end)
+ reward = check_ignore_powerup(current_state, new_X, new_Y, action_list, current_grids, reward,
+ r_ignore_penalty)
+ reward = check_move_loop(action_list, reward, r_move_loop)
+ reward = check_power_up(new_X, new_Y, current_state, reward, r_powerup)
+ reward = check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y)
+ reward = check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb,
+ r_get_close_to_bomb, action_list)
+ return reward
+ """
+ Collect rewards for action 5
+ Check_in_flame: Checks whether the action causes agent death
+ Check_and_away_from_bomb: Checks whether the action is agent away from the bomb
+ Check_bomb_reward: checks if the action blows up wood, or if an enemy is within the blast range.
+ """
+
+ if action == 5:
+
+ reward = check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y)
+ reward = check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb,
+ r_get_close_to_bomb, action_list)
+ if current_state["ammo"] != 0:
+ reward += r_lay_bomb
+ reward = check_bomb_reward(current_state, X, Y, reward, r_wood, r_lay_bomb_near_enemy, r_attack_teammate,
+ enemies, teammate)
+ else:
+ # When ammo == 0, continue to lay bomb is considered as meanless action.
+ # The penalty is twice the reward for hitting the wall
+ reward += (2 * r_move_towards_wood)
+
+ return reward
+
+
+def check_bomb_reward(current_state, X, Y, reward, r_wood, r_lay_bomb_near_enemy, r_attack_teammate, enemies, teammate):
+ blast_strength = current_state["blast_strength"]
+ # Determine if there's a wall to the left of the bomb
+ for strength in range(1, blast_strength):
+ # Check if agent are outside the map boundaries
+ if Y - strength < 0:
+ break
+ # Check for wood
+ elif current_state["board"][X][Y - strength] == 2:
+ reward += r_wood
+ break
+ # If it is rigid, break
+ elif current_state["board"][X][Y - strength] == 1:
+ break
+ # If an enemy is within blast range, earn a reward
+ elif current_state["board"][X][Y - strength] in enemies:
+ reward += r_lay_bomb_near_enemy
+ elif current_state["board"][X][Y - strength] in teammate:
+ reward += r_attack_teammate
+
+ # Determine if there's a wall to the right of the bomb
+ for strength in range(1, blast_strength):
+ # Check if agent are outside the map boundaries
+ if Y + strength > 10:
+ break
+ # Check for wood
+ elif current_state["board"][X][Y + strength] == 2:
+ reward += r_wood
+ break
+ # If it is rigid, break
+ elif current_state["board"][X][Y + strength] == 1:
+ break
+ # If an enemy is within blast range, earn a reward
+ elif current_state["board"][X][Y + strength] in enemies:
+ reward += r_lay_bomb_near_enemy
+ elif current_state["board"][X][Y + strength] in teammate:
+ reward += r_attack_teammate
+
+ # Determine if there's a wall above the bomb
+ for strength in range(1, blast_strength):
+ # Check if agent are outside the map boundaries
+ if X - strength < 0:
+ break
+ # Check for wood
+ elif current_state["board"][X - strength][Y] == 2:
+ reward += r_wood
+ break
+ # If it is rigid, break
+ elif current_state["board"][X - strength][Y] == 1:
+ break
+ # If an enemy is within blast range, earn a reward
+ elif current_state["board"][X - strength][Y] in enemies:
+ reward += r_lay_bomb_near_enemy
+ elif current_state["board"][X - strength][Y] in teammate:
+ reward += r_attack_teammate
+
+ # Determine if there is a wall under the bomb
+ for strength in range(1, blast_strength):
+ # Check if agent are outside the map boundaries
+ if X + strength > 10:
+ break
+ # Check for wood
+ elif current_state["board"][X + strength][Y] == 2:
+ reward += r_wood
+ break
+ # If it is rigid, break
+ elif current_state["board"][X + strength][Y] == 1:
+ break
+ # If an enemy is within the blast range, you will receive a reward
+ elif current_state["board"][X + strength][Y] in enemies:
+ reward += r_lay_bomb_near_enemy
+ elif current_state["board"][X + strength][Y] in teammate:
+ reward += r_attack_teammate
+ return reward
+
+
+def check_in_flame(current_state, new_state, reward, r_lose, X, Y, new_X, new_Y):
+ """If the agent overlaps with the flame, it dies and returns reward"""
+
+ if current_state["flame_life"][X][Y] == 0 and new_state["flame_life"][new_X][new_Y] != 0:
+ reward += r_lose
+ return reward
+
+
+def check_power_up(new_X, new_Y, current_state, reward, r_power_up):
+ """Check whether agent eats power Up"""
+ if current_state["board"][new_X][new_Y] in [6, 7, 8]:
+ reward += r_power_up
+
+ return reward
+
+
+def check_corner_bomb(current_state, X, Y, reward, r_avoid, r_stay, current_grids):
+ """Use Action 0 to avoid bombs"""
+ # action 0 to avoid upper left bomb
+ find_bomb = False
+ if X - 1 >= 0 and Y - 1 >= 0 and current_state["board"][X - 1][Y - 1] == 3:
+ reward += r_avoid
+ find_bomb = True
+ if X - 1 >= 0 and Y - 2 >= 0 and current_state["board"][X - 1][Y - 2] == 3:
+ reward += r_avoid
+ find_bomb = True
+ # upper right
+ if X - 1 >= 0 and Y + 1 <= 10 and current_state["board"][X - 1][Y + 1] == 3:
+ reward += r_avoid
+ find_bomb = True
+ if X - 1 >= 0 and Y + 2 <= 10 and current_state["board"][X - 1][Y + 2] == 3:
+ reward += r_avoid
+ find_bomb = True
+ # lower left
+ if X + 1 <= 10 and Y - 1 >= 0 and current_state["board"][X + 1][Y - 1] == 3:
+ reward += r_avoid
+ find_bomb = True
+ if X + 2 <= 10 and Y - 1 >= 0 and current_state["board"][X + 2][Y - 1] == 3:
+ reward += r_avoid
+ find_bomb = True
+ # lower right
+ if X + 1 <= 10 and Y + 1 <= 10 and current_state["board"][X + 1][Y + 1] == 3:
+ reward += r_avoid
+ find_bomb = True
+ if X + 1 <= 10 and Y + 2 <= 10 and current_state["board"][X + 1][Y + 2] == 3:
+ reward += r_avoid
+ find_bomb = True
+ if not find_bomb and 3 not in current_grids:
+ reward += r_stay
+
+ return reward
+
+
+def check_and_away_from_bomb(current_state, X, Y, new_X, new_Y, reward, r_get_away_from_bomb, r_get_close_to_bomb,
+ action_list):
+ """Get a reward when out of danger"""
+ # Get a reward when out of danger
+ if action_list[2] == 5 and (X != new_X or Y != new_Y):
+ reward += r_get_away_from_bomb
+ elif action_list[2] == 5 and (X == new_X and Y == new_Y):
+ reward += 2 * r_get_close_to_bomb
+ # above
+ if X - 1 >= 0 and current_state["board"][X - 1][Y] == 3 and (abs((X - 1) - new_X) + abs(Y - new_Y)) > 1:
+ reward += r_get_away_from_bomb
+ if X - 1 >= 0 and current_state["board"][X - 1][Y] == 3 and (abs((X - 1) - new_X) + abs(Y - new_Y)) == 1:
+ reward += 2 * r_get_close_to_bomb
+ if X - 2 >= 0 and current_state["board"][X - 2][Y] == 3 and (abs((X - 2) - new_X) + abs(Y - new_Y)) > 2 and \
+ current_state["board"][X - 1][Y] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif X - 2 >= 0 and current_state["board"][X - 2][Y] == 3 and (abs((X - 2) - new_X) + abs(Y - new_Y)) <= 2 and \
+ current_state["board"][X - 1][Y] not in [1, 2]:
+ reward += r_get_close_to_bomb
+ if X - 3 >= 0 and current_state["board"][X - 3][Y] == 3 and (abs((X - 3) - new_X) + abs(Y - new_Y)) > 3 and \
+ current_state["board"][X - 1][Y] not in [1, 2] and current_state["board"][X - 2][Y] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif X - 3 >= 0 and current_state["board"][X - 3][Y] == 3 and (abs((X - 3) - new_X) + abs(Y - new_Y)) <= 3 and \
+ current_state["board"][X - 1][Y] not in [1, 2] and current_state["board"][X - 2][Y] not in [1, 2]:
+ reward += r_get_close_to_bomb
+ # below
+ if X + 1 <= 10 and current_state["board"][X + 1][Y] == 3 and (abs((X + 1) - new_X) + abs(Y - new_Y)) > 1:
+ reward += r_get_away_from_bomb
+ if X + 1 <= 10 and current_state["board"][X + 1][Y] == 3 and (abs((X + 1) - new_X) + abs(Y - new_Y)) == 1:
+ reward += 2 * r_get_close_to_bomb
+ if X + 2 <= 10 and current_state["board"][X + 2][Y] == 3 and (abs((X + 2) - new_X) + abs(Y - new_Y)) > 2 and \
+ current_state["board"][X + 1][Y] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif X + 2 <= 10 and current_state["board"][X + 2][Y] == 3 and (abs((X + 2) - new_X) + abs(Y - new_Y)) <= 2 and \
+ current_state["board"][X + 1][Y] not in [1, 2]:
+ reward += r_get_close_to_bomb
+ if X + 3 <= 10 and current_state["board"][X + 3][Y] == 3 and (abs((X + 3) - new_X) + abs(Y - new_Y)) > 3 and \
+ current_state["board"][X + 1][Y] not in [1, 2] and current_state["board"][X + 2][Y] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif X + 3 <= 10 and current_state["board"][X + 3][Y] == 3 and (abs((X + 3) - new_X) + abs(Y - new_Y)) <= 3 and \
+ current_state["board"][X + 1][Y] not in [1, 2] and current_state["board"][X + 2][Y] not in [1, 2]:
+ reward += r_get_close_to_bomb
+
+ # left
+ if Y - 1 >= 0 and current_state["board"][X][Y - 1] == 3 and (abs(X - new_X) + abs((Y - 1) - new_Y)) > 1:
+ reward += r_get_away_from_bomb
+ if Y - 1 >= 0 and current_state["board"][X][Y - 1] == 3 and (abs(X - new_X) + abs((Y - 1) - new_Y)) == 1:
+ reward += 2 * r_get_close_to_bomb
+ if Y - 2 >= 0 and current_state["board"][X][Y - 2] == 3 and (abs(X - new_X) + abs((Y - 2) - new_Y)) > 2 and \
+ current_state["board"][X][Y - 1] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif Y - 2 >= 0 and current_state["board"][X][Y - 2] == 3 and (abs(X - new_X) + abs((Y - 2) - new_Y)) <= 2 and \
+ current_state["board"][X][Y - 1] not in [1, 2]:
+ reward += r_get_close_to_bomb
+ if Y - 3 >= 0 and current_state["board"][X][Y - 3] == 3 and (abs(X - new_X) + abs((Y - 3) - new_Y)) > 3 and \
+ current_state["board"][X][Y - 1] not in [1, 2] and current_state["board"][X][Y - 2] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif Y - 3 >= 0 and current_state["board"][X][Y - 3] == 3 and (abs(X - new_X) + abs((Y - 3) - new_Y)) <= 3 and \
+ current_state["board"][X][Y - 1] not in [1, 2] and current_state["board"][X][Y - 2] not in [1, 2]:
+ reward += r_get_close_to_bomb
+
+ # right
+ if Y + 1 <= 10 and current_state["board"][X][Y + 1] == 3 and (abs(X - new_X) + abs((Y + 1) - new_Y)) > 1:
+ reward += r_get_away_from_bomb
+ if Y + 1 <= 10 and current_state["board"][X][Y + 1] == 3 and (abs(X - new_X) + abs((Y + 1) - new_Y)) == 1:
+ reward += 2 * r_get_close_to_bomb
+ if Y + 2 <= 10 and current_state["board"][X][Y + 2] == 3 and (abs(X - new_X) + abs((Y + 2) - new_Y)) > 2 and \
+ current_state["board"][X][Y + 1] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif Y + 2 <= 10 and current_state["board"][X][Y + 2] == 3 and (abs(X - new_X) + abs((Y + 2) - new_Y)) <= 2 and \
+ current_state["board"][X][Y + 1] not in [1, 2]:
+ reward += r_get_close_to_bomb
+ if Y + 3 <= 10 and current_state["board"][X][Y + 3] == 3 and (abs(X - new_X) + abs((Y + 3) - new_Y)) > 3 and \
+ current_state["board"][X][Y + 1] not in [1, 2] and current_state["board"][X][Y + 2] not in [1, 2]:
+ reward += r_get_away_from_bomb
+ elif Y + 3 <= 10 and current_state["board"][X][Y + 3] == 3 and (abs(X - new_X) + abs((Y + 3) - new_Y)) <= 3 and \
+ current_state["board"][X][Y + 1] not in [1, 2] and current_state["board"][X][Y + 2] not in [1, 2]:
+ reward += r_get_close_to_bomb
+
+ # Check the corners upper left
+ if X - 1 >= 0 and Y - 1 >= 0 and current_state["board"][X - 1][Y - 1] == 3 and (abs((X - 1) - new_X)) + abs(
+ (Y - 1) - new_Y) > 2:
+ reward += r_get_away_from_bomb
+ elif X - 1 >= 0 and Y - 1 >= 0 and current_state["board"][X - 1][Y - 1] == 3 and (abs((X - 1) - new_X)) + abs(
+ (Y - 1) - new_Y) < 2:
+ reward += r_get_close_to_bomb
+ # The lower left
+ if X + 1 <= 10 and Y - 1 >= 0 and current_state["board"][X + 1][Y - 1] == 3 and (abs((X + 1) - new_X)) + abs(
+ (Y - 1) - new_Y) > 2:
+ reward += r_get_away_from_bomb
+ elif X + 1 <= 10 and Y - 1 >= 0 and current_state["board"][X + 1][Y - 1] == 3 and (abs((X + 1) - new_X)) + abs(
+ (Y - 1) - new_Y) < 2:
+ reward += r_get_close_to_bomb
+ # The upper right
+ if X - 1 >= 0 and Y + 1 <= 10 and current_state["board"][X - 1][Y + 1] == 3 and (abs((X - 1) - new_X)) + abs(
+ (Y + 1) - new_Y) > 2:
+ reward += r_get_away_from_bomb
+ elif X - 1 >= 0 and Y + 1 <= 10 and current_state["board"][X - 1][Y + 1] == 3 and (abs((X - 1) - new_X)) + abs(
+ (Y + 1) - new_Y) < 2:
+ reward += r_get_close_to_bomb
+
+ # The lower right
+ if X + 1 <= 10 and Y + 1 <= 10 and current_state["board"][X + 1][Y + 1] == 3 and (abs((X + 1) - new_X)) + abs(
+ (Y + 1) - new_Y) > 2:
+ reward += r_get_away_from_bomb
+ elif X + 1 <= 10 and Y + 1 <= 10 and current_state["board"][X + 1][Y + 1] == 3 and (abs((X + 1) - new_X)) + abs(
+ (Y - 1) + new_Y) < 2:
+ reward += r_get_close_to_bomb
+
+ return reward
+
+
+def check_move_loop(action_list, reward, r_move_loop):
+ """Penalty for performing circular meaningless actions"""
+ check_list = [[1, 2, 1, 2],
+ [2, 1, 2, 1],
+ [3, 4, 3, 4],
+ [4, 3, 4, 3],
+ [1, 2, 3, 4],
+ [2, 1, 3, 4],
+ [3, 4, 1, 2],
+ [3, 4, 2, 1],
+ [1, 2, 4, 3],
+ [2, 1, 4, 3],
+ [4, 3, 1, 2],
+ [4, 3, 2, 1]]
+ if action_list in check_list:
+ reward += r_move_loop
+ return reward
+
+
+def check_dead_end(new_state, new_X, new_Y, action_list, reward, r_dead_end):
+ """Check if it's in a dead end"""
+ if action_list[2] == 5 and action_list[3] == 1:
+ if (new_Y - 1 < 0 or new_state["board"][new_X][new_Y - 1] in [1, 2, 3]) and \
+ (new_Y + 1 > 10 or new_state["board"][new_X][new_Y + 1] in [1, 2, 3]) and \
+ (new_X - 1 < 0 or new_state["board"][new_X - 1][new_Y] in [1, 2, 3]):
+ reward += r_dead_end
+ return reward
+ elif action_list[2] == 5 and action_list[3] == 2:
+ if (new_Y - 1 < 0 or new_state["board"][new_X][new_Y - 1] in [1, 2, 3]) and \
+ (new_Y + 1 > 10 or new_state["board"][new_X][new_Y + 1] in [1, 2, 3]) and \
+ (new_X + 1 > 10 or new_state["board"][new_X + 1][new_Y] in [1, 2, 3]):
+ reward += r_dead_end
+ return reward
+ elif action_list[2] == 5 and action_list[3] == 3:
+ if (new_X - 1 < 0 or new_state["board"][new_X - 1][new_Y] in [1, 2, 3]) and \
+ (new_Y - 1 < 0 or new_state["board"][new_X][new_Y - 1] in [1, 2, 3]) and \
+ (new_X + 1 > 10 or new_state["board"][new_X + 1][new_Y] in [1, 2, 3]):
+ reward += r_dead_end
+ return reward
+ elif action_list[2] == 5 and action_list[3] == 4:
+ if (new_X - 1 < 0 or new_state["board"][new_X - 1][new_Y] in [1, 2, 3]) and \
+ (new_Y + 1 > 10 or new_state["board"][new_X][new_Y + 1] in [1, 2, 3]) and \
+ (new_X + 1 > 10 or new_state["board"][new_X + 1][new_Y] in [1, 2, 3]):
+ reward += r_dead_end
+ return reward
+
+ return reward
+
+
+def check_avoid_flame(reward, r_avoid, current_grids):
+ """Check if action is used to avoid flames"""
+ if 4 in current_grids and all((grid in [1, 2, 3, 4]) for grid in current_grids):
+ reward += r_avoid
+ return reward
+
+
+def check_ignore_powerup(current_state, new_X, new_Y, action_list, current_grids, reward, r_ignore_penalty):
+ """Check whether powerUp is ignored"""
+ if ((6 or 7 or 8) in current_grids) and current_state["board"][new_X][new_Y] not in [6, 7, 8] and (
+ 5 not in action_list) and \
+ current_state["ammo"] != 0:
+ reward += r_ignore_penalty
+
+ return reward
+
+
+def featurize2D(states, partially_obs=True):
+ """Process Oberservation to match network format"""
+ # There are 18 matrices
+ X = states["position"][0]
+ Y = states["position"][1]
+ shape = (11, 11)
+
+ # Process path, rigid, wood, bomb, flame, fog, power_up, agent1, agent2, agent3, agent4
+ def get_partially_obs(states, X, Y):
+ """Limit field of view in FFA environment"""
+ board = np.full(shape, 5)
+ for x in range(10):
+ for y in range(10):
+ if X - 4 <= x <= X + 4 and Y - 4 <= y <= Y + 4:
+ board[x][y] = states["board"][x][y]
+ states["board"] = board
+ return states
+
+ def get_matrix(board, key):
+ res = board[key]
+ return res.reshape(shape).astype(np.float64)
+
+ def get_map(board, item):
+ map = np.zeros(shape)
+ map[board == item] = 1
+ return map
+
+ if partially_obs:
+ states = get_partially_obs(states, X, Y)
+
+ board = get_matrix(states, "board")
+
+ path = get_map(board, 0)
+ rigid = get_map(board, 1)
+ wood = get_map(board, 2)
+ bomb = get_map(board, 3)
+ flame = get_map(board, 4)
+ # fog = get_map(board, 5)
+ fog = np.zeros(shape)
+ agent1 = get_map(board, 10)
+ agent2 = get_map(board, 11)
+ agent3 = get_map(board, 12)
+ agent4 = get_map(board, 13)
+
+ power_up = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 6 or num == 7 or num == 8:
+ new_row.append(1)
+ else:
+ new_row.append(0.0)
+ power_up.append(new_row)
+
+ bomb_blast_strength = get_matrix(states, 'bomb_blast_strength')
+ bomb_life = get_matrix(states, 'bomb_life')
+ bomb_moving_direction = get_matrix(states, 'bomb_moving_direction')
+ flame_life = get_matrix(states, 'flame_life')
+
+ ammo_2D, blast_strength_2D, can_kick_2D = rebuild_1D_element(states)
+
+ feature2D = [path, rigid, wood, bomb, flame, fog, power_up, agent1, agent2, agent3, agent4, bomb_blast_strength,
+ bomb_life, bomb_moving_direction, flame_life, ammo_2D, blast_strength_2D, can_kick_2D]
+
+ return np.array(feature2D)
+
+
+def rebuild_1D_element(states):
+ """Process some 1D data in Oberservation"""
+ shape = (11, 11)
+
+ ammo = states["ammo"]
+ ammo_2D = np.full(shape, ammo)
+
+ blast_strength = states["blast_strength"]
+ blast_strength_2D = np.full(shape, blast_strength)
+
+ can_kick = states["can_kick"]
+ can_kick_2D = np.full(shape, int(can_kick))
+
+ return ammo_2D, blast_strength_2D, can_kick_2D
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..528dc3c
--- /dev/null
+++ b/README.md
@@ -0,0 +1,71 @@
+# Architecture
+
+We were able to implement all the descendants of Rainbow DQN except Categorical DQN
+
+- [x] basic DQN
+- [x] Double DQN
+- [x] Prioritized Experience Replay
+- [x] Dueling Network Architectures
+- [x] Noisy Nets
+- [x] Multi Step Reinforcement Learning
+- [ ] Categorical DQN
+
+# Getting Started with our DQNAgent
+
+# Pre-requisites
+
+* [Python 3.6.0](https://www.python.org/downloads/release/python-360/)+ (including `pip`)
+* [Docker](https://www.docker.com/) (only needed for `DockerAgent`)
+* [tensorflow 2.6.2](https://www.tensorflow.org/hub/installation)
+* [Keras 2.6.0](https://keras.io/getting_started/)
+* Others are all included in [requirements](Group_C/requirements.txt)
+# Installation
+
+* Clone the repository
+```
+$ git clone https://github.com/oxFFFF-Q/Project_AI.git
+```
+
+## Pip
+
+* Install the `pommerman` package. This needs to be done every time the code is updated to get the
+latest modules
+```
+$ cd ~/playground
+$ pip install -U .
+```
+
+## Conda
+
+* Install the `pommerman` environment.
+```
+$ cd ~/playground
+$ conda env create -f env.yml
+$ conda activate pommerman
+```
+
+* To update the environment
+```
+$ conda env update -f env.yml --prune
+```
+
+# Launch the agent
+We have seperately trained models for player 1 [Agent1](Group_C/agents/Agent1.py) and player 3 [Agent3](Group_C/agents/Agent3.py). Run [main_test.py](Group_C/main_test.py) to test them palying against two [SimpleAgent](pommerman/agents/simple_agent.py).
+
+# Train your agent
+
+## A Simple Example
+
+Run [main_train.py](Group_C/main_train.py) to train our final DQN model for radio team competition of two [SimpleAgent](pommerman/agents/simple_agent.py) as enemies and a [SimpleAgent](pommerman/agents/simple_agent.py) as teammate.
+
+The training will not automatically stop, but need to be done manully, according to the given out report about the rewards. The paramaters will be recorded every 100 episodes. Run [main_save_model.py](Group_C/main_save_model.py) to save the model. The name of the model is required. The best one is usually among the last few models.
+
+## Use other strategies
+
+Select other names for `strategy` in [main_train.py](Group_C/main_train.py) to try other achietectures. Make sure of the consistency of the `strategy` in [main_save_model.py](Group_C/main_save_model.py).
+
+
+
+# Visualize the experiment results
+
+Our experiment results are all stored in [data](Group_C/result_image/data). Run [make_image.py](Group_C/result_image/make_image.py) to get a visualization of them.
diff --git a/docs/README.md b/docs/README.md
index 126cd78..528dc3c 100755
--- a/docs/README.md
+++ b/docs/README.md
@@ -1,30 +1,33 @@
-# Getting Started
+# Architecture
+
+We were able to implement all the descendants of Rainbow DQN except Categorical DQN
+
+- [x] basic DQN
+- [x] Double DQN
+- [x] Prioritized Experience Replay
+- [x] Dueling Network Architectures
+- [x] Noisy Nets
+- [x] Multi Step Reinforcement Learning
+- [ ] Categorical DQN
+
+# Getting Started with our DQNAgent
# Pre-requisites
* [Python 3.6.0](https://www.python.org/downloads/release/python-360/)+ (including `pip`)
* [Docker](https://www.docker.com/) (only needed for `DockerAgent`)
-* [virtualenv](https://virtualenv.pypa.io/en/stable/) (optional, for isolated Python environment)
-
+* [tensorflow 2.6.2](https://www.tensorflow.org/hub/installation)
+* [Keras 2.6.0](https://keras.io/getting_started/)
+* Others are all included in [requirements](Group_C/requirements.txt)
# Installation
* Clone the repository
```
-$ git clone https://github.com/MultiAgentLearning/playground ~/playground
+$ git clone https://github.com/oxFFFF-Q/Project_AI.git
```
## Pip
-* **OPTIONAL**: Setup an isolated virtual Python environment by running the following commands
-```
-$ virtualenv ~/venv
-```
-This environment needs to be activated for usage. Any package installations will now persist
-in this virtual environment folder only.
-```
-source ~/venv/bin/activate
-```
-
* Install the `pommerman` package. This needs to be done every time the code is updated to get the
latest modules
```
@@ -46,83 +49,23 @@ $ conda activate pommerman
$ conda env update -f env.yml --prune
```
-# Examples
-
-## A Simple Example
-
-The [simple_ffa_run.py](../examples/simple_ffa_run.py) runs a sample Free-For-All game with two
-[SimpleAgent](../pommerman/agents/simple_agent.py)s and two [RandomAgent](../pommerman/agents/random_agent.py)s
-on the board.
-
-## Using A Docker Agent
-
-The above example can be extended to use [DockerAgent](../pommerman/agents/docker_agent.py) instead of a
-[RandomAgent](../pommerman/agents/random_agent.py). [examples/docker-agent](../examples/docker-agent) contains
-the code to wrap a [SimpleAgent](../pommerman/agents/simple_agent.py) inside Docker.
-
-
-* We will build a docker image with the name "pommerman/simple-agent" using the `Dockerfile` provided.
-```
-$ cd ~/playground
-$ docker build -t pommerman/simple-agent -f examples/docker-agent/Dockerfile .
-```
-
-* The agent list seen in the previous example can now be updated. Note that a `port` argument (of an unoccupied port) is
-needed to expose the HTTP server.
-```python
-agent_list = [
- agents.SimpleAgent(),
- agents.RandomAgent(),
- agents.SimpleAgent(),
- agents.DockerAgent("pommerman/simple-agent", port=12345)
-]
-```
-
-## Playing an interactive game
-
-You can also play the game! See below for an example where one [PlayerAgent](../pommerman/agents/player_agent.py)
-controls with the `arrow` keys and the other with the `wasd` keys.
+# Launch the agent
+We have seperately trained models for player 1 [Agent1](Group_C/agents/Agent1.py) and player 3 [Agent3](Group_C/agents/Agent3.py). Run [main_test.py](Group_C/main_test.py) to test them palying against two [SimpleAgent](pommerman/agents/simple_agent.py).
+# Train your agent
-```python
-agent_list = [
- agents.SimpleAgent(),
- agents.PlayerAgent(agent_control="arrows"), # arrows to move, space to lay bomb
- agents.SimpleAgent(),
- agents.PlayerAgent(agent_control="wasd"), # W,A,S,D to move, E to lay bomb
-]
-```
-
-## Submitting an Agent.
-
-In order to submit an agent, you need to create an account at
-[pommerman.com](https://pommerman.com). You can do this by registering with your
-email address or logging in with your Github account.
-
-Once you have created an account, login and navigate to your profile -
-[Pommerman profile](https://pommerman.com/me). To submit an agent, fill in the
-form with your agent's name, an ssh git url, and the path to your agent's Docker
-file from the github repository's top level directory. Please make sure that
-your docker file builds properly beforehand.
+## A Simple Example
-Next, you will need to add an ssh deploy key to your account so we can access
-your agent's repo. This is provided to you along with instructions after
-registering the agent.
+Run [main_train.py](Group_C/main_train.py) to train our final DQN model for radio team competition of two [SimpleAgent](pommerman/agents/simple_agent.py) as enemies and a [SimpleAgent](pommerman/agents/simple_agent.py) as teammate.
-Before doing all of this, note that we use Docker to run the agents. The best example for making a Docker agent is in the repo in the examples/docker-agent directory. This *must* work in order to properly enter an agent, and we suggest using the accompanying pom_battle cli command (or equivalently run_battle.py) to test out your Docker implementation. If you are having trouble still, feel free to ask questions on our Discord channel.
+The training will not automatically stop, but need to be done manully, according to the given out report about the rewards. The paramaters will be recorded every 100 episodes. Run [main_save_model.py](Group_C/main_save_model.py) to save the model. The name of the model is required. The best one is usually among the last few models.
-## NIPS Competition Information:
+## Use other strategies
-Each competitor will submit two agents that will be teamed together. These agents can be the same one and can be in the same repository even, but we expect there to be two submissions for each entrant. We additionally expect there to be notable differences among the submissions. Similarly to the June 3rd competition, we will examine the code before running it on our servers and collusion will not be tolerated.
+Select other names for `strategy` in [main_train.py](Group_C/main_train.py) to try other achietectures. Make sure of the consistency of the `strategy` in [main_save_model.py](Group_C/main_save_model.py).
-The competition will be held live at NIPS 2018 in Montreal. We would prefer it if serious entrants were there, but that is not a requirement.
-## Actually Getting Started
-Here is some information that may help you more quickly develop successful agents:
+# Visualize the experiment results
-1. Two agents cannot move to the same cell. They will bounce back to their prior places if they try. The same applies to bombs. If an agent and a bomb both try to move to the same space, then the agent will succeed but the bomb will bounce back.
-2. If an agent with the can_kick ability moves to a cell with a bomb, then the bomb is kicked in the direction from which the agent came. The ensuing motion will persist until the bomb hits a wall, another agent, or the edge of the grid.
-3. When a bomb explodes, it immediately reaches its full blast radius. If there is an agent or a wall in the way, then it prematurely ends and destroys that agent or wall.
-4. If a bomb is in the vicinity of an explosion, then it will also go off. In this way, bombs can chain together.
-5. The SimpleAgent is very useful as a barometer for your own efforts. Four SimpleAgents playing against each other have a win rate of ~18% each with the remaining ~28% of the time being a tie. Keep in mind that it _can_ destroy itself. That can skew your own results if not properly understood.
+Our experiment results are all stored in [data](Group_C/result_image/data). Run [make_image.py](Group_C/result_image/make_image.py) to get a visualization of them.
diff --git a/examples/simple_ffa_run.py b/examples/simple_ffa_run.py
index b364a1f..239d7d9 100755
--- a/examples/simple_ffa_run.py
+++ b/examples/simple_ffa_run.py
@@ -1,16 +1,21 @@
'''An example to show how to set up an pommerman game programmatically'''
import pommerman
+import random
+import numpy as np
+import tensorflow as tf
from pommerman import agents
def main():
'''Simple function to bootstrap a game.
-
+
Use this as an example to set up your training env.
'''
# Print all possible environments in the Pommerman registry
print(pommerman.REGISTRY)
-
+ random.seed(1)
+ #np.random.seed(1)
+ #tf.random.set_seed(1)
# Create a set of agents (exactly four)
agent_list = [
agents.SimpleAgent(),
diff --git a/pommerman/__init__.py b/pommerman/__init__.py
index 7c6ea33..fb7ed75 100755
--- a/pommerman/__init__.py
+++ b/pommerman/__init__.py
@@ -7,7 +7,7 @@
from . import forward_model
from . import helpers
from . import utility
-from . import network
+#from . import network
gym.logger.set_level(40)
REGISTRY = None
diff --git a/pommerman/constants.py b/pommerman/constants.py
index 27b1ef4..4ca8570 100755
--- a/pommerman/constants.py
+++ b/pommerman/constants.py
@@ -26,7 +26,7 @@
117)]
# If using collapsing boards, the step at which the board starts to collapse.
FIRST_COLLAPSE = 500
-MAX_STEPS = 800
+MAX_STEPS = 300
RADIO_VOCAB_SIZE = 8
RADIO_NUM_WORDS = 2
diff --git a/pommerman/envs/v2.py b/pommerman/envs/v2.py
index a2a3985..947a56a 100755
--- a/pommerman/envs/v2.py
+++ b/pommerman/envs/v2.py
@@ -79,6 +79,7 @@ def get_observations(self):
self.observations = observations
return observations
+ # TODO radio
def step(self, actions):
personal_actions = []
radio_actions = []
diff --git a/pommerman/graphics.py b/pommerman/graphics.py
index 18e57d0..874c45a 100755
--- a/pommerman/graphics.py
+++ b/pommerman/graphics.py
@@ -9,17 +9,18 @@
from random import randint
from time import strftime
-from gym.utils import reraise
+#from gym.utils import reraise
import numpy as np
+import pyglet
from PIL import Image
-try:
- import pyglet
-except ImportError as error:
- reraise(
- suffix="Install pyglet with 'pip install pyglet'. If you want to just "
- "install all Gym dependencies, run 'pip install -e .[all]' or "
- "'pip install gym[all]'.")
+# try:
+#
+# except ImportError as error:
+# reraise(
+# suffix="Install pyglet with 'pip install pyglet'. If you want to just "
+# "install all Gym dependencies, run 'pip install -e .[all]' or "
+# "'pip install gym[all]'.")
try:
from pyglet.gl import *
diff --git a/test/DQN2Agent.py b/test/DQN2Agent.py
index 7507e47..953ac56 100644
--- a/test/DQN2Agent.py
+++ b/test/DQN2Agent.py
@@ -1,4 +1,5 @@
from pommerman.agents.simple_agent import SimpleAgent
+from pommerman.agents.random_agent import RandomAgent
import torch
import torch.nn as nn
import torch.optim as optim
@@ -9,6 +10,7 @@
import gym
import numpy as np
from utils import featurize, featurize2
+import os
from pommerman.agents import BaseAgent
from replay_buffer import ReplayBuffer, ReplayBuffer2
@@ -22,6 +24,7 @@ def __init__(self, env, args, character=characters.Bomber):
super(DQN2Agent, self).__init__(character)
self.obs_n = env.observation_space.shape[0] # output_dim
self.action_n = 6 # input_dim
+ self.action_space = [0,1,2,3,4,5]
self.env = env
self.epsilon = args.epsilon
@@ -59,38 +62,220 @@ def forward(self, state):
return qvals
def act(self, obs, action_space):
- return self.baseAgent.act(obs,self.action_n)
+ return random.randrange(0,6,1)
+ #return self.baseAgent.act(obs,self.action_n)
def dqnact(self, obs):
- action = self.eval_net.forward(obs)[0]
+ #action = self.eval_net.forward(obs)[0]
+ #result = (torch.max(action, 0)[1]).numpy()
+ lx = obs['local']
+ ax = obs['additional']
+ action = self.eval_net.forward1(lx,ax)[0]
result = (torch.max(action, 0)[1]).numpy()
return result
+ def reward(self, featurel, featurea, action, sl, sa, rewards):
+ # set up reward
+ r_wood = 0.05
+ r_powerup = 0.2
+ r_put_bomb = 0.08
+ r_win = 0.5
+ r_fail = -1
+ r_kick = 0.2
+ r_hit = -0.2
+ r_stay_in_danger = -0.2
+ r_escape = 0.2
+
+ rigid = featurel[0].numpy()
+ wood = featurel[1].numpy()
+ bomb = featurel[2].numpy()
+ power_up = featurel[3]
+ fog = featurel[4]
+ agent1 = featurel[5].numpy()
+ sagent1 = sl[5].numpy()
+ agent2 = featurel[6].numpy()
+ agent3 = featurel[7].numpy()
+ agent4 = featurel[8].numpy()
+ flame = featurel[9]
+ position0 = int(featurea[0].item())
+ position1 = int(featurea[1].item())
+ p0 = int(sa[0].item())
+ p1 = int(sa[1].item())
+ ammo = int(sa[2].item())
+ blast_strength = int(featurea[3].item())
+ can_kick = int(featurea[4].item())
+ teammate = int(featurea[5].item())
+ enemies = int(featurea[6].item())
+ message = int(featurea[7].item())
+ rewards = rewards.numpy()
+ reward = 0
+ #sagents = sl[4]
+ sbomb = sl[2].numpy()
+ action = int(action.item())
+
+
+
+ # print(agent1)
+ # print(p0, p1)
+ # print(position0, position1)
+ # print('len:', len(agent1))
+ # print('shape:', agent1.shape)
+ # print('-------------------')
+ # # reward_done
+ # # print(rewards)
+
+ if rewards == 1:
+ reward += r_win
+ if rewards == -1:
+ reward += r_fail
+
+ # reward_powerup
+ # sammo = int(sa[2].item())
+ # if ammo > 1 and ammo > sammo:
+ # reward += r_powerup
+ # sstrength = int(sa[3].item())
+ # if blast_strength > sstrength:
+ # reward += r_powerup
+ # skick = int(sa[4].item())
+ # if can_kick and not skick:
+ # reward += r_powerup
+
+ # reward_wood
+ if ammo > 0 and action == 5:
+ bomb_flame = self.build_flame(position0, position1, rigid, blast_strength)
+ num_wood = np.count_nonzero(wood*bomb_flame == 1)
+ reward += num_wood*r_wood
+
+ # reward_kick
+ # if sbomb[position0, position1] == 1 and rewards != -1:
+ # reward += r_kick
+ # reward_hit_wood
+ if action>0 and action<5:
+ if (p0,p1) == (position0,position1):
+ reward += r_hit
+
+ # reward_escape
+ bomb_list = []
+ for i in range(11):
+ for j in range(11):
+ if sbomb[i,j] == 1:
+ bomb_list.append((i,j))
+
+ for b in bomb_list:
+ bomb_flame1 = self.build_flame(b[0], b[1], rigid, 8)
+ if bomb_flame1[position0, position1] == 1:
+ reward += r_stay_in_danger
+ #print(bomb_flame1)
+
+
+
+ # reward escape from bomb
+
+ """
+ exist_bomb = []
+ for row, rowbomb in enumerate(bomb):
+ for col, _ in enumerate(rowbomb):
+ if bomb[row, col] == 1:
+ exist_bomb.append((row, col))
+ #print(bomb)
+ #print(exist_bomb)
+
+ if exist_bomb:
+ for ebomb in exist_bomb:
+ bomb_flame1 = self.build_flame(ebomb[0], ebomb[1], rigid, blast_strength)
+ if bomb_flame1[position0, position1] == 1:
+ reward -= 0.5
+ #print(bomb_flame1)
+ """
+ return reward
+
+ def build_flame(self, position0, position1, rigid, blast_strength):
+
+ position_bomb = np.array([position0,position1])
+ m = position_bomb[0]
+ n = position_bomb[1]
+ l = blast_strength
+ f = [l,l,l,l] # Scope of flame: up down left right
+ bomb_flame = np.zeros_like(rigid)
+
+ # 判断实体墙或边界是否阻断火焰
+ flame_up = np.zeros_like(bomb_flame)
+ flame_down = np.zeros_like(bomb_flame)
+ flame_left = np.zeros_like(bomb_flame)
+ flame_right = np.zeros_like(bomb_flame)
+ if m - f[0] < 0: # 上边界
+ f[0] = m
+ flame_up[m - f[0]:m, n] = 1
+ if m + f[1] > bomb_flame.shape[0] - 1: # 下边界
+ f[1] = bomb_flame.shape[0] - 1 - m
+ flame_down[m + 1:m + f[1] + 1, n] = 1
+ if n - f[2] < 0: # 左边界
+ f[2] = n
+ flame_left[m, n - f[2]:n] = 1
+ if n + f[3] > bomb_flame.shape[0] - 1: # 右边界
+ f[3] = bomb_flame.shape[0] - 1 - n
+ flame_right[m, n + 1:n + f[3] + 1] = 1
+
+ rigid_0 = flame_up * rigid
+ rigid_1 = flame_down * rigid
+ rigid_2 = flame_left * rigid
+ rigid_3 = flame_right * rigid
+ if np.argwhere(rigid_0==1).size != 0: # 上实体墙
+ rigid_up = np.max(np.argwhere(rigid_0==1)[:,0][0])
+ if rigid_up >= m-f[0]:
+ f[0] = m - rigid_up - 1
+ if np.argwhere(rigid_1==1).size != 0: # 下实体墙
+ rigid_down = np.min(np.argwhere(rigid_1 == 1)[:, 0][0])
+ if rigid_down <= m+f[1]:
+ f[1] = rigid_down - m - 1
+ if np.argwhere(rigid_2==1).size != 0: # 左实体墙
+ rigid_left = np.max(np.argwhere(rigid_2 == 1)[0, :][1])
+ if rigid_left >= n-f[2]:
+ f[2] = n - rigid_left - 1
+ if np.argwhere(rigid_3==1).size != 0: # 右实体墙
+ rigid_right = np.min(np.argwhere(rigid_3 == 1)[0, :][1])
+ if rigid_right <= n+f[3]:
+ f[3] = rigid_right - n - 1
+ bomb_flame[m-f[0]:m+f[1]+1, n] = 1
+ bomb_flame[m, n-f[2]:n+f[3]+1] = 1
+
+ return bomb_flame
+
def update(self, gamma, batch_size):
if self.learn_step_counter % 10 == 0:
self.target_net.load_state_dict(self.eval_net.state_dict())
self.learn_step_counter += 1
- states, actions, rewards, next_states, done = self.buffer.sample2(batch_size)
- action_index = actions.squeeze(-2)[:,0].unsqueeze(1)
- curr_Q_batch = self.eval_net(states)[:,0]
- curr_Q = curr_Q_batch.gather(1, action_index).squeeze(-1)
- #print(curr_Q)
+ statesl, statesa, actions, rewards, next_statesl, next_statesa, done = self.buffer.sample(batch_size)
+ #print(rewards)
+ #print(actions)
+ action_index = actions.squeeze(-2)
+ #print(action_index)
+ curr_Q_batch = self.eval_net(statesl, statesa)#[:,0]
+ curr_Q = curr_Q_batch.gather(1, action_index.type(torch.int64)).squeeze(-1)
- next_batch = self.target_net(next_states)[:,0]
+ next_batch = self.target_net(next_statesl, next_statesa)#[:,0]
next_Q = torch.max(next_batch,1)[0]
- #print(next_Q)
+ #计算reward
+ computed_reward = []
+ for l, a, action, sl, sa, re in zip(next_statesl, next_statesa, actions, statesl, statesa, rewards):
+ computed_reward.append(self.reward(l, a, action, sl, sa, re))
+
- rewards_batch = rewards.squeeze(-2)[:,0]
+ #这是得到的reward
+ computed_reward = torch.tensor(computed_reward)
+
+ rewards_batch = computed_reward
#print(rewards_batch)
+
+
# expected_Q = rewards + self.gamma * torch.max(next_Q, 1)
- expected_Q = gamma * next_Q + rewards_batch
-
+ expected_Q = (gamma * next_Q + rewards_batch) * ~done + done * rewards_batch
# max_q_prime = next_Q.max(1)[0].unsqueeze(1)
# expected_Q = done * (rewards + gamma * max_q_prime) + (1 - done) * 1 / (1 - gamma) * rewards
# expected_Q = done * (rewards + gamma * max_q_prime) + 1 / (1 - gamma) * rewards
- loss = self.MSE_loss(curr_Q, expected_Q) # TODO: try Huber Loss later too
-
+ loss = self.MSE_loss(curr_Q, expected_Q[0]) # TODO: try Huber Loss later too
+ # print('loss:', loss)
self.optim.zero_grad()
loss.backward()
self.optim.step()
@@ -98,19 +283,58 @@ def update(self, gamma, batch_size):
def epsdecay(self):
self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_eps else self.epsilon
+ # def lrdecay(self,epi):
+ # epi = self.epi
+ # self.lr = self.lr * self.lr_decay ** (epi) if s
+
+ def save_model(self):
+ torch.save({'dqn2Net': self.eval_net.state_dict(),'optimizer2_state_dict': self.optim.state_dict()}, 'model_dqn2.pt')
+
+ def load_model(self):
+ if os.path.exists('model_dqn2.pt'):
+ state_dict = torch.load('model_dqn2.pt')
+ self.eval_net.load_state_dict(state_dict['dqn2Net'])
+ self.optim.load_state_dict(state_dict['optimizer2_state_dict'])
+ self.target_net.load_state_dict(self.eval_net.state_dict())
class Net2(nn.Module):
def __init__(self,env):
super(Net2,self).__init__()
- self.obs_n = env.observation_space.shape[0]
- self.fc1 = nn.Linear(374,128)
- self.fc1.weight.data.normal_(0, 0.1)
- self.out = nn.Linear(128,6)
- self.out.weight.data.normal_(0, 0.1)
-
- def forward(self, x):
- x = torch.FloatTensor(x)
- x = self.fc1(x)
- x = F.relu(x)
- qvals = self.out(x)
- return qvals
\ No newline at end of file
+ self.features = nn.Sequential(
+ nn.Conv2d(14,32,2,stride=1,padding=1),
+ nn.ReLU(),
+ nn.Conv2d(32,64,3,stride=1,padding=1),
+ nn.ReLU(),
+ nn.Conv2d(64,64,3,stride=1,padding=1),
+ nn.AdaptiveAvgPool2d(1)
+ )
+ self.fc = nn.Sequential(
+ nn.Linear(75, 32),
+ nn.ReLU(),
+ nn.Linear(32, 16),
+ nn.ReLU(),
+ nn.Linear(16, 6)
+ )
+
+ def forward(self, lx, ax):
+ lx = torch.FloatTensor(lx)
+ ax = torch.FloatTensor(ax)
+ #lx = lx.unsqueeze(3)
+ #print(lx[0])
+ #x = torch.unsqueeze(x, dim=0).float()
+ lx = self.features(lx)
+ lx = lx.view(lx.size(0), -1)
+ outx = torch.cat((lx, ax), 1)
+ out = self.fc(outx)
+ return out
+
+ def forward1(self, lx, ax):
+ lx = torch.FloatTensor(lx)
+ ax = torch.FloatTensor(ax)
+ lx = lx.unsqueeze(0)
+ lx = self.features(lx)
+ lx = lx.view(lx.size(0), -1)
+ ax = ax.unsqueeze(0)
+ outx = torch.cat((lx, ax), 1)
+ out = self.fc(outx)
+ return out
\ No newline at end of file
diff --git a/test/DQNAgent.py b/test/DQNAgent.py
index 73a9f62..c7381a4 100644
--- a/test/DQNAgent.py
+++ b/test/DQNAgent.py
@@ -1,4 +1,4 @@
-from pommerman.agents.simple_agent import SimpleAgent
+from pommerman.agents.simple_agent import SimpleAgent #random_agent
import torch
import torch.nn as nn
import torch.optim as optim
@@ -8,9 +8,11 @@
import random
import numpy as np
from utils import featurize
+import os
from pommerman.agents import BaseAgent
from replay_buffer import ReplayBuffer, ReplayBuffer2
+from plot import plot_reward
class DQNAgent(BaseAgent):
@@ -67,121 +69,167 @@ def dqnact(self, obs):
result = (torch.max(action, 0)[1]).numpy()
return result
- def reward(self, featurel, featurea, action, sl, sa, epistep):
+ def reward(self, featurel, featurea, action, sl, sa, rewards):
# set up reward
- r_wood = 0.1
+ r_wood = 0.05
+ r_powerup = 0.2
+ r_put_bomb = 0.08
+ r_win = 0.5
+ r_fail = -1
+ r_kick = 0.2
+ r_hit = -0.2
rigid = featurel[0].numpy()
wood = featurel[1].numpy()
- bomb = featurel[2]
- agents = featurel[4]
+ bomb = featurel[2].numpy()
+ agents = featurel[4].numpy()
power_up = featurel[3]
+ flame = featurel[9]
position0 = int(featurea[0].item())
position1 = int(featurea[1].item())
+ p0 = int(sa[0].item())
+ p1 = int(sa[1].item())
ammo = int(featurea[2].item())
blast_strength = int(featurea[3].item())
can_kick = int(featurea[4].item())
teammate = int(featurea[5].item())
enemies = int(featurea[6].item())
+ rewards = rewards.numpy()
reward = 0
- sagents = sl[4]
- sbombs = sl[2]
- es = epistep.tolist()
- es[1] += 2
+ #sagents = sl[4]
+ sbomb = sl[2].numpy()
+ action = int(action.item())
+
+ # reward_done
+ #print(rewards)
+ if rewards == 1:
+ reward += r_win
+ if rewards == -1:
+ reward += r_fail
+
+ # reward_powerup
+ sammo = int(sa[2].item())
+ if ammo > 1 and ammo > sammo:
+ reward += r_powerup
+ sstrength = int(sa[3].item())
+ if blast_strength > sstrength:
+ reward += r_powerup
+ skick = int(sa[4].item())
+ if can_kick and not skick:
+ reward += r_powerup
+ #print(action)
# reward_wood
- if int(action[0].item()) == 5:
- position_bomb = np.array([position0,position1])
- m = position_bomb[0]
- n = position_bomb[1]
- l = blast_strength
- f = [l,l,l,l] # Scope of flame: up down left right
- bomb_flame = np.zeros_like(bomb.numpy())
-
- # 判断实体墙或边界是否阻断火焰
- flame_up = np.zeros_like(bomb_flame)
- flame_down = np.zeros_like(bomb_flame)
- flame_left = np.zeros_like(bomb_flame)
- flame_right = np.zeros_like(bomb_flame)
- if m - f[0] < 0: # 上边界
- f[0] = m
- flame_up[m - f[0]:m, n] = 1
- if m + f[1] > bomb_flame.shape[0] - 1: # 下边界
- f[1] = bomb_flame.shape[0] - 1 - m
- flame_down[m + 1:m + f[1] + 1, n] = 1
- if n - f[2] < 0: # 左边界
- f[2] = n
- flame_left[m, n - f[2]:n] = 1
- if n + f[3] > bomb_flame.shape[0] - 1: # 右边界
- f[3] = bomb_flame.shape[0] - 1 - n
- flame_right[m, n + 1:n + f[3] + 1] = 1
-
- rigid_0 = flame_up * rigid
- rigid_1 = flame_down * rigid
- rigid_2 = flame_left * rigid
- rigid_3 = flame_right * rigid
- if np.argwhere(rigid_0==1).size != 0: # 上实体墙
- rigid_up = np.max(np.argwhere(rigid_0==1)[:,0][0])
- if rigid_up >= m-f[0]:
- f[0] = m - rigid_up - 1
- if np.argwhere(rigid_1==1).size != 0: # 下实体墙
- rigid_down = np.min(np.argwhere(rigid_1 == 1)[:, 0][0])
- if rigid_down <= m+f[1]:
- f[1] = rigid_down - m - 1
- if np.argwhere(rigid_2==1).size != 0: # 左实体墙
- rigid_left = np.max(np.argwhere(rigid_2 == 1)[0, :][1])
- if rigid_left >= n-f[2]:
- f[2] = n - rigid_left - 1
- if np.argwhere(rigid_3==1).size != 0: # 右实体墙
- rigid_right = np.min(np.argwhere(rigid_3 == 1)[0, :][1])
- if rigid_right <= n+f[3]:
- f[3] = rigid_right - n - 1
- bomb_flame[m-f[0]:m+f[1]+1, n] = 1
- bomb_flame[m, n-f[2]:n+f[3]+1] = 1
+ if action == 5:
+ reward += r_put_bomb
+ bomb_flame = self.build_flame(position0, position1, rigid, blast_strength)
num_wood = np.count_nonzero(wood*bomb_flame == 1)
reward += num_wood*r_wood
- '''
- # test
- print('rigid')
- print(rigid)
- print('position_bomb')
- print(position_bomb)
- print('f')
- print(f)
- print('l')
- print(l)
- print('bomb_flame')
- print(bomb_flame)
- print('num_wood')
- print(num_wood)
- print('-------------------------------------')
- '''
+
+ # reward_kick
+ if sbomb[position0, position1] == 1 and rewards != -1:
+ reward += r_kick
+ # reward_hit_wood
+ if action>0 or action<5:
+ if (p0,p1) == (position0,position1):
+ reward += r_hit
+ """
+ exist_bomb = []
+ for row, rowbomb in enumerate(bomb):
+ for col, _ in enumerate(rowbomb):
+ if bomb[row, col] == 1:
+ exist_bomb.append((row, col))
+ #print(bomb)
+ #print(exist_bomb)
+
+ if exist_bomb:
+ for ebomb in exist_bomb:
+ bomb_flame1 = self.build_flame(ebomb[0], ebomb[1], rigid, blast_strength)
+ if bomb_flame1[position0, position1] == 1:
+ reward -= 0.5
+ #print(bomb_flame1)
+ """
+
+
return reward
- def update(self, gamma, batch_size,episode, step):
- #每走十步学习一次
+ def build_flame(self, position0, position1, rigid, blast_strength):
+
+ position_bomb = np.array([position0,position1])
+ m = position_bomb[0]
+ n = position_bomb[1]
+ l = blast_strength - 1
+ f = [l,l,l,l] # Scope of flame: up down left right
+ bomb_flame = np.zeros_like(rigid)
+
+ # 判断实体墙或边界是否阻断火焰
+ flame_up = np.zeros_like(bomb_flame)
+ flame_down = np.zeros_like(bomb_flame)
+ flame_left = np.zeros_like(bomb_flame)
+ flame_right = np.zeros_like(bomb_flame)
+ if m - f[0] < 0: # 上边界
+ f[0] = m
+ flame_up[m - f[0]:m, n] = 1
+ if m + f[1] > bomb_flame.shape[0] - 1: # 下边界
+ f[1] = bomb_flame.shape[0] - 1 - m
+ flame_down[m + 1:m + f[1] + 1, n] = 1
+ if n - f[2] < 0: # 左边界
+ f[2] = n
+ flame_left[m, n - f[2]:n] = 1
+ if n + f[3] > bomb_flame.shape[0] - 1: # 右边界
+ f[3] = bomb_flame.shape[0] - 1 - n
+ flame_right[m, n + 1:n + f[3] + 1] = 1
+
+ rigid_0 = flame_up * rigid
+ rigid_1 = flame_down * rigid
+ rigid_2 = flame_left * rigid
+ rigid_3 = flame_right * rigid
+ if np.argwhere(rigid_0==1).size != 0: # 上实体墙
+ rigid_up = np.max(np.argwhere(rigid_0==1)[:,0][0])
+ if rigid_up >= m-f[0]:
+ f[0] = m - rigid_up - 1
+ if np.argwhere(rigid_1==1).size != 0: # 下实体墙
+ rigid_down = np.min(np.argwhere(rigid_1 == 1)[:, 0][0])
+ if rigid_down <= m+f[1]:
+ f[1] = rigid_down - m - 1
+ if np.argwhere(rigid_2==1).size != 0: # 左实体墙
+ rigid_left = np.max(np.argwhere(rigid_2 == 1)[0, :][1])
+ if rigid_left >= n-f[2]:
+ f[2] = n - rigid_left - 1
+ if np.argwhere(rigid_3==1).size != 0: # 右实体墙
+ rigid_right = np.min(np.argwhere(rigid_3 == 1)[0, :][1])
+ if rigid_right <= n+f[3]:
+ f[3] = rigid_right - n - 1
+ bomb_flame[m-f[0]:m+f[1]+1, n] = 1
+ bomb_flame[m, n-f[2]:n+f[3]+1] = 1
+
+ return bomb_flame
+
+ def update(self, gamma, batch_size):
if self.learn_step_counter % 10 == 0:
self.target_net.load_state_dict(self.eval_net.state_dict())
self.learn_step_counter += 1
- statesl, statesa, actions, rewards, next_statesl, next_statesa, done, epistep = self.buffer.sample(batch_size)
+ statesl, statesa, actions, rewards, next_statesl, next_statesa, done = self.buffer.sample(batch_size)
#print(epistep)
-
#计算reward
- computed_reward = []
- for l, a, action, sl, sa, es in zip(next_statesl, next_statesa, actions, statesl, statesa, epistep):
- computed_reward.append(self.reward(l, a, action[0], sl, sa, es))
+ compute_reward = []
+ for l, a, action, sl, sa, re in zip(next_statesl, next_statesa, actions, statesl, statesa, rewards):
+ compute_reward.append(self.reward(l, a, action, sl, sa, re))
#这是得到的reward
- computed_reward = torch.tensor(computed_reward)
- action_index = actions.squeeze(-2)[:,0].unsqueeze(1)
+ computed_reward = torch.tensor(compute_reward)
+ #print(actions)
+
+ action_index = actions.squeeze(-2)#.unsqueeze(1)
curr_Q_batch = self.eval_net(statesl,statesa)#[:,0]
#print(curr_Q_batch)
curr_Q = curr_Q_batch.gather(1, action_index.type(torch.int64)).squeeze(-1)
-
+
next_batch = self.target_net(next_statesl, next_statesa)#[:,0]
next_Q = torch.max(next_batch,1)[0]
- rewards_batch = rewards.squeeze(-2)[:,0]
+ #rewards_batch = rewards.squeeze(-2)[:,0]
+ rewards_batch = computed_reward
#print(rewards_batch)
# expected_Q = rewards + self.gamma * torch.max(next_Q, 1)
#需要把done计算进去
@@ -191,40 +239,47 @@ def update(self, gamma, batch_size,episode, step):
# expected_Q = done * (rewards + gamma * max_q_prime) + (1 - done) * 1 / (1 - gamma) * rewards
# expected_Q = done * (rewards + gamma * max_q_prime) + 1 / (1 - gamma) * rewards
loss = self.MSE_loss(curr_Q, expected_Q[0]) # TODO: try Huber Loss later too
-
self.optim.zero_grad()
loss.backward()
self.optim.step()
+
def epsdecay(self):
self.epsilon = self.epsilon * self.eps_decay if self.epsilon > self.min_eps else self.epsilon
- def compute_reward(self, local, additional, epistep):
- m = self.buffer.get(tuple(epistep.tolist()))
-
- return 0
-
+ def save_model(self):
+ torch.save({'dqnNet': self.eval_net.state_dict(),'optimizer_state_dict': self.optim.state_dict()}, 'model_dqn.pt')
+
+ def load_model(self):
+ if os.path.exists('model_dqn.pt'):
+ state_dict = torch.load('model_dqn.pt')
+ self.eval_net.load_state_dict(state_dict['dqnNet'])
+ self.optim.load_state_dict(state_dict['optimizer_state_dict'])
+ self.target_net.load_state_dict(self.eval_net.state_dict())
class Net1(nn.Module):
def __init__(self):
+ # 初始化数组
super(Net,self).__init__()
+ # 此步骤是官方要求
"""
self.conv1=nn.Conv2d(199,16,2,stride=1,padding=1)
self.conv2=nn.Conv2d(16,32,3,stride=1,padding=1)
"""
- self.fc1 = nn.Linear(199,128)
+ self.fc1 = nn.Linear(199,128) #设置输入层到隐藏层的函数
self.fc1.weight.data.normal_(0, 0.1)
- self.out = nn.Linear(128,6)
+ self.out = nn.Linear(128,6) #设置隐藏层到输出层的函数
self.out.weight.data.normal_(0, 0.1)
def forward(self, x):
+ # 定义向前传播函数
x = torch.FloatTensor(x)
#x = torch.unsqueeze(x, dim=0).float()
#x=self.conv1(x)
#x=self.conv2(x)
x = self.fc1(x)
- x = F.relu(x)
- out = self.out(x)
+ x = F.relu(x) #给x加权成为a,用激励函数将a变成特征b
+ out = self.out(x) #给b加权,预测最终结果
return out
@@ -233,7 +288,7 @@ class Net(nn.Module):
def __init__(self):
super(Net,self).__init__()
self.features = nn.Sequential(
- nn.Conv2d(9,32,2,stride=1,padding=1),
+ nn.Conv2d(14,32,2,stride=1,padding=1),
nn.ReLU(),
nn.Conv2d(32,64,3,stride=1,padding=1),
nn.ReLU(),
@@ -274,3 +329,4 @@ def forward1(self, lx, ax):
out = self.fc(outx)
return out
+
diff --git a/test/__pycache__/DQN2Agent.cpython-36.pyc b/test/__pycache__/DQN2Agent.cpython-36.pyc
index a41f4f4..6ec74e9 100644
Binary files a/test/__pycache__/DQN2Agent.cpython-36.pyc and b/test/__pycache__/DQN2Agent.cpython-36.pyc differ
diff --git a/test/__pycache__/DQNAgent.cpython-36.pyc b/test/__pycache__/DQNAgent.cpython-36.pyc
index c056579..2ac0a33 100644
Binary files a/test/__pycache__/DQNAgent.cpython-36.pyc and b/test/__pycache__/DQNAgent.cpython-36.pyc differ
diff --git a/test/__pycache__/plot.cpython-36.pyc b/test/__pycache__/plot.cpython-36.pyc
new file mode 100644
index 0000000..b28f4dd
Binary files /dev/null and b/test/__pycache__/plot.cpython-36.pyc differ
diff --git a/test/__pycache__/replay_buffer.cpython-36.pyc b/test/__pycache__/replay_buffer.cpython-36.pyc
index fd53247..cbd41d7 100644
Binary files a/test/__pycache__/replay_buffer.cpython-36.pyc and b/test/__pycache__/replay_buffer.cpython-36.pyc differ
diff --git a/test/__pycache__/utils.cpython-36.pyc b/test/__pycache__/utils.cpython-36.pyc
index ce756e8..90f47b7 100644
Binary files a/test/__pycache__/utils.cpython-36.pyc and b/test/__pycache__/utils.cpython-36.pyc differ
diff --git a/test/img/winrate.png b/test/img/winrate.png
new file mode 100644
index 0000000..a6d6848
Binary files /dev/null and b/test/img/winrate.png differ
diff --git a/test/main.py b/test/main.py
index e98e818..284a7d5 100644
--- a/test/main.py
+++ b/test/main.py
@@ -4,22 +4,26 @@
import argparse
import random
import numpy as np
+import collections
from pommerman import agents
from pommerman.configs import one_vs_one_env
from DQNAgent import DQNAgent
-from utils import featurize, CustomEnvWrapper
+from DQN2Agent import DQN2Agent
+from utils import featurize
+import os
+from plot import plot_win_rate
def main():
"""解析参数"""
parser = argparse.ArgumentParser(description='DQN pommerman MARL')
- parser.add_argument('--episodes', type=int, default=3000, help='episodes')
+ parser.add_argument('--episodes', type=int, default=5000, help='episodes')
parser.add_argument('--maxsteps', type=int, default=200, help='maximum steps')
parser.add_argument('--showevery', type=int, default=1, help='report loss every n episodes')
- parser.add_argument('--epsilon', type=float, default=0.05, help='parameter for epsilon greedy')
+ parser.add_argument('--epsilon', type=float, default=0.9, help='parameter for epsilon greedy')
parser.add_argument('--eps_decay', type=float, default=0.995, help='epsilon decay rate')
parser.add_argument('--min_eps', type=float, default=0.05, help='minimum epsilon for decaying')
parser.add_argument('--gamma', type=float, default=0.95, help='gamma')
@@ -27,9 +31,10 @@ def main():
parser.add_argument('--capacity', type=int, default=100000, help='capacity for replay buffer')
parser.add_argument('--batch', type=int, default=201, help='batch size for replay buffer')
- parser.add_argument('--tryepi', type=int, default=5, help='episode for agent to gain experience')
+ parser.add_argument('--tryepi', type=int, default=50, help='episode for agent to gain experience')
parser.add_argument('--gpu', type=str, default='0', help='gpu number')
-
+ parser.add_argument('--win_in_epi', type=int, default='500', help='calculate win in epi..')
+ parser.add_argument('--ranepi', type=int, default='2000', help='agent go random action in epi..')
args = parser.parse_args()
# GPU
@@ -39,65 +44,96 @@ def main():
agent_list = [agents.SimpleAgent(), agents.SimpleAgent()] # placeholder
env = pommerman.make('OneVsOne-v0', agent_list)
- agent1 = DQNAgent(env, args) # TODO: assertionerror; not agents.BaseAgent??
+ agent1 = DQN2Agent(env, args) # TODO: assertionerror; not agents.BaseAgent??
agent2 = agents.SimpleAgent()
agent_list = [agent1, agent2]
env = pommerman.make('OneVsOne-v0', agent_list)
- episode_rewards = []
- action_n = env.action_space.n
+ # plot
+ list_win = []
+
+ #episode_rewards = []
+ #action_n = env.action_space.n
- win = 0
+ # 加载模型
+ #agent1.load_model()
+ # collect win times
+ if os.path.exists('model_dqn.pt'):
+ args.tryepi = 0
+ args.ranepi = 0
+ args.epsilon = 0.1
+ win_buffer = collections.deque(maxlen=args.win_in_epi)
for episode in range(args.episodes):
states = env.reset()
done = False
- episode_reward = 0
+ #episode_reward = 0
+ step = 0
for step in range(args.maxsteps):
- state_feature = featurize(env, states)
+ state_feature = featurize(env, states[0])
# 刷新环境
- if episode > (args.episodes - 10):
+ if episode % 100 == 0 and episode != 0:
env.render()
# 选择action
- if (episode <= args.tryepi) or (args.epsilon > random.random()):
- actions = env.act(states)
- else:
- actions = env.act(states)
- dqn_action = agent1.dqnact(state_feature)
- actions[0] = int(np.int64(dqn_action))
+
+ actions = env.act(states)
+ dqn_action = agent1.dqnact(state_feature)
+ actions[0] = int(np.int64(dqn_action))
+ #print(actions[0])
+
next_state, reward, done, info = env.step(actions) # n-array with action for each agent
- next_state_feature = featurize(env, next_state)
- episode_reward += reward[0]
+ next_state_feature = featurize(env, next_state[0])
+ #episode_reward += reward[0]
# 存储记忆
- agent1.buffer.append([state_feature, actions, reward, next_state_feature, done], episode, step)
+ agent1.buffer.append([state_feature, actions[0], reward[0], next_state_feature, done])
# 先走batch步之后再开始学习
- if episode >= args.tryepi:
- agent1.update(args.gamma, args.batch,episode, step)
+ if episode > args.tryepi and agent1.buffer.size() >= args.batch:
+ agent1.update(args.gamma, args.batch)
# 更新state
states = next_state
if done:
break
-
+ '''
if done:
episode_rewards.append(episode_reward)
+ '''
if episode % args.showevery == 0:
- print(f"Episode: {episode + 1:2d} finished, result: {'Win' if 0 in info.get('winners', []) else 'Lose'}")
+ if 0 in info.get('winners', []):
+ print(f"Episode: {episode + 1:2d} finished, result: Win")
+ elif not done:
+ print(f"Episode: {episode + 1:2d} finished, result: Not finish")
+ else:
+ print(f"Episode: {episode + 1:2d} finished, result: Lose")
#print(f"Avg Episode Reward: {np.mean(episode_rewards)}")
- if 0 in info.get('winners', []) and episode > 500:
- win += 1
-
- if episode > 500:
- winrate = win / (episode - 500 + 1)
- print(f"current winrate: {winrate}")
-
- agent1.epsdecay()
+
+
+ if episode > args.tryepi:
+ agent1.epsdecay()
+ if 0 in info.get('winners', []):
+ win_buffer.append(1)
+ elif 1 in info.get('winners', []):
+ win_buffer.append(0)
+ if len(win_buffer) == args.win_in_epi:
+ avg = sum(win_buffer) / len(win_buffer)
+ print(f"current winrate: {avg}")
+ list_win.append(avg)
+ if len(list_win)%1000 == 0:
+ plot_win_rate(list_win)
+
+
+
+
+
+ print('epsilon:',agent1.epsilon)
+
+ agent1.save_model() #保存模型
env.close()
diff --git a/test/model_dqn.pt b/test/model_dqn.pt
new file mode 100644
index 0000000..d49fe00
Binary files /dev/null and b/test/model_dqn.pt differ
diff --git a/test/model_dqn2.pt b/test/model_dqn2.pt
new file mode 100644
index 0000000..c3152c5
Binary files /dev/null and b/test/model_dqn2.pt differ
diff --git a/test/multi_main.py b/test/multi_main.py
index 5a99863..076ec0e 100644
--- a/test/multi_main.py
+++ b/test/multi_main.py
@@ -1,35 +1,40 @@
+import collections
import pommerman
import gym
import torch
import argparse
import random
import numpy as np
-
+import os
from pommerman import agents
from pommerman.configs import radio_v2_env, team_v0_fast_env, radio_competition_env
from DQN2Agent import DQN2Agent
from utils import featurize, CustomEnvWrapper, featurize2
+from plot import plot_win_rate
def main():
"""解析参数"""
parser = argparse.ArgumentParser(description='DQN pommerman MARL')
- parser.add_argument('--episodes', type=int, default=3000, help='episodes')
- parser.add_argument('--maxsteps', type=int, default=200, help='maximum steps')
+ parser.add_argument('--episodes', type=int, default=100, help='episodes')
+ parser.add_argument('--maxsteps', type=int, default=500, help='maximum steps')
parser.add_argument('--showevery', type=int, default=1, help='report loss every n episodes')
- parser.add_argument('--epsilon', type=float, default=0.05, help='parameter for epsilon greedy')
- parser.add_argument('--eps_decay', type=float, default=0.995, help='epsilon decay rate')
+ parser.add_argument('--epsilon', type=float, default=0.5, help='parameter for epsilon greedy')
+ parser.add_argument('--eps_decay', type=float, default=0.99, help='epsilon decay rate')
parser.add_argument('--min_eps', type=float, default=0.05, help='minimum epsilon for decaying')
parser.add_argument('--gamma', type=float, default=0.95, help='gamma')
parser.add_argument('--lr', type=float, default=0.01, help='learning rate')
+ # parser.add_argument('--lr_decay', type=float, default=0.99, help='learning rate decay rate')
+ # parser.add_argument('--lr_decay_s', type=float, default=100, help='learning rate decay rate setp size')
parser.add_argument('--capacity', type=int, default=100000, help='capacity for replay buffer')
- parser.add_argument('--batch', type=int, default=201, help='batch size for replay buffer')
- parser.add_argument('--tryepi', type=int, default=5, help='episode for agent to gain experience')
+ parser.add_argument('--batch', type=int, default=256, help='batch size for replay buffer')
+ parser.add_argument('--tryepi', type=int, default=0, help='episode for agent to gain experience')
parser.add_argument('--gpu', type=str, default='0', help='gpu number')
-
+ parser.add_argument('--win_in_epi', type=int, default=200, help='calculate win in epi..')
+ parser.add_argument('--ranepi', type=int, default=50, help='agent go random action in epi..')
args = parser.parse_args()
# GPU
@@ -41,72 +46,146 @@ def main():
agent1 = DQN2Agent(env, args) # TODO: assertionerror; not agents.BaseAgent??
agent2 = agents.SimpleAgent()
- agent3 = DQN2Agent(env, args)
+ # agent3 = DQN2Agent(env, args)
+ agent3 = agents.SimpleAgent()
agent4 = agents.SimpleAgent()
agent_list = [agent1, agent2, agent3, agent4]
env = pommerman.make('PommeRadioCompetition-v2', agent_list)
+ # plot
+ list_win = []
+
episode_rewards = []
action_n = 6
- win = 0
+ # 加载模型
+ #agent1.load_model()
+ # agent3.load_model()
+ # collect win times
+ # if os.path.exists('model_dqn2.pt'):
+ # args.epsilon = 0.1
+ # args.eps_decay = 0.98
+ # args.tryepi = 0
+ # args.ranepi = 0
+
+
+ win_buffer = collections.deque(maxlen=args.win_in_epi)
for episode in range(args.episodes):
- states = env.reset()
+ # 固定地图
+ random.seed(2)
+ np.random.seed(2)
- state_feature1 = featurize2(env, states, 0)
- state_feature3 = featurize2(env, states, 2)
+ states = env.reset()
+ # print('epi:', episode)
done = False
episode_reward = 0
+ die = 0
for step in range(args.maxsteps):
+ state_feature1 = featurize2(env, states[0])
+ #state_feature3 = featurize2(env, states[2])
+ random.seed()
+ seed = random.random()
+ # print(seed)
+ # state_feature3 = featurize2(env, states[2])
# 刷新环境
- if episode > (args.episodes - 10):
- env.render()
-
+ # if episode % 100 == 0 and episode != 0:
+ # env.render()
+ #env.render()
# 选择action
- if (args.epsilon > random.random()) or (episode <= args.tryepi):
+ if episode < args.tryepi: # epi < 4
+ # print("simple try")
actions = env.act(states)
else:
+ if args.epsilon > seed:
+ actions = env.act(states)
+ else:
+ actions = env.act(states)
+ dqn_action1 = agent1.dqnact(state_feature1)
+ actions[0] = int(np.int64(dqn_action1))
+
+
+ """
+ elif episode < args.ranepi: # epi < 3000
+ # print("simple try")
+ actions = env.act(states)
+ elif episode >= args.ranepi and args.epsilon > seed:
+ # elif episode >= args.ranepi: # epi >= 3000 a
+ #print("random try")
+ actions = env.act(states)
+ actions[0] = random.randrange(0,6,1)
+ #actions[2] = random.randrange(0,6,1)
+ elif episode >= args.ranepi and args.epsilon <= seed: # epi >= 3000 and eps <= random.random
+ #print("dqn select")
actions = env.act(states)
dqn_action1 = agent1.dqnact(state_feature1)
- dqn_action3 = agent3.dqnact(state_feature3)
+ # dqn_action3 = agent3.dqnact(state_feature3)
actions[0] = int(np.int64(dqn_action1))
- actions[2] = int(np.int64(dqn_action3))
+ #actions[2] = int(np.int64(dqn_action3))
+ """
+ # actions = env.act(states)
+
next_state, reward, done, info = env.step(actions) # n-array with action for each agent
- next_state_feature1 = featurize2(env, next_state, 0)
- next_state_feature3 = featurize2(env, next_state, 2)
- episode_reward += reward[0]
+ if 10 not in next_state[0]['alive']:
+ info['winners'] = [1, 3]
+ reward = [-1, 1, -1, 1]
+ next_state_feature1 = featurize2(env, next_state[0])
+ # next_state_feature3 = featurize2(env, next_state[2])
+ #episode_reward += reward[0]
# 存储记忆
- agent1.buffer.append([state_feature1, actions, reward, next_state_feature1, done])
- agent3.buffer.append([state_feature3, actions, reward, next_state_feature3, done])
+ agent1.buffer.append([state_feature1, actions[0], reward[0], next_state_feature1, done])
+ # agent3.buffer.append([state_feature3, actions[2], reward[2], next_state_feature3, done])
# 先走batch步之后再开始学习
- if agent1.buffer.size() > args.batch:
+ if agent1.buffer.size() >= args.batch:
+ # if agent1.buffer.size() >= args.batch:
agent1.update(args.gamma, args.batch)
- if agent3.buffer.size() > args.batch:
- agent3.update(args.gamma, args.batch)
+ # if episode > args.tryepi and agent1.buffer.size() >= args.batch:
+ # agent3.update(args.gamma, args.batch)
# 更新state
states = next_state
-
+
+ #print('alive:', next_state[0]['alive'])
+
if done:
break
- if done:
- episode_rewards.append(episode_reward)
+ # agent1 die -> game over
+ if 10 not in next_state[0]['alive']:
+ break
+
+ #if done:
+ # episode_rewards.append(episode_reward)
if episode % args.showevery == 0:
- print(f"Episode: {episode + 1:2d} finished, result: {'Win' if 0 in info.get('winners', []) else 'Lose'}")
- print(f"Avg Episode Reward: {np.mean(episode_rewards)}")
- if 0 in info.get('winners', []) and episode > 500:
- win += 1
-
- if episode > 500:
- winrate = win / (episode + 1)
- print(f"current winrate: {winrate}")
+ if 0 in info.get('winners', []):
+ print(f"Episode: {episode + 1:2d} finished, result: Win")
+ elif 1 in info.get('winners', []):
+ print(f"Episode: {episode + 1:2d} finished, result: Lose")
+ else:
+ print(f"Episode: {episode + 1:2d} finished, result: Not finish")
+ #print(f"Avg Episode Reward: {np.mean(episode_rewards)}")
+
+ if episode > args.win_in_epi:
+ agent1.epsdecay()
+ if 0 in info.get('winners', []):
+ win_buffer.append(1)
+ elif 1 in info.get('winners', []):
+ win_buffer.append(0)
+ if len(win_buffer) == args.win_in_epi:
+ avg = sum(win_buffer) / len(win_buffer)
+ print(f"current winrate: {avg}")
+ list_win.append(avg)
+ # if len(list_win) % 500 == 0:
+ # plot_win_rate(list_win)
+
+
agent1.epsdecay()
- agent3.epsdecay()
+ # agent3.epsdecay()
+ agent1.save_model()
+ # agent3.save_model()
env.close()
# TODO: Implement Target Network
diff --git a/test/plot.py b/test/plot.py
new file mode 100644
index 0000000..1117eb7
--- /dev/null
+++ b/test/plot.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+import matplotlib.pyplot as plt
+import numpy as np
+import os
+
+
+def get_png_path(png_name):
+ folder_path = os.getcwd().replace('\\', '/')
+ png_path = os.path.join(folder_path,'img',png_name)
+ return png_path
+
+def plot(list,title,png_name):
+ s = len(list)
+ x = [i + 1 for i in range(s)]
+ y = list
+
+ plt.figure(1)
+ # plt.axis([1, s, 0, 1])
+ plt.title(title)
+ plt.plot(x, y)
+
+ png_path = get_png_path(png_name)
+ plt.savefig(png_path)
+ plt.show()
+
+def plot_win_rate(list_win_rate):
+ return plot(list_win_rate, 'Win rate', 'winrate.png')
+
+def plot_reward(list_reward):
+ return plot(list_reward, 'Q-value', 'Q_value.png')
+
+
+if __name__ == '__main__':
+
+ list_win_rate = [i*0.001/5 for i in range(500)]
+ print(list_win_rate)
+ plot_win_rate(list_win_rate)
+ print('------------------------')
+
+ list_reward = [i*0.001/5 for i in range(500)]
+ print(list_reward)
+ plot_reward(list_reward)
+
+
+
+
diff --git a/test/replay_buffer.py b/test/replay_buffer.py
index 6c7953c..b361287 100644
--- a/test/replay_buffer.py
+++ b/test/replay_buffer.py
@@ -48,30 +48,31 @@ def sample2(self, batch_size):
state, action, reward, next_state, done = zip(*random.sample(self.buffer, batch_size))
return np.concatenate(state), action, reward, np.concatenate(next_state), done
+
class ReplayBuffer2():
def __init__(self, buffer_limit):
- #self.buffer = collections.deque(maxlen=buffer_limit)
- self.limit = buffer_limit
- self.memory = {}
+ self.buffer = collections.deque(maxlen=buffer_limit)
+ # self.limit = buffer_limit
+ # self.memory = {}
- def append(self, transition, episode, step):
- #self.buffer.append(transition)
- key = (episode, step)
- self.memory[key] = transition
- if len(self.memory) > self.limit:
- key = self.memory.keys()[0]
- self.memory.pop(key)
+ def append(self, transition):
+ self.buffer.append(transition)
+ # key = (episode, step)
+ # self.memory[key] = transition
+ # if len(self.memory) > self.limit:
+ # key = self.memory.keys()[0]
+ # self.memory.pop(key)
- def get(self, key):
- return self.memory.get(key)
+ # def get(self, key):
+ # return self.memory.get(key)
def sample(self, n):
- #mini_batch = random.sample(self.buffer, n)
- mini_batch = random.sample(self.memory.keys(), n)
+ mini_batch = random.sample(self.buffer, n)
+
sl_lst, sa_lst, a_lst, r_lst, sl_prime_list, sa_prime_list, done_mask_list = [], [], [], [], [], [], []
- epistep = []
- for key in mini_batch: #transition: tuple
- transition = self.memory.get(key)
+
+ for transition in mini_batch: # transition: tuple
+ # transition = self.memory.get(key)
s, a, r, s_prime, done_mask = transition
sl_lst.append(s['local'])
sa_lst.append(s['additional'])
@@ -80,7 +81,6 @@ def sample(self, n):
sl_prime_list.append(s_prime['local'])
sa_prime_list.append(s_prime['additional'])
done_mask_list.append([done_mask])
- epistep.append(key)
"""
sl_lst = np.array(sl_lst)
sa_lst = np.array(sa_lst)
@@ -95,13 +95,13 @@ def sample(self, n):
torch.tensor(a_lst), torch.tensor(r_lst),
torch.tensor(sl_prime_list, dtype=torch.float),
torch.tensor(sa_prime_list, dtype=torch.float),
- torch.tensor(done_mask_list), torch.tensor(epistep))
-
+ torch.tensor(done_mask_list))
+
def sample2(self, n):
mini_batch = random.sample(self.buffer, n)
s_lst, a_lst, r_lst, s_prime_list, done_mask_list = [], [], [], [], []
- for transition in mini_batch: #transition: tuple
+ for transition in mini_batch: # transition: tuple
s, a, r, s_prime, done_mask = transition
s_lst.append([s])
a_lst.append([a])
@@ -115,4 +115,4 @@ def sample2(self, n):
torch.tensor(done_mask_list))
def size(self):
- return len(self.buffer)
\ No newline at end of file
+ return len(self.buffer)
diff --git a/test/utils.py b/test/utils.py
index bf29d49..e390adb 100644
--- a/test/utils.py
+++ b/test/utils.py
@@ -87,7 +87,7 @@ def _preprocessing(self, obs: List[Dict], **kwargs) -> List[Dict]:
pos = np.array(d.get('position'))
view_range = 2 * self._agent_view_size + 1
v = v[pos[0]:pos[0] + view_range,
- pos[1]:pos[1] + view_range]
+ pos[1]:pos[1] + view_range]
locational.append(v)
@@ -106,6 +106,115 @@ def _preprocessing(self, obs: List[Dict], **kwargs) -> List[Dict]:
return out
+def rebuild_board2(board):
+ # 将board中数据分离,2D化
+ rigid = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 1:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ rigid.append(new_row)
+
+ wood = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 2:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ wood.append(new_row)
+
+ bomb = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 3:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ bomb.append(new_row)
+
+ flame = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 4:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ flame.append(new_row)
+
+ # 暂时用不到fog
+ fog = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 4:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ fog.append(new_row)
+
+ power_up = []
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 6 or num == 7 or num == 8:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ power_up.append(new_row)
+
+ agent1 = []
+ # 如果是10为此处为agent,则取1.0
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 10:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ agent1.append(new_row)
+
+ agent2 = []
+ # 如果是11为此处为agent,则取1.0
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 11:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ agent2.append(new_row)
+
+ agent3 = []
+ # 如果是12为此处为agent,则取1.0
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 12:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ agent3.append(new_row)
+
+ agent4 = []
+ # 如果是13为此处为agent,则取1.0
+ for row in board:
+ new_row = []
+ for num in row:
+ if num == 13:
+ new_row.append(1.0)
+ else:
+ new_row.append(0.0)
+ agent4.append(new_row)
+
+ return rigid, wood, bomb, power_up, fog, agent1, agent2, agent3, agent4, flame
+
def featurize(env, states):
'''
@@ -117,11 +226,11 @@ def featurize(env, states):
Output:
- feature: list[num_agents, 372]
'''
-
- #length = len(env.featurize(states[0]).tolist())
- #list = env.featurize(states[0]).tolist()
+
+ # length = len(env.featurize(states[0]).tolist())
+ # list = env.featurize(states[0]).tolist()
+ # states = states[0]
local = featurize2D(states)
- states = states[0]
"""
board = states["board"].reshape(-1).astype(np.float32)
bomb_blast_strength = states["bomb_blast_strength"].reshape(-1).astype(np.float32)
@@ -135,15 +244,14 @@ def featurize(env, states):
local.append(flame_life.tolist())
"""
feature = {'local': local}
-
additional = []
position = utility.make_np_float(states["position"])
- ammo = utility.make_np_float([states["ammo"]]) #fff
+ ammo = utility.make_np_float([states["ammo"]]) # fff
blast_strength = utility.make_np_float([states["blast_strength"]])
can_kick = utility.make_np_float([states["can_kick"]])
teammate = utility.make_np_float([states["teammate"].value])
enemies = utility.make_np_float([e.value for e in states["enemies"]])
- #print(position, ammo, blast_strength, can_kick, teammate, enemies)
+ # print(position, ammo, blast_strength, can_kick, teammate, enemies)
"""
additional.append(position.tolist())
additional.append(ammo.tolist())
@@ -152,28 +260,30 @@ def featurize(env, states):
additional.append(teammate.tolist())
additional.append(enemies.tolist())
"""
- #print(additional)
- #position占两个数,所以你要取ammo的话就要取additional[2]
+ # print(additional)
+ # position占两个数,所以你要取ammo的话就要取additional[2]
additional = np.concatenate(
- (position, ammo,
- blast_strength, can_kick, teammate, enemies))
+ (position, ammo,
+ blast_strength, can_kick, teammate, enemies))
feature['additional'] = additional.tolist()
return feature
+
def featurize2D(states):
feature2D = []
- # 共9个矩阵
- for board in rebuild_board(states[0]["board"]):
+ # 共10个矩阵
+ for board in rebuild_board2(states["board"]):
feature2D.append(board)
- feature2D.append(states[0]["bomb_blast_strength"].tolist())
- feature2D.append(states[0]["bomb_life"].tolist())
- feature2D.append(states[0]["bomb_moving_direction"].tolist())
- feature2D.append(states[0]["flame_life"].tolist())
+ feature2D.append(states["bomb_blast_strength"].tolist())
+ feature2D.append(states["bomb_life"].tolist())
+ feature2D.append(states["bomb_moving_direction"].tolist())
+ feature2D.append(states["flame_life"].tolist())
return feature2D
+
def rebuild_board(board):
# 将board中数据分离,2D化
rigid = []
@@ -215,9 +325,8 @@ def rebuild_board(board):
else:
new_row.append(0.0)
flame.append(new_row)
- """
- 暂时用不到fog
- fog =[]
+
+ fog = []
for row in board:
new_row = []
for num in row:
@@ -226,7 +335,7 @@ def rebuild_board(board):
else:
new_row.append(0.0)
fog.append(new_row)
- """
+
power_up = []
for row in board:
new_row = []
@@ -248,17 +357,18 @@ def rebuild_board(board):
new_row.append(0.0)
agents.append(new_row)
- return rigid, wood, bomb, power_up, agents
+ return rigid, wood, bomb, power_up, fog, agents, flame
-def featurize2(env, states, aid):
+def featurize2(env, states):
"""
feature = []
for state in states:
feature.append((env.featurize(state)).tolist())
"""
local = []
- states = states[aid]
+ # print(states)
+ '''
board = states["board"].reshape(-1).astype(np.float32)
bomb_blast_strength = states["bomb_blast_strength"].reshape(-1).astype(np.float32)
bomb_life = states["bomb_life"].reshape(-1).astype(np.float32)
@@ -270,7 +380,9 @@ def featurize2(env, states, aid):
local.append(bomb_moving_direction.tolist())
local.append(flame_life.tolist())
feature = []
-
+ '''
+ local = featurize2D(states)
+ feature = {'local': local}
additional = []
position = utility.make_np_float(states["position"])
ammo = utility.make_np_float([states["ammo"]])
@@ -278,9 +390,13 @@ def featurize2(env, states, aid):
can_kick = utility.make_np_float([states["can_kick"]])
teammate = utility.make_np_float([states["teammate"].value])
enemies = utility.make_np_float([e.value for e in states["enemies"]])
- additional = np.concatenate((position, ammo, blast_strength, can_kick, teammate, enemies))
message = states['message']
message = utility.make_np_float(message)
- return np.concatenate(
- (board, bomb_blast_strength, bomb_life, position, ammo,
- blast_strength, can_kick, teammate, enemies, message)).tolist()
+ additional = np.concatenate(
+ (position, ammo,
+ blast_strength, can_kick, teammate, enemies, message))
+
+ feature['additional'] = additional.tolist()
+
+ # feature['alive'] = states['alive']
+ return feature