Commit 27cde7c7 authored by Volkan Kayatas's avatar Volkan Kayatas
Browse files

Recreate DeepWalk embeddings, update README.md, shell script generator for graph embeddings

parent 00f28b6b
import datetime
import itertools
import random
from collections import deque
import sys
import sys, os, time
import argparse
import numpy as np
#import paho.mqtt.client as mqtt
from keras.layers import *
from keras.models import Sequential
from gensim.models import Word2Vec
from gensim.models import KeyedVectors
from gensim.scripts.glove2word2vec import glove2word2vec
from gensim.test.utils import datapath, get_tmpfile
from matplotlib import pyplot as plt
from variable import *
import os
# os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
np.random.seed(1)
DEFAULT_SIZE_ROW = 10
DEFAULT_SIZE_COLUMN = 10
MAX_SIZE = 10
GRID_SIZE = 30
PADDING = 5
# Run params
MAX_EPISODES = 6 #Paper= 60
EMBTOGGLE = 2
#DIMENSION = 30
#TARGET_LOC = 99
EMBEDFOLDER = "./Maze/Embedding/"
RESULTFOLDER = "./Result/"
VALSFOLDER = RESULTFOLDER + "Vals/"
STPSFOLDER = RESULTFOLDER + "Steps/"
REWFOLDER = RESULTFOLDER + "Reward/"
GRID = 10 # dimension of maze grid
DEBUG = False
EPSILON_REDUCE = True
RANDOM_MODE = False
# Hyperparameter
EPSILON = 1.0
EPSILON_MIN = 0.01
EPSILON_DECAY = 0.995
MEMORY_LEN = 1000
DISCOUNT_RATE = 0.95
BATCH_SIZE = 50
# Global constants
obstacles_loc = []
obstacles_loc_1 = [12, 15, 16, 17, 27, 37, 30, 42, 43,
......@@ -53,40 +65,13 @@ obstacles_loc_7 = [22, 25, 26, 31, 41, 70, 71, 85, 90, 131, 132, 142, 167, 170,
212, 215, 226, 224, 248, 261, 271, 282, 286, 298, 301, 308, 311, 316, 351, 332, 366, 369, 384, 385,
318, 391]
canvas_list = []
storage_value = []
NOT_USE = -1.0
OBSTACLE = 0.0
EMPTY = 1.0
TARGET = 0.75
START = 0.5
# Run params
START_STATE = 1
MAX_EPISODES = 60
EMBTOGGLE = 2
DIMENSION = 20
TARGET_LOC = 399
EMBEDPATH = "./Old/Old_Embeddings/"
RESULTPATH = "./Old/Old_Results/LM2/"
VALSPATH = "./Old/Old_Results/Vals/"
STPSPATH = "./Old/Old_Results/STP2/"
REWPATH = "./Old/Old_Results/RW2/"
GRID = 20
row_num = GRID
col_num = GRID
DEBUG = False
EPSILON_REDUCE = True
RANDOM_MODE = False
# constant action info
ACTION_LEFT = 0
ACTION_UP = 1
ACTION_RIGHT = 2
ACTION_DOWN = 3
# constant state info
STATE_START = 'start'
STATE_WIN = 'win'
STATE_LOSE = 'lose'
......@@ -94,21 +79,23 @@ STATE_BLOCKED = 'blocked'
STATE_VALID = 'valid'
STATE_INVALID = 'invalid'
# Hyperparameter
EPSILON = 1.0
EPSILON_MIN = 0.01
EPSILON_DECAY = 0.995
MEMORY_LEN = 1000
DISCOUNT_RATE = 0.95
BATCH_SIZE = 50
# constant state mapping
NOT_USE = -1.0
OBSTACLE = 0.0
EMPTY = 1.0
TARGET = 0.75
START = 0.5
# Save params
# constant save params
storage_value = []
state_index = -1
rew_arr = []
rewardAxis = np.zeros((10, 60))
stepsAxis = np.zeros((10, 60))
partRew = [[] for i in range(10)]
globalTotSteps = 0
row_num = GRID
col_num = GRID
class DQNAgent:
......@@ -214,8 +201,8 @@ class Environment:
self.adj_dir = []
self.num_states = row_x * col_y + 1
self.dimension = DIMENSION
self.cs = 0
emb_path = EMBEDPATH + str(args.embedpath)
self.cs = 0 #current node in maze
emb_path = EMBEDFOLDER + str(args.embedpath)
self.model1 = KeyedVectors.load_word2vec_format(
emb_path, binary=False)
# self.model2 = KeyedVectors.load_word2vec_format(
......@@ -324,12 +311,6 @@ class Environment:
self.current_state = (nrow, ncol, nmode)
self.cs = nrow * self.col_number + ncol
# Action define:
# 0: LEFT
# 1: UP
# 2: RIGHT
# 3: DOWN
def act(self, act):
self.update_state(act)
reward = self.get_reward()
......@@ -414,10 +395,10 @@ def deepQLearning(model, env, state, args, randomMode=False, **opt):
totStps = 0
globalTotSteps = 0
totRew = 0
for episode in range(episodes):
# print("\nEpisode: ",episode)
loss = 0.0
env.reset()
game_over = False
......@@ -458,9 +439,7 @@ def deepQLearning(model, env, state, args, randomMode=False, **opt):
totRew += reward
ns = (env.generate_embedding()).reshape((1, -1))
# print(env.cs,reward,end=' -> ')
# print(env.cs,end=' ')
if game_status == STATE_WIN:
x, y, _ = env.current_state
storage_value[x, y] = TARGET
......@@ -533,7 +512,7 @@ def getRowCol(obs):
def create_environment(start_row, start_col, args):
global obstacles_loc
# init obstacle locations
for obstacle in obstacles_loc:
(row, col) = getRowCol(obstacle)
row = obstacle // GRID
......@@ -563,19 +542,7 @@ def create_environment(start_row, start_col, args):
env.adj_list.append([])
env.adj_dir.append([])
# for state in range(GRID*GRID):
# if state in obstacles_loc:
# continue
# if(state%GRID != 0):
# env.adj_list[state].append(state-1)
# if((state+1)%GRID != 0):
# env.adj_list[state].append(state+1)
# if(state>=GRID):
# env.adj_list[state].append(state-GRID)
# if(state+GRID<=GRID*GRID):
# env.adj_list[state].append(state+GRID)
# print(env.adj_list[0])
# load maze from edgelist (edges per state)
with open(args.edgelist) as f:
for line in f:
line = line.rstrip().split(' ')
......@@ -584,8 +551,7 @@ def create_environment(start_row, start_col, args):
if (int(line[0]) not in env.adj_list[int(line[1])]):
env.adj_list[int(line[1])].append(int(line[0]))
# print(env.adj_list[0])
# init possible actions per state
for state in range(GRID * GRID):
for next_state in env.adj_list[state]:
if (next_state == state - 1):
......@@ -612,25 +578,6 @@ def create_environment(start_row, start_col, args):
return env
def printEdgelist(args):
f = open(args.edgelist, 'w')
for row in range(row_num):
for col in range(col_num):
vertexList = []
dirList = []
state = col_num * row + col
if (state in obstacles_loc):
continue
if ((state % row_num != 0)):
f.write('{} {}\n'.format(state, state - 1))
if (((state + 1) % row_num != 0)):
f.write('{} {}\n'.format(state, state + 1))
if ((state > row_num)):
f.write('{} {}\n'.format(state, state - row_num))
if ((state + row_num < GRID * GRID)):
f.write('{} {}\n'.format(state, state + row_num))
def trainDQN(args):
# update state, valid actions, set collision
......@@ -657,12 +604,18 @@ def trainDQN(args):
env.reset()
deepQLearning(model, env, state, args)
# termination of process
if (_ == int(args.iterations) - 1):
partRew = np.array(partRew)
res_path = RESULTPATH + str(args.savepath)
vals_path = VALSPATH + str(args.savepath)
steps_path = STPSPATH + str(args.savepath)
partrew_path = REWPATH + str(args.savepath)
# define saving path
prefix = str(args.savepath).replace(".npy","")
res_path = RESULTFOLDER + prefix + "_result.npy"
vals_path = VALSFOLDER + prefix + "_vals.npy"
steps_path = STPSFOLDER + prefix + "_steps.npy"
partrew_path = REWFOLDER + prefix + "_reward.npy"
# save run results
np.save(res_path, rewardAxis)
np.save(steps_path, stepsAxis)
np.save(partrew_path, partRew)
......@@ -682,12 +635,12 @@ def trainDQN(args):
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-ep", "--embedpath", help="Path for embeddings")
parser.add_argument("-sp", "--savepath", help="Save path for npy array")
parser.add_argument("-emb", "--embedpath", help="Path for embeddings")
parser.add_argument("-save", "--savepath", help="Save path for npy array")
parser.add_argument("-maze", "--maze", help="Which maze to run")
parser.add_argument("-iter", "--iterations", help="Number of iterations")
parser.add_argument("-target", "--target", help="Location of target")
parser.add_argument("-el", "--edgelist", help="edgelist of the maze")
parser.add_argument("-end", "--target", help="Location of target node")
parser.add_argument("-edgel", "--edgelist", help="edgelist of the maze")
parser.add_argument("-dim", "--dimension", help="Dimension")
args = parser.parse_args()
......@@ -711,5 +664,6 @@ if __name__ == "__main__":
storage_value.append(NOT_USE)
storage_value = np.array(storage_value, dtype=np.float).reshape(GRID, GRID)
# printEdgelist(args)
trainDQN(args)
\ No newline at end of file
start = time.time()
trainDQN(args)
print("End: {:.1f} seconds.".format(time.time()-start))
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
55 30
82 -0.7629231 0.3725208 0.0902338 -0.16709633 1.0553325 0.34894246 -1.0674796 -0.11119717 -0.05416084 -0.14964221 0.18577644 1.0306027 0.039653532 -0.11293112 -0.17375602 1.3367811 0.16708007 -0.6711103 0.36315858 0.5779631 -0.09654516 0.12539464 1.2548213 -0.40444028 0.65717477 -0.25953418 0.021687016 -0.095586985 0.88508767 0.13926116
3 -1.1648716 -0.43139774 0.55715615 -0.43627694 -1.0147913 0.11710168 0.12462152 0.026017684 0.60101193 -0.3351013 1.038531 1.1483611 -0.096845396 -0.31872308 -0.35375205 0.56249464 -0.31682304 0.35385042 -0.64637774 0.2353118 -1.1140867 0.42409962 -0.3124476 -0.46467292 -0.19819808 -0.8704626 0.23504767 0.33712104 0.3648968 -1.1333557
5 -1.0995964 -0.5687432 0.71115714 -0.8754525 -0.7205454 0.20301186 -0.19688329 0.53338116 0.87346166 -0.16458145 0.9004967 0.3432371 -0.22761223 -0.29527262 -0.29174712 0.45267335 -0.45380282 0.3096566 -1.2308452 -0.055799887 -0.6954908 0.5631044 -0.29779735 -1.0446422 -0.604683 -0.47173077 0.0689198 0.030181352 -0.23653671 -1.3531967
80 -1.0531573 0.5754791 0.59443283 -0.20834507 0.992297 0.63824 -0.7261191 0.040598743 0.13363749 -0.56915545 -0.3217007 0.6868431 -0.018165195 -0.3501438 0.3838414 1.1133441 0.1700484 -0.73915327 -0.20340377 0.27535996 0.017879855 0.09415103 1.0169197 -0.2294711 0.41482294 -0.44355264 0.4710587 0.06176129 0.46900943 0.06439951
30 -1.707454 -0.6240825 -0.2814964 0.57055414 -1.3954134 0.32298088 0.0442634 -0.5956906 0.8893017 0.6414895 -0.31499726 0.5551591 -0.24184054 -0.52765524 -1.0367706 -0.017222315 0.31911838 0.5222713 -0.099531084 -0.4787211 0.08031754 0.09649392 0.7443055 -0.8792158 0.18136767 -1.1268141 0.8744631 0.34035856 -0.33904752 -0.032086644
98 -1.5478727 -1.1230788 -0.6673988 0.41361743 0.9699696 -0.80749494 0.09439479 0.5391005 0.21636109 0.36135405 0.50458336 1.1512188 0.18413933 -0.2602448 -1.735745 0.8796053 0.59754956 -0.36464795 1.0829953 -0.4232386 0.92235124 -0.036084462 0.074077 -0.51520956 0.5649605 0.06809587 0.65524966 0.22111623 -0.6802417 0.34982082
33 -1.0406572 0.18574044 -0.25856054 -1.1149133 0.08117516 0.3920259 -0.7124043 0.121832825 0.9523532 -1.1062366 -0.19201046 -0.0028093355 0.25933328 0.1179003 -0.45160505 0.31541315 0.19578479 0.50990283 -1.3794731 -0.2075091 0.53860545 0.78788453 -0.042696062 -0.874986 -0.9341318 -0.3647567 -0.6142681 -0.41349977 -0.8133363 -0.44660214
73 -0.082053736 -0.5052214 -0.5182162 -0.57341355 0.8400568 -0.13516411 -1.4849995 -0.6157021 0.25004998 -0.57105935 1.247592 -0.15613346 0.73053485 -0.2118363 -0.6020456 0.65029395 0.171955 -0.86973774 -0.41682863 0.3214001 0.085694596 0.5141828 0.3918567 -0.2569997 0.93989956 -0.41062543 0.17691496 -0.012411959 -0.10034377 0.21683069
76 -0.7768387 -1.1561924 -0.46892837 -0.07167079 0.8107931 -0.59322184 -0.41386604 0.17201905 0.35739034 -0.02489321 1.3245059 -0.07114365 0.72088844 -0.5283422 -1.1868136 0.64331055 0.09854193 -0.7053044 0.12966284 -0.23709081 0.63636386 0.4361023 -0.27212262 0.10823308 0.95719105 -0.50311995 0.6837021 0.37320015 -0.58853847 0.2717719
70 -1.2182684 0.56748897 0.9158189 -0.4151771 0.69116104 0.92468077 -0.7536568 -0.05892466 0.27402887 -0.6074024 -0.6741074 0.43198732 -0.031167563 -0.3817103 0.43536967 0.995327 0.2804815 -0.7025151 -0.7435812 0.25372964 0.15095425 0.25275344 1.1020174 -0.41937003 0.1312505 -0.35086048 0.54816306 -0.04867312 0.24698839 0.1813538
60 -1.4214041 0.47330114 1.1291025 -0.4573625 0.75162876 0.8346518 -0.57291365 -0.024088426 0.121112056 -0.8688673 -1.0377259 0.63460463 0.23739152 -0.21836615 0.24115835 1.1902875 0.36469084 -0.5356178 -0.810015 0.15353915 0.52600354 0.696579 0.6145021 -0.15398775 -0.26226786 -0.41070366 0.3494259 0.0125887245 0.21464765 0.40877038
92 -0.8621289 0.2964609 0.044076283 -0.07799018 1.1032625 0.45397225 -1.1054341 -0.2427829 -0.089884154 0.06427722 0.1921989 0.9626477 -0.11461692 -0.29286745 -0.3794941 1.5957855 0.17901559 -0.77700526 0.5529617 0.6702225 0.12651712 0.04209102 1.4313853 -0.5671493 0.7664649 -0.19185564 0.014337062 -0.1074049 0.9171374 0.3593871
81 -0.84651786 0.4889755 0.24546196 -0.19806886 1.0009177 0.5312268 -0.9531669 0.07893806 0.111226425 -0.19045815 -0.23043935 0.7398843 -0.044365402 -0.14088804 0.06981132 1.1193732 0.16665725 -0.61263514 0.08185103 0.35644618 0.028592702 0.06359419 1.2317799 -0.44842216 0.43100145 -0.29536635 0.15445474 -0.089795284 0.6011562 0.07728725
63 -0.05085657 -0.46260563 -0.46626115 -0.42533556 0.81432396 -0.24138227 -1.5274165 -0.37033975 0.041389067 -0.28506204 1.1173438 0.40167257 0.7179269 0.039398007 -0.6806736 0.7730589 0.062424112 -0.77958137 0.04449286 0.4416437 -0.16208056 0.5721207 0.5099625 -0.19724374 0.88361144 -0.43093815 -0.050188754 -0.06714075 0.41549706 0.08370593
2 -1.1251369 -0.6075135 0.28261113 -0.30753896 -1.3980881 0.07025652 0.19814588 -0.41401288 0.5349351 -0.08748232 0.8687374 1.3090371 -0.1176307 -0.3160204 -0.6432422 0.43569645 -0.044715904 0.5357467 -0.5156357 0.1722678 -1.0425729 0.29501933 -0.24236399 -0.64256394 -0.13193186 -0.65654624 0.280598 0.22607885 0.31440383 -0.8953062
72 -0.45286644 0.057014454 -0.12215444 -0.14727326 0.9377139 0.16856155 -1.2534801 -0.28198653 -0.18944556 -0.08606128 0.21672751 0.9951999 0.31450254 0.04819573 -0.42251158 1.1855906 0.1746942 -0.6404037 0.4573548 0.48355082 -0.052897178 0.3292813 0.9704435 -0.279935 0.70094365 -0.34971878 -0.083421536 -0.1481578 0.8693719 0.2945228
88 -1.4962858 -0.9841715 -0.56069714 0.4140138 0.9359163 -0.790713 0.25863323 0.44368944 0.22257508 0.10234077 0.6029714 0.9904145 0.23996547 -0.41472214 -1.5230463 0.82877207 0.5147302 -0.3534426 0.9070974 -0.45087227 0.8740462 -0.002608983 -0.14250252 -0.231375 0.6076451 -0.038794782 0.7360874 0.30027357 -0.7182838 0.36250004
4 -1.1889205 -0.40305513 0.49446446 -0.64784557 -0.7560966 0.06320113 0.1566441 0.31871888 0.7510235 -0.38834244 1.0746353 0.88888025 -0.23466174 -0.30777922 -0.307911 0.49180755 -0.31700975 0.34732914 -0.8766406 0.069268405 -0.9544789 0.3049565 -0.36941212 -0.7434692 -0.37241772 -0.5446024 0.16438584 0.18127072 0.012170743 -1.3127973
90 -1.4627165 0.77931947 0.8596529 -0.14701247 1.2216492 0.73474336 -0.20555598 -0.10066444 0.18974286 -0.9607098 -0.19650139 0.62780666 -0.35683933 -0.85996634 0.6803497 1.3764436 0.22032152 -0.91084796 -0.36902937 0.22961038 0.13449608 -0.17726156 0.7714368 -0.27276912 0.45425457 -0.38260403 0.7753314 0.28404275 0.17201403 0.05647777
87 -1.2824459 -1.1379254 -0.6581957 0.16339666 0.8498484 -0.8775306 0.09979857 0.47009653 0.25378883 0.05664203 0.9604026 0.8050377 0.44910756 -0.35419703 -1.5858103 0.7708392 0.34118423 -0.35451046 0.7323627 -0.38078716 0.78749114 0.19761823 -0.3560919 -0.16067374 0.62286776 -0.11932314 0.5643305 0.26045486 -0.67561424 0.23652051
62 -0.24426022 -0.30162844 -0.41128945 -0.27810383 0.9019459 -0.18195403 -1.473143 -0.20087603 -0.018137481 -0.23553933 0.9232303 0.7499916 0.6908721 0.07024733 -0.6450273 0.9125364 0.037922684 -0.7683793 0.2942208 0.40791503 -0.21379563 0.5216961 0.6700645 -0.04952345 0.92069346 -0.6090035 6.276238e-05 -0.040145118 0.67457634 0.06658786
84 -0.033713978 -0.29182148 -0.53833073 -0.8461244 0.670146 -0.0059950566 -1.2298105 -1.6413282 0.059373833 -1.0461714 1.8287072 -0.12006141 0.0985764 -0.7420917 -0.51405746 0.95978975 0.43119836 -0.8474644 -0.73065025 0.7279842 -0.033398043 0.076373935 0.25567093 -0.9437386 0.9780777 0.2657075 0.014092776 -0.15478078 -0.22464432 0.26236635
1 -1.1574348 -0.525973 0.09668279 -0.19193569 -1.4509878 0.1164583 0.1125643 -0.64537865 0.49692482 0.024830569 0.72869265 1.4567631 -0.16679311 -0.28727973 -0.82214713 0.49713928 0.09767412 0.5424326 -0.38291013 0.22026262 -0.98840237 0.18455628 0.044942852 -0.7998607 -0.08505475 -0.6014965 0.272516 0.10215592 0.40580595 -0.73416877
77 -0.96603405 -1.1265727 -0.6327285 0.038479097 0.8694938 -0.8164085 -0.15944986 0.4114159 0.37459707 -0.013518977 1.2974219 0.2397378 0.5847165 -0.45595273 -1.3530335 0.5898835 0.14710774 -0.49786353 0.3832195 -0.3660442 0.672392 0.27806893 -0.3456542 -0.00014166502 0.8207218 -0.30584738 0.63697225 0.3015094 -0.7697709 0.12680225
74 -0.2367566 -0.7297932 -0.5512091 -0.44271234 0.7590706 -0.42393968 -1.1502047 -0.38732132 0.40416327 -0.5143743 1.2465839 -0.24314034 0.9007559 -0.16193554 -0.66514754 0.3635032 0.2393724 -0.71903396 -0.4064292 0.09975865 0.21824133 0.50040907 0.07460458 0.023480034 0.9009093 -0.4148882 0.36808643 0.19771504 -0.36264384 0.2870969
34 -0.9636728 -0.0080684 -0.19323616 -1.0369824 -0.055777777 0.2635567 -0.6436982 0.22799392 1.0450346 -0.8995564 0.04262846 -0.18671937 0.15595715 0.023032786 -0.37700596 0.16224718 0.08038745 0.47633696 -1.4481629 -0.32738033 0.3737216 0.66500586 -0.03297711 -0.95501566 -0.8721158 -0.32221764 -0.4246342 -0.34361675 -0.9154046 -0.59821326
86 -0.9656789 -1.181086 -0.45430717 -0.068483695 0.6205848 -0.6448481 -0.22809331 0.22850299 0.63137776 -0.16487883 1.5868618 -0.08702294 0.70158345 -0.6722227 -1.1082944 0.54199046 0.011792543 -0.5375062 -0.085210174 -0.33042583 0.43680733 0.4305268 -0.49275166 0.17957196 0.92935646 -0.7839198 0.7920942 0.53780013 -0.68198305 0.04194635
20 -1.3863599 -0.5569661 -0.35647866 0.37277228 -1.5084383 0.2498528 0.060661998 -1.0231786 0.74327457 0.41375262 0.07122705 0.8699902 -0.36295524 -0.493031 -0.9396348 0.007879613 0.46053243 0.531163 -0.19520853 -0.17868654 -0.33140877 -0.11809286 0.65898657 -1.0095812 0.20500171 -0.7174876 0.71176445 0.1783861 -0.16827951 -0.08520322
75 -0.4809901 -0.81999093 -0.601848 -0.309209 0.8411981 -0.54077417 -0.73397315 -0.071513474 0.4248197 -0.44305047 1.3108305 -0.1315366 0.8154329 -0.31693155 -0.8575728 0.47406748 0.18922353 -0.6486898 -0.20593359 -0.06806317 0.4003738 0.42833024 -0.14873925 0.11102807 0.8769278 -0.4374944 0.4752395 0.25104636 -0.5238528 0.20917168
24 -0.85481644 -0.19194707 -0.06080132 -1.1437984 -0.27260926 0.36392906 -0.655086 0.3060772 1.0001264 -0.5802457 0.37933332 -0.32962793 0.118812405 -0.1871417 -0.46751615 0.24470598 -0.1493399 0.38076738 -1.4675353 -0.22354846 0.20693798 0.696675 -0.05456343 -1.0962518 -0.7963015 -0.36548927 -0.38420564 -0.3713415 -0.8355279 -0.8719413
15 -0.9371756 -0.35123506 0.404451 -1.015721 -0.5936346 0.3139342 -0.38286608 0.41356277 1.0440915 -0.29476568 0.9442597 -0.11845907 -0.24974975 -0.46822703 -0.15456507 0.25040224 -0.39198068 0.22848369 -1.4950668 -0.09895133 -0.51229364 0.41521436 -0.07410068 -1.2242057 -0.52561826 -0.40823054 0.07346029 -0.104162686 -0.578672 -1.3506036
25 -0.9042254 -0.36507073 0.20860995 -1.0970913 -0.37376225 0.31667525 -0.60260177 0.41678604 1.1226743 -0.46170145 0.699083 -0.41640183 0.008152592 -0.32305744 -0.30848837 0.20110221 -0.2951708 0.2365306 -1.618845 -0.255571 -0.098575026 0.6456531 -0.06321746 -1.124849 -0.6453528 -0.45908612 -0.08264571 -0.19803514 -0.8258041 -1.0615513
10 -1.2784816 -0.7152079 -0.27774465 0.26237687 -1.7153908 0.16698612 0.028560193 -1.0093794 0.7223252 0.4464785 0.21252961 0.9950189 -0.26995054 -0.39269376 -1.0350683 0.045385372 0.40827683 0.61250395 -0.24386935 -0.12836765 -0.5008511 0.0069275186 0.46642417 -1.0444553 0.11823512 -0.69268537 0.61246413 0.17695881 -0.04202837 -0.20527655
83 -0.010851439 -0.5047385 -0.6313787 -0.7333902 0.6826183 -0.2861094 -1.1987377 -1.0522041 0.10680481 -0.84167737 1.7009319 -0.05221841 0.49556223 -0.40006787 -0.68910295 0.69687617 0.26099953 -0.7517951 -0.48403287 0.5089001 -0.018007867 0.33043846 0.036998432 -0.5189296 0.91317075 -0.018118877 0.010064827 -0.05044107 -0.20211534 0.16803132
32 -1.1367077 0.34356135 -0.19320148 -0.9084751 0.06109155 0.28002816 -0.5507295 -0.22848298 0.86490047 -1.3636045 -0.49163458 0.28438428 0.26908228 0.21080688 -0.26781756 0.25693774 0.48304653 0.58819306 -1.3947791 -0.24147439 0.47224694 0.69336385 -0.023869922 -0.7042411 -0.91889644 -0.30750975 -0.44947267 -0.31924477 -0.7300094 -0.13910316
61 -1.3585988 0.5983378 0.840193 -0.4356968 0.50026286 0.7571651 -0.6039777 -0.3138449 0.28282753 -0.9257782 -1.199678 0.6284003 0.115913354 -0.0657659 0.22828905 0.9168293 0.5289202 -0.17333528 -0.9246527 0.016633729 0.49977353 0.5993142 0.65981084 -0.40402 -0.45639792 -0.37364107 0.15002666 -0.07570682 0.095819734 0.4182153
51 -1.3026322 0.58912987 0.385411 -0.56603235 0.452299 0.6493865 -0.6599539 -0.28474447 0.52217954 -0.93153614 -1.2489569 0.68395627 0.051073182 0.18547359 -0.0121345045 0.8112262 0.5892363 0.28095102 -0.8922341 -0.06261242 0.5531326 0.586637 0.6707092 -0.7355254 -0.7465654 -0.28365362 -0.22102973 -0.23820475 -0.08407758 0.3414116
31 -1.5971413 -0.68229413 -0.42772475 0.64554065 -1.2547711 0.27505073 -0.099893965 -0.6616151 0.90958524 0.6619048 -0.4359847 0.52206385 -0.38058567 -0.44141388 -1.0854976 -0.03929871 0.5002059 0.52570015 -0.0226867 -0.45881212 0.20443593 -0.02339149 0.71240675 -1.0368713 0.10323404 -0.8703759 0.78337467 0.3063976 -0.47752205 0.15416744
21 -1.7437681 -0.7254248 -0.34479585 0.66528 -1.1185234 0.3969617 -0.25774845 -0.55202574 0.9263506 0.7644581 -0.71931463 0.493802 -0.36949062 -0.45560032 -1.2029546 0.08852769 0.6058808 0.5012375 0.052287787 -0.5197438 0.45170623 0.09543122 0.8226553 -1.1429276 0.022157606 -0.9128334 0.75828296 0.26166588 -0.5127739 0.30637157
41 -1.1993077 0.4855467 0.15086631 -0.5904317 0.1536472 0.44995838 -0.6454485 -0.42679256 0.67760855 -1.0368241 -1.0486686 0.5254324 0.1430499 0.2485691 -0.119318426 0.5119683 0.6155097 0.4984197 -1.086376 -0.17467582 0.51057667 0.6654766 0.37156823 -0.7120868 -0.84660995 -0.38122883 -0.3005989 -0.21995719 -0.29785904 0.23354356
0 -0.9766628 -0.8224613 -0.112667836 -0.04152196 -1.7522762 0.20424315 -0.058139328 -0.94723445 0.47784352 0.36372617 0.583875 1.0368786 -0.15349405 -0.3354406 -1.0913731 0.23392367 0.18615398 0.5541285 -0.29558885 0.11404751 -0.71688366 0.22750314 0.15945418 -0.95624334 0.041856978 -0.63110816 0.29122204 0.080480896 0.16141479 -0.40999687
42 -1.2233332 0.45068333 -0.11550346 -0.80790013 0.060821682 0.4496697 -0.63165414 -0.19962771 0.8530611 -1.0909369 -0.8423632 0.5031225 0.13966028 0.3190529 -0.24801892 0.4204382 0.51691866 0.7057689 -1.2080182 -0.19454424 0.44793686 0.6797073 0.29316095 -0.86465466 -0.98019034 -0.37441644 -0.5075014 -0.31792167 -0.4301241 -0.060959786
13 -1.1419472 -0.40987274 0.45861074 -0.43102255 -0.8749249 0.04606861 0.25358018 0.080897555 0.58709407 -0.40571737 0.9542891 1.1098038 -0.16690402 -0.27513367 -0.3644401 0.45357013 -0.21383147 0.37787956 -0.66235036 0.164385 -0.97977227 0.27862942 -0.41530836 -0.49869847 -0.24816279 -0.6361519 0.22835366 0.28880075 0.15661676 -1.0786171
91 -1.5140032 0.67118806 0.93867075 -0.12755914 1.1445394 0.77626 -0.24329785 0.051431827 0.17327845 -0.76763594 -0.18495725 0.6144486 -0.26107067 -0.8877198 0.5661854 1.4449633 0.09323911 -0.9682655 -0.25849476 0.24214512 0.12391422 -0.051814627 0.8211954 -0.1887379 0.5193269 -0.5786555 0.8348205 0.32066017 0.29402092 -0.025635391
94 -0.002427511 -0.37738162 -0.39674312 -0.93557084 0.51134753 0.114157565 -1.2905548 -1.6345314 -0.03481207 -0.9326738 2.027867 -0.12786658 0.09648441 -0.90326846 -0.62917495 1.1399579 0.27325666 -0.94989926 -0.7235193 0.8316377 -0.111222774 0.1713098 0.19628823 -0.9915116 1.051945 0.16821414 0.0012642777 -0.18225536 -0.07567134 0.11963124
93 -0.8616029 0.36093214 -0.047116455 0.0026044087 0.947997 0.29303184 -0.9408825 -0.5439051 -0.13605872 -0.08831196 0.29225662 1.2719519 -0.21999447 -0.22854042 -0.38934392 1.5333027 0.34407592 -0.60930294 0.5963242 0.7522499 -0.03548465 -0.060388446 1.358128 -0.6263315 0.7455542 -0.06567146 -0.028952425 -0.09522565 1.001701 0.37650216
97 -1.4864519 -0.9252356 -0.5564122 0.41685283 0.8414117 -0.77079886 0.30208117 0.48095033 0.18443803 0.2177 0.45209828 1.1401116 0.057987124 -0.30788937 -1.449738 0.7588793 0.5354427 -0.2995708 0.95105845 -0.4197973 0.7457344 -0.13026768 -0.009422422 -0.46703583 0.46114126 0.12837239 0.6776422 0.22745758 -0.6197237 0.22018112
40 -1.6890047 -0.567907 -0.40292957 0.58334255 -1.1966026 0.21496613 0.11225667 -0.62317014 0.9519087 0.43006027 -0.3632715 0.5147439 -0.31140527 -0.5014983 -0.94053864 -0.12572819 0.47555408 0.5421132 -0.1795821 -0.5250928 0.20165178 -0.06355296 0.6348135 -0.90409964 0.13684827 -0.89936626 0.8843013 0.31711593 -0.5775788 0.070499055
96 -0.88643163 -1.2084938 -0.24993831 -0.17298527 0.6428824 -0.5930257 -0.14885376 0.22715217 0.49324164 -0.24626179 1.4978831 -0.14668025 0.78371316 -0.684834 -1.0734477 0.6029572 0.013049377 -0.59452087 -0.18258108 -0.29789403 0.5379652 0.5399972 -0.6793437 0.30785295 0.86281794 -0.6519982 0.75141484 0.5157266 -0.692091 0.13920343
6 -1.1289624 -0.36714244 0.5872365 -0.7930078 -0.477925 0.14691988 -0.0076468885 0.49956456 0.8903407 -0.31656817 0.99562734 0.34848157 -0.35026035 -0.4092984 -0.06834243 0.42182076 -0.41202894 0.21878804 -1.1438894 -0.041942105 -0.7488316 0.27403286 -0.26111728 -0.99381125 -0.43840376 -0.34623876 0.19769624 0.06664647 -0.2927009 -1.3702141
50 -1.4392267 0.58007663 1.1702312 -0.5119186 0.6879347 0.7896147 -0.5561853 -0.05070661 0.16802213 -1.0626175 -0.96291023 0.789295 0.22082262 -0.16248204 0.3737868 1.0963056 0.4155258 -0.5028226 -0.9633579 0.13497794 0.36196354 0.69384617 0.61512935 -0.1778599 -0.31239894 -0.3923321 0.2793363 -0.0006150583 0.23591723 0.32065153
71 -1.2707413 0.6724431 0.90813786 -0.3494936 0.74172145 0.8210419 -0.655624 -0.01931834 0.21129942 -0.67441165 -0.6985235 0.65732354 -0.0996307 -0.3085806 0.49253032 1.049729 0.2905977 -0.65243024 -0.5999838 0.2978435 0.044727553 0.19590916 1.0598812 -0.374657 0.113128625 -0.31795776 0.52530843 0.013648447 0.34876955 0.10312018
43 -1.0547308 0.19151874 -0.5348739 -1.1167026 0.23646323 0.17534852 -0.6398799 0.11602897 1.0245107 -1.2468245 -0.12712373 0.12773225 0.2683048 0.21225527 -0.48917302 0.2460574 0.32438123 0.6200245 -1.3350862 -0.28173536 0.55283755 0.63318074 -0.10461258 -0.8805453 -0.93807036 -0.16414775 -0.62872833 -0.42493826 -0.9385609 -0.37411317
11 -1.7432708 -0.751267 -0.49312583 0.68381655 -1.0744693 0.43491974 -0.40427107 -0.5693705 1.0232407 0.7857204 -0.8435384 0.33916253 -0.34387112 -0.4308648 -1.2621448 0.08035328 0.6340925 0.5130021 0.0029673744 -0.6068901 0.6063888 0.1566977 0.8739845 -1.1944765 -0.01486431 -0.9535703 0.6901443 0.25709778 -0.6361248 0.40392026
99 -1.5992862 -0.85081315 -0.5431014 0.47425553 0.86088777 -0.76430297 0.37400338 0.5182211 0.16403006 0.18949968 0.3556071 1.3811848 0.051925983 -0.27105847 -1.4676903 0.8661972 0.5485334 -0.26566765 1.0471338 -0.38673797 0.7104196 -0.17134662 0.05487773 -0.41400698 0.46261865 0.08906333 0.6727973 0.24970984 -0.5215999 0.22440979
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
0 10
10 0
0 10
10 11
11 10
10 11
11 10
10 0
0 10
10 0
0 10
10 20
20 10
10 20
20 21
21 20
20 10
10 11
11 10
10 11
11 10
10 0
0 10
10 0
0 1
1 11
11 21
21 11
11 21
21 11
11 10
10 11
11 10
10 0
0 1
1 2
2 12
12 2
2 12
12 11
11 1
1 0
0 1
1 2
2 3
3 4
4 5
5 6
6 5
5 15
15 5
5 4
4 5
5 4
4 3
3 2
2 12
12 22
22 23
23 24
24 14
14 15
15 5
5 4
4 5
5 15
15 25
25 15
15 25
25 24
24 14
14 15
15 25
25 35
35 34
34 24
24 34
34 44
44 54
54 53
53 54
54 44
44 34
34 44
44 54
54 53
53 52
52 53
53 43
43 33
33 34
34 44
44 54
54 44
44 54
54 55
55 65
65 66
66 67
67 77
77 87
87 88
88 78
78 79
79 78
78 79
79 78
78 79
79 89
89 79
79 89
89 88
88 98
98 97
97 96
96 97
97 96
96 86
86 87
87 88
88 89
89 88
88 87
87 77
77 78
78 68
68 67
67 77
77 76
76 66
66 76
76 86
86 76
76 75
75 74
74 64
64 65
65 55
55 54
54 55
55 65
65 66
66 65
65 66
66 76
76 77
77 67
67 77
77 67
67 66
66 56
56 46
46 56
56 55
55 56
56 66
66 67
67 57
57 47
47 46
46 56
56 57
57 56
56 66
66 76
76 86
86 76
76 66
66 56
56 55
55 65
65 75
75 85
85 86
86 87
87 86
86 76
76 75
75 65
65 66
66 67
67 66
66 67
67 68
68 67
67 66
66 76
76 86
86 87
87 97
97 98
98 88
88 87
87 97
97 87
87 77
77 67
67 66
66 76
76 66
66 76
76 75
75 65
65 55
55 65
65 75
75 76
76 75
75 76
76 75
75 74
74 75
75 76
76 75
75 65
65 55
55 56
56 55
55 56
56 57
57 47
47 37
37 36
36 26
26 16
16 26
26 36
36 26
26 25
25 35
35 25
25 24
24 23
23 13
13 14
14 24
24 25
25 24
24 23
23 24
24 14
14 4
4 3
3 2
2 3
3 4
4 14
14 24
24 14
14 4
4 5
5 15
15 14
14 4
4 14
14 4
4 5
5 15
15 5
5 15