diff --git a/MobileNet_and_DenseNet/CA_D.py b/MobileNet_and_DenseNet/CA_D.py new file mode 100644 index 0000000..ea1e7b1 --- /dev/null +++ b/MobileNet_and_DenseNet/CA_D.py @@ -0,0 +1,73 @@ +import numpy as np +import sys +import logging +import csv +import os +os.environ['TF_CPP_MIN_LOG_LEVEL']='2' +from TYY_utils import mk_dir, load_data_npz +from TYY_model import TYY_DenseNet_reg + +def MAE(a,b): + mae = np.sum(np.absolute(a-b)) + mae/=len(b) + return mae + +''''''''''''''''''''''''''''''''''''''''''''' + file name +''''''''''''''''''''''''''''''''''''''''''''' +test_file = sys.argv[1] +netType = int(sys.argv[2]) + +logging.debug("Loading testing data...") +image2, age2, image_size = load_data_npz(test_file) + +if netType == 3: + N_densenet = 3 + depth_densenet = 3*N_densenet+4 + model_file = 'megaage_models/DenseNet/batch_size_50/densenet_reg_%d_64/densenet_reg_%d_64.h5'%(depth_densenet, depth_densenet) + model = TYY_DenseNet_reg(image_size,depth_densenet)() + mk_dir('Results_csv') + save_name = 'Results_csv/densenet_reg_%d_%d.csv' % (depth_densenet, image_size) + +elif netType == 4: + N_densenet = 5 + depth_densenet = 3*N_densenet+4 + model_file = 'megaage_models/DenseNet/batch_size_50/densenet_reg_%d_64/densenet_reg_%d_64.h5'%(depth_densenet, depth_densenet) + model = TYY_DenseNet_reg(image_size,depth_densenet)() + mk_dir('Results_csv') + save_name = 'Results_csv/densenet_reg_%d_%d.csv' % (depth_densenet, image_size) + + +''''''''''''''''''''''''''''''''''''''''''''' + load data +''''''''''''''''''''''''''''''''''''''''''''' +logging.debug("Loading model file...") +model.load_weights(model_file) + +age_p=model.predict(image2) + +''''''''''''''''''''''''''''''''''''''''''''' + prediction +''''''''''''''''''''''''''''''''''''''''''''' +pred=[['MAE'],[str(MAE(age2,age_p[:,0]))],['CA3','CA5'],['0','0'],['ID','age','age_p','error']] +CA3=0 +CA5=0 +for i in range(0,len(image2)): + error=np.absolute(age2[i]-age_p[i,0]) + if error<=3: + CA3+=1 + if error<=5: + CA5+=1 + temp = [str(i), str(age2[i]), str(age_p[i,0]), str(error)] + pred.append(temp) + +CA3/=len(image2) +CA5/=len(image2) +pred[3]=[str(CA3),str(CA5)] + +print('CA3: ',CA3,'\nCA5: ',CA5) + +f=open(save_name,'w') +w=csv.writer(f) +w.writerows(pred) +f.close diff --git a/MobileNet_and_DenseNet/CA_M.py b/MobileNet_and_DenseNet/CA_M.py new file mode 100644 index 0000000..c0f3282 --- /dev/null +++ b/MobileNet_and_DenseNet/CA_M.py @@ -0,0 +1,74 @@ +import numpy as np +import sys +import logging +import csv +import os +os.environ['TF_CPP_MIN_LOG_LEVEL']='2' + +from TYY_utils import mk_dir, load_data_npz +from TYY_model import TYY_MobileNet_reg + +def MAE(a,b): + mae = np.sum(np.absolute(a-b)) + mae/=len(b) + return mae + +''''''''''''''''''''''''''''''''''''''''''''' + file name +''''''''''''''''''''''''''''''''''''''''''''' +test_file = sys.argv[1] +netType = int(sys.argv[2]) + +logging.debug("Loading testing data...") +image2, age2, image_size = load_data_npz(test_file) + +if netType == 1: + alpha = 0.25 + model_file = 'megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.h5' + model = TYY_MobileNet_reg(image_size,alpha)() + mk_dir('Results_csv') + save_name = 'Results_csv/mobilenet_reg_%s_%d.csv' % (alpha, image_size) + +elif netType == 2: + alpha = 0.5 + model_file = 'megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.5_64/mobilenet_reg_0.5_64.h5' + model = TYY_MobileNet_reg(image_size,alpha)() + mk_dir('Results_csv') + save_name = 'Results_csv/mobilenet_reg_%s_%d.csv' % (alpha, image_size) + + +''''''''''''''''''''''''''''''''''''''''''''' + load data +''''''''''''''''''''''''''''''''''''''''''''' +logging.debug("Loading model file...") +model.load_weights(model_file) + +age_p=model.predict(image2,batch_size=len(image2)) + +''''''''''''''''''''''''''''''''''''''''''''' + prediction +''''''''''''''''''''''''''''''''''''''''''''' +pred=[['MAE'],[str(MAE(age2,age_p[:,0]))],['ID','age','age_p','error']] +pred.append(['CA3','CA5']) +pred.append(['0','0']) +CA3=0 +CA5=0 +for i in range(0,len(image2)): + error=np.absolute(age2[i]-age_p[i,0]) + if error<=3: + CA3+=1 + if error<=5: + CA5+=1 + temp = [str(i), str(age2[i]), str(age_p[i,0]), str(error)] + pred.append(temp) + +CA3/=len(image2) +CA5/=len(image2) +pred[4]=[str(CA3),str(CA5)] + +print('CA3: ',CA3,'\nCA5: ',CA5) + +f=open(save_name,'w') +w=csv.writer(f) +w.writerows(pred) +f.close diff --git a/MobileNet_and_DenseNet/TYY_callbacks.py b/MobileNet_and_DenseNet/TYY_callbacks.py new file mode 100644 index 0000000..075c70e --- /dev/null +++ b/MobileNet_and_DenseNet/TYY_callbacks.py @@ -0,0 +1,36 @@ +import keras +from sklearn.metrics import roc_auc_score +import sys +import matplotlib.pyplot as plt +from keras.models import Model +import numpy as np +from keras import backend as K + + +class DecayLearningRate(keras.callbacks.Callback): + def __init__(self, startEpoch): + self.startEpoch = startEpoch + + def on_train_begin(self, logs={}): + return + def on_train_end(self, logs={}): + return + + def on_epoch_begin(self, epoch, logs={}): + if epoch in self.startEpoch: + if epoch == 0: + ratio = 1 + else: + ratio = 0.1 + LR = K.get_value(self.model.optimizer.lr) + K.set_value(self.model.optimizer.lr,LR*ratio) + return + + def on_epoch_end(self, epoch, logs={}): + return + + def on_batch_begin(self, batch, logs={}): + return + + def on_batch_end(self, batch, logs={}): + return diff --git a/MobileNet_and_DenseNet/TYY_generators.py b/MobileNet_and_DenseNet/TYY_generators.py new file mode 100644 index 0000000..8f7e132 --- /dev/null +++ b/MobileNet_and_DenseNet/TYY_generators.py @@ -0,0 +1,102 @@ +import keras +import numpy as np +import sys +from scipy import misc +import tensorflow as tf + + +def random_crop(x,dn): + dx = np.random.randint(dn,size=1)[0] + dy = np.random.randint(dn,size=1)[0] + w = x.shape[0] + h = x.shape[1] + out = x[0+dx:w-(dn-dx),0+dy:h-(dn-dy),:] + out = misc.imresize(out, (w,h), interp='nearest') + return out + +def augment_data(images): + for i in range(0,images.shape[0]): + + if np.random.random() > 0.5: + images[i] = images[i][:,::-1] + """ + if np.random.random() > 0.5: + images[i] = random_crop(images[i],4) + """ + if np.random.random() > 0.75: + images[i] = tf.contrib.keras.preprocessing.image.random_rotation(images[i], 20, row_axis=0, col_axis=1, channel_axis=2) + if np.random.random() > 0.75: + images[i] = tf.contrib.keras.preprocessing.image.random_shear(images[i], 0.2, row_axis=0, col_axis=1, channel_axis=2) + if np.random.random() > 0.75: + images[i] = tf.contrib.keras.preprocessing.image.random_shift(images[i], 0.2, 0.2, row_axis=0, col_axis=1, channel_axis=2) + if np.random.random() > 0.75: + images[i] = tf.contrib.keras.preprocessing.image.random_zoom(images[i], [0.8,1.2], row_axis=0, col_axis=1, channel_axis=2) + + return images + + +def data_generator_reg(X,Y,batch_size): + + while True: + idxs = np.random.permutation(len(X)) + X = X[idxs] + Y = Y[idxs] + p,q = [],[] + for i in range(len(X)): + p.append(X[i]) + q.append(Y[i]) + if len(p) == batch_size: + yield augment_data(np.array(p)),np.array(q) + p,q = [],[] + if p: + yield augment_data(np.array(p)),np.array(q) + p,q = [],[] + +def data_generator_dex(X,Y,batch_size): + + Y1 = Y[0] + Y2 = Y[1] + + while True: + idxs = np.random.permutation(len(X)) + X = X[idxs] + Y1 = Y1[idxs] + Y2 = Y2[idxs] + p,q1,q2 = [],[],[] + for i in range(len(X)): + p.append(X[i]) + q1.append(Y1[i]) + q2.append(Y2[i]) + if len(p) == batch_size: + yield augment_data(np.array(p)),[np.array(q1),np.array(q2)] + p,q1,q2 = [],[],[] + if p: + yield augment_data(np.array(p)),[np.array(q1),np.array(q2)] + p,q1,q2 = [],[],[] + +def data_generator_dex_centerloss(X,Y,batch_size): + X1 = X[0] + X2 = X[1] + Y1 = Y[0] + Y2 = Y[1] + Y3 = Y[2] + while True: + idxs = np.random.permutation(len(X1)) + X1 = X1[idxs] #images + X2 = X2[idxs] #labels for center loss + Y1 = Y1[idxs] + Y2 = Y2[idxs] + Y3 = Y3[idxs] + p1,p2,q1,q2,q3 = [],[],[],[],[] + for i in range(len(X1)): + p1.append(X1[i]) + p2.append(X2[i]) + q1.append(Y1[i]) + q2.append(Y2[i]) + q3.append(Y3[i]) + if len(p1) == batch_size: + yield [augment_data(np.array(p1)),np.array(p2)],[np.array(q1),np.array(q2),np.array(q3)] + p1,p2,q1,q2,q3 = [],[],[],[],[] + if p1: + yield [augment_data(np.array(p1)),np.array(p2)],[np.array(q1),np.array(q2),np.array(q3)] + p1,p2,q1,q2,q3 = [],[],[],[],[] \ No newline at end of file diff --git a/MobileNet_and_DenseNet/TYY_model.py b/MobileNet_and_DenseNet/TYY_model.py new file mode 100644 index 0000000..770d57c --- /dev/null +++ b/MobileNet_and_DenseNet/TYY_model.py @@ -0,0 +1,86 @@ +# This code is imported from the following project: https://github.com/asmith26/wide_resnets_keras + +import logging +import sys +import numpy as np +from keras.models import Model +from keras.layers import Input, Activation, add, Dense, Flatten, Dropout, Multiply, Embedding, Lambda, Add, Concatenate, Activation +from keras.layers.convolutional import Conv2D, AveragePooling2D, MaxPooling2D +from keras.layers.normalization import BatchNormalization +from keras.regularizers import l2 +from keras import backend as K +from keras.optimizers import SGD,Adam +from keras.applications.mobilenet import MobileNet +from densenet import * +from keras.utils import plot_model + +sys.setrecursionlimit(2 ** 20) +np.random.seed(2 ** 10) + + +class TYY_MobileNet_reg: + def __init__(self, image_size, alpha): + + + if K.image_dim_ordering() == "th": + logging.debug("image_dim_ordering = 'th'") + self._channel_axis = 1 + self._input_shape = (3, image_size, image_size) + else: + logging.debug("image_dim_ordering = 'tf'") + self._channel_axis = -1 + self._input_shape = (image_size, image_size, 3) + self.alpha = alpha + +# def create_model(self): + def __call__(self): + logging.debug("Creating model...") + + inputs = Input(shape=self._input_shape) + model_mobilenet = MobileNet(input_shape=self._input_shape, alpha=self.alpha, depth_multiplier=1, dropout=1e-3, include_top=False, weights=None, input_tensor=None, pooling=None) + x = model_mobilenet(inputs) + #flatten = Flatten()(x) + + feat_a = Conv2D(20,(1,1),activation='relu')(x) + feat_a = Flatten()(feat_a) + feat_a = Dropout(0.2)(feat_a) + feat_a = Dense(32,activation='relu',name='feat_a')(feat_a) + + pred_a = Dense(1,name='pred_a')(feat_a) + model = Model(inputs=inputs, outputs=[pred_a]) + + + return model + + +class TYY_DenseNet_reg: + def __init__(self, image_size, depth): + + if K.image_dim_ordering() == "th": + logging.debug("image_dim_ordering = 'th'") + self._channel_axis = 1 + self._input_shape = (3, image_size, image_size) + else: + logging.debug("image_dim_ordering = 'tf'") + self._channel_axis = -1 + self._input_shape = (image_size, image_size, 3) + self.depth = depth + +# def create_model(self): + def __call__(self): + logging.debug("Creating model...") + + inputs = Input(shape=self._input_shape) + model_densenet = DenseNet(input_shape=self._input_shape, depth=self.depth, include_top=False, weights=None, input_tensor=None) + flatten = model_densenet(inputs) + + feat_a = Dense(128,activation='relu')(flatten) + feat_a = Dropout(0.2)(feat_a) + feat_a = Dense(32,activation='relu',name='feat_a')(feat_a) + + pred_a = Dense(1,name='pred_a')(feat_a) + model = Model(inputs=inputs, outputs=[pred_a]) + + return model + + diff --git a/MobileNet_and_DenseNet/TYY_train_others.py b/MobileNet_and_DenseNet/TYY_train_others.py new file mode 100644 index 0000000..80f24f1 --- /dev/null +++ b/MobileNet_and_DenseNet/TYY_train_others.py @@ -0,0 +1,156 @@ +import pandas as pd +import logging +import argparse +import os +from keras.callbacks import ModelCheckpoint +from keras.optimizers import Adam +from TYY_model import TYY_MobileNet_reg, TYY_DenseNet_reg +from TYY_utils import mk_dir, load_data_npz +import sys +import numpy as np +from keras.preprocessing.image import ImageDataGenerator +from keras.applications.mobilenet import MobileNet +import TYY_callbacks +from keras.preprocessing.image import ImageDataGenerator +'''from mixup_generator import MixupGenerator''' +'''from random_eraser import get_random_eraser''' +from TYY_generators import * +from keras.utils import plot_model +from moviepy.editor import * + +logging.basicConfig(level=logging.DEBUG) + + + +def get_args(): + parser = argparse.ArgumentParser(description="This script trains the CNN model for age and gender estimation.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument("--input1", "-i1", type=str, required=True, + help="path to input database npz file") + parser.add_argument("--input2", "-i2", type=str, required=True, + help="path to input database npz file") + parser.add_argument("--db", type=str, required=True, + help="database name") + parser.add_argument("--netType", type=int, required=True, + help="network type") + parser.add_argument("--batch_size", type=int, default=128, + help="batch size") + parser.add_argument("--nb_epochs", type=int, default=90, + help="number of epochs") + + args = parser.parse_args() + return args + + + +def main(): + args = get_args() + input_path1 = args.input1 + input_path2 = args.input2 + db_name = args.db + batch_size = args.batch_size + nb_epochs = args.nb_epochs + netType = args.netType + + logging.debug("Loading training data...") + image1, age1, image_size = load_data_npz(input_path1) + logging.debug("Loading testing data...") + image2, age2, image_size = load_data_npz(input_path2) + + + start_decay_epoch = [30,60] + + optMethod = Adam() + + if netType == 1: + model_type = 'MobileNet' + alpha = 0.25 + model = TYY_MobileNet_reg(image_size,alpha)() + save_name = 'mobilenet_reg_%s_%d' % (alpha, image_size) + model.compile(optimizer=optMethod, loss=["mae"], metrics={'pred_a':'mae'}) + + elif netType == 2: + model_type = 'MobileNet' + alpha = 0.5 + model = TYY_MobileNet_reg(image_size,alpha)() + save_name = 'mobilenet_reg_%s_%d' % (alpha, image_size) + model.compile(optimizer=optMethod, loss=["mae"], metrics={'pred_a':'mae'}) + + elif netType == 3: + model_type = 'DenseNet' + N_densenet = 3 + depth_densenet = 3*N_densenet+4 + model = TYY_DenseNet_reg(image_size,depth_densenet)() + save_name = 'densenet_reg_%d_%d' % (depth_densenet, image_size) + model.compile(optimizer=optMethod, loss=["mae"], metrics={'pred_a':'mae'}) + + elif netType == 4: + model_type = 'DenseNet' + N_densenet = 5 + depth_densenet = 3*N_densenet+4 + model = TYY_DenseNet_reg(image_size,depth_densenet)() + save_name = 'densenet_reg_%d_%d' % (depth_densenet, image_size) + model.compile(optimizer=optMethod, loss=["mae"], metrics={'pred_a':'mae'}) + + + + if db_name == "meagaage": + weight_file = "../pre-trained/wiki/"+save_name+"/"+save_name+".h5" + model.load_weights(weight_file) + + + logging.debug("Model summary...") + model.count_params() + model.summary() + + logging.debug("Saving model...") + + mk_dir(db_name+"_models") + mk_dir(db_name+"_models/"+model_type+"/") + mk_dir(db_name+"_models/"+model_type+"/batch_size_%d/"%(batch_size)) + mk_dir(db_name+"_models/"+model_type+"/batch_size_%d/"%(batch_size)+save_name) + mk_dir(db_name+"_checkpoints") + mk_dir(db_name+"_checkpoints/"+model_type) + mk_dir(db_name+"_checkpoints/"+model_type+"/batch_size_%d/"%(batch_size)) + plot_model(model, to_file=db_name+"_models/"+model_type+"/batch_size_%d/"%(batch_size)+save_name+"/"+save_name+".png") + + with open(os.path.join(db_name+"_models/"+model_type+"/batch_size_%d/"%(batch_size)+save_name, save_name+'.json'), "w") as f: + f.write(model.to_json()) + + decaylearningrate = TYY_callbacks.DecayLearningRate(start_decay_epoch) + + callbacks = [ModelCheckpoint(db_name+"_checkpoints/"+model_type+"/batch_size_%d/"%(batch_size)+"weights.{epoch:02d}-{val_loss:.2f}.hdf5", + monitor="val_loss", + verbose=1, + save_best_only=True, + mode="auto"), decaylearningrate + ] + + logging.debug("Running training...") + + + data_num = len(image1)+len(image2) + indexes1 = np.arange(len(image1)) + indexes2 = np.arange(len(image2)) + np.random.shuffle(indexes1) + np.random.shuffle(indexes2) + x_train = image1[indexes1] + x_test = image2[indexes2] + y_train_a = age1[indexes1] + y_test_a = age2[indexes2] + train_num = len(image1) + + + hist = model.fit_generator(generator=data_generator_reg(X=x_train, Y=y_train_a, batch_size=batch_size), + steps_per_epoch=train_num // batch_size, + validation_data=(x_test, [y_test_a]), + epochs=nb_epochs, verbose=1, + callbacks=callbacks) + + logging.debug("Saving weights...") + model.save_weights(os.path.join(db_name+"_models/"+model_type+"/batch_size_%d/"%(batch_size)+save_name, save_name+'.h5'), overwrite=True) + pd.DataFrame(hist.history).to_hdf(os.path.join(db_name+"_models/"+model_type+"/batch_size_%d/"%(batch_size)+save_name, 'history_'+save_name+'.h5'), "history") + + +if __name__ == '__main__': + main() diff --git a/MobileNet_and_DenseNet/TYY_utils.py b/MobileNet_and_DenseNet/TYY_utils.py new file mode 100644 index 0000000..9329178 --- /dev/null +++ b/MobileNet_and_DenseNet/TYY_utils.py @@ -0,0 +1,51 @@ +# modifided from https://github.com/yu4u/age-gender-estimation + +from scipy.io import loadmat +from datetime import datetime +import os +import numpy as np + +def calc_age(taken, dob): + birth = datetime.fromordinal(max(int(dob) - 366, 1)) + + # assume the photo was taken in the middle of the year + if birth.month < 7: + return taken - birth.year + else: + return taken - birth.year - 1 + + +def get_meta(mat_path, db): + meta = loadmat(mat_path) + full_path = meta[db][0, 0]["full_path"][0] + dob = meta[db][0, 0]["dob"][0] # Matlab serial date number + gender = meta[db][0, 0]["gender"][0] + photo_taken = meta[db][0, 0]["photo_taken"][0] # year + face_score = meta[db][0, 0]["face_score"][0] + second_face_score = meta[db][0, 0]["second_face_score"][0] + age = [calc_age(photo_taken[i], dob[i]) for i in range(len(dob))] + + return full_path, dob, gender, photo_taken, face_score, second_face_score, age + + +def load_data(mat_path): + d = loadmat(mat_path) + + return d["image"], d["gender"][0], d["age"][0], d["db"][0], d["img_size"][0, 0], d["min_score"][0, 0] + +''' +def load_MORPH_data_npz(npz_path): + d = np.load(npz_path) + + return d["image"], d["gender"], d["age"], d["img_size"] +''' +def load_data_npz(npz_path): + d = np.load(npz_path) + + return d["image"], d["age"], d["img_size"] + +def mk_dir(dir): + try: + os.mkdir( dir ) + except OSError: + pass diff --git a/MobileNet_and_DenseNet/densenet.py b/MobileNet_and_DenseNet/densenet.py new file mode 100644 index 0000000..19101bb --- /dev/null +++ b/MobileNet_and_DenseNet/densenet.py @@ -0,0 +1,785 @@ +#https://github.com/titu1994/DenseNet +'''DenseNet models for Keras. +# Reference +- [Densely Connected Convolutional Networks](https://arxiv.org/pdf/1608.06993.pdf) +- [The One Hundred Layers Tiramisu: Fully Convolutional DenseNets for Semantic Segmentation](https://arxiv.org/pdf/1611.09326.pdf) +''' +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division + +import warnings + +from keras.models import Model +from keras.layers.core import Dense, Dropout, Activation, Reshape +from keras.layers.convolutional import Conv2D, Conv2DTranspose, UpSampling2D +from keras.layers.pooling import AveragePooling2D, MaxPooling2D +from keras.layers.pooling import GlobalAveragePooling2D +from keras.layers import Input +from keras.layers.merge import concatenate +from keras.layers.normalization import BatchNormalization +from keras.regularizers import l2 +from keras.utils.layer_utils import convert_all_kernels_in_model, convert_dense_weights_data_format +from keras.utils.data_utils import get_file +from keras.engine.topology import get_source_inputs +from keras.applications.imagenet_utils import _obtain_input_shape +from keras.applications.imagenet_utils import decode_predictions +import keras.backend as K + +from subpixel import SubPixelUpscaling + +DENSENET_121_WEIGHTS_PATH = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-121-32.h5' +DENSENET_161_WEIGHTS_PATH = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-161-48.h5' +DENSENET_169_WEIGHTS_PATH = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-169-32.h5' +DENSENET_121_WEIGHTS_PATH_NO_TOP = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-121-32-no-top.h5' +DENSENET_161_WEIGHTS_PATH_NO_TOP = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-161-48-no-top.h5' +DENSENET_169_WEIGHTS_PATH_NO_TOP = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-169-32-no-top.h5' + +def preprocess_input(x, data_format=None): + """Preprocesses a tensor encoding a batch of images. + + # Arguments + x: input Numpy tensor, 4D. + data_format: data format of the image tensor. + + # Returns + Preprocessed tensor. + """ + if data_format is None: + data_format = K.image_data_format() + assert data_format in {'channels_last', 'channels_first'} + + if data_format == 'channels_first': + if x.ndim == 3: + # 'RGB'->'BGR' + x = x[::-1, ...] + # Zero-center by mean pixel + x[0, :, :] -= 103.939 + x[1, :, :] -= 116.779 + x[2, :, :] -= 123.68 + else: + x = x[:, ::-1, ...] + x[:, 0, :, :] -= 103.939 + x[:, 1, :, :] -= 116.779 + x[:, 2, :, :] -= 123.68 + else: + # 'RGB'->'BGR' + x = x[..., ::-1] + # Zero-center by mean pixel + x[..., 0] -= 103.939 + x[..., 1] -= 116.779 + x[..., 2] -= 123.68 + + x *= 0.017 # scale values + + return x + + +def DenseNet(input_shape=None, depth=40, nb_dense_block=3, growth_rate=12, nb_filter=-1, nb_layers_per_block=-1, + bottleneck=False, reduction=0.0, dropout_rate=0.0, weight_decay=1e-4, subsample_initial_block=False, + include_top=True, weights=None, input_tensor=None, + classes=10, activation='softmax'): + '''Instantiate the DenseNet architecture, + optionally loading weights pre-trained + on CIFAR-10. Note that when using TensorFlow, + for best performance you should set + `image_data_format='channels_last'` in your Keras config + at ~/.keras/keras.json. + The model and the weights are compatible with both + TensorFlow and Theano. The dimension ordering + convention used by the model is the one + specified in your Keras config file. + # Arguments + input_shape: optional shape tuple, only to be specified + if `include_top` is False (otherwise the input shape + has to be `(32, 32, 3)` (with `channels_last` dim ordering) + or `(3, 32, 32)` (with `channels_first` dim ordering). + It should have exactly 3 inputs channels, + and width and height should be no smaller than 8. + E.g. `(200, 200, 3)` would be one valid value. + depth: number or layers in the DenseNet + nb_dense_block: number of dense blocks to add to end (generally = 3) + growth_rate: number of filters to add per dense block + nb_filter: initial number of filters. -1 indicates initial + number of filters is 2 * growth_rate + nb_layers_per_block: number of layers in each dense block. + Can be a -1, positive integer or a list. + If -1, calculates nb_layer_per_block from the network depth. + If positive integer, a set number of layers per dense block. + If list, nb_layer is used as provided. Note that list size must + be (nb_dense_block + 1) + bottleneck: flag to add bottleneck blocks in between dense blocks + reduction: reduction factor of transition blocks. + Note : reduction value is inverted to compute compression. + dropout_rate: dropout rate + weight_decay: weight decay rate + subsample_initial_block: Set to True to subsample the initial convolution and + add a MaxPool2D before the dense blocks are added. + include_top: whether to include the fully-connected + layer at the top of the network. + weights: one of `None` (random initialization) or + 'imagenet' (pre-training on ImageNet).. + input_tensor: optional Keras tensor (i.e. output of `layers.Input()`) + to use as image input for the model. + classes: optional number of classes to classify images + into, only to be specified if `include_top` is True, and + if no `weights` argument is specified. + activation: Type of activation at the top layer. Can be one of 'softmax' or 'sigmoid'. + Note that if sigmoid is used, classes must be 1. + # Returns + A Keras model instance. + ''' + + if weights not in {'imagenet', None}: + raise ValueError('The `weights` argument should be either ' + '`None` (random initialization) or `cifar10` ' + '(pre-training on CIFAR-10).') + + if weights == 'imagenet' and include_top and classes != 1000: + raise ValueError('If using `weights` as ImageNet with `include_top`' + ' as true, `classes` should be 1000') + + if activation not in ['softmax', 'sigmoid']: + raise ValueError('activation must be one of "softmax" or "sigmoid"') + + if activation == 'sigmoid' and classes != 1: + raise ValueError('sigmoid activation can only be used when classes = 1') + """ + # Determine proper input shape + input_shape = _obtain_input_shape(input_shape, + default_size=32, + min_size=8, + data_format=K.image_data_format(), + require_flatten=include_top) + """ + if input_tensor is None: + img_input = Input(shape=input_shape) + else: + if not K.is_keras_tensor(input_tensor): + img_input = Input(tensor=input_tensor, shape=input_shape) + else: + img_input = input_tensor + + x = __create_dense_net(classes, img_input, include_top, depth, nb_dense_block, + growth_rate, nb_filter, nb_layers_per_block, bottleneck, reduction, + dropout_rate, weight_decay, subsample_initial_block, activation) + + # Ensure that the model takes into account + # any potential predecessors of `input_tensor`. + if input_tensor is not None: + inputs = get_source_inputs(input_tensor) + else: + inputs = img_input + # Create model. + model = Model(inputs, x, name='densenet') + + # load weights + if weights == 'imagenet': + weights_loaded = False + + if (depth == 121) and (nb_dense_block == 4) and (growth_rate == 32) and (nb_filter == 64) and \ + (bottleneck is True) and (reduction == 0.5) and (dropout_rate == 0.0) and (subsample_initial_block): + if include_top: + weights_path = get_file('DenseNet-BC-121-32.h5', + DENSENET_121_WEIGHTS_PATH, + cache_subdir='models', + md5_hash='a439dd41aa672aef6daba4ee1fd54abd') + else: + weights_path = get_file('DenseNet-BC-121-32-no-top.h5', + DENSENET_121_WEIGHTS_PATH_NO_TOP, + cache_subdir='models', + md5_hash='55e62a6358af8a0af0eedf399b5aea99') + model.load_weights(weights_path) + weights_loaded = True + + if (depth == 161) and (nb_dense_block == 4) and (growth_rate == 48) and (nb_filter == 96) and \ + (bottleneck is True) and (reduction == 0.5) and (dropout_rate == 0.0) and (subsample_initial_block): + if include_top: + weights_path = get_file('DenseNet-BC-161-48.h5', + DENSENET_161_WEIGHTS_PATH, + cache_subdir='models', + md5_hash='6c326cf4fbdb57d31eff04333a23fcca') + else: + weights_path = get_file('DenseNet-BC-161-48-no-top.h5', + DENSENET_161_WEIGHTS_PATH_NO_TOP, + cache_subdir='models', + md5_hash='1a9476b79f6b7673acaa2769e6427b92') + model.load_weights(weights_path) + weights_loaded = True + + if (depth == 169) and (nb_dense_block == 4) and (growth_rate == 32) and (nb_filter == 64) and \ + (bottleneck is True) and (reduction == 0.5) and (dropout_rate == 0.0) and (subsample_initial_block): + if include_top: + weights_path = get_file('DenseNet-BC-169-32.h5', + DENSENET_169_WEIGHTS_PATH, + cache_subdir='models', + md5_hash='914869c361303d2e39dec640b4e606a6') + else: + weights_path = get_file('DenseNet-BC-169-32-no-top.h5', + DENSENET_169_WEIGHTS_PATH_NO_TOP, + cache_subdir='models', + md5_hash='89c19e8276cfd10585d5fadc1df6859e') + model.load_weights(weights_path) + weights_loaded = True + + if weights_loaded: + if K.backend() == 'theano': + convert_all_kernels_in_model(model) + + if K.image_data_format() == 'channels_first' and K.backend() == 'tensorflow': + warnings.warn('You are using the TensorFlow backend, yet you ' + 'are using the Theano ' + 'image data format convention ' + '(`image_data_format="channels_first"`). ' + 'For best performance, set ' + '`image_data_format="channels_last"` in ' + 'your Keras config ' + 'at ~/.keras/keras.json.') + + print("Weights for the model were loaded successfully") + + return model + + +def DenseNetFCN(input_shape, nb_dense_block=5, growth_rate=16, nb_layers_per_block=4, + reduction=0.0, dropout_rate=0.0, weight_decay=1e-4, init_conv_filters=48, + include_top=True, weights=None, input_tensor=None, classes=1, activation='softmax', + upsampling_conv=128, upsampling_type='deconv'): + '''Instantiate the DenseNet FCN architecture. + Note that when using TensorFlow, + for best performance you should set + `image_data_format='channels_last'` in your Keras config + at ~/.keras/keras.json. + # Arguments + nb_dense_block: number of dense blocks to add to end (generally = 3) + growth_rate: number of filters to add per dense block + nb_layers_per_block: number of layers in each dense block. + Can be a positive integer or a list. + If positive integer, a set number of layers per dense block. + If list, nb_layer is used as provided. Note that list size must + be (nb_dense_block + 1) + reduction: reduction factor of transition blocks. + Note : reduction value is inverted to compute compression. + dropout_rate: dropout rate + init_conv_filters: number of layers in the initial convolution layer + include_top: whether to include the fully-connected + layer at the top of the network. + weights: one of `None` (random initialization) or + 'cifar10' (pre-training on CIFAR-10).. + input_tensor: optional Keras tensor (i.e. output of `layers.Input()`) + to use as image input for the model. + input_shape: optional shape tuple, only to be specified + if `include_top` is False (otherwise the input shape + has to be `(32, 32, 3)` (with `channels_last` dim ordering) + or `(3, 32, 32)` (with `channels_first` dim ordering). + It should have exactly 3 inputs channels, + and width and height should be no smaller than 8. + E.g. `(200, 200, 3)` would be one valid value. + classes: optional number of classes to classify images + into, only to be specified if `include_top` is True, and + if no `weights` argument is specified. + activation: Type of activation at the top layer. Can be one of 'softmax' or 'sigmoid'. + Note that if sigmoid is used, classes must be 1. + upsampling_conv: number of convolutional layers in upsampling via subpixel convolution + upsampling_type: Can be one of 'upsampling', 'deconv' and + 'subpixel'. Defines type of upsampling algorithm used. + batchsize: Fixed batch size. This is a temporary requirement for + computation of output shape in the case of Deconvolution2D layers. + Parameter will be removed in next iteration of Keras, which infers + output shape of deconvolution layers automatically. + # Returns + A Keras model instance. + ''' + + if weights not in {None}: + raise ValueError('The `weights` argument should be ' + '`None` (random initialization) as no ' + 'model weights are provided.') + + upsampling_type = upsampling_type.lower() + + if upsampling_type not in ['upsampling', 'deconv', 'subpixel']: + raise ValueError('Parameter "upsampling_type" must be one of "upsampling", ' + '"deconv" or "subpixel".') + + if input_shape is None: + raise ValueError('For fully convolutional models, input shape must be supplied.') + + if type(nb_layers_per_block) is not list and nb_dense_block < 1: + raise ValueError('Number of dense layers per block must be greater than 1. Argument ' + 'value was %d.' % (nb_layers_per_block)) + + if activation not in ['softmax', 'sigmoid']: + raise ValueError('activation must be one of "softmax" or "sigmoid"') + + if activation == 'sigmoid' and classes != 1: + raise ValueError('sigmoid activation can only be used when classes = 1') + + # Determine proper input shape + min_size = 2 ** nb_dense_block + + if K.image_data_format() == 'channels_first': + if input_shape is not None: + if ((input_shape[1] is not None and input_shape[1] < min_size) or + (input_shape[2] is not None and input_shape[2] < min_size)): + raise ValueError('Input size must be at least ' + + str(min_size) + 'x' + str(min_size) + ', got ' + '`input_shape=' + str(input_shape) + '`') + else: + input_shape = (classes, None, None) + else: + if input_shape is not None: + if ((input_shape[0] is not None and input_shape[0] < min_size) or + (input_shape[1] is not None and input_shape[1] < min_size)): + raise ValueError('Input size must be at least ' + + str(min_size) + 'x' + str(min_size) + ', got ' + '`input_shape=' + str(input_shape) + '`') + else: + input_shape = (None, None, classes) + + if input_tensor is None: + img_input = Input(shape=input_shape) + else: + if not K.is_keras_tensor(input_tensor): + img_input = Input(tensor=input_tensor, shape=input_shape) + else: + img_input = input_tensor + + x = __create_fcn_dense_net(classes, img_input, include_top, nb_dense_block, + growth_rate, reduction, dropout_rate, weight_decay, + nb_layers_per_block, upsampling_conv, upsampling_type, + init_conv_filters, input_shape, activation) + + # Ensure that the model takes into account + # any potential predecessors of `input_tensor`. + if input_tensor is not None: + inputs = get_source_inputs(input_tensor) + else: + inputs = img_input + # Create model. + model = Model(inputs, x, name='fcn-densenet') + + return model + + +def DenseNetImageNet121(input_shape=None, + bottleneck=True, + reduction=0.5, + dropout_rate=0.0, + weight_decay=1e-4, + include_top=True, + weights='imagenet', + input_tensor=None, + classes=1000, + activation='softmax'): + return DenseNet(input_shape, depth=121, nb_dense_block=4, growth_rate=32, nb_filter=64, + nb_layers_per_block=[6, 12, 24, 16], bottleneck=bottleneck, reduction=reduction, + dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True, + include_top=include_top, weights=weights, input_tensor=input_tensor, + classes=classes, activation=activation) + + +def DenseNetImageNet169(input_shape=None, + bottleneck=True, + reduction=0.5, + dropout_rate=0.0, + weight_decay=1e-4, + include_top=True, + weights='imagenet', + input_tensor=None, + classes=1000, + activation='softmax'): + return DenseNet(input_shape, depth=169, nb_dense_block=4, growth_rate=32, nb_filter=64, + nb_layers_per_block=[6, 12, 32, 32], bottleneck=bottleneck, reduction=reduction, + dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True, + include_top=include_top, weights=weights, input_tensor=input_tensor, + classes=classes, activation=activation) + + +def DenseNetImageNet201(input_shape=None, + bottleneck=True, + reduction=0.5, + dropout_rate=0.0, + weight_decay=1e-4, + include_top=True, + weights=None, + input_tensor=None, + classes=1000, + activation='softmax'): + return DenseNet(input_shape, depth=201, nb_dense_block=4, growth_rate=32, nb_filter=64, + nb_layers_per_block=[6, 12, 48, 32], bottleneck=bottleneck, reduction=reduction, + dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True, + include_top=include_top, weights=weights, input_tensor=input_tensor, + classes=classes, activation=activation) + + +def DenseNetImageNet264(input_shape=None, + bottleneck=True, + reduction=0.5, + dropout_rate=0.0, + weight_decay=1e-4, + include_top=True, + weights=None, + input_tensor=None, + classes=1000, + activation='softmax'): + return DenseNet(input_shape, depth=201, nb_dense_block=4, growth_rate=32, nb_filter=64, + nb_layers_per_block=[6, 12, 64, 48], bottleneck=bottleneck, reduction=reduction, + dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True, + include_top=include_top, weights=weights, input_tensor=input_tensor, + classes=classes, activation=activation) + + +def DenseNetImageNet161(input_shape=None, + bottleneck=True, + reduction=0.5, + dropout_rate=0.0, + weight_decay=1e-4, + include_top=True, + weights='imagenet', + input_tensor=None, + classes=1000, + activation='softmax'): + return DenseNet(input_shape, depth=161, nb_dense_block=4, growth_rate=48, nb_filter=96, + nb_layers_per_block=[6, 12, 36, 24], bottleneck=bottleneck, reduction=reduction, + dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True, + include_top=include_top, weights=weights, input_tensor=input_tensor, + classes=classes, activation=activation) + + +def __conv_block(ip, nb_filter, bottleneck=False, dropout_rate=None, weight_decay=1e-4): + ''' Apply BatchNorm, Relu, 3x3 Conv2D, optional bottleneck block and dropout + Args: + ip: Input keras tensor + nb_filter: number of filters + bottleneck: add bottleneck block + dropout_rate: dropout rate + weight_decay: weight decay factor + Returns: keras tensor with batch_norm, relu and convolution2d added (optional bottleneck) + ''' + concat_axis = 1 if K.image_data_format() == 'channels_first' else -1 + + x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(ip) + x = Activation('relu')(x) + + if bottleneck: + inter_channel = nb_filter * 4 # Obtained from https://github.com/liuzhuang13/DenseNet/blob/master/densenet.lua + + x = Conv2D(inter_channel, (1, 1), kernel_initializer='he_normal', padding='same', use_bias=False, + kernel_regularizer=l2(weight_decay))(x) + x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(x) + x = Activation('relu')(x) + + x = Conv2D(nb_filter, (3, 3), kernel_initializer='he_normal', padding='same', use_bias=False)(x) + if dropout_rate: + x = Dropout(dropout_rate)(x) + + return x + + +def __dense_block(x, nb_layers, nb_filter, growth_rate, bottleneck=False, dropout_rate=None, weight_decay=1e-4, + grow_nb_filters=True, return_concat_list=False): + ''' Build a dense_block where the output of each conv_block is fed to subsequent ones + Args: + x: keras tensor + nb_layers: the number of layers of conv_block to append to the model. + nb_filter: number of filters + growth_rate: growth rate + bottleneck: bottleneck block + dropout_rate: dropout rate + weight_decay: weight decay factor + grow_nb_filters: flag to decide to allow number of filters to grow + return_concat_list: return the list of feature maps along with the actual output + Returns: keras tensor with nb_layers of conv_block appended + ''' + concat_axis = 1 if K.image_data_format() == 'channels_first' else -1 + + x_list = [x] + + for i in range(nb_layers): + cb = __conv_block(x, growth_rate, bottleneck, dropout_rate, weight_decay) + x_list.append(cb) + + x = concatenate([x, cb], axis=concat_axis) + + if grow_nb_filters: + nb_filter += growth_rate + + if return_concat_list: + return x, nb_filter, x_list + else: + return x, nb_filter + + +def __transition_block(ip, nb_filter, compression=1.0, weight_decay=1e-4): + ''' Apply BatchNorm, Relu 1x1, Conv2D, optional compression, dropout and Maxpooling2D + Args: + ip: keras tensor + nb_filter: number of filters + compression: calculated as 1 - reduction. Reduces the number of feature maps + in the transition block. + dropout_rate: dropout rate + weight_decay: weight decay factor + Returns: keras tensor, after applying batch_norm, relu-conv, dropout, maxpool + ''' + concat_axis = 1 if K.image_data_format() == 'channels_first' else -1 + + x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(ip) + x = Activation('relu')(x) + x = Conv2D(int(nb_filter * compression), (1, 1), kernel_initializer='he_normal', padding='same', use_bias=False, + kernel_regularizer=l2(weight_decay))(x) + x = AveragePooling2D((2, 2), strides=(2, 2))(x) + + return x + + +def __transition_up_block(ip, nb_filters, type='deconv', weight_decay=1E-4): + ''' SubpixelConvolutional Upscaling (factor = 2) + Args: + ip: keras tensor + nb_filters: number of layers + type: can be 'upsampling', 'subpixel', 'deconv'. Determines type of upsampling performed + weight_decay: weight decay factor + Returns: keras tensor, after applying upsampling operation. + ''' + + if type == 'upsampling': + x = UpSampling2D()(ip) + elif type == 'subpixel': + x = Conv2D(nb_filters, (3, 3), activation='relu', padding='same', kernel_regularizer=l2(weight_decay), + use_bias=False, kernel_initializer='he_normal')(ip) + x = SubPixelUpscaling(scale_factor=2)(x) + x = Conv2D(nb_filters, (3, 3), activation='relu', padding='same', kernel_regularizer=l2(weight_decay), + use_bias=False, kernel_initializer='he_normal')(x) + else: + x = Conv2DTranspose(nb_filters, (3, 3), activation='relu', padding='same', strides=(2, 2), + kernel_initializer='he_normal', kernel_regularizer=l2(weight_decay))(ip) + + return x + + +def __create_dense_net(nb_classes, img_input, include_top, depth=40, nb_dense_block=3, growth_rate=12, nb_filter=-1, + nb_layers_per_block=-1, bottleneck=False, reduction=0.0, dropout_rate=None, weight_decay=1e-4, + subsample_initial_block=False, activation='softmax'): + ''' Build the DenseNet model + Args: + nb_classes: number of classes + img_input: tuple of shape (channels, rows, columns) or (rows, columns, channels) + include_top: flag to include the final Dense layer + depth: number or layers + nb_dense_block: number of dense blocks to add to end (generally = 3) + growth_rate: number of filters to add per dense block + nb_filter: initial number of filters. Default -1 indicates initial number of filters is 2 * growth_rate + nb_layers_per_block: number of layers in each dense block. + Can be a -1, positive integer or a list. + If -1, calculates nb_layer_per_block from the depth of the network. + If positive integer, a set number of layers per dense block. + If list, nb_layer is used as provided. Note that list size must + be (nb_dense_block + 1) + bottleneck: add bottleneck blocks + reduction: reduction factor of transition blocks. Note : reduction value is inverted to compute compression + dropout_rate: dropout rate + weight_decay: weight decay rate + subsample_initial_block: Set to True to subsample the initial convolution and + add a MaxPool2D before the dense blocks are added. + subsample_initial: + activation: Type of activation at the top layer. Can be one of 'softmax' or 'sigmoid'. + Note that if sigmoid is used, classes must be 1. + Returns: keras tensor with nb_layers of conv_block appended + ''' + + concat_axis = 1 if K.image_data_format() == 'channels_first' else -1 + + if reduction != 0.0: + assert reduction <= 1.0 and reduction > 0.0, 'reduction value must lie between 0.0 and 1.0' + + # layers in each dense block + if type(nb_layers_per_block) is list or type(nb_layers_per_block) is tuple: + nb_layers = list(nb_layers_per_block) # Convert tuple to list + + assert len(nb_layers) == (nb_dense_block), 'If list, nb_layer is used as provided. ' \ + 'Note that list size must be (nb_dense_block)' + final_nb_layer = nb_layers[-1] + nb_layers = nb_layers[:-1] + else: + if nb_layers_per_block == -1: + assert (depth - 4) % 3 == 0, 'Depth must be 3 N + 4 if nb_layers_per_block == -1' + count = int((depth - 4) / 3) + nb_layers = [count for _ in range(nb_dense_block)] + final_nb_layer = count + else: + final_nb_layer = nb_layers_per_block + nb_layers = [nb_layers_per_block] * nb_dense_block + + # compute initial nb_filter if -1, else accept users initial nb_filter + if nb_filter <= 0: + nb_filter = 2 * growth_rate + + # compute compression factor + compression = 1.0 - reduction + + # Initial convolution + if subsample_initial_block: + initial_kernel = (7, 7) + initial_strides = (2, 2) + else: + initial_kernel = (3, 3) + initial_strides = (1, 1) + + x = Conv2D(nb_filter, initial_kernel, kernel_initializer='he_normal', padding='same', + strides=initial_strides, use_bias=False, kernel_regularizer=l2(weight_decay))(img_input) + + if subsample_initial_block: + x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(x) + x = Activation('relu')(x) + x = MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x) + + # Add dense blocks + for block_idx in range(nb_dense_block - 1): + x, nb_filter = __dense_block(x, nb_layers[block_idx], nb_filter, growth_rate, bottleneck=bottleneck, + dropout_rate=dropout_rate, weight_decay=weight_decay) + # add transition_block + x = __transition_block(x, nb_filter, compression=compression, weight_decay=weight_decay) + nb_filter = int(nb_filter * compression) + + # The last dense_block does not have a transition_block + x, nb_filter = __dense_block(x, final_nb_layer, nb_filter, growth_rate, bottleneck=bottleneck, + dropout_rate=dropout_rate, weight_decay=weight_decay) + + x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(x) + x = Activation('relu')(x) + x = GlobalAveragePooling2D()(x) + + if include_top: + x = Dense(nb_classes, activation=activation)(x) + + return x + + +def __create_fcn_dense_net(nb_classes, img_input, include_top, nb_dense_block=5, growth_rate=12, + reduction=0.0, dropout_rate=None, weight_decay=1e-4, + nb_layers_per_block=4, nb_upsampling_conv=128, upsampling_type='upsampling', + init_conv_filters=48, input_shape=None, activation='deconv'): + ''' Build the DenseNet model + Args: + nb_classes: number of classes + img_input: tuple of shape (channels, rows, columns) or (rows, columns, channels) + include_top: flag to include the final Dense layer + nb_dense_block: number of dense blocks to add to end (generally = 3) + growth_rate: number of filters to add per dense block + reduction: reduction factor of transition blocks. Note : reduction value is inverted to compute compression + dropout_rate: dropout rate + weight_decay: weight decay + nb_layers_per_block: number of layers in each dense block. + Can be a positive integer or a list. + If positive integer, a set number of layers per dense block. + If list, nb_layer is used as provided. Note that list size must + be (nb_dense_block + 1) + nb_upsampling_conv: number of convolutional layers in upsampling via subpixel convolution + upsampling_type: Can be one of 'upsampling', 'deconv' and 'subpixel'. Defines + type of upsampling algorithm used. + input_shape: Only used for shape inference in fully convolutional networks. + activation: Type of activation at the top layer. Can be one of 'softmax' or 'sigmoid'. + Note that if sigmoid is used, classes must be 1. + Returns: keras tensor with nb_layers of conv_block appended + ''' + + concat_axis = 1 if K.image_data_format() == 'channels_first' else -1 + + if concat_axis == 1: # channels_first dim ordering + _, rows, cols = input_shape + else: + rows, cols, _ = input_shape + + if reduction != 0.0: + assert reduction <= 1.0 and reduction > 0.0, 'reduction value must lie between 0.0 and 1.0' + + # check if upsampling_conv has minimum number of filters + # minimum is set to 12, as at least 3 color channels are needed for correct upsampling + assert nb_upsampling_conv > 12 and nb_upsampling_conv % 4 == 0, 'Parameter `upsampling_conv` number of channels must ' \ + 'be a positive number divisible by 4 and greater ' \ + 'than 12' + + # layers in each dense block + if type(nb_layers_per_block) is list or type(nb_layers_per_block) is tuple: + nb_layers = list(nb_layers_per_block) # Convert tuple to list + + assert len(nb_layers) == (nb_dense_block + 1), 'If list, nb_layer is used as provided. ' \ + 'Note that list size must be (nb_dense_block + 1)' + + bottleneck_nb_layers = nb_layers[-1] + rev_layers = nb_layers[::-1] + nb_layers.extend(rev_layers[1:]) + else: + bottleneck_nb_layers = nb_layers_per_block + nb_layers = [nb_layers_per_block] * (2 * nb_dense_block + 1) + + # compute compression factor + compression = 1.0 - reduction + + # Initial convolution + x = Conv2D(init_conv_filters, (7, 7), kernel_initializer='he_normal', padding='same', name='initial_conv2D', + use_bias=False, kernel_regularizer=l2(weight_decay))(img_input) + x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(x) + x = Activation('relu')(x) + + nb_filter = init_conv_filters + + skip_list = [] + + # Add dense blocks and transition down block + for block_idx in range(nb_dense_block): + x, nb_filter = __dense_block(x, nb_layers[block_idx], nb_filter, growth_rate, dropout_rate=dropout_rate, + weight_decay=weight_decay) + + # Skip connection + skip_list.append(x) + + # add transition_block + x = __transition_block(x, nb_filter, compression=compression, weight_decay=weight_decay) + + nb_filter = int(nb_filter * compression) # this is calculated inside transition_down_block + + # The last dense_block does not have a transition_down_block + # return the concatenated feature maps without the concatenation of the input + _, nb_filter, concat_list = __dense_block(x, bottleneck_nb_layers, nb_filter, growth_rate, + dropout_rate=dropout_rate, weight_decay=weight_decay, + return_concat_list=True) + + skip_list = skip_list[::-1] # reverse the skip list + + # Add dense blocks and transition up block + for block_idx in range(nb_dense_block): + n_filters_keep = growth_rate * nb_layers[nb_dense_block + block_idx] + + # upsampling block must upsample only the feature maps (concat_list[1:]), + # not the concatenation of the input with the feature maps (concat_list[0]. + l = concatenate(concat_list[1:], axis=concat_axis) + + t = __transition_up_block(l, nb_filters=n_filters_keep, type=upsampling_type, weight_decay=weight_decay) + + # concatenate the skip connection with the transition block + x = concatenate([t, skip_list[block_idx]], axis=concat_axis) + + # Dont allow the feature map size to grow in upsampling dense blocks + x_up, nb_filter, concat_list = __dense_block(x, nb_layers[nb_dense_block + block_idx + 1], nb_filter=growth_rate, + growth_rate=growth_rate, dropout_rate=dropout_rate, + weight_decay=weight_decay, return_concat_list=True, + grow_nb_filters=False) + + if include_top: + x = Conv2D(nb_classes, (1, 1), activation='linear', padding='same', use_bias=False)(x_up) + + if K.image_data_format() == 'channels_first': + channel, row, col = input_shape + else: + row, col, channel = input_shape + + x = Reshape((row * col, nb_classes))(x) + x = Activation(activation)(x) + x = Reshape((row, col, nb_classes))(x) + else: + x = x_up + + return x + diff --git a/MobileNet_and_DenseNet/subpixel.py b/MobileNet_and_DenseNet/subpixel.py new file mode 100644 index 0000000..3057157 --- /dev/null +++ b/MobileNet_and_DenseNet/subpixel.py @@ -0,0 +1,81 @@ +#https://github.com/titu1994/DenseNet +from __future__ import absolute_import + +from keras import backend as K +from keras.engine import Layer +from keras.utils.generic_utils import get_custom_objects +from keras.utils.conv_utils import normalize_data_format + +if K.backend() == 'theano': + import theano_backend as K_BACKEND +else: + import tensorflow_backend as K_BACKEND + +class SubPixelUpscaling(Layer): + """ Sub-pixel convolutional upscaling layer based on the paper "Real-Time Single Image + and Video Super-Resolution Using an Efficient Sub-Pixel Convolutional Neural Network" + (https://arxiv.org/abs/1609.05158). + This layer requires a Convolution2D prior to it, having output filters computed according to + the formula : + filters = k * (scale_factor * scale_factor) + where k = a user defined number of filters (generally larger than 32) + scale_factor = the upscaling factor (generally 2) + This layer performs the depth to space operation on the convolution filters, and returns a + tensor with the size as defined below. + # Example : + ```python + # A standard subpixel upscaling block + x = Convolution2D(256, 3, 3, padding='same', activation='relu')(...) + u = SubPixelUpscaling(scale_factor=2)(x) + [Optional] + x = Convolution2D(256, 3, 3, padding='same', activation='relu')(u) + ``` + In practice, it is useful to have a second convolution layer after the + SubPixelUpscaling layer to speed up the learning process. + However, if you are stacking multiple SubPixelUpscaling blocks, it may increase + the number of parameters greatly, so the Convolution layer after SubPixelUpscaling + layer can be removed. + # Arguments + scale_factor: Upscaling factor. + data_format: Can be None, 'channels_first' or 'channels_last'. + # Input shape + 4D tensor with shape: + `(samples, k * (scale_factor * scale_factor) channels, rows, cols)` if data_format='channels_first' + or 4D tensor with shape: + `(samples, rows, cols, k * (scale_factor * scale_factor) channels)` if data_format='channels_last'. + # Output shape + 4D tensor with shape: + `(samples, k channels, rows * scale_factor, cols * scale_factor))` if data_format='channels_first' + or 4D tensor with shape: + `(samples, rows * scale_factor, cols * scale_factor, k channels)` if data_format='channels_last'. + """ + + def __init__(self, scale_factor=2, data_format=None, **kwargs): + super(SubPixelUpscaling, self).__init__(**kwargs) + + self.scale_factor = scale_factor + self.data_format = normalize_data_format(data_format) + + def build(self, input_shape): + pass + + def call(self, x, mask=None): + y = K_BACKEND.depth_to_space(x, self.scale_factor, self.data_format) + return y + + def compute_output_shape(self, input_shape): + if self.data_format == 'channels_first': + b, k, r, c = input_shape + return (b, k // (self.scale_factor ** 2), r * self.scale_factor, c * self.scale_factor) + else: + b, r, c, k = input_shape + return (b, r * self.scale_factor, c * self.scale_factor, k // (self.scale_factor ** 2)) + + def get_config(self): + config = {'scale_factor': self.scale_factor, + 'data_format': self.data_format} + base_config = super(SubPixelUpscaling, self).get_config() + return dict(list(base_config.items()) + list(config.items())) + + +get_custom_objects().update({'SubPixelUpscaling': SubPixelUpscaling}) diff --git a/MobileNet_and_DenseNet/tensorflow_backend.py b/MobileNet_and_DenseNet/tensorflow_backend.py new file mode 100644 index 0000000..a878af9 --- /dev/null +++ b/MobileNet_and_DenseNet/tensorflow_backend.py @@ -0,0 +1,54 @@ +#https://github.com/titu1994/DenseNet +import tensorflow as tf + +from keras.backend import tensorflow_backend as KTF +from keras.backend.common import image_data_format + +py_all = all + + +def _preprocess_conv2d_input(x, data_format): + """Transpose and cast the input before the conv2d. + # Arguments + x: input tensor. + data_format: string, `"channels_last"` or `"channels_first"`. + # Returns + A tensor. + """ + if dtype(x) == 'float64': + x = tf.cast(x, 'float32') + if data_format == 'channels_first': + # TF uses the last dimension as channel dimension, + # instead of the 2nd one. + # TH input shape: (samples, input_depth, rows, cols) + # TF input shape: (samples, rows, cols, input_depth) + x = tf.transpose(x, (0, 2, 3, 1)) + return x + + +def _postprocess_conv2d_output(x, data_format): + """Transpose and cast the output from conv2d if needed. + # Arguments + x: A tensor. + data_format: string, `"channels_last"` or `"channels_first"`. + # Returns + A tensor. + """ + + if data_format == 'channels_first': + x = tf.transpose(x, (0, 3, 1, 2)) + + if floatx() == 'float64': + x = tf.cast(x, 'float64') + return x + + +def depth_to_space(input, scale, data_format=None): + ''' Uses phase shift algorithm to convert channels/depth for spatial resolution ''' + if data_format is None: + data_format = image_data_format() + data_format = data_format.lower() + input = _preprocess_conv2d_input(input, data_format) + out = tf.depth_to_space(input, scale) + out = _postprocess_conv2d_output(out, data_format) + return out diff --git a/Results_csv/densenet_reg_19_64.csv b/Results_csv/densenet_reg_19_64.csv new file mode 100644 index 0000000..67089ee --- /dev/null +++ b/Results_csv/densenet_reg_19_64.csv @@ -0,0 +1,3950 @@ +MAE +3.966643238913726 +CA3,CA5 +0.5171102661596958,0.69404309252218 +ID,age,age_p,error +0,1,2.6055956,1.605595588684082 +1,1,1.2175224,0.21752238273620605 +2,2,1.0605788,0.9394211769104004 +3,1,1.3244247,0.32442474365234375 +4,1,0.7847681,0.21523189544677734 +5,1,1.6721985,0.6721985340118408 +6,1,2.0162969,1.0162968635559082 +7,1,1.0586331,0.05863308906555176 +8,1,0.9567673,0.04323267936706543 +9,1,0.803051,0.19694900512695312 +10,1,0.8745012,0.12549877166748047 +11,1,1.9344196,0.9344196319580078 +12,1,1.8203099,0.8203098773956299 +13,1,1.0574627,0.05746269226074219 +14,1,1.0286379,0.02863788604736328 +15,1,1.5636857,0.5636856555938721 +16,1,0.85970473,0.14029526710510254 +17,1,1.3183599,0.3183598518371582 +18,1,0.8934481,0.1065518856048584 +19,1,1.1629138,0.16291379928588867 +20,1,3.7226598,2.7226598262786865 +21,1,1.1147821,0.11478209495544434 +22,1,1.0850253,0.08502531051635742 +23,1,0.7328634,0.2671365737915039 +24,2,1.3174274,0.682572603225708 +25,1,3.3253584,2.3253583908081055 +26,1,2.0556984,1.0556983947753906 +27,1,1.0754395,0.075439453125 +28,1,0.74746037,0.25253963470458984 +29,1,1.4702358,0.47023582458496094 +30,1,1.3702765,0.37027645111083984 +31,1,1.9380567,0.9380567073822021 +32,1,0.86960936,0.13039064407348633 +33,1,1.0484362,0.04843616485595703 +34,1,1.9359727,0.9359726905822754 +35,1,1.538913,0.5389130115509033 +36,1,1.393621,0.39362096786499023 +37,1,1.4288201,0.4288201332092285 +38,1,2.1421297,1.14212965965271 +39,1,1.47017,0.4701700210571289 +40,1,2.3259144,1.3259143829345703 +41,1,1.8363125,0.8363125324249268 +42,1,1.8173254,0.8173253536224365 +43,1,0.90232587,0.09767413139343262 +44,1,1.2380087,0.23800873756408691 +45,1,1.2096322,0.20963215827941895 +46,1,1.2135875,0.21358752250671387 +47,1,0.7067838,0.2932162284851074 +48,2,2.153961,0.1539609432220459 +49,1,0.6654718,0.33452820777893066 +50,1,1.5254943,0.5254943370819092 +51,1,1.921294,0.9212939739227295 +52,1,1.1415224,0.14152240753173828 +53,1,1.1366956,0.13669562339782715 +54,1,1.0851622,0.08516216278076172 +55,1,0.7001238,0.29987621307373047 +56,1,0.98791814,0.01208186149597168 +57,2,1.691493,0.30850696563720703 +58,1,0.92982674,0.07017326354980469 +59,1,1.3090732,0.30907320976257324 +60,1,2.2700043,1.2700042724609375 +61,1,0.89310217,0.10689783096313477 +62,1,1.4005003,0.4005002975463867 +63,1,1.294884,0.29488396644592285 +64,1,0.56987333,0.4301266670227051 +65,1,1.3081183,0.3081183433532715 +66,2,1.5936289,0.4063711166381836 +67,1,1.0977278,0.09772777557373047 +68,1,1.8748858,0.8748857975006104 +69,1,1.3769736,0.37697362899780273 +70,2,1.1498456,0.8501543998718262 +71,1,0.6387329,0.36126708984375 +72,1,1.3365719,0.33657193183898926 +73,1,1.7131381,0.7131381034851074 +74,1,0.55123806,0.4487619400024414 +75,1,1.5276337,0.5276336669921875 +76,1,1.162152,0.16215205192565918 +77,3,2.4503427,0.5496573448181152 +78,1,1.2190018,0.21900177001953125 +79,1,2.3345523,1.33455228805542 +80,4,2.0267155,1.9732844829559326 +81,3,0.9359405,2.0640594959259033 +82,1,0.9542701,0.045729875564575195 +83,1,0.38899517,0.6110048294067383 +84,1,1.5339057,0.5339057445526123 +85,1,1.2074437,0.2074437141418457 +86,1,1.1485019,0.14850187301635742 +87,2,5.0950994,3.095099449157715 +88,3,4.421505,1.4215049743652344 +89,1,1.3627152,0.3627152442932129 +90,2,1.5667484,0.43325161933898926 +91,1,1.7791226,0.7791225910186768 +92,1,1.126646,0.1266460418701172 +93,1,1.4527247,0.4527246952056885 +94,1,0.8218467,0.17815327644348145 +95,3,3.295145,0.29514503479003906 +96,4,2.2998753,1.700124740600586 +97,1,1.1464291,0.14642906188964844 +98,1,1.6272933,0.6272933483123779 +99,1,1.5494957,0.5494956970214844 +100,1,1.7257698,0.7257697582244873 +101,1,1.7131822,0.7131822109222412 +102,2,1.5209143,0.47908568382263184 +103,2,1.061449,0.9385509490966797 +104,1,1.7212975,0.7212975025177002 +105,1,1.659473,0.6594729423522949 +106,2,1.7464361,0.25356388092041016 +107,2,1.2889929,0.7110071182250977 +108,2,1.178864,0.8211359977722168 +109,1,1.5797186,0.5797185897827148 +110,1,1.5899622,0.5899622440338135 +111,3,1.7373099,1.2626900672912598 +112,2,2.0964663,0.0964663028717041 +113,2,1.0381012,0.9618988037109375 +114,1,2.0090945,1.009094476699829 +115,1,2.4633634,1.4633634090423584 +116,1,1.3865075,0.386507511138916 +117,1,0.9924979,0.007502079010009766 +118,1,0.96983814,0.03016185760498047 +119,1,1.1354177,0.13541769981384277 +120,1,1.0018928,0.0018928050994873047 +121,1,1.5182419,0.5182418823242188 +122,2,1.035362,0.9646379947662354 +123,1,1.6822493,0.6822493076324463 +124,2,3.9800837,1.980083703994751 +125,2,1.1896992,0.8103008270263672 +126,2,1.8462231,0.1537768840789795 +127,3,1.9253635,1.074636459350586 +128,1,6.250957,5.250957012176514 +129,1,2.6043212,1.6043212413787842 +130,3,3.416573,0.41657304763793945 +131,1,1.153266,0.15326595306396484 +132,1,1.0589046,0.05890464782714844 +133,1,1.2264059,0.22640585899353027 +134,1,2.1536527,1.1536526679992676 +135,2,3.1622834,1.1622834205627441 +136,1,1.7586625,0.7586624622344971 +137,1,1.476583,0.47658300399780273 +138,1,1.2795534,0.2795534133911133 +139,1,1.182797,0.18279695510864258 +140,1,1.2360702,0.2360701560974121 +141,1,1.0357075,0.03570747375488281 +142,2,1.7397821,0.26021790504455566 +143,1,3.350204,2.3502039909362793 +144,11,9.623562,1.3764381408691406 +145,10,7.847778,2.152222156524658 +146,9,7.979319,1.0206809043884277 +147,6,6.7795796,0.7795796394348145 +148,5,6.652866,1.6528658866882324 +149,11,6.88859,4.111410140991211 +150,9,3.4902017,5.509798288345337 +151,11,6.7175045,4.282495498657227 +152,6,3.184235,2.815764904022217 +153,6,4.248952,1.7510480880737305 +154,10,6.528645,3.4713549613952637 +155,10,5.056134,4.943865776062012 +156,9,6.252826,2.74717378616333 +157,7,7.606343,0.6063427925109863 +158,6,6.796232,0.7962322235107422 +159,9,10.017269,1.0172691345214844 +160,9,2.7734792,6.226520776748657 +161,10,6.0296073,3.9703927040100098 +162,6,5.4698896,0.5301103591918945 +163,8,4.462594,3.5374059677124023 +164,12,9.99432,2.0056800842285156 +165,9,4.922907,4.077093124389648 +166,6,4.3009014,1.6990985870361328 +167,9,4.57692,4.423079967498779 +168,9,3.363205,5.6367950439453125 +169,6,16.260557,10.260557174682617 +170,9,2.6402998,6.3597002029418945 +171,9,6.809921,2.1900792121887207 +172,6,3.2232356,2.776764392852783 +173,13,16.411173,3.411172866821289 +174,13,11.281721,1.7182788848876953 +175,8,9.961508,1.961507797241211 +176,10,8.149006,1.8509941101074219 +177,9,6.9284625,2.071537494659424 +178,7,6.0676107,0.9323892593383789 +179,6,3.6321712,2.367828845977783 +180,9,6.519019,2.48098087310791 +181,6,8.314437,2.314436912536621 +182,9,7.1548257,1.8451743125915527 +183,6,3.0345383,2.9654617309570312 +184,6,8.876335,2.8763351440429688 +185,7,11.698328,4.698328018188477 +186,7,5.9644413,1.0355587005615234 +187,11,14.047384,3.047384262084961 +188,6,4.6881247,1.311875343322754 +189,7,8.201723,1.2017230987548828 +190,8,4.7134995,3.2865004539489746 +191,6,13.619032,7.61903190612793 +192,6,4.8622437,1.13775634765625 +193,8,3.7640605,4.2359395027160645 +194,7,12.062239,5.062238693237305 +195,6,5.1659613,0.8340387344360352 +196,13,8.273275,4.726724624633789 +197,10,3.4834068,6.5165932178497314 +198,9,7.630691,1.3693089485168457 +199,6,23.939066,17.93906593322754 +200,11,11.214067,0.2140674591064453 +201,6,1.8449521,4.15504789352417 +202,10,9.70892,0.2910804748535156 +203,9,8.96009,0.039910316467285156 +204,6,5.139991,0.8600091934204102 +205,9,10.270222,1.2702217102050781 +206,6,4.8311515,1.1688485145568848 +207,8,7.343377,0.6566228866577148 +208,12,10.4874935,1.5125064849853516 +209,13,9.77058,3.229419708251953 +210,10,7.327907,2.672092914581299 +211,10,23.383678,13.383678436279297 +212,11,13.370848,2.370847702026367 +213,7,11.017092,4.017091751098633 +214,13,17.149607,4.149606704711914 +215,11,13.37347,2.3734703063964844 +216,9,12.91987,3.919870376586914 +217,11,9.14199,1.8580102920532227 +218,14,13.930082,0.06991767883300781 +219,7,12.162687,5.162687301635742 +220,7,6.1107054,0.8892946243286133 +221,7,4.889123,2.11087703704834 +222,9,8.264275,0.7357254028320312 +223,11,7.2432055,3.7567944526672363 +224,11,14.81221,3.8122100830078125 +225,7,8.142014,1.1420135498046875 +226,12,9.122965,2.877035140991211 +227,7,5.235977,1.7640228271484375 +228,9,4.026974,4.973025798797607 +229,10,4.638117,5.361883163452148 +230,11,17.09955,6.099550247192383 +231,7,6.672736,0.32726383209228516 +232,11,9.335054,1.6649456024169922 +233,7,5.4845896,1.5154104232788086 +234,11,7.8040543,3.1959457397460938 +235,10,5.156418,4.8435821533203125 +236,10,12.525911,2.525911331176758 +237,12,6.2225924,5.777407646179199 +238,9,7.9909797,1.0090203285217285 +239,7,3.9004135,3.0995864868164062 +240,14,4.783495,9.21650505065918 +241,11,4.950219,6.04978084564209 +242,7,9.448202,2.448202133178711 +243,11,9.787411,1.2125892639160156 +244,12,10.531916,1.4680843353271484 +245,7,9.00976,2.0097599029541016 +246,8,7.8115206,0.18847942352294922 +247,14,10.730244,3.269756317138672 +248,8,3.554586,4.445414066314697 +249,13,3.2732072,9.726792812347412 +250,7,7.2817597,0.28175973892211914 +251,8,2.2340312,5.7659687995910645 +252,7,9.927675,2.927675247192383 +253,7,5.560819,1.439180850982666 +254,8,5.8552155,2.144784450531006 +255,13,15.086096,2.0860958099365234 +256,7,6.417728,0.5822720527648926 +257,11,15.610144,4.610143661499023 +258,11,15.61779,4.617790222167969 +259,10,8.023031,1.976968765258789 +260,7,5.0296803,1.9703197479248047 +261,10,5.9531198,4.04688024520874 +262,7,8.384264,1.3842639923095703 +263,7,7.1673455,0.16734552383422852 +264,7,4.8493423,2.1506576538085938 +265,9,16.114796,7.114795684814453 +266,10,13.810593,3.8105926513671875 +267,14,12.471687,1.5283126831054688 +268,7,5.5210133,1.4789867401123047 +269,8,9.5102215,1.5102214813232422 +270,8,9.94495,1.9449501037597656 +271,10,8.646022,1.353978157043457 +272,8,11.221441,3.2214412689208984 +273,13,13.419949,0.4199485778808594 +274,8,9.129619,1.1296186447143555 +275,8,7.774742,0.22525787353515625 +276,10,14.917198,4.917198181152344 +277,8,8.772755,0.7727546691894531 +278,8,10.292614,2.292613983154297 +279,9,5.388245,3.611754894256592 +280,8,7.5690055,0.43099451065063477 +281,10,18.298267,8.298267364501953 +282,8,7.217237,0.7827630043029785 +283,8,5.65065,2.3493499755859375 +284,10,8.629137,1.3708629608154297 +285,13,10.644987,2.355012893676758 +286,8,13.457018,5.45701789855957 +287,15,11.567289,3.432710647583008 +288,8,5.760423,2.239576816558838 +289,9,11.556854,2.556854248046875 +290,9,5.121583,3.8784170150756836 +291,8,7.6456265,0.3543734550476074 +292,9,12.363714,3.3637142181396484 +293,8,10.682217,2.6822166442871094 +294,10,5.91912,4.080880165100098 +295,8,15.917252,7.9172515869140625 +296,8,7.114563,0.88543701171875 +297,8,8.262756,0.26275634765625 +298,10,4.632182,5.3678178787231445 +299,11,9.082978,1.9170217514038086 +300,8,9.16976,1.169759750366211 +301,8,13.810593,5.8105926513671875 +302,12,12.564449,0.5644493103027344 +303,8,9.182367,1.1823673248291016 +304,8,6.7342854,1.2657146453857422 +305,9,12.084017,3.084016799926758 +306,8,9.569046,1.5690460205078125 +307,13,13.82464,0.8246402740478516 +308,8,6.819151,1.1808490753173828 +309,8,10.520039,2.520038604736328 +310,8,7.2800665,0.7199335098266602 +311,8,21.812141,13.812141418457031 +312,11,15.414675,4.414674758911133 +313,12,8.215797,3.7842025756835938 +314,12,6.3560963,5.643903732299805 +315,9,9.54406,0.5440597534179688 +316,12,10.018251,1.9817485809326172 +317,12,7.1784835,4.821516513824463 +318,12,14.322138,2.3221378326416016 +319,8,8.154411,0.15441131591796875 +320,12,14.149675,2.1496753692626953 +321,12,7.0324354,4.967564582824707 +322,13,11.927368,1.0726318359375 +323,12,6.3532696,5.646730422973633 +324,12,8.295771,3.7042293548583984 +325,13,16.870571,3.8705711364746094 +326,7,14.022469,7.022468566894531 +327,10,14.780527,4.780527114868164 +328,12,7.4458346,4.554165363311768 +329,12,15.204166,3.2041664123535156 +330,12,16.857443,4.857442855834961 +331,10,7.024827,2.975172996520996 +332,8,4.151926,3.848073959350586 +333,8,4.4107323,3.5892677307128906 +334,8,12.205503,4.205503463745117 +335,11,13.305445,2.3054447174072266 +336,12,12.039333,0.039333343505859375 +337,12,7.589577,4.4104228019714355 +338,12,8.62393,3.376070022583008 +339,12,9.766583,2.2334165573120117 +340,12,3.5259943,8.474005699157715 +341,12,12.806414,0.8064136505126953 +342,14,8.805004,5.194995880126953 +343,8,3.029296,4.970704078674316 +344,8,9.449649,1.4496488571166992 +345,8,11.136408,3.1364078521728516 +346,9,7.0940995,1.905900478363037 +347,10,7.5450807,2.4549193382263184 +348,12,5.6804943,6.31950569152832 +349,9,10.55938,1.5593795776367188 +350,14,4.8836837,9.116316318511963 +351,8,7.2182097,0.7817902565002441 +352,12,10.293118,1.7068824768066406 +353,13,13.111664,0.111663818359375 +354,8,11.579388,3.579387664794922 +355,12,9.7594795,2.240520477294922 +356,8,9.763469,1.7634687423706055 +357,12,8.216856,3.783143997192383 +358,9,10.273863,1.2738628387451172 +359,12,9.026035,2.9739646911621094 +360,12,11.33931,0.6606903076171875 +361,12,5.28614,6.713860034942627 +362,16,14.104815,1.8951854705810547 +363,13,12.859276,0.14072418212890625 +364,12,14.845503,2.8455028533935547 +365,12,15.476343,3.4763431549072266 +366,12,15.215597,3.215597152709961 +367,9,12.261555,3.261554718017578 +368,11,15.096647,4.096647262573242 +369,13,17.604502,4.604501724243164 +370,11,11.954315,0.954315185546875 +371,12,9.382672,2.6173276901245117 +372,12,11.653852,0.3461475372314453 +373,12,15.498459,3.4984588623046875 +374,9,11.483753,2.483753204345703 +375,12,17.20453,5.204530715942383 +376,9,11.358803,2.3588027954101562 +377,9,12.029549,3.0295486450195312 +378,12,4.0514364,7.948563575744629 +379,12,9.404722,2.595277786254883 +380,12,13.619848,1.6198482513427734 +381,9,8.461498,0.5385017395019531 +382,12,12.577513,0.5775127410888672 +383,12,12.7054405,0.7054405212402344 +384,12,13.890308,1.8903083801269531 +385,12,14.444307,2.444307327270508 +386,12,8.996901,3.0030994415283203 +387,12,12.71736,0.7173595428466797 +388,12,18.867567,6.86756706237793 +389,12,6.8545356,5.1454644203186035 +390,12,28.786118,16.786117553710938 +391,12,18.791073,6.791072845458984 +392,12,9.529564,2.4704360961914062 +393,12,7.9642854,4.035714626312256 +394,12,6.131857,5.868143081665039 +395,12,15.912779,3.912778854370117 +396,12,15.256975,3.2569751739501953 +397,12,11.319769,0.6802310943603516 +398,12,17.496294,5.496294021606445 +399,12,8.049953,3.9500465393066406 +400,12,13.662603,1.6626033782958984 +401,12,12.058563,0.058563232421875 +402,12,9.934343,2.0656566619873047 +403,12,13.558132,1.5581321716308594 +404,12,9.132784,2.867216110229492 +405,12,11.600794,0.39920616149902344 +406,12,14.381176,2.381175994873047 +407,8,3.7750974,4.224902629852295 +408,12,16.29074,4.290739059448242 +409,12,8.573496,3.426504135131836 +410,13,16.430391,3.430391311645508 +411,12,10.579357,1.4206428527832031 +412,12,8.446535,3.553464889526367 +413,10,8.404643,1.5953569412231445 +414,8,6.864161,1.1358389854431152 +415,8,7.1942453,0.8057546615600586 +416,12,14.378887,2.378887176513672 +417,8,10.226345,2.2263450622558594 +418,11,11.355183,0.3551826477050781 +419,14,9.394882,4.6051177978515625 +420,10,7.004409,2.995591163635254 +421,11,9.178381,1.8216190338134766 +422,12,13.282043,1.28204345703125 +423,14,13.989542,0.010457992553710938 +424,13,14.388523,1.3885231018066406 +425,13,6.848335,6.151665210723877 +426,13,19.410074,6.410074234008789 +427,8,8.412336,0.4123363494873047 +428,16,8.395571,7.604429244995117 +429,11,8.134094,2.86590576171875 +430,15,9.950392,5.04960823059082 +431,13,15.987137,2.9871368408203125 +432,9,19.466742,10.466741561889648 +433,9,6.947095,2.0529050827026367 +434,15,10.897804,4.102195739746094 +435,14,15.515236,1.5152359008789062 +436,9,12.066931,3.0669307708740234 +437,12,16.742344,4.742343902587891 +438,13,17.752007,4.752006530761719 +439,12,14.112318,2.1123180389404297 +440,14,12.780188,1.2198123931884766 +441,14,12.9949665,1.0050334930419922 +442,16,20.05658,4.05657958984375 +443,14,15.884264,1.8842639923095703 +444,12,10.9562,1.0438003540039062 +445,13,15.146379,2.1463794708251953 +446,11,12.528044,1.5280437469482422 +447,15,12.908203,2.091796875 +448,14,17.97008,3.9700794219970703 +449,9,8.262756,0.73724365234375 +450,14,13.248756,0.7512435913085938 +451,13,14.698843,1.698843002319336 +452,13,14.207157,1.2071571350097656 +453,14,5.780798,8.219202041625977 +454,13,5.20349,7.796510219573975 +455,13,8.458357,4.541643142700195 +456,12,10.804201,1.1957988739013672 +457,13,14.882683,1.8826828002929688 +458,12,13.338291,1.3382911682128906 +459,12,15.3192215,3.3192214965820312 +460,13,12.842541,0.15745925903320312 +461,13,12.313946,0.6860542297363281 +462,13,14.420259,1.4202594757080078 +463,14,13.73583,0.26416969299316406 +464,13,14.428669,1.4286689758300781 +465,14,11.140318,2.859682083129883 +466,12,5.9143577,6.085642337799072 +467,16,21.237167,5.2371673583984375 +468,13,12.805538,0.19446182250976562 +469,13,8.650536,4.349464416503906 +470,13,12.632313,0.3676872253417969 +471,13,16.430391,3.430391311645508 +472,12,8.729084,3.270915985107422 +473,14,15.317503,1.3175029754638672 +474,16,14.110725,1.8892745971679688 +475,13,15.264814,2.2648143768310547 +476,14,13.639601,0.3603992462158203 +477,16,12.946026,3.053974151611328 +478,10,15.834656,5.83465576171875 +479,9,5.4614515,3.538548469543457 +480,14,11.341475,2.6585254669189453 +481,14,15.812618,1.8126182556152344 +482,9,7.917256,1.0827441215515137 +483,12,5.3351717,6.664828300476074 +484,9,6.6996565,2.3003435134887695 +485,14,14.060539,0.06053924560546875 +486,12,11.730673,0.26932716369628906 +487,10,8.884529,1.1154708862304688 +488,14,11.345499,2.654500961303711 +489,9,8.798412,0.20158767700195312 +490,9,10.879641,1.8796405792236328 +491,14,15.319645,1.3196449279785156 +492,14,16.775179,2.775178909301758 +493,16,13.7601185,2.2398815155029297 +494,13,6.344268,6.655732154846191 +495,11,17.309566,6.309566497802734 +496,10,4.9222245,5.077775478363037 +497,10,12.0382595,2.038259506225586 +498,13,9.590868,3.4091320037841797 +499,13,12.775135,0.22486495971679688 +500,17,15.429575,1.570425033569336 +501,10,7.8654704,2.1345295906066895 +502,14,11.319769,2.6802310943603516 +503,14,11.500029,2.499971389770508 +504,10,14.989378,4.989377975463867 +505,17,15.52994,1.4700603485107422 +506,14,13.155079,0.8449211120605469 +507,13,14.130146,1.1301460266113281 +508,13,12.004402,0.9955978393554688 +509,16,11.113041,4.886959075927734 +510,13,12.079023,0.9209766387939453 +511,12,11.607706,0.39229393005371094 +512,10,3.308822,6.6911780834198 +513,10,11.090851,1.090850830078125 +514,11,11.538815,0.5388145446777344 +515,11,13.131979,2.131978988647461 +516,13,15.922163,2.9221630096435547 +517,13,13.275572,0.2755718231201172 +518,13,15.162136,2.1621360778808594 +519,13,12.801296,0.19870376586914062 +520,10,11.459503,1.459503173828125 +521,13,9.02206,3.9779396057128906 +522,14,15.113146,1.1131458282470703 +523,10,8.463024,1.5369758605957031 +524,13,14.20841,1.2084102630615234 +525,13,17.1663,4.166299819946289 +526,10,10.141966,0.1419658660888672 +527,10,9.435492,0.5645084381103516 +528,10,13.668285,3.668285369873047 +529,10,14.140352,4.140352249145508 +530,15,15.653946,0.6539459228515625 +531,10,14.51837,4.518369674682617 +532,14,15.033562,1.0335617065429688 +533,13,16.888536,3.8885364532470703 +534,17,15.988556,1.011444091796875 +535,14,5.960042,8.039958000183105 +536,10,21.048334,11.048334121704102 +537,16,13.035429,2.964570999145508 +538,13,10.567909,2.4320907592773438 +539,12,12.516352,0.5163516998291016 +540,10,12.710228,2.7102279663085938 +541,13,15.338032,2.338031768798828 +542,10,19.914234,9.914234161376953 +543,12,12.625605,0.6256046295166016 +544,15,14.47999,0.5200099945068359 +545,15,16.29823,1.2982292175292969 +546,15,16.702822,1.7028217315673828 +547,16,16.825563,0.8255634307861328 +548,16,14.199114,1.8008861541748047 +549,15,15.638287,0.6382865905761719 +550,15,13.342579,1.6574211120605469 +551,15,14.411028,0.5889720916748047 +552,15,14.213255,0.7867450714111328 +553,15,18.16134,3.1613407135009766 +554,18,13.770155,4.22984504699707 +555,15,19.060148,4.060148239135742 +556,19,15.223253,3.7767467498779297 +557,15,19.181484,4.181484222412109 +558,16,15.997271,0.0027294158935546875 +559,15,15.089695,0.08969497680664062 +560,15,17.784266,2.7842655181884766 +561,11,21.066595,10.066595077514648 +562,14,15.2355,1.2355003356933594 +563,14,15.436758,1.436758041381836 +564,15,16.475912,1.475912094116211 +565,15,13.732216,1.2677841186523438 +566,15,14.532715,0.46728515625 +567,11,14.779278,3.779277801513672 +568,15,21.58806,6.58806037902832 +569,15,13.209734,1.7902660369873047 +570,14,14.31875,0.31875038146972656 +571,15,17.091616,2.091615676879883 +572,15,14.890152,0.1098480224609375 +573,15,16.24771,1.2477092742919922 +574,15,18.869106,3.8691062927246094 +575,19,20.538906,1.5389060974121094 +576,16,7.0956616,8.904338359832764 +577,15,18.697306,3.697305679321289 +578,11,13.462826,2.4628257751464844 +579,19,16.86173,2.1382694244384766 +580,15,9.084972,5.915027618408203 +581,15,12.943087,2.056913375854492 +582,16,12.415211,3.584789276123047 +583,13,17.403349,4.403348922729492 +584,15,12.8172245,2.1827754974365234 +585,14,13.213514,0.7864856719970703 +586,14,17.655624,3.6556243896484375 +587,14,12.190662,1.8093376159667969 +588,14,12.0472145,1.9527854919433594 +589,16,13.950285,2.049715042114258 +590,15,16.13295,1.1329498291015625 +591,16,15.507637,0.49236297607421875 +592,16,16.059849,0.059848785400390625 +593,11,9.394882,1.6051177978515625 +594,15,13.867811,1.1321887969970703 +595,15,13.814802,1.1851978302001953 +596,15,17.315342,2.3153419494628906 +597,15,14.132879,0.8671207427978516 +598,11,17.971706,6.971706390380859 +599,15,8.08975,6.910249710083008 +600,15,13.59799,1.4020099639892578 +601,15,19.17187,4.171869277954102 +602,11,14.731106,3.7311058044433594 +603,16,16.815353,0.8153533935546875 +604,17,16.143095,0.8569049835205078 +605,15,15.883633,0.8836326599121094 +606,16,14.105146,1.8948535919189453 +607,16,10.012114,5.987886428833008 +608,15,15.657019,0.6570186614990234 +609,16,17.773901,1.7739009857177734 +610,15,18.420626,3.420625686645508 +611,17,16.01823,0.9817695617675781 +612,15,14.603079,0.39692115783691406 +613,15,16.734108,1.7341079711914062 +614,19,18.884832,0.11516761779785156 +615,15,11.325247,3.674753189086914 +616,16,16.293285,0.2932853698730469 +617,15,12.775135,2.224864959716797 +618,18,12.535721,5.4642791748046875 +619,16,17.03362,1.033620834350586 +620,18,15.388456,2.611543655395508 +621,15,23.222927,8.22292709350586 +622,15,14.253828,0.7461719512939453 +623,16,10.85306,5.146940231323242 +624,15,16.038374,1.0383739471435547 +625,15,15.268852,0.26885223388671875 +626,15,13.851824,1.1481761932373047 +627,17,16.020737,0.9792633056640625 +628,19,18.912262,0.087738037109375 +629,16,14.007795,1.9922046661376953 +630,16,18.938189,2.9381885528564453 +631,12,14.988541,2.9885406494140625 +632,16,14.157328,1.842672348022461 +633,19,14.095001,4.904998779296875 +634,16,15.962736,0.03726387023925781 +635,16,14.687998,1.312002182006836 +636,19,18.671604,0.3283958435058594 +637,16,13.717884,2.282115936279297 +638,16,15.8213005,0.17869949340820312 +639,17,14.9573555,2.042644500732422 +640,17,14.32971,2.670289993286133 +641,19,15.0171795,3.982820510864258 +642,19,15.592314,3.407686233520508 +643,16,18.267317,2.2673168182373047 +644,16,15.657337,0.3426628112792969 +645,17,13.384178,3.6158218383789062 +646,16,17.892693,1.8926925659179688 +647,17,16.809484,0.19051551818847656 +648,17,18.062674,1.062673568725586 +649,16,9.610257,6.389742851257324 +650,20,13.706938,6.293062210083008 +651,16,12.376635,3.6233654022216797 +652,16,12.790178,3.2098217010498047 +653,16,16.426786,0.4267864227294922 +654,16,14.673676,1.326324462890625 +655,16,15.58675,0.4132499694824219 +656,16,16.623024,0.6230239868164062 +657,16,19.582083,3.582082748413086 +658,16,13.890581,2.1094188690185547 +659,16,17.345102,1.345102310180664 +660,16,20.02265,4.022649765014648 +661,16,15.627838,0.372161865234375 +662,20,18.967419,1.0325813293457031 +663,16,15.255657,0.7443428039550781 +664,16,15.965372,0.03462791442871094 +665,16,11.493853,4.506147384643555 +666,16,17.159445,1.159444808959961 +667,16,10.394423,5.60557746887207 +668,19,16.179232,2.820768356323242 +669,16,15.941217,0.05878257751464844 +670,17,14.964565,2.0354347229003906 +671,17,14.499649,2.5003509521484375 +672,12,14.289455,2.2894554138183594 +673,17,14.554613,2.4453868865966797 +674,12,16.083927,4.083927154541016 +675,13,17.81594,4.815940856933594 +676,18,10.877563,7.1224365234375 +677,13,13.185354,0.18535423278808594 +678,17,17.12675,0.12675094604492188 +679,16,13.985439,2.0145606994628906 +680,15,12.588465,2.4115352630615234 +681,16,13.789547,2.2104530334472656 +682,16,19.906303,3.9063034057617188 +683,12,13.118271,1.1182708740234375 +684,15,18.314312,3.314311981201172 +685,12,19.99147,7.9914703369140625 +686,15,8.568678,6.43132209777832 +687,15,13.438446,1.561553955078125 +688,12,15.460468,3.460468292236328 +689,17,17.083641,0.08364105224609375 +690,13,15.518576,2.518575668334961 +691,16,15.228783,0.7712173461914062 +692,15,15.913193,0.9131927490234375 +693,18,20.30947,2.309469223022461 +694,17,15.909744,1.0902557373046875 +695,15,18.241957,3.2419567108154297 +696,14,11.094875,2.9051246643066406 +697,18,17.958517,0.04148292541503906 +698,15,16.14341,1.1434097290039062 +699,15,4.1033416,10.896658420562744 +700,15,15.409519,0.4095191955566406 +701,13,12.3953,0.6047000885009766 +702,17,18.227833,1.2278327941894531 +703,16,16.879847,0.8798465728759766 +704,20,23.611046,3.6110458374023438 +705,17,13.710581,3.289419174194336 +706,12,18.593964,6.593963623046875 +707,16,14.531799,1.46820068359375 +708,17,15.836163,1.1638374328613281 +709,18,19.535395,1.5353946685791016 +710,18,17.671726,0.3282737731933594 +711,16,12.973072,3.026927947998047 +712,12,17.149418,5.149417877197266 +713,18,13.723789,4.276210784912109 +714,15,10.299421,4.700578689575195 +715,17,14.702042,2.2979583740234375 +716,15,16.988789,1.9887886047363281 +717,16,15.454945,0.5450553894042969 +718,15,20.179823,5.17982292175293 +719,13,19.894707,6.894706726074219 +720,16,18.565342,2.5653419494628906 +721,12,15.299442,3.2994422912597656 +722,15,16.460505,1.4605045318603516 +723,17,12.632219,4.367780685424805 +724,15,16.848314,1.8483142852783203 +725,16,15.617537,0.3824634552001953 +726,17,16.887135,0.11286544799804688 +727,13,12.045279,0.9547214508056641 +728,16,14.61392,1.3860797882080078 +729,16,18.257359,2.2573585510253906 +730,16,16.033401,0.0334014892578125 +731,16,18.242455,2.2424545288085938 +732,19,19.441425,0.4414253234863281 +733,17,14.817974,2.182025909423828 +734,17,18.166626,1.1666259765625 +735,16,16.98024,0.9802398681640625 +736,20,17.743183,2.256816864013672 +737,16,17.363525,1.363525390625 +738,16,10.919199,5.080801010131836 +739,16,15.920355,0.07964515686035156 +740,16,17.52826,1.52825927734375 +741,17,14.083353,2.916646957397461 +742,16,17.721777,1.7217769622802734 +743,15,14.014877,0.9851226806640625 +744,16,14.628992,1.3710079193115234 +745,16,23.325483,7.325483322143555 +746,17,15.9814205,1.0185794830322266 +747,17,16.590223,0.4097766876220703 +748,15,11.351719,3.6482810974121094 +749,16,17.383675,1.3836746215820312 +750,16,12.721327,3.2786731719970703 +751,15,15.215963,0.21596336364746094 +752,16,16.815275,0.8152751922607422 +753,17,16.265059,0.7349414825439453 +754,13,14.464733,1.4647331237792969 +755,16,13.856178,2.1438217163085938 +756,17,17.008741,0.008741378784179688 +757,16,19.36488,3.364879608154297 +758,17,14.353781,2.646219253540039 +759,17,10.706511,6.293489456176758 +760,15,13.33769,1.6623096466064453 +761,17,13.019682,3.980318069458008 +762,17,19.918335,2.9183349609375 +763,20,19.328274,0.6717262268066406 +764,16,18.870428,2.8704280853271484 +765,17,15.486242,1.5137577056884766 +766,17,13.417761,3.5822391510009766 +767,20,18.323706,1.6762943267822266 +768,19,13.817425,5.182575225830078 +769,15,16.147844,1.1478443145751953 +770,17,12.888233,4.111766815185547 +771,21,15.340221,5.659778594970703 +772,17,16.956778,0.04322242736816406 +773,18,16.07544,1.924560546875 +774,20,17.591202,2.4087982177734375 +775,15,15.74333,0.7433300018310547 +776,20,20.015047,0.015047073364257812 +777,19,20.005182,1.0051822662353516 +778,15,15.055058,0.055057525634765625 +779,18,15.730249,2.2697505950927734 +780,17,12.348167,4.651832580566406 +781,16,16.748592,0.7485923767089844 +782,20,16.780008,3.219991683959961 +783,20,21.530655,1.5306549072265625 +784,20,13.668386,6.331613540649414 +785,17,13.637587,3.3624134063720703 +786,17,13.855188,3.1448116302490234 +787,19,24.994974,5.994974136352539 +788,18,16.477398,1.5226020812988281 +789,18,14.989077,3.010923385620117 +790,16,16.146328,0.14632797241210938 +791,17,18.360092,1.3600921630859375 +792,13,12.568888,0.43111228942871094 +793,20,15.53092,4.469079971313477 +794,19,13.668112,5.331888198852539 +795,14,6.539258,7.460741996765137 +796,18,14.928631,3.071369171142578 +797,18,17.192652,0.8073482513427734 +798,20,17.11476,2.8852405548095703 +799,18,17.798374,0.20162582397460938 +800,20,17.763325,2.236675262451172 +801,16,12.778278,3.221721649169922 +802,21,17.210386,3.789613723754883 +803,17,14.455795,2.5442047119140625 +804,20,17.903105,2.096895217895508 +805,18,15.0375595,2.9624404907226562 +806,18,17.243103,0.75689697265625 +807,20,13.813372,6.186628341674805 +808,20,20.946333,0.9463329315185547 +809,18,21.473225,3.473224639892578 +810,17,17.564554,0.5645542144775391 +811,20,16.384014,3.615985870361328 +812,16,7.5910573,8.408942699432373 +813,20,20.395208,0.39520835876464844 +814,21,9.159401,11.840599060058594 +815,17,13.856739,3.143260955810547 +816,13,5.973149,7.026851177215576 +817,21,21.009901,0.009901046752929688 +818,13,15.404465,2.4044647216796875 +819,17,19.904568,2.9045677185058594 +820,17,17.22834,0.22834014892578125 +821,17,18.920317,1.9203166961669922 +822,13,13.043957,0.043956756591796875 +823,17,16.632885,0.3671150207519531 +824,17,14.679071,2.3209285736083984 +825,21,21.490995,0.4909954071044922 +826,17,18.720879,1.7208786010742188 +827,17,17.357718,0.35771751403808594 +828,21,17.89916,3.100839614868164 +829,21,16.811888,4.188112258911133 +830,17,14.830048,2.169952392578125 +831,21,16.86181,4.138189315795898 +832,17,13.321272,3.6787281036376953 +833,17,16.991444,0.008556365966796875 +834,17,14.149977,2.8500232696533203 +835,21,12.629875,8.370124816894531 +836,17,16.475647,0.52435302734375 +837,18,5.6750693,12.324930667877197 +838,18,19.666336,1.6663360595703125 +839,21,25.791645,4.791645050048828 +840,18,13.8604145,4.139585494995117 +841,22,27.654654,5.654653549194336 +842,22,17.986235,4.013765335083008 +843,22,20.609592,1.3904075622558594 +844,14,18.99434,4.994340896606445 +845,18,21.630625,3.630624771118164 +846,18,24.151285,6.151285171508789 +847,18,18.415302,0.4153022766113281 +848,14,18.078594,4.078594207763672 +849,18,14.2953,3.704700469970703 +850,18,14.925438,3.0745620727539062 +851,22,16.763294,5.236705780029297 +852,18,21.461697,3.4616966247558594 +853,18,20.610922,2.610921859741211 +854,18,18.15197,0.15196990966796875 +855,22,18.88994,3.1100597381591797 +856,18,20.326887,2.3268871307373047 +857,18,16.940613,1.05938720703125 +858,19,20.117672,1.1176719665527344 +859,21,13.98226,7.017740249633789 +860,22,22.516294,0.5162944793701172 +861,21,18.29391,2.7060909271240234 +862,18,15.221378,2.7786216735839844 +863,22,21.24546,0.7545394897460938 +864,18,20.285122,2.2851219177246094 +865,17,16.001179,0.9988212585449219 +866,18,15.85232,2.1476802825927734 +867,19,15.767012,3.2329883575439453 +868,14,15.626066,1.6260662078857422 +869,14,17.827318,3.8273181915283203 +870,19,17.616404,1.383596420288086 +871,22,20.017376,1.9826240539550781 +872,18,18.865835,0.8658351898193359 +873,18,14.498665,3.5013351440429688 +874,22,15.014692,6.985307693481445 +875,22,14.382643,7.61735725402832 +876,19,17.541887,1.4581127166748047 +877,18,14.170614,3.829385757446289 +878,18,14.827337,3.1726627349853516 +879,14,23.008535,9.008535385131836 +880,14,20.00805,6.008050918579102 +881,18,20.686096,2.68609619140625 +882,18,17.987698,0.012302398681640625 +883,18,17.23274,0.7672595977783203 +884,14,15.294735,1.2947349548339844 +885,18,18.90471,0.9047107696533203 +886,22,19.03796,2.9620399475097656 +887,22,22.593304,0.5933036804199219 +888,14,19.312906,5.312906265258789 +889,18,19.160645,1.16064453125 +890,15,21.693111,6.693111419677734 +891,21,19.783915,1.216085433959961 +892,20,19.270752,0.729248046875 +893,21,15.724806,5.27519416809082 +894,18,19.265915,1.2659149169921875 +895,18,22.286959,4.286958694458008 +896,16,11.866524,4.133476257324219 +897,18,17.03131,0.9686908721923828 +898,16,17.039867,1.0398674011230469 +899,14,15.581291,1.5812911987304688 +900,21,16.620754,4.379245758056641 +901,14,19.965311,5.965311050415039 +902,17,16.84655,0.15345001220703125 +903,18,17.709095,0.2909049987792969 +904,16,15.80905,0.1909503936767578 +905,15,18.4346,3.434600830078125 +906,16,18.538853,2.5388526916503906 +907,15,16.8723,1.8722991943359375 +908,18,18.624285,0.6242847442626953 +909,19,21.325413,2.3254127502441406 +910,20,16.931498,3.068502426147461 +911,16,13.776966,2.223033905029297 +912,16,11.740097,4.2599029541015625 +913,14,20.15473,6.154729843139648 +914,21,10.645586,10.354413986206055 +915,19,15.361189,3.6388111114501953 +916,18,20.087297,2.0872974395751953 +917,19,19.760477,0.7604770660400391 +918,18,15.79204,2.2079601287841797 +919,14,14.82263,0.8226299285888672 +920,21,12.1971035,8.802896499633789 +921,16,14.988104,1.0118961334228516 +922,19,20.658598,1.6585979461669922 +923,18,20.551332,2.551332473754883 +924,1,2.916849,1.91684889793396 +925,1,10.593979,9.593978881835938 +926,1,2.3861153,1.386115312576294 +927,1,2.4582283,1.458228349685669 +928,1,1.5348873,0.5348873138427734 +929,2,1.6917617,0.30823826789855957 +930,1,1.7392328,0.7392327785491943 +931,1,3.6209767,2.620976686477661 +932,1,1.7634106,0.7634105682373047 +933,1,1.3757758,0.3757758140563965 +934,1,4.6449313,3.6449313163757324 +935,1,1.2837927,0.28379273414611816 +936,1,1.2618845,0.2618844509124756 +937,1,1.7326987,0.7326986789703369 +938,1,1.5962868,0.5962867736816406 +939,1,1.7570369,0.7570369243621826 +940,1,1.6867948,0.6867947578430176 +941,1,2.0275145,1.0275144577026367 +942,1,1.7671702,0.7671701908111572 +943,1,1.2981179,0.29811787605285645 +944,1,2.2019238,1.2019238471984863 +945,1,2.1042724,1.1042723655700684 +946,2,3.388183,1.3881831169128418 +947,1,0.60580635,0.3941936492919922 +948,1,1.3005288,0.30052876472473145 +949,1,2.1519165,1.15191650390625 +950,1,1.5601058,0.5601058006286621 +951,1,1.2216372,0.22163724899291992 +952,1,0.6917119,0.3082880973815918 +953,1,1.3914647,0.3914647102355957 +954,1,5.0032063,4.003206253051758 +955,1,1.6044285,0.6044285297393799 +956,1,1.8216782,0.8216781616210938 +957,1,1.8465343,0.846534252166748 +958,1,3.0920908,2.0920908451080322 +959,1,1.0249515,0.024951457977294922 +960,1,1.6607656,0.6607656478881836 +961,1,2.5112998,1.5112998485565186 +962,4,3.048446,0.9515540599822998 +963,1,1.7964761,0.7964761257171631 +964,1,4.2434773,3.2434773445129395 +965,1,1.4376998,0.4376997947692871 +966,1,1.8853879,0.8853878974914551 +967,1,1.382406,0.38240599632263184 +968,1,1.1965053,0.19650530815124512 +969,1,1.2005298,0.2005298137664795 +970,1,1.077563,0.07756304740905762 +971,1,3.6177907,2.617790699005127 +972,2,2.2738304,0.2738304138183594 +973,1,2.131583,1.1315829753875732 +974,2,3.7190378,1.7190377712249756 +975,2,1.3229673,0.6770327091217041 +976,2,4.0049267,2.0049266815185547 +977,1,2.7550948,1.7550947666168213 +978,1,1.9686258,0.9686257839202881 +979,1,3.1499708,2.149970769882202 +980,1,1.5897865,0.5897865295410156 +981,1,0.47446632,0.5255336761474609 +982,1,22.26915,21.269149780273438 +983,2,1.2204893,0.7795107364654541 +984,2,1.698566,0.3014340400695801 +985,3,2.0785701,0.9214298725128174 +986,1,2.106966,1.1069660186767578 +987,1,1.4609623,0.46096229553222656 +988,1,3.0268567,2.0268566608428955 +989,4,1.4221435,2.5778565406799316 +990,1,1.3689318,0.36893177032470703 +991,3,2.2609518,0.7390482425689697 +992,2,1.2299027,0.7700972557067871 +993,1,1.4289589,0.4289588928222656 +994,2,1.2248363,0.7751636505126953 +995,3,1.4856188,1.5143811702728271 +996,1,0.93259764,0.06740236282348633 +997,3,2.9453335,0.05466651916503906 +998,4,2.0981567,1.9018433094024658 +999,4,2.90468,1.0953199863433838 +1000,3,2.1729243,0.827075719833374 +1001,1,1.7897642,0.7897641658782959 +1002,2,1.6578388,0.3421611785888672 +1003,1,1.5908554,0.5908553600311279 +1004,2,2.5704732,0.5704731941223145 +1005,1,0.4870782,0.5129218101501465 +1006,1,1.1981792,0.1981792449951172 +1007,2,2.373512,0.37351202964782715 +1008,1,1.6579199,0.6579198837280273 +1009,1,0.8514261,0.1485738754272461 +1010,3,2.0267742,0.9732258319854736 +1011,2,1.5413396,0.458660364151001 +1012,2,3.6471362,1.6471362113952637 +1013,1,1.6497796,0.6497795581817627 +1014,2,1.1876287,0.8123712539672852 +1015,3,2.6690435,0.33095645904541016 +1016,1,0.66511726,0.3348827362060547 +1017,2,2.725855,0.7258551120758057 +1018,3,1.4653945,1.5346055030822754 +1019,1,2.5318468,1.5318467617034912 +1020,4,5.2636356,1.2636356353759766 +1021,1,1.6157906,0.615790605545044 +1022,2,3.8335059,1.8335058689117432 +1023,2,2.8369572,0.8369572162628174 +1024,2,2.8256893,0.8256893157958984 +1025,1,1.2766621,0.27666211128234863 +1026,3,1.9582338,1.0417661666870117 +1027,1,2.5856433,1.5856432914733887 +1028,4,4.375699,0.3756990432739258 +1029,1,2.0389807,1.0389807224273682 +1030,3,2.26111,0.7388899326324463 +1031,3,2.1142988,0.8857011795043945 +1032,2,1.0589046,0.9410953521728516 +1033,3,1.4199078,1.580092191696167 +1034,1,1.1493359,0.1493358612060547 +1035,1,3.173648,2.1736481189727783 +1036,4,4.5568132,0.5568132400512695 +1037,1,1.2106543,0.21065425872802734 +1038,1,1.2694731,0.2694730758666992 +1039,2,1.6223693,0.37763071060180664 +1040,3,2.2836747,0.7163252830505371 +1041,1,1.2869718,0.2869718074798584 +1042,1,1.1092215,0.1092214584350586 +1043,1,1.8284733,0.8284733295440674 +1044,1,1.2284462,0.22844624519348145 +1045,1,1.0974793,0.09747934341430664 +1046,1,1.5894217,0.5894217491149902 +1047,1,2.8301961,1.8301961421966553 +1048,1,2.3110087,1.3110086917877197 +1049,1,3.2370157,2.237015724182129 +1050,4,2.8604844,1.1395156383514404 +1051,1,1.3610067,0.3610067367553711 +1052,4,2.1202698,1.879730224609375 +1053,4,2.1388714,1.861128568649292 +1054,1,1.5393746,0.539374589920044 +1055,1,1.9047196,0.9047195911407471 +1056,1,1.472899,0.4728989601135254 +1057,1,1.9162874,0.9162874221801758 +1058,2,2.3110917,0.31109166145324707 +1059,1,2.978873,1.9788730144500732 +1060,1,2.151778,1.151777982711792 +1061,1,0.83821034,0.1617896556854248 +1062,19,15.365957,3.634042739868164 +1063,20,19.47697,0.5230293273925781 +1064,22,6.375929,15.62407112121582 +1065,20,18.92198,1.0780200958251953 +1066,15,19.886484,4.886484146118164 +1067,19,16.241076,2.7589244842529297 +1068,22,16.155226,5.84477424621582 +1069,21,14.873209,6.126791000366211 +1070,20,13.835142,6.164857864379883 +1071,21,17.42059,3.5794105529785156 +1072,21,19.510126,1.4898738861083984 +1073,20,19.730017,0.26998329162597656 +1074,15,13.671234,1.328765869140625 +1075,20,13.098417,6.901582717895508 +1076,18,16.209305,1.7906951904296875 +1077,17,20.782452,3.782451629638672 +1078,17,18.798056,1.798055648803711 +1079,21,19.713694,1.286306381225586 +1080,17,19.28267,2.2826690673828125 +1081,21,20.239367,0.7606334686279297 +1082,19,19.015617,0.01561737060546875 +1083,20,21.159346,1.1593456268310547 +1084,20,18.280998,1.7190017700195312 +1085,20,17.837746,2.1622543334960938 +1086,18,19.432947,1.4329471588134766 +1087,19,16.386354,2.613645553588867 +1088,20,20.685125,0.6851253509521484 +1089,22,22.521786,0.5217857360839844 +1090,22,18.9029,3.0970993041992188 +1091,19,15.730846,3.269153594970703 +1092,17,17.948303,0.94830322265625 +1093,17,24.212854,7.212854385375977 +1094,22,16.340185,5.659814834594727 +1095,19,22.439552,3.4395523071289062 +1096,20,16.652426,3.347574234008789 +1097,22,15.153057,6.846942901611328 +1098,15,14.428827,0.5711727142333984 +1099,17,13.992371,3.00762939453125 +1100,21,16.999006,4.000993728637695 +1101,19,15.107611,3.8923892974853516 +1102,20,18.50453,1.4954700469970703 +1103,21,20.53343,0.4665699005126953 +1104,19,19.22873,0.228729248046875 +1105,20,17.19227,2.807729721069336 +1106,21,19.284235,1.7157649993896484 +1107,17,19.178268,2.1782684326171875 +1108,22,18.519619,3.4803810119628906 +1109,19,14.739395,4.2606048583984375 +1110,17,20.179577,3.179576873779297 +1111,17,13.742889,3.257110595703125 +1112,21,19.060816,1.9391841888427734 +1113,21,17.465576,3.534423828125 +1114,15,15.885582,0.8855819702148438 +1115,22,21.092775,0.9072246551513672 +1116,22,15.647581,6.352418899536133 +1117,17,13.525719,3.4742813110351562 +1118,19,17.546352,1.4536476135253906 +1119,20,20.18873,0.18873023986816406 +1120,15,18.73419,3.734189987182617 +1121,22,17.775492,4.224508285522461 +1122,21,21.66273,0.6627292633056641 +1123,16,13.872852,2.127147674560547 +1124,17,16.886438,0.11356163024902344 +1125,21,15.867149,5.132850646972656 +1126,21,16.04356,4.956439971923828 +1127,22,18.59965,3.400350570678711 +1128,23,21.256313,1.7436866760253906 +1129,20,15.325874,4.674125671386719 +1130,21,18.390455,2.6095447540283203 +1131,23,19.730072,3.269927978515625 +1132,17,16.462921,0.537078857421875 +1133,16,14.356283,1.643716812133789 +1134,22,17.69437,4.305629730224609 +1135,17,18.488226,1.4882259368896484 +1136,22,20.947565,1.0524349212646484 +1137,20,19.102198,0.8978023529052734 +1138,16,16.81353,0.8135299682617188 +1139,23,20.786901,2.2130985260009766 +1140,18,18.247677,0.24767684936523438 +1141,20,15.821753,4.178247451782227 +1142,19,15.236477,3.7635231018066406 +1143,23,16.015274,6.9847259521484375 +1144,19,14.799557,4.200443267822266 +1145,23,17.715881,5.28411865234375 +1146,23,19.834906,3.1650943756103516 +1147,23,21.575333,1.4246673583984375 +1148,16,15.446056,0.5539436340332031 +1149,21,20.958315,0.04168510437011719 +1150,20,16.544664,3.4553356170654297 +1151,16,20.475708,4.4757080078125 +1152,23,17.536991,5.463008880615234 +1153,21,17.010458,3.989542007446289 +1154,22,23.30701,1.3070106506347656 +1155,23,19.639662,3.3603382110595703 +1156,20,13.778669,6.221330642700195 +1157,20,18.497486,1.5025138854980469 +1158,21,23.007702,2.007701873779297 +1159,21,13.415911,7.584089279174805 +1160,20,21.503773,1.5037727355957031 +1161,16,17.137447,1.1374473571777344 +1162,21,20.119942,0.8800582885742188 +1163,23,18.344597,4.655403137207031 +1164,20,18.766747,1.2332534790039062 +1165,23,22.79091,0.2090892791748047 +1166,21,16.342262,4.657737731933594 +1167,23,16.89896,6.101039886474609 +1168,21,15.95912,5.04088020324707 +1169,19,13.573629,5.426370620727539 +1170,23,14.008928,8.991071701049805 +1171,18,19.850224,1.8502235412597656 +1172,23,18.233232,4.766767501831055 +1173,19,18.355177,0.6448230743408203 +1174,23,20.613121,2.3868789672851562 +1175,18,19.310064,1.3100643157958984 +1176,23,17.406517,5.593482971191406 +1177,20,20.791157,0.7911567687988281 +1178,23,18.795458,4.20454216003418 +1179,21,19.75169,1.2483100891113281 +1180,21,19.682512,1.3174877166748047 +1181,23,20.42611,2.5738906860351562 +1182,22,19.052958,2.947042465209961 +1183,20,19.6588,0.3411998748779297 +1184,23,15.930117,7.069883346557617 +1185,22,15.477535,6.522464752197266 +1186,22,15.338541,6.661458969116211 +1187,17,15.838808,1.1611919403076172 +1188,23,14.081476,8.918523788452148 +1189,21,19.314941,1.68505859375 +1190,20,14.584162,5.415838241577148 +1191,18,17.258211,0.7417888641357422 +1192,22,19.430702,2.5692977905273438 +1193,21,16.07579,4.9242095947265625 +1194,21,17.442598,3.557401657104492 +1195,23,21.870205,1.1297950744628906 +1196,23,19.91525,3.084749221801758 +1197,21,18.641582,2.358417510986328 +1198,19,18.880259,0.11974143981933594 +1199,24,16.049355,7.950645446777344 +1200,23,20.598534,2.4014663696289062 +1201,24,23.257544,0.7424564361572266 +1202,24,18.436247,5.563753128051758 +1203,20,16.004625,3.9953746795654297 +1204,24,16.72258,7.2774200439453125 +1205,21,17.63362,3.3663806915283203 +1206,24,21.74594,2.254060745239258 +1207,19,15.550322,3.449678421020508 +1208,24,19.77089,4.2291107177734375 +1209,18,14.904194,3.095806121826172 +1210,18,20.279545,2.2795448303222656 +1211,19,18.421364,0.5786361694335938 +1212,18,16.72241,1.2775897979736328 +1213,19,18.001352,0.9986476898193359 +1214,18,22.58622,4.586219787597656 +1215,22,21.440094,0.559906005859375 +1216,21,15.314564,5.685436248779297 +1217,21,27.090216,6.090215682983398 +1218,25,16.707037,8.292963027954102 +1219,24,18.578444,5.42155647277832 +1220,22,19.845877,2.154123306274414 +1221,17,19.845972,2.8459720611572266 +1222,17,15.495428,1.5045719146728516 +1223,22,18.089758,3.9102420806884766 +1224,22,18.56296,3.437040328979492 +1225,19,22.459444,3.459444046020508 +1226,21,19.69324,1.3067607879638672 +1227,22,26.25976,4.259759902954102 +1228,21,19.84475,1.1552505493164062 +1229,25,22.245825,2.7541751861572266 +1230,17,20.232128,3.232128143310547 +1231,22,20.542442,1.4575576782226562 +1232,22,22.325373,0.32537269592285156 +1233,21,18.957418,2.042581558227539 +1234,21,18.52548,2.474519729614258 +1235,21,21.746052,0.7460517883300781 +1236,21,26.088423,5.088422775268555 +1237,20,13.605482,6.39451789855957 +1238,24,21.03775,2.962249755859375 +1239,22,14.491354,7.508646011352539 +1240,21,23.343744,2.3437442779541016 +1241,21,17.291422,3.708578109741211 +1242,24,20.277435,3.722564697265625 +1243,21,17.167826,3.832174301147461 +1244,24,9.739557,14.260442733764648 +1245,21,17.571745,3.428255081176758 +1246,20,20.971767,0.9717674255371094 +1247,21,21.749428,0.7494277954101562 +1248,21,23.785173,2.7851734161376953 +1249,22,27.993464,5.993463516235352 +1250,22,18.257214,3.742786407470703 +1251,23,16.963646,6.036354064941406 +1252,26,19.275352,6.724647521972656 +1253,23,18.028074,4.971925735473633 +1254,20,21.082235,1.082235336303711 +1255,23,19.844776,3.155223846435547 +1256,19,25.278233,6.278232574462891 +1257,23,14.901049,8.09895133972168 +1258,23,19.504026,3.495973587036133 +1259,25,15.151638,9.84836196899414 +1260,22,13.549919,8.450080871582031 +1261,19,21.54552,2.545520782470703 +1262,22,24.236353,2.2363529205322266 +1263,23,18.363705,4.636295318603516 +1264,23,28.303461,5.303461074829102 +1265,24,21.037151,2.962848663330078 +1266,21,19.910976,1.0890235900878906 +1267,22,22.930906,0.9309062957763672 +1268,24,24.619078,0.6190776824951172 +1269,21,19.404016,1.5959835052490234 +1270,20,19.748428,0.2515716552734375 +1271,25,19.33,5.670000076293945 +1272,22,16.910997,5.08900260925293 +1273,22,24.759901,2.7599010467529297 +1274,22,25.55271,3.5527095794677734 +1275,21,17.633974,3.366025924682617 +1276,24,20.294456,3.7055435180664062 +1277,23,26.770597,3.770597457885742 +1278,24,21.88433,2.1156692504882812 +1279,20,24.291338,4.291337966918945 +1280,19,18.98665,0.013349533081054688 +1281,24,34.32972,10.329719543457031 +1282,25,22.992756,2.007244110107422 +1283,20,20.262316,0.2623157501220703 +1284,25,21.019947,3.980052947998047 +1285,22,15.534977,6.465023040771484 +1286,21,17.872545,3.1274547576904297 +1287,24,18.957905,5.042095184326172 +1288,20,21.901802,1.9018020629882812 +1289,18,18.157146,0.15714645385742188 +1290,22,20.209217,1.7907829284667969 +1291,18,22.457634,4.457633972167969 +1292,22,19.716145,2.283855438232422 +1293,18,22.759266,4.759265899658203 +1294,22,20.943521,1.056478500366211 +1295,24,17.888086,6.111913681030273 +1296,22,17.246275,4.753725051879883 +1297,25,27.05464,2.0546398162841797 +1298,22,10.554573,11.445426940917969 +1299,25,16.688358,8.311641693115234 +1300,24,22.15788,1.8421192169189453 +1301,23,17.58161,5.418390274047852 +1302,25,18.706385,6.293615341186523 +1303,26,15.649519,10.350481033325195 +1304,25,17.758247,7.241752624511719 +1305,25,15.405518,9.594482421875 +1306,24,15.12587,8.874130249023438 +1307,25,19.6422,5.357799530029297 +1308,22,16.846977,5.153022766113281 +1309,22,21.477644,0.5223560333251953 +1310,22,18.417719,3.5822811126708984 +1311,18,22.406715,4.406715393066406 +1312,22,22.939814,0.9398136138916016 +1313,22,16.951965,5.04803466796875 +1314,22,16.355211,5.64478874206543 +1315,20,20.20788,0.20788002014160156 +1316,21,20.525682,0.4743175506591797 +1317,24,17.31406,6.685939788818359 +1318,25,17.928791,7.071208953857422 +1319,23,20.063639,2.936361312866211 +1320,22,18.639654,3.3603458404541016 +1321,22,22.094603,0.09460258483886719 +1322,22,20.992886,1.0071144104003906 +1323,18,19.251005,1.2510051727294922 +1324,23,17.688576,5.311424255371094 +1325,22,17.500824,4.499176025390625 +1326,22,17.56003,4.439970016479492 +1327,24,21.399492,2.6005077362060547 +1328,21,20.321686,0.678314208984375 +1329,26,20.183943,5.816057205200195 +1330,22,17.658463,4.3415374755859375 +1331,22,21.318047,0.6819534301757812 +1332,22,21.820301,0.17969894409179688 +1333,26,23.435179,2.564821243286133 +1334,22,18.51678,3.4832191467285156 +1335,21,26.499254,5.49925422668457 +1336,23,22.33546,0.6645393371582031 +1337,23,31.019352,8.019351959228516 +1338,23,19.339859,3.6601409912109375 +1339,23,22.88505,0.11495018005371094 +1340,26,17.724173,8.275827407836914 +1341,24,23.243359,0.7566413879394531 +1342,20,20.847364,0.8473644256591797 +1343,26,21.720242,4.279758453369141 +1344,20,21.11417,1.1141700744628906 +1345,22,21.401617,0.5983829498291016 +1346,20,21.820955,1.8209552764892578 +1347,20,16.680819,3.319181442260742 +1348,23,22.949339,0.05066108703613281 +1349,21,20.777195,0.22280502319335938 +1350,22,18.936506,3.0634937286376953 +1351,20,25.059834,5.059833526611328 +1352,26,18.876657,7.123342514038086 +1353,21,17.483877,3.516122817993164 +1354,26,28.925436,2.925436019897461 +1355,23,20.219967,2.7800331115722656 +1356,21,21.076525,0.07652473449707031 +1357,22,19.078524,2.921476364135742 +1358,23,23.83301,0.8330097198486328 +1359,22,18.230278,3.7697219848632812 +1360,23,20.406607,2.593393325805664 +1361,21,22.362465,1.3624649047851562 +1362,19,25.469545,6.469545364379883 +1363,22,24.554583,2.5545825958251953 +1364,23,22.497284,0.502716064453125 +1365,25,17.744074,7.255926132202148 +1366,20,18.544666,1.4553337097167969 +1367,23,21.761862,1.238138198852539 +1368,23,19.063948,3.9360523223876953 +1369,22,26.972315,4.972314834594727 +1370,26,20.988575,5.011425018310547 +1371,22,18.441246,3.5587539672851562 +1372,19,24.277046,5.277046203613281 +1373,21,21.920994,0.9209938049316406 +1374,23,19.960365,3.0396347045898438 +1375,25,19.48961,5.51038932800293 +1376,21,20.071688,0.9283123016357422 +1377,22,22.285564,0.2855644226074219 +1378,25,20.513088,4.486911773681641 +1379,20,15.841347,4.158653259277344 +1380,21,18.586744,2.4132556915283203 +1381,23,18.840893,4.159107208251953 +1382,20,15.998529,4.001470565795898 +1383,26,24.102303,1.8976974487304688 +1384,19,20.959867,1.9598674774169922 +1385,23,19.926884,3.0731163024902344 +1386,27,20.968508,6.031492233276367 +1387,21,19.639563,1.3604373931884766 +1388,21,22.413864,1.4138641357421875 +1389,24,18.888182,5.111818313598633 +1390,26,19.562407,6.437593460083008 +1391,19,18.250856,0.7491436004638672 +1392,23,20.333359,2.6666412353515625 +1393,23,20.014946,2.9850540161132812 +1394,20,20.951244,0.9512443542480469 +1395,23,16.086506,6.913494110107422 +1396,23,20.084444,2.915555953979492 +1397,26,23.797482,2.2025184631347656 +1398,24,23.49928,0.5007190704345703 +1399,23,21.372963,1.6270370483398438 +1400,19,18.881891,0.11810874938964844 +1401,23,25.900984,2.9009838104248047 +1402,19,17.740831,1.2591686248779297 +1403,21,20.86002,0.13998031616210938 +1404,24,20.62517,3.3748302459716797 +1405,20,15.133251,4.866748809814453 +1406,23,21.35912,1.6408805847167969 +1407,23,20.96273,2.0372695922851562 +1408,25,21.092676,3.9073238372802734 +1409,22,20.207268,1.7927322387695312 +1410,23,19.529768,3.4702320098876953 +1411,24,19.898973,4.10102653503418 +1412,24,24.321192,0.32119178771972656 +1413,27,23.383795,3.6162052154541016 +1414,26,19.34504,6.654960632324219 +1415,22,19.809044,2.1909561157226562 +1416,23,25.053476,2.053476333618164 +1417,26,22.312899,3.687101364135742 +1418,23,16.947823,6.052177429199219 +1419,23,22.832712,0.16728782653808594 +1420,27,23.611204,3.388795852661133 +1421,24,21.706148,2.293851852416992 +1422,23,18.175974,4.824026107788086 +1423,23,22.453411,0.5465888977050781 +1424,25,22.930962,2.0690383911132812 +1425,23,22.940817,0.05918312072753906 +1426,23,23.32715,0.3271503448486328 +1427,23,21.484991,1.5150089263916016 +1428,26,29.33167,3.3316707611083984 +1429,24,19.209318,4.790681838989258 +1430,23,19.204874,3.795125961303711 +1431,22,21.935835,0.06416511535644531 +1432,24,27.820818,3.8208179473876953 +1433,24,21.312126,2.6878738403320312 +1434,23,22.257153,0.7428474426269531 +1435,23,20.65661,2.3433895111083984 +1436,23,23.302704,0.302703857421875 +1437,23,23.087446,0.08744621276855469 +1438,23,21.31031,1.6896896362304688 +1439,22,21.94674,0.05326080322265625 +1440,22,21.740398,0.2596015930175781 +1441,24,25.237917,1.2379169464111328 +1442,23,21.95766,1.0423393249511719 +1443,21,16.393742,4.606258392333984 +1444,27,25.55005,1.4499492645263672 +1445,26,26.246218,0.2462177276611328 +1446,25,25.96387,0.9638690948486328 +1447,20,14.127605,5.872394561767578 +1448,26,26.493784,0.49378395080566406 +1449,24,22.435425,1.5645751953125 +1450,23,16.796644,6.20335578918457 +1451,20,18.25655,1.7434501647949219 +1452,24,18.732271,5.267728805541992 +1453,23,23.162691,0.1626911163330078 +1454,24,22.177559,1.8224411010742188 +1455,24,31.291857,7.29185676574707 +1456,24,20.637308,3.362691879272461 +1457,21,20.950975,0.04902458190917969 +1458,25,25.716673,0.7166728973388672 +1459,21,29.302961,8.302961349487305 +1460,26,25.65218,0.3478202819824219 +1461,21,19.50277,1.4972305297851562 +1462,22,23.268173,1.2681732177734375 +1463,28,22.565601,5.434398651123047 +1464,24,26.885323,2.8853225708007812 +1465,20,18.523382,1.4766178131103516 +1466,23,18.101599,4.898401260375977 +1467,20,23.384497,3.3844966888427734 +1468,24,21.729683,2.2703170776367188 +1469,28,30.103584,2.1035842895507812 +1470,24,18.067497,5.932502746582031 +1471,22,19.534863,2.465137481689453 +1472,24,22.369379,1.6306209564208984 +1473,24,25.605637,1.6056365966796875 +1474,27,29.112814,2.112813949584961 +1475,22,18.453684,3.546316146850586 +1476,20,22.1233,2.123300552368164 +1477,23,22.82639,0.17361068725585938 +1478,21,19.562462,1.4375381469726562 +1479,24,21.861746,2.138254165649414 +1480,24,21.60746,2.3925399780273438 +1481,21,16.329466,4.670534133911133 +1482,23,19.164854,3.835145950317383 +1483,24,19.360119,4.639881134033203 +1484,28,18.992552,9.007448196411133 +1485,24,18.96147,5.038530349731445 +1486,20,18.714724,1.2852764129638672 +1487,25,24.431553,0.5684471130371094 +1488,21,20.895372,0.10462760925292969 +1489,23,18.250084,4.749916076660156 +1490,20,18.126926,1.8730735778808594 +1491,27,18.858559,8.141441345214844 +1492,24,19.303432,4.696567535400391 +1493,24,30.711723,6.711723327636719 +1494,25,24.07134,0.9286594390869141 +1495,28,16.536484,11.463516235351562 +1496,20,17.625097,2.3749027252197266 +1497,23,16.189999,6.810001373291016 +1498,23,19.335365,3.6646347045898438 +1499,24,20.858578,3.1414222717285156 +1500,20,21.7397,1.7397003173828125 +1501,26,23.745146,2.254854202270508 +1502,21,18.841887,2.158113479614258 +1503,24,21.264343,2.73565673828125 +1504,23,21.302778,1.6972217559814453 +1505,24,24.319918,0.3199176788330078 +1506,24,21.233171,2.7668285369873047 +1507,24,22.61719,1.3828105926513672 +1508,25,31.201097,6.20109748840332 +1509,27,18.718006,8.281993865966797 +1510,24,23.450945,0.5490550994873047 +1511,20,19.214268,0.7857322692871094 +1512,25,24.2484,0.7516002655029297 +1513,25,23.658932,1.3410682678222656 +1514,23,19.55864,3.4413604736328125 +1515,24,20.2237,3.7763004302978516 +1516,21,21.762732,0.7627315521240234 +1517,24,18.806074,5.193925857543945 +1518,25,19.573544,5.426456451416016 +1519,24,21.372795,2.6272048950195312 +1520,24,21.745226,2.2547740936279297 +1521,22,16.805634,5.194366455078125 +1522,27,21.220175,5.779825210571289 +1523,26,23.322426,2.6775741577148438 +1524,24,17.726805,6.273195266723633 +1525,24,16.692995,7.307004928588867 +1526,23,20.665976,2.334024429321289 +1527,23,16.28115,6.718849182128906 +1528,22,20.269768,1.7302322387695312 +1529,24,25.806301,1.8063011169433594 +1530,25,26.57604,1.576040267944336 +1531,23,20.512499,2.4875011444091797 +1532,24,17.027409,6.972591400146484 +1533,24,25.034597,1.034597396850586 +1534,23,19.731379,3.2686214447021484 +1535,24,22.56728,1.4327201843261719 +1536,23,20.708807,2.2911930084228516 +1537,25,19.825356,5.174644470214844 +1538,25,20.820515,4.179485321044922 +1539,24,19.626484,4.373516082763672 +1540,25,17.64134,7.358659744262695 +1541,28,21.111612,6.888387680053711 +1542,24,19.148554,4.851446151733398 +1543,24,20.70379,3.2962093353271484 +1544,20,23.242277,3.242277145385742 +1545,22,21.060305,0.9396953582763672 +1546,25,23.970224,1.029775619506836 +1547,21,23.053507,2.053506851196289 +1548,24,24.475967,0.4759674072265625 +1549,24,19.641294,4.358705520629883 +1550,24,20.64949,3.3505096435546875 +1551,27,20.232708,6.767292022705078 +1552,23,21.235498,1.7645015716552734 +1553,25,21.936031,3.0639686584472656 +1554,24,24.432043,0.43204307556152344 +1555,24,21.504755,2.4952449798583984 +1556,24,19.623459,4.3765411376953125 +1557,24,20.160578,3.8394222259521484 +1558,26,18.812735,7.187265396118164 +1559,22,19.583454,2.416545867919922 +1560,25,21.839422,3.1605777740478516 +1561,24,18.379044,5.6209564208984375 +1562,23,20.933681,2.0663185119628906 +1563,25,18.781286,6.218713760375977 +1564,26,20.842995,5.157005310058594 +1565,24,23.410769,0.5892314910888672 +1566,24,20.40375,3.596250534057617 +1567,24,19.413197,4.586803436279297 +1568,24,21.55796,2.4420394897460938 +1569,20,21.469965,1.4699649810791016 +1570,24,20.781792,3.2182083129882812 +1571,24,19.043646,4.956354141235352 +1572,26,20.905554,5.094446182250977 +1573,24,21.32406,2.6759395599365234 +1574,24,21.144222,2.8557777404785156 +1575,28,25.306925,2.693075180053711 +1576,28,29.789757,1.7897567749023438 +1577,23,19.818676,3.181324005126953 +1578,24,22.48869,1.5113105773925781 +1579,24,22.14902,1.8509807586669922 +1580,24,23.102604,0.8973960876464844 +1581,24,16.21157,7.788429260253906 +1582,20,16.468628,3.5313720703125 +1583,24,26.152113,2.1521129608154297 +1584,27,21.825926,5.174074172973633 +1585,20,22.36098,2.3609790802001953 +1586,24,18.509964,5.4900360107421875 +1587,22,21.884628,0.1153717041015625 +1588,20,19.680277,0.31972312927246094 +1589,24,23.17622,0.8237800598144531 +1590,28,18.16056,9.839439392089844 +1591,20,22.803593,2.8035926818847656 +1592,24,19.251984,4.748016357421875 +1593,20,23.322979,3.322978973388672 +1594,28,24.85361,3.146390914916992 +1595,28,19.76547,8.234529495239258 +1596,20,20.2696,0.26959991455078125 +1597,26,27.7998,1.7998008728027344 +1598,24,19.567377,4.432622909545898 +1599,24,20.842714,3.157285690307617 +1600,21,20.794579,0.20542144775390625 +1601,21,23.886919,2.8869190216064453 +1602,25,19.86285,5.137149810791016 +1603,25,29.20984,4.209840774536133 +1604,26,21.698822,4.301177978515625 +1605,25,18.510838,6.489162445068359 +1606,21,23.719252,2.7192516326904297 +1607,29,18.651228,10.348772048950195 +1608,25,21.422337,3.5776634216308594 +1609,29,18.99734,10.002660751342773 +1610,25,21.412914,3.587085723876953 +1611,25,20.872856,4.127143859863281 +1612,23,20.895655,2.1043453216552734 +1613,25,21.989323,3.0106773376464844 +1614,25,23.463303,1.5366973876953125 +1615,25,19.02618,5.973819732666016 +1616,26,22.938425,3.061574935913086 +1617,25,21.793377,3.206623077392578 +1618,29,19.602493,9.397506713867188 +1619,25,18.89511,6.104890823364258 +1620,25,18.22708,6.772920608520508 +1621,21,20.371151,0.6288490295410156 +1622,29,14.325792,14.67420768737793 +1623,25,19.343586,5.656414031982422 +1624,25,19.44611,5.553890228271484 +1625,29,17.618557,11.38144302368164 +1626,21,18.709286,2.2907142639160156 +1627,25,21.90486,3.0951404571533203 +1628,26,22.991837,3.0081634521484375 +1629,25,22.881256,2.118743896484375 +1630,29,21.413116,7.586883544921875 +1631,22,21.453382,0.5466175079345703 +1632,28,21.4681,6.531900405883789 +1633,21,24.670673,3.670673370361328 +1634,25,21.341543,3.658456802368164 +1635,28,26.966917,1.0330829620361328 +1636,21,19.033321,1.9666786193847656 +1637,21,22.560608,1.56060791015625 +1638,26,20.738405,5.261594772338867 +1639,28,21.754473,6.245527267456055 +1640,25,20.322844,4.677156448364258 +1641,26,22.343328,3.6566715240478516 +1642,25,19.791578,5.20842170715332 +1643,25,20.488907,4.5110931396484375 +1644,25,20.753262,4.246738433837891 +1645,29,20.678432,8.32156753540039 +1646,25,27.452957,2.4529571533203125 +1647,23,18.569494,4.430505752563477 +1648,23,19.250109,3.7498912811279297 +1649,25,29.033371,4.0333709716796875 +1650,25,29.9442,4.94420051574707 +1651,25,24.155241,0.8447589874267578 +1652,21,23.909672,2.9096717834472656 +1653,26,20.763014,5.23698616027832 +1654,21,18.71028,2.2897205352783203 +1655,25,18.407217,6.592782974243164 +1656,25,19.82518,5.1748199462890625 +1657,25,23.075842,1.9241580963134766 +1658,24,25.39565,1.395650863647461 +1659,22,20.55889,1.441110610961914 +1660,26,26.765892,0.7658920288085938 +1661,25,18.224491,6.775508880615234 +1662,25,31.0157,6.01569938659668 +1663,21,21.32425,0.324249267578125 +1664,24,23.457558,0.5424423217773438 +1665,24,25.827011,1.8270111083984375 +1666,25,25.852379,0.8523788452148438 +1667,21,16.52039,4.479610443115234 +1668,21,21.953432,0.9534320831298828 +1669,25,23.24576,1.7542400360107422 +1670,21,24.64912,3.649120330810547 +1671,24,24.007685,0.0076847076416015625 +1672,21,23.13402,2.1340198516845703 +1673,24,21.69098,2.309019088745117 +1674,23,16.934744,6.065256118774414 +1675,25,21.128212,3.8717880249023438 +1676,21,27.59787,6.597869873046875 +1677,25,25.871325,0.8713245391845703 +1678,21,5.087218,15.912782192230225 +1679,23,21.061111,1.9388885498046875 +1680,25,21.668297,3.3317031860351562 +1681,24,19.15872,4.841279983520508 +1682,25,23.388165,1.6118354797363281 +1683,24,24.953203,0.9532032012939453 +1684,25,20.622885,4.377115249633789 +1685,28,25.7804,2.2196006774902344 +1686,25,24.478868,0.5211315155029297 +1687,21,29.425835,8.425834655761719 +1688,25,23.051664,1.9483356475830078 +1689,23,16.328592,6.671407699584961 +1690,24,21.575546,2.4244537353515625 +1691,25,20.009157,4.990842819213867 +1692,25,22.574436,2.4255638122558594 +1693,24,20.608799,3.3912010192871094 +1694,25,24.719835,0.2801647186279297 +1695,24,18.898281,5.101718902587891 +1696,25,29.778728,4.778728485107422 +1697,25,22.02393,2.9760704040527344 +1698,25,18.994722,6.005277633666992 +1699,25,22.015135,2.984865188598633 +1700,23,24.419052,1.4190521240234375 +1701,21,24.965786,3.9657859802246094 +1702,25,19.381565,5.618434906005859 +1703,25,22.812346,2.187654495239258 +1704,23,24.787626,1.7876262664794922 +1705,21,17.342752,3.657247543334961 +1706,23,23.579565,0.5795650482177734 +1707,25,24.335787,0.6642131805419922 +1708,25,19.192196,5.807804107666016 +1709,21,16.584438,4.415561676025391 +1710,29,22.763088,6.236911773681641 +1711,23,23.805052,0.8050518035888672 +1712,23,22.258524,0.7414760589599609 +1713,21,16.076263,4.923736572265625 +1714,25,19.989212,5.0107879638671875 +1715,23,22.509651,0.49034881591796875 +1716,21,20.806034,0.19396591186523438 +1717,23,20.665335,2.334665298461914 +1718,29,24.708933,4.291067123413086 +1719,21,28.653992,7.65399169921875 +1720,21,24.04328,3.0432796478271484 +1721,25,20.731607,4.268392562866211 +1722,21,21.797337,0.7973365783691406 +1723,23,23.368141,0.36814117431640625 +1724,21,21.45429,0.45429039001464844 +1725,21,20.325567,0.6744327545166016 +1726,25,17.476322,7.523677825927734 +1727,21,22.445963,1.445962905883789 +1728,25,23.43759,1.5624103546142578 +1729,25,25.984882,0.9848823547363281 +1730,21,23.601162,2.6011619567871094 +1731,22,22.107964,0.10796356201171875 +1732,21,22.766672,1.766672134399414 +1733,21,25.043854,4.043853759765625 +1734,25,25.473925,0.4739246368408203 +1735,23,24.546125,1.5461254119873047 +1736,22,18.245771,3.7542285919189453 +1737,24,26.18506,2.185060501098633 +1738,22,24.964752,2.964752197265625 +1739,22,29.65585,7.655849456787109 +1740,22,23.395216,1.3952159881591797 +1741,24,21.31328,2.6867198944091797 +1742,26,20.901728,5.098272323608398 +1743,22,17.557373,4.442626953125 +1744,22,20.944752,1.0552482604980469 +1745,24,21.30727,2.692729949951172 +1746,22,15.398943,6.601057052612305 +1747,24,27.010141,3.010141372680664 +1748,24,21.622644,2.3773555755615234 +1749,22,22.898424,0.8984241485595703 +1750,25,19.248663,5.751337051391602 +1751,30,28.317108,1.682891845703125 +1752,24,20.785828,3.21417236328125 +1753,26,17.724285,8.275714874267578 +1754,22,26.514147,4.51414680480957 +1755,30,15.697161,14.302839279174805 +1756,25,19.537052,5.462947845458984 +1757,30,21.926853,8.07314682006836 +1758,26,27.197475,1.1974754333496094 +1759,25,17.228876,7.771123886108398 +1760,22,19.123322,2.876678466796875 +1761,26,16.332888,9.667112350463867 +1762,22,20.018333,1.9816665649414062 +1763,24,20.576767,3.4232330322265625 +1764,22,16.867313,5.132686614990234 +1765,25,21.529478,3.470521926879883 +1766,30,22.603033,7.396966934204102 +1767,22,12.394371,9.605628967285156 +1768,26,17.286076,8.713924407958984 +1769,26,24.345533,1.6544666290283203 +1770,22,21.968863,0.031137466430664062 +1771,22,20.721775,1.2782249450683594 +1772,26,19.42473,6.57526969909668 +1773,27,23.204226,3.795774459838867 +1774,22,22.15953,0.1595306396484375 +1775,22,18.870083,3.1299171447753906 +1776,30,22.881828,7.118171691894531 +1777,26,22.823532,3.1764678955078125 +1778,29,20.454636,8.545364379882812 +1779,22,22.538939,0.5389385223388672 +1780,24,21.535038,2.4649620056152344 +1781,26,18.327604,7.672395706176758 +1782,23,22.21478,0.7852191925048828 +1783,27,20.97826,6.021739959716797 +1784,30,26.260164,3.739835739135742 +1785,27,25.86275,1.1372509002685547 +1786,22,35.918156,13.918155670166016 +1787,22,22.75761,0.7576103210449219 +1788,26,24.033998,1.9660015106201172 +1789,24,20.74901,3.2509899139404297 +1790,30,25.85116,4.148839950561523 +1791,26,23.042383,2.9576168060302734 +1792,26,18.495054,7.504945755004883 +1793,26,27.760157,1.7601566314697266 +1794,26,21.047514,4.952486038208008 +1795,26,20.244076,5.755924224853516 +1796,22,24.857182,2.8571815490722656 +1797,22,21.139343,0.86065673828125 +1798,30,19.621101,10.378898620605469 +1799,30,28.585165,1.414834976196289 +1800,22,18.89896,3.1010398864746094 +1801,26,20.222635,5.777364730834961 +1802,27,21.348206,5.65179443359375 +1803,22,19.28559,2.7144107818603516 +1804,27,21.569416,5.430583953857422 +1805,30,20.098665,9.901334762573242 +1806,30,20.62313,9.376869201660156 +1807,30,23.298931,6.701068878173828 +1808,30,21.213455,8.786544799804688 +1809,30,24.780462,5.219537734985352 +1810,22,24.350561,2.3505611419677734 +1811,26,23.721914,2.278085708618164 +1812,29,23.524782,5.475217819213867 +1813,25,22.903723,2.0962772369384766 +1814,28,22.961578,5.038421630859375 +1815,24,19.765177,4.234823226928711 +1816,26,17.830984,8.169015884399414 +1817,30,23.859344,6.140655517578125 +1818,29,15.190441,13.809558868408203 +1819,26,20.747149,5.252851486206055 +1820,27,22.456762,4.543237686157227 +1821,30,22.913553,7.086446762084961 +1822,22,18.2395,3.760499954223633 +1823,26,22.711115,3.2888851165771484 +1824,30,21.716227,8.283773422241211 +1825,26,19.480757,6.519243240356445 +1826,30,22.41803,7.58197021484375 +1827,23,21.102571,1.8974285125732422 +1828,27,19.925903,7.0740966796875 +1829,26,22.817858,3.1821422576904297 +1830,28,26.352484,1.6475162506103516 +1831,24,26.07646,2.0764598846435547 +1832,31,23.772827,7.2271728515625 +1833,31,26.560099,4.439901351928711 +1834,23,17.034727,5.965272903442383 +1835,27,21.699604,5.300395965576172 +1836,23,20.97295,2.027050018310547 +1837,28,23.83379,4.166210174560547 +1838,23,24.635565,1.6355648040771484 +1839,23,24.956057,1.9560565948486328 +1840,27,25.943762,1.0562381744384766 +1841,24,22.32132,1.678680419921875 +1842,25,24.42751,0.5724906921386719 +1843,23,25.941809,2.9418087005615234 +1844,23,20.619734,2.3802661895751953 +1845,27,20.128857,6.871143341064453 +1846,23,24.662203,1.6622028350830078 +1847,27,24.375454,2.6245460510253906 +1848,23,26.633211,3.633211135864258 +1849,26,22.539505,3.4604949951171875 +1850,30,20.350586,9.6494140625 +1851,31,24.061926,6.938074111938477 +1852,31,24.613604,6.386396408081055 +1853,30,22.032333,7.9676666259765625 +1854,30,26.728643,3.2713565826416016 +1855,30,23.174238,6.825761795043945 +1856,27,23.444784,3.555215835571289 +1857,26,22.498627,3.501373291015625 +1858,28,18.78363,9.21636962890625 +1859,27,23.86378,3.136220932006836 +1860,30,26.167583,3.832416534423828 +1861,28,27.564037,0.4359626770019531 +1862,27,19.213556,7.786443710327148 +1863,28,24.06093,3.9390697479248047 +1864,27,24.671349,2.3286514282226562 +1865,28,19.779469,8.220531463623047 +1866,24,23.585167,0.41483306884765625 +1867,28,33.990307,5.990306854248047 +1868,26,11.869152,14.130847930908203 +1869,27,19.288275,7.711725234985352 +1870,27,15.609798,11.390201568603516 +1871,27,25.822943,1.1770572662353516 +1872,28,24.793682,3.206317901611328 +1873,28,25.125141,2.874858856201172 +1874,27,22.42645,4.573549270629883 +1875,28,24.39794,3.602060317993164 +1876,26,19.99973,6.000270843505859 +1877,26,22.255241,3.7447586059570312 +1878,27,19.417648,7.5823516845703125 +1879,28,22.90315,5.09684944152832 +1880,26,25.136158,0.8638420104980469 +1881,26,22.877312,3.1226882934570312 +1882,27,28.312307,1.312307357788086 +1883,27,18.101414,8.89858627319336 +1884,24,22.603327,1.3966732025146484 +1885,24,22.832388,1.167612075805664 +1886,31,21.934856,9.065143585205078 +1887,27,24.210386,2.789613723754883 +1888,31,27.22001,3.779989242553711 +1889,30,28.468664,1.5313358306884766 +1890,28,21.310354,6.689645767211914 +1891,25,25.468643,0.4686431884765625 +1892,28,19.477217,8.522783279418945 +1893,28,32.061714,4.061714172363281 +1894,30,21.326439,8.673561096191406 +1895,26,25.59429,0.40571022033691406 +1896,29,23.628654,5.371345520019531 +1897,28,22.49739,5.5026092529296875 +1898,24,22.889833,1.1101665496826172 +1899,25,22.666628,2.333372116088867 +1900,31,20.487421,10.512578964233398 +1901,30,22.43987,7.560129165649414 +1902,27,20.997969,6.002031326293945 +1903,30,20.73646,9.263540267944336 +1904,28,27.245722,0.7542781829833984 +1905,26,22.331362,3.668638229370117 +1906,25,25.487917,0.4879169464111328 +1907,28,26.855545,1.1444549560546875 +1908,28,23.62544,4.37455940246582 +1909,26,24.347803,1.6521968841552734 +1910,30,26.915512,3.0844879150390625 +1911,24,19.170172,4.829828262329102 +1912,29,20.471033,8.528966903686523 +1913,28,22.28718,5.712820053100586 +1914,28,23.005299,4.994701385498047 +1915,27,20.08991,6.910089492797852 +1916,24,26.068556,2.0685558319091797 +1917,30,23.408052,6.591947555541992 +1918,30,26.227085,3.7729148864746094 +1919,30,24.270294,5.729705810546875 +1920,30,30.145334,0.14533424377441406 +1921,30,22.415155,7.584844589233398 +1922,30,21.442717,8.557283401489258 +1923,28,21.975916,6.024084091186523 +1924,28,27.14567,0.8543300628662109 +1925,29,21.242332,7.757667541503906 +1926,28,25.71985,2.280149459838867 +1927,30,24.000095,5.999904632568359 +1928,27,23.346943,3.653057098388672 +1929,28,20.215235,7.784765243530273 +1930,27,21.814486,5.185514450073242 +1931,27,23.152819,3.8471813201904297 +1932,28,22.581602,5.418397903442383 +1933,30,24.89462,5.105379104614258 +1934,30,18.101376,11.898624420166016 +1935,27,18.464281,8.53571891784668 +1936,24,22.929262,1.0707378387451172 +1937,30,28.423271,1.5767288208007812 +1938,28,22.265598,5.734401702880859 +1939,28,22.223755,5.7762451171875 +1940,26,20.8198,5.180200576782227 +1941,30,22.065502,7.934497833251953 +1942,32,25.137053,6.862947463989258 +1943,28,26.352129,1.6478710174560547 +1944,25,20.894821,4.1051788330078125 +1945,28,18.490068,9.509931564331055 +1946,29,20.726175,8.273824691772461 +1947,27,22.94128,4.058719635009766 +1948,27,22.206013,4.793987274169922 +1949,28,26.682777,1.3172225952148438 +1950,29,22.873571,6.126428604125977 +1951,31,23.090752,7.909248352050781 +1952,25,21.231764,3.7682361602783203 +1953,25,21.706732,3.2932682037353516 +1954,29,22.21137,6.788629531860352 +1955,28,21.264904,6.735095977783203 +1956,27,17.598047,9.401952743530273 +1957,25,21.242195,3.7578048706054688 +1958,27,25.862959,1.1370410919189453 +1959,28,20.790216,7.209783554077148 +1960,31,25.240149,5.759851455688477 +1961,31,28.248884,2.7511157989501953 +1962,31,23.466793,7.533206939697266 +1963,28,22.137869,5.862131118774414 +1964,28,25.469809,2.530191421508789 +1965,25,15.538563,9.461437225341797 +1966,29,24.382618,4.617382049560547 +1967,28,20.797445,7.202554702758789 +1968,27,21.664701,5.335298538208008 +1969,28,18.429932,9.570068359375 +1970,31,23.348074,7.651926040649414 +1971,28,23.839472,4.160528182983398 +1972,28,24.80818,3.1918201446533203 +1973,28,21.729156,6.270843505859375 +1974,31,23.06673,7.933269500732422 +1975,28,21.079891,6.920108795166016 +1976,31,21.989891,9.010108947753906 +1977,29,24.042063,4.957937240600586 +1978,28,20.311052,7.688947677612305 +1979,24,22.889025,1.1109752655029297 +1980,28,25.39331,2.606689453125 +1981,27,24.272518,2.7274818420410156 +1982,26,31.121246,5.121246337890625 +1983,29,27.509308,1.490692138671875 +1984,24,28.829138,4.829137802124023 +1985,28,19.88258,8.117420196533203 +1986,24,23.246643,0.75335693359375 +1987,26,19.635569,6.364431381225586 +1988,24,20.710985,3.2890148162841797 +1989,26,21.29942,4.700580596923828 +1990,26,24.40143,1.5985698699951172 +1991,24,22.21215,1.7878494262695312 +1992,26,23.067629,2.932371139526367 +1993,24,19.70757,4.292430877685547 +1994,26,23.825754,2.174245834350586 +1995,24,24.902641,0.9026412963867188 +1996,26,23.811466,2.1885337829589844 +1997,24,20.501535,3.498464584350586 +1998,26,25.503992,0.49600791931152344 +1999,26,23.63841,2.3615894317626953 +2000,30,17.49954,12.500459671020508 +2001,24,23.754019,0.24598121643066406 +2002,24,30.221582,6.221582412719727 +2003,24,21.64142,2.358579635620117 +2004,26,22.525126,3.474874496459961 +2005,24,17.81142,6.188579559326172 +2006,26,23.101513,2.898487091064453 +2007,26,22.145664,3.8543357849121094 +2008,24,25.649275,1.6492748260498047 +2009,32,20.411753,11.588247299194336 +2010,24,18.75485,5.245149612426758 +2011,32,27.786112,4.213888168334961 +2012,24,22.746086,1.2539138793945312 +2013,24,23.775364,0.22463607788085938 +2014,26,24.399542,1.6004581451416016 +2015,24,20.231937,3.7680625915527344 +2016,24,17.779675,6.220325469970703 +2017,27,21.971811,5.028188705444336 +2018,28,26.58423,1.4157695770263672 +2019,26,23.000938,2.9990615844726562 +2020,29,20.010332,8.989667892456055 +2021,28,22.739147,5.260852813720703 +2022,1,2.7813623,1.7813622951507568 +2023,1,2.0779855,1.0779855251312256 +2024,2,2.8398638,0.8398637771606445 +2025,1,1.6471603,0.6471602916717529 +2026,1,2.152827,1.1528270244598389 +2027,1,5.724389,4.72438907623291 +2028,1,1.0247889,0.024788856506347656 +2029,1,1.5826564,0.5826563835144043 +2030,1,2.3162677,1.316267728805542 +2031,1,1.4759114,0.47591137886047363 +2032,1,2.2584636,1.2584636211395264 +2033,1,2.4347317,1.4347317218780518 +2034,1,1.3682165,0.36821651458740234 +2035,1,2.4656777,1.4656777381896973 +2036,1,1.1485019,0.14850187301635742 +2037,1,1.547395,0.5473949909210205 +2038,1,4.429991,3.429990768432617 +2039,1,3.1865478,2.1865477561950684 +2040,1,1.9876416,0.9876415729522705 +2041,1,0.9438956,0.056104421615600586 +2042,1,1.139252,0.1392519474029541 +2043,1,2.004798,1.0047979354858398 +2044,1,1.4725494,0.4725494384765625 +2045,1,3.2983065,2.298306465148926 +2046,1,3.6080942,2.6080942153930664 +2047,1,14.528931,13.5289306640625 +2048,1,1.3289449,0.3289449214935303 +2049,1,1.4933062,0.49330615997314453 +2050,1,1.439106,0.4391059875488281 +2051,1,2.965071,1.9650709629058838 +2052,1,1.585563,0.5855629444122314 +2053,1,1.7038364,0.7038364410400391 +2054,1,1.8987057,0.8987057209014893 +2055,1,2.2915332,1.2915332317352295 +2056,1,1.1498456,0.14984560012817383 +2057,1,4.6166344,3.6166343688964844 +2058,1,1.7220705,0.7220704555511475 +2059,1,1.3795838,0.37958383560180664 +2060,1,1.6278498,0.627849817276001 +2061,1,1.455373,0.45537304878234863 +2062,1,0.9926894,0.007310628890991211 +2063,1,1.4826014,0.4826014041900635 +2064,1,1.7640777,0.7640776634216309 +2065,1,1.8878143,0.8878142833709717 +2066,1,2.0016813,1.0016813278198242 +2067,1,1.659473,0.6594729423522949 +2068,1,1.7662969,0.7662968635559082 +2069,1,2.510174,1.510174036026001 +2070,1,2.006755,1.0067551136016846 +2071,1,2.0675757,1.0675756931304932 +2072,1,1.5640996,0.5640995502471924 +2073,1,1.8120959,0.8120958805084229 +2074,1,4.417862,3.4178619384765625 +2075,1,2.1755621,1.1755621433258057 +2076,1,3.8007913,2.8007912635803223 +2077,1,5.178138,4.17813777923584 +2078,1,1.6003935,0.600393533706665 +2079,1,4.2910385,3.2910385131835938 +2080,1,1.6247678,0.6247677803039551 +2081,1,2.226674,1.2266740798950195 +2082,1,3.7408838,2.7408838272094727 +2083,1,2.7237155,1.7237155437469482 +2084,1,1.4425836,0.4425835609436035 +2085,1,4.0107474,3.0107474327087402 +2086,1,1.7058802,0.7058801651000977 +2087,1,3.1285253,2.1285252571105957 +2088,1,4.1448283,3.1448283195495605 +2089,1,1.3738632,0.37386322021484375 +2090,1,3.314799,2.3147990703582764 +2091,1,1.6218212,0.6218211650848389 +2092,1,6.240422,5.240421772003174 +2093,1,1.7945199,0.7945199012756348 +2094,1,1.5597703,0.5597703456878662 +2095,1,18.425129,17.425128936767578 +2096,1,4.099695,3.0996952056884766 +2097,1,1.153012,0.15301203727722168 +2098,2,2.1511848,0.1511847972869873 +2099,1,3.1306915,2.1306915283203125 +2100,1,1.119194,0.11919403076171875 +2101,1,2.0707114,1.070711374282837 +2102,2,2.4607677,0.4607677459716797 +2103,1,1.7009106,0.7009105682373047 +2104,1,2.5483906,1.5483906269073486 +2105,1,1.5577931,0.557793140411377 +2106,3,2.6121128,0.38788723945617676 +2107,1,1.7524443,0.7524442672729492 +2108,2,1.7769227,0.22307729721069336 +2109,2,3.3601491,1.3601491451263428 +2110,1,2.0822322,1.0822322368621826 +2111,1,1.8977439,0.8977439403533936 +2112,1,2.0463588,1.0463588237762451 +2113,1,2.55003,1.550029993057251 +2114,1,2.025638,1.0256381034851074 +2115,5,2.164811,2.8351891040802 +2116,1,2.4471152,1.447115182876587 +2117,1,2.2419395,1.2419395446777344 +2118,1,2.3035753,1.3035752773284912 +2119,2,1.5848491,0.41515088081359863 +2120,1,2.6031332,1.603133201599121 +2121,1,1.666842,0.666841983795166 +2122,1,2.14548,1.1454799175262451 +2123,1,1.4948895,0.494889497756958 +2124,2,2.182174,0.1821739673614502 +2125,1,1.9602342,0.9602341651916504 +2126,2,1.9741914,0.02580857276916504 +2127,1,1.1288538,0.12885379791259766 +2128,1,2.2346764,1.2346763610839844 +2129,2,3.2303283,1.2303283214569092 +2130,1,1.1804001,0.18040013313293457 +2131,1,3.7382958,2.738295793533325 +2132,1,2.2147539,1.2147538661956787 +2133,1,3.3812957,2.381295680999756 +2134,1,1.3333812,0.33338117599487305 +2135,1,2.1356883,1.135688304901123 +2136,1,2.7926137,1.7926137447357178 +2137,1,1.3697467,0.36974668502807617 +2138,1,1.1215615,0.12156152725219727 +2139,1,2.2540083,1.2540082931518555 +2140,1,1.9571717,0.9571716785430908 +2141,1,1.7845969,0.7845969200134277 +2142,1,1.6514835,0.6514835357666016 +2143,1,1.9338629,0.9338629245758057 +2144,1,1.2194455,0.21944546699523926 +2145,1,2.387388,1.387387990951538 +2146,1,2.1670666,1.1670665740966797 +2147,1,1.7788978,0.778897762298584 +2148,1,2.2313468,1.231346845626831 +2149,1,1.7496142,0.7496142387390137 +2150,1,1.1465607,0.1465606689453125 +2151,1,1.2860277,0.2860276699066162 +2152,1,2.1876438,1.1876437664031982 +2153,1,3.2762208,2.2762207984924316 +2154,1,1.0468752,0.0468752384185791 +2155,1,1.6499367,0.6499366760253906 +2156,2,1.250432,0.749567985534668 +2157,1,1.891367,0.8913669586181641 +2158,1,1.6455691,0.6455690860748291 +2159,1,1.4267852,0.4267852306365967 +2160,1,2.2503304,1.2503304481506348 +2161,1,2.1397915,1.139791488647461 +2162,1,3.2797892,2.2797892093658447 +2163,1,1.6356323,0.6356322765350342 +2164,1,3.8029144,2.8029143810272217 +2165,1,5.415984,4.415984153747559 +2166,1,1.4443355,0.4443354606628418 +2167,1,1.8786306,0.8786306381225586 +2168,1,2.0185485,1.0185484886169434 +2169,1,3.1250477,2.1250476837158203 +2170,1,1.6244957,0.6244957447052002 +2171,1,2.8581164,1.8581163883209229 +2172,1,1.2539744,0.25397443771362305 +2173,1,2.1596198,1.1596198081970215 +2174,1,2.3195343,1.3195343017578125 +2175,1,1.5101192,0.5101191997528076 +2176,1,2.0752525,1.0752525329589844 +2177,1,2.368854,1.36885404586792 +2178,1,2.9358952,1.9358952045440674 +2179,2,1.3020301,0.697969913482666 +2180,1,9.176914,8.17691421508789 +2181,1,1.3830428,0.3830428123474121 +2182,1,1.5951579,0.5951578617095947 +2183,1,3.0757287,2.07572865486145 +2184,1,2.013823,1.0138230323791504 +2185,1,1.5052466,0.505246639251709 +2186,33,21.576815,11.423185348510742 +2187,25,23.15009,1.8499107360839844 +2188,29,19.682398,9.317602157592773 +2189,25,24.69223,0.307769775390625 +2190,29,22.373325,6.626674652099609 +2191,28,24.167612,3.832387924194336 +2192,33,27.863506,5.136493682861328 +2193,29,22.093609,6.906391143798828 +2194,25,24.408428,0.5915718078613281 +2195,30,27.009329,2.990671157836914 +2196,29,20.447262,8.552738189697266 +2197,26,20.31425,5.685749053955078 +2198,33,21.770685,11.229314804077148 +2199,30,25.69971,4.300289154052734 +2200,29,25.362373,3.6376266479492188 +2201,30,22.189758,7.81024169921875 +2202,27,20.770098,6.229902267456055 +2203,29,26.73149,2.2685108184814453 +2204,33,27.23246,5.767539978027344 +2205,33,26.135555,6.864444732666016 +2206,33,22.411322,10.588678359985352 +2207,33,36.40027,3.4002685546875 +2208,25,22.208588,2.791412353515625 +2209,25,20.152088,4.847911834716797 +2210,29,26.31789,2.682109832763672 +2211,27,21.190443,5.80955696105957 +2212,32,27.201889,4.7981109619140625 +2213,33,29.444899,3.5551013946533203 +2214,30,22.218401,7.781599044799805 +2215,31,26.855545,4.1444549560546875 +2216,26,24.154182,1.8458175659179688 +2217,29,23.666632,5.333368301391602 +2218,33,29.94205,3.0579490661621094 +2219,33,26.088327,6.911672592163086 +2220,25,15.744293,9.255706787109375 +2221,33,23.383333,9.616666793823242 +2222,26,23.328737,2.671262741088867 +2223,29,23.323433,5.676567077636719 +2224,29,20.38131,8.618690490722656 +2225,29,22.817148,6.182851791381836 +2226,29,25.066217,3.9337825775146484 +2227,30,21.378529,8.621471405029297 +2228,33,24.570818,8.429182052612305 +2229,30,22.75519,7.244810104370117 +2230,32,27.875732,4.124267578125 +2231,29,21.040136,7.959863662719727 +2232,29,22.037682,6.962318420410156 +2233,29,25.678633,3.3213672637939453 +2234,26,24.220404,1.7795963287353516 +2235,25,20.220093,4.7799072265625 +2236,31,30.638872,0.3611278533935547 +2237,33,19.957827,13.042173385620117 +2238,25,22.218273,2.781726837158203 +2239,31,27.767693,3.2323074340820312 +2240,33,24.73872,8.261280059814453 +2241,32,11.8236885,20.176311492919922 +2242,30,26.548714,3.4512863159179688 +2243,29,20.119982,8.88001823425293 +2244,29,29.209675,0.20967483520507812 +2245,33,20.267033,12.732967376708984 +2246,29,19.591867,9.408132553100586 +2247,33,23.445143,9.55485725402832 +2248,30,22.721018,7.278982162475586 +2249,33,22.075253,10.924747467041016 +2250,33,28.350735,4.649265289306641 +2251,29,21.288649,7.71135139465332 +2252,29,22.722612,6.277387619018555 +2253,30,22.966967,7.03303337097168 +2254,29,20.073824,8.926176071166992 +2255,25,21.889675,3.1103248596191406 +2256,33,21.95524,11.044759750366211 +2257,33,23.219683,9.780317306518555 +2258,30,21.66643,8.33357048034668 +2259,29,24.704023,4.295976638793945 +2260,28,18.449942,9.550058364868164 +2261,33,22.512161,10.487838745117188 +2262,30,22.962639,7.037361145019531 +2263,29,18.303028,10.696971893310547 +2264,33,18.27059,14.729410171508789 +2265,30,31.557526,1.557525634765625 +2266,33,27.25364,5.746360778808594 +2267,28,25.178413,2.8215866088867188 +2268,30,23.313177,6.686822891235352 +2269,33,21.74392,11.256080627441406 +2270,28,23.16758,4.832420349121094 +2271,33,23.068134,9.931865692138672 +2272,31,23.13974,7.860260009765625 +2273,33,19.823626,13.176374435424805 +2274,33,24.511442,8.488557815551758 +2275,30,24.900707,5.099292755126953 +2276,32,22.937525,9.062475204467773 +2277,30,25.01374,4.986259460449219 +2278,29,24.517668,4.482332229614258 +2279,29,31.45936,2.459360122680664 +2280,28,23.68309,4.3169097900390625 +2281,33,22.207865,10.792135238647461 +2282,26,25.238806,0.7611942291259766 +2283,27,23.927193,3.0728073120117188 +2284,29,22.081373,6.91862678527832 +2285,25,24.52706,0.47294044494628906 +2286,25,17.024488,7.97551155090332 +2287,31,23.635817,7.36418342590332 +2288,29,33.0485,4.048500061035156 +2289,27,29.619022,2.6190223693847656 +2290,31,25.200909,5.799091339111328 +2291,25,25.732018,0.7320175170898438 +2292,25,24.011084,0.9889163970947266 +2293,25,24.34769,0.6523094177246094 +2294,30,27.760157,2.2398433685302734 +2295,33,29.112116,3.8878841400146484 +2296,28,21.954758,6.0452423095703125 +2297,28,29.109074,1.1090736389160156 +2298,29,21.12701,7.872989654541016 +2299,25,24.543877,0.45612335205078125 +2300,25,25.398764,0.39876365661621094 +2301,29,19.915861,9.084138870239258 +2302,29,23.305853,5.694147109985352 +2303,25,22.817904,2.182096481323242 +2304,30,22.375134,7.624866485595703 +2305,29,19.233685,9.766315460205078 +2306,29,22.795439,6.204561233520508 +2307,28,22.032124,5.967876434326172 +2308,28,24.629644,3.3703556060791016 +2309,25,16.511976,8.48802375793457 +2310,25,21.098803,3.9011974334716797 +2311,29,21.098866,7.901134490966797 +2312,29,26.365618,2.6343822479248047 +2313,32,26.301094,5.698905944824219 +2314,29,26.245342,2.754657745361328 +2315,29,26.080873,2.919126510620117 +2316,30,22.557999,7.4420013427734375 +2317,25,21.458105,3.5418949127197266 +2318,29,22.345243,6.654756546020508 +2319,32,22.46933,9.530670166015625 +2320,30,21.17581,8.824190139770508 +2321,27,24.81164,2.1883602142333984 +2322,25,18.69432,6.3056793212890625 +2323,29,23.235891,5.764108657836914 +2324,29,21.771158,7.228841781616211 +2325,27,20.757948,6.24205207824707 +2326,27,22.451862,4.548137664794922 +2327,33,24.562746,8.437253952026367 +2328,29,24.864477,4.135522842407227 +2329,33,22.844753,10.15524673461914 +2330,33,21.476566,11.523433685302734 +2331,33,22.708618,10.2913818359375 +2332,29,24.182604,4.81739616394043 +2333,25,28.160604,3.160604476928711 +2334,26,23.83379,2.166210174560547 +2335,32,24.49167,7.508329391479492 +2336,34,24.371609,9.62839126586914 +2337,33,27.44384,5.556159973144531 +2338,30,24.59308,5.406919479370117 +2339,30,22.746113,7.253887176513672 +2340,26,26.576612,0.5766124725341797 +2341,30,21.342867,8.657133102416992 +2342,30,20.885696,9.114303588867188 +2343,30,26.210825,3.789175033569336 +2344,28,25.064327,2.9356727600097656 +2345,31,24.622234,6.377765655517578 +2346,28,23.120733,4.879266738891602 +2347,33,28.598116,4.401884078979492 +2348,30,25.573944,4.426055908203125 +2349,32,30.137442,1.862558364868164 +2350,33,27.974642,5.025358200073242 +2351,31,22.78221,8.217790603637695 +2352,34,28.402327,5.597673416137695 +2353,32,27.889135,4.110864639282227 +2354,33,26.798185,6.201814651489258 +2355,34,30.610447,3.3895530700683594 +2356,32,29.556343,2.4436569213867188 +2357,28,20.070137,7.929862976074219 +2358,28,22.559168,5.440832138061523 +2359,28,18.464954,9.535045623779297 +2360,28,24.342428,3.657571792602539 +2361,28,21.346003,6.653997421264648 +2362,26,33.195507,7.195507049560547 +2363,30,21.655525,8.344474792480469 +2364,26,24.320711,1.6792888641357422 +2365,28,25.407156,2.592844009399414 +2366,29,24.23969,4.760309219360352 +2367,28,25.742811,2.2571887969970703 +2368,32,27.223051,4.776948928833008 +2369,32,24.848692,7.151308059692383 +2370,28,19.974277,8.02572250366211 +2371,32,25.784021,6.215978622436523 +2372,28,25.443106,2.556894302368164 +2373,26,24.572788,1.4272117614746094 +2374,34,25.14858,8.851419448852539 +2375,28,30.61148,2.611480712890625 +2376,28,22.66463,5.335369110107422 +2377,28,26.707653,1.2923469543457031 +2378,32,27.231073,4.768926620483398 +2379,28,22.437872,5.562128067016602 +2380,27,24.258749,2.741250991821289 +2381,30,24.821144,5.178855895996094 +2382,28,22.271215,5.728784561157227 +2383,30,26.200079,3.7999210357666016 +2384,26,19.461527,6.538473129272461 +2385,28,27.10969,0.8903102874755859 +2386,30,24.672031,5.327968597412109 +2387,28,29.332659,1.3326587677001953 +2388,30,28.365396,1.634603500366211 +2389,28,24.32713,3.672870635986328 +2390,30,28.5447,1.4552993774414062 +2391,26,28.120567,2.1205673217773438 +2392,26,18.53205,7.467950820922852 +2393,30,25.379652,4.62034797668457 +2394,28,22.335356,5.664644241333008 +2395,32,26.900179,5.099821090698242 +2396,30,26.923615,3.076385498046875 +2397,26,26.363173,0.3631725311279297 +2398,28,22.818502,5.181497573852539 +2399,28,29.74448,1.7444801330566406 +2400,29,24.72992,4.27008056640625 +2401,29,25.726263,3.2737369537353516 +2402,35,25.56503,9.43497085571289 +2403,28,23.588123,4.411876678466797 +2404,27,25.425251,1.5747489929199219 +2405,28,24.771626,3.228374481201172 +2406,29,27.152758,1.8472423553466797 +2407,32,24.289112,7.710887908935547 +2408,27,21.546776,5.453224182128906 +2409,32,11.994947,20.00505256652832 +2410,35,21.528015,13.47198486328125 +2411,31,24.23538,6.764619827270508 +2412,27,23.468763,3.5312366485595703 +2413,27,22.70553,4.294469833374023 +2414,27,24.536882,2.4631175994873047 +2415,34,28.480747,5.519252777099609 +2416,27,19.535774,7.464225769042969 +2417,27,20.379362,6.620637893676758 +2418,35,28.65278,6.347219467163086 +2419,27,24.08633,2.9136695861816406 +2420,27,22.16995,4.830049514770508 +2421,32,25.026659,6.97334098815918 +2422,34,24.788088,9.211912155151367 +2423,34,26.902676,7.097324371337891 +2424,28,28.506119,0.5061187744140625 +2425,32,21.262589,10.737411499023438 +2426,32,22.753643,9.246356964111328 +2427,30,22.233137,7.766862869262695 +2428,27,24.206045,2.793954849243164 +2429,32,23.104729,8.895271301269531 +2430,32,25.158266,6.841733932495117 +2431,32,22.46122,9.538780212402344 +2432,33,29.9308,3.0692005157470703 +2433,31,24.665663,6.33433723449707 +2434,34,28.00928,5.990720748901367 +2435,34,21.027609,12.972391128540039 +2436,31,36.914436,5.914436340332031 +2437,34,23.642561,10.357439041137695 +2438,31,32.367634,1.3676338195800781 +2439,31,27.631577,3.3684234619140625 +2440,32,26.200472,5.799528121948242 +2441,28,23.219973,4.780027389526367 +2442,32,23.609974,8.390026092529297 +2443,27,24.893309,2.106691360473633 +2444,32,24.673967,7.326032638549805 +2445,34,31.390522,2.609477996826172 +2446,31,23.366772,7.633228302001953 +2447,32,26.576088,5.423912048339844 +2448,33,29.051538,3.9484615325927734 +2449,27,22.282911,4.71708869934082 +2450,34,23.923311,10.076688766479492 +2451,32,24.668509,7.331491470336914 +2452,32,23.971544,8.02845573425293 +2453,34,21.057098,12.942901611328125 +2454,34,30.174128,3.8258724212646484 +2455,31,27.211271,3.788728713989258 +2456,33,20.86734,12.132659912109375 +2457,31,25.838274,5.161725997924805 +2458,28,15.595818,12.404182434082031 +2459,30,26.510841,3.4891586303710938 +2460,32,26.528742,5.471258163452148 +2461,32,25.303179,6.696821212768555 +2462,34,30.995123,3.0048770904541016 +2463,29,22.871914,6.128086090087891 +2464,34,26.557995,7.442005157470703 +2465,31,31.860415,0.8604145050048828 +2466,34,27.372084,6.62791633605957 +2467,33,28.808012,4.191987991333008 +2468,31,24.896032,6.103967666625977 +2469,32,22.492315,9.507684707641602 +2470,32,23.012117,8.987882614135742 +2471,34,34.65936,0.6593589782714844 +2472,34,22.121367,11.878633499145508 +2473,31,22.199032,8.800968170166016 +2474,32,31.07323,0.9267692565917969 +2475,31,28.2787,2.7213001251220703 +2476,31,29.094873,1.9051265716552734 +2477,30,21.460936,8.539064407348633 +2478,32,23.08461,8.915390014648438 +2479,27,26.649277,0.3507232666015625 +2480,33,27.954376,5.045623779296875 +2481,31,21.0291,9.97089958190918 +2482,32,22.009861,9.99013900756836 +2483,31,22.97288,8.027120590209961 +2484,29,27.195707,1.8042926788330078 +2485,29,22.211126,6.788873672485352 +2486,33,25.580292,7.419708251953125 +2487,34,23.648495,10.351505279541016 +2488,32,26.41509,5.584909439086914 +2489,34,25.238672,8.761327743530273 +2490,34,22.063522,11.936477661132812 +2491,29,26.13386,2.866140365600586 +2492,28,23.362875,4.637125015258789 +2493,35,26.007391,8.992609024047852 +2494,27,19.798452,7.201547622680664 +2495,31,25.629671,5.370328903198242 +2496,33,25.074064,7.925935745239258 +2497,35,29.560411,5.43958854675293 +2498,31,22.024801,8.975198745727539 +2499,35,25.241325,9.758674621582031 +2500,33,31.15678,1.8432197570800781 +2501,32,27.053083,4.946916580200195 +2502,29,26.977045,2.0229549407958984 +2503,31,22.709782,8.290218353271484 +2504,32,25.25296,6.747039794921875 +2505,32,25.317808,6.682191848754883 +2506,32,30.63924,1.3607597351074219 +2507,30,24.430311,5.56968879699707 +2508,35,26.580133,8.419866561889648 +2509,35,32.46871,2.5312881469726562 +2510,32,25.00245,6.997549057006836 +2511,33,24.575874,8.424125671386719 +2512,35,26.453352,8.546648025512695 +2513,35,29.422655,5.57734489440918 +2514,31,22.175354,8.82464599609375 +2515,34,24.048866,9.951133728027344 +2516,35,27.558622,7.441377639770508 +2517,33,25.6201,7.379899978637695 +2518,31,24.757132,6.242868423461914 +2519,32,27.387089,4.612911224365234 +2520,31,22.939903,8.060096740722656 +2521,33,25.412209,7.587791442871094 +2522,33,30.642057,2.357942581176758 +2523,31,26.720184,4.279815673828125 +2524,33,30.031054,2.9689464569091797 +2525,31,24.154783,6.845216751098633 +2526,32,25.746014,6.253986358642578 +2527,35,28.488369,6.511631011962891 +2528,30,31.965738,1.965738296508789 +2529,32,21.565329,10.434671401977539 +2530,31,24.8202,6.179800033569336 +2531,35,24.090395,10.909605026245117 +2532,33,30.332457,2.667543411254883 +2533,34,25.724998,8.275001525878906 +2534,31,25.250336,5.749664306640625 +2535,30,22.876522,7.123477935791016 +2536,33,26.709116,6.290884017944336 +2537,31,23.709955,7.290044784545898 +2538,35,29.177036,5.822963714599609 +2539,35,29.478218,5.521781921386719 +2540,35,31.847282,3.1527175903320312 +2541,34,26.867554,7.1324462890625 +2542,31,25.923466,5.076534271240234 +2543,32,25.66771,6.3322906494140625 +2544,31,36.129982,5.129981994628906 +2545,30,32.665657,2.6656570434570312 +2546,32,25.007511,6.992488861083984 +2547,35,32.148056,2.8519439697265625 +2548,32,23.983921,8.01607894897461 +2549,32,26.184542,5.815458297729492 +2550,32,26.835352,5.164648056030273 +2551,30,26.424934,3.5750656127929688 +2552,31,28.388578,2.611421585083008 +2553,35,25.250288,9.749711990356445 +2554,31,28.563234,2.436765670776367 +2555,34,27.748657,6.2513427734375 +2556,33,25.783646,7.2163543701171875 +2557,30,20.712801,9.287199020385742 +2558,33,30.797457,2.202543258666992 +2559,32,26.923206,5.076793670654297 +2560,33,22.89024,10.109760284423828 +2561,32,20.883799,11.116201400756836 +2562,32,19.216654,12.783346176147461 +2563,34,27.055342,6.944658279418945 +2564,32,23.012117,8.987882614135742 +2565,35,25.721571,9.27842903137207 +2566,35,22.414915,12.585084915161133 +2567,30,23.462833,6.537166595458984 +2568,32,27.039753,4.960247039794922 +2569,35,25.745758,9.254241943359375 +2570,32,25.178425,6.821575164794922 +2571,33,24.054092,8.945907592773438 +2572,36,23.415009,12.584991455078125 +2573,31,22.947289,8.052711486816406 +2574,31,31.690392,0.6903915405273438 +2575,28,16.339617,11.660383224487305 +2576,31,28.751966,2.2480335235595703 +2577,32,25.167921,6.83207893371582 +2578,32,24.5431,7.456899642944336 +2579,32,24.744213,7.255786895751953 +2580,32,26.13943,5.860570907592773 +2581,32,31.977104,0.02289581298828125 +2582,28,30.15242,2.1524200439453125 +2583,30,23.623058,6.376941680908203 +2584,28,34.918674,6.918674468994141 +2585,33,27.818995,5.181005477905273 +2586,32,26.534317,5.4656829833984375 +2587,32,21.965689,10.034311294555664 +2588,32,27.165653,4.834346771240234 +2589,32,25.89754,6.102460861206055 +2590,28,28.235458,0.2354583740234375 +2591,32,11.951765,20.048234939575195 +2592,32,25.395893,6.604106903076172 +2593,36,25.060688,10.939311981201172 +2594,32,32.73041,0.7304115295410156 +2595,28,30.664595,2.6645946502685547 +2596,32,22.67277,9.327230453491211 +2597,32,35.44084,3.4408416748046875 +2598,36,30.154768,5.845232009887695 +2599,32,15.956968,16.043031692504883 +2600,32,23.755276,8.24472427368164 +2601,28,28.088251,0.08825111389160156 +2602,28,24.17526,3.824739456176758 +2603,32,30.597883,1.4021167755126953 +2604,32,26.102385,5.897615432739258 +2605,28,24.129414,3.870586395263672 +2606,34,31.229208,2.770792007446289 +2607,31,22.587536,8.412464141845703 +2608,34,27.000498,6.999502182006836 +2609,33,25.831295,7.168704986572266 +2610,32,21.026482,10.973518371582031 +2611,35,30.806404,4.193595886230469 +2612,31,23.408674,7.591325759887695 +2613,32,24.287844,7.712156295776367 +2614,33,26.187824,6.812175750732422 +2615,32,26.370264,5.629735946655273 +2616,28,21.903692,6.096307754516602 +2617,33,23.090693,9.909307479858398 +2618,32,27.434658,4.565341949462891 +2619,34,25.153364,8.846635818481445 +2620,32,23.217073,8.782926559448242 +2621,32,32.32079,0.3207893371582031 +2622,32,25.877407,6.122592926025391 +2623,28,22.15533,5.844669342041016 +2624,32,29.561415,2.4385852813720703 +2625,32,30.49711,1.502889633178711 +2626,37,28.612297,8.387702941894531 +2627,29,23.029322,5.970678329467773 +2628,33,32.016205,0.983795166015625 +2629,33,33.875668,0.8756675720214844 +2630,29,25.963774,3.036226272583008 +2631,29,26.341349,2.658651351928711 +2632,29,24.900486,4.099514007568359 +2633,33,21.375065,11.624935150146484 +2634,33,29.628242,3.3717575073242188 +2635,33,31.933146,1.066854476928711 +2636,33,25.874365,7.125635147094727 +2637,29,27.760553,1.2394466400146484 +2638,29,43.814312,14.814311981201172 +2639,33,22.285269,10.714731216430664 +2640,33,28.404406,4.59559440612793 +2641,33,22.280224,10.719776153564453 +2642,33,31.253803,1.7461967468261719 +2643,37,32.68189,4.318111419677734 +2644,33,26.962072,6.037927627563477 +2645,33,36.075943,3.0759429931640625 +2646,29,21.155437,7.844562530517578 +2647,33,23.486004,9.513996124267578 +2648,29,27.851831,1.1481685638427734 +2649,29,33.23215,4.232151031494141 +2650,33,23.92312,9.076879501342773 +2651,29,25.9938,3.0062007904052734 +2652,29,27.814482,1.1855182647705078 +2653,33,27.78096,5.2190399169921875 +2654,37,36.51532,0.48468017578125 +2655,29,28.534275,0.4657249450683594 +2656,33,25.30385,7.696149826049805 +2657,37,32.782093,4.217906951904297 +2658,33,27.654839,5.345161437988281 +2659,33,28.477274,4.522726058959961 +2660,29,27.181963,1.8180370330810547 +2661,37,26.927862,10.072137832641602 +2662,33,28.021894,4.978105545043945 +2663,37,27.16891,9.831090927124023 +2664,33,37.035236,4.035236358642578 +2665,33,22.323254,10.676746368408203 +2666,34,31.471537,2.528463363647461 +2667,31,37.76155,6.7615509033203125 +2668,29,25.643488,3.3565120697021484 +2669,33,24.619371,8.38062858581543 +2670,29,25.76133,3.2386703491210938 +2671,33,26.46477,6.53523063659668 +2672,29,24.469805,4.530195236206055 +2673,29,25.586184,3.413816452026367 +2674,33,25.670528,7.329471588134766 +2675,33,29.499802,3.5001983642578125 +2676,29,24.470497,4.529502868652344 +2677,33,24.289347,8.710653305053711 +2678,33,25.536509,7.463491439819336 +2679,32,27.790369,4.209630966186523 +2680,33,38.156635,5.156635284423828 +2681,33,26.182762,6.817237854003906 +2682,33,25.917845,7.082155227661133 +2683,29,24.078485,4.921514511108398 +2684,33,29.610018,3.389982223510742 +2685,33,24.911552,8.088447570800781 +2686,37,28.74463,8.255369186401367 +2687,31,27.806011,3.193988800048828 +2688,33,31.94002,1.0599803924560547 +2689,33,21.708202,11.291797637939453 +2690,33,30.567139,2.432861328125 +2691,29,26.737345,2.262655258178711 +2692,33,22.800875,10.199125289916992 +2693,34,36.071014,2.071014404296875 +2694,29,28.483114,0.5168857574462891 +2695,34,25.773228,8.22677230834961 +2696,33,27.22997,5.770029067993164 +2697,29,26.489218,2.510782241821289 +2698,34,27.275164,6.724836349487305 +2699,33,24.650442,8.349557876586914 +2700,29,21.51836,7.481639862060547 +2701,38,32.869286,5.130714416503906 +2702,35,25.747375,9.25262451171875 +2703,36,28.461313,7.538686752319336 +2704,34,25.222937,8.777063369750977 +2705,34,30.126198,3.8738021850585938 +2706,34,33.059628,0.9403724670410156 +2707,35,31.822975,3.1770248413085938 +2708,31,36.783005,5.7830047607421875 +2709,34,29.875078,4.124921798706055 +2710,34,30.06948,3.9305191040039062 +2711,30,28.507282,1.4927177429199219 +2712,30,23.401749,6.5982513427734375 +2713,34,24.909353,9.090646743774414 +2714,36,32.55128,3.448719024658203 +2715,34,38.105118,4.1051177978515625 +2716,38,32.55128,5.448719024658203 +2717,38,24.776033,13.223966598510742 +2718,30,30.382929,0.38292884826660156 +2719,30,29.156794,0.8432064056396484 +2720,34,27.009073,6.990926742553711 +2721,34,34.790928,0.7909278869628906 +2722,34,30.001652,3.9983482360839844 +2723,34,29.944609,4.055391311645508 +2724,30,22.585789,7.414211273193359 +2725,38,33.00086,4.999141693115234 +2726,34,34.085865,0.08586502075195312 +2727,34,27.896006,6.103994369506836 +2728,34,28.324532,5.675468444824219 +2729,31,32.42167,1.4216690063476562 +2730,34,30.462631,3.5373687744140625 +2731,30,27.892838,2.1071624755859375 +2732,35,34.41984,0.5801582336425781 +2733,37,26.211525,10.788475036621094 +2734,33,32.750336,0.249664306640625 +2735,32,34.303524,2.3035240173339844 +2736,31,25.273102,5.726898193359375 +2737,32,28.943611,3.0563888549804688 +2738,33,26.579758,6.4202423095703125 +2739,34,27.909586,6.090414047241211 +2740,31,30.509413,0.4905872344970703 +2741,33,27.373354,5.626646041870117 +2742,37,31.390356,5.609643936157227 +2743,31,27.72883,3.271169662475586 +2744,36,31.883667,4.1163330078125 +2745,30,31.027433,1.0274333953857422 +2746,34,27.825846,6.174154281616211 +2747,32,36.262794,4.262794494628906 +2748,30,28.705622,1.2943782806396484 +2749,35,33.230392,1.7696075439453125 +2750,30,23.562925,6.437074661254883 +2751,30,23.254328,6.745672225952148 +2752,32,25.87546,6.124540328979492 +2753,30,30.299324,0.29932403564453125 +2754,35,27.609041,7.390958786010742 +2755,32,25.35285,6.647150039672852 +2756,30,23.1944,6.805599212646484 +2757,37,33.769344,3.2306556701660156 +2758,32,23.908195,8.091804504394531 +2759,30,22.069899,7.93010139465332 +2760,38,30.616198,7.38380241394043 +2761,30,27.429308,2.5706920623779297 +2762,32,25.682695,6.317304611206055 +2763,31,27.679146,3.3208541870117188 +2764,31,30.285444,0.7145557403564453 +2765,39,33.471695,5.5283050537109375 +2766,32,29.011787,2.9882125854492188 +2767,39,28.347345,10.652654647827148 +2768,39,25.211594,13.788406372070312 +2769,32,25.764227,6.235773086547852 +2770,32,25.1321,6.867900848388672 +2771,35,30.985847,4.014152526855469 +2772,33,24.899607,8.100393295288086 +2773,32,33.696625,1.696624755859375 +2774,37,27.427385,9.572614669799805 +2775,32,31.757862,0.24213790893554688 +2776,36,29.922825,6.077175140380859 +2777,32,29.365114,2.634885787963867 +2778,31,27.226583,3.773416519165039 +2779,36,32.32109,3.6789093017578125 +2780,32,26.03974,5.960260391235352 +2781,32,25.638468,6.361532211303711 +2782,32,35.41938,3.4193801879882812 +2783,31,30.893118,0.10688209533691406 +2784,31,31.418415,0.4184150695800781 +2785,34,32.24944,1.7505607604980469 +2786,39,22.368113,16.631887435913086 +2787,31,24.376665,6.623334884643555 +2788,33,32.629993,0.3700065612792969 +2789,32,29.713112,2.2868881225585938 +2790,33,31.862955,1.137044906616211 +2791,40,42.995068,2.995067596435547 +2792,40,36.974743,3.025257110595703 +2793,34,32.54203,1.4579696655273438 +2794,36,30.530306,5.469694137573242 +2795,32,24.86751,7.132490158081055 +2796,32,29.558443,2.441556930541992 +2797,34,29.729496,4.270503997802734 +2798,32,27.134142,4.86585807800293 +2799,32,26.541162,5.458837509155273 +2800,34,25.93526,8.064739227294922 +2801,34,25.350794,8.649206161499023 +2802,32,27.161718,4.838281631469727 +2803,34,32.26216,1.7378387451171875 +2804,34,42.344387,8.34438705444336 +2805,39,36.777782,2.222217559814453 +2806,32,26.492022,5.507978439331055 +2807,34,26.055044,7.944955825805664 +2808,34,26.318552,7.681447982788086 +2809,32,27.936592,4.063407897949219 +2810,40,27.78622,12.21377944946289 +2811,34,32.396954,1.6030464172363281 +2812,35,27.086712,7.913288116455078 +2813,32,23.974262,8.025737762451172 +2814,34,31.05884,2.941160202026367 +2815,32,28.246517,3.7534828186035156 +2816,32,21.246414,10.753585815429688 +2817,32,28.87654,3.1234607696533203 +2818,40,38.03071,1.9692916870117188 +2819,40,33.776005,6.223995208740234 +2820,40,33.76507,6.234928131103516 +2821,40,30.806047,9.193952560424805 +2822,35,28.507282,6.492717742919922 +2823,35,29.144552,5.855447769165039 +2824,33,27.367897,5.632102966308594 +2825,33,29.979933,3.0200672149658203 +2826,37,28.802507,8.197492599487305 +2827,35,28.715927,6.2840728759765625 +2828,34,25.621174,8.378826141357422 +2829,39,31.406027,7.593973159790039 +2830,41,33.346497,7.65350341796875 +2831,37,25.546335,11.453664779663086 +2832,35,34.971336,0.02866363525390625 +2833,37,26.741858,10.258142471313477 +2834,37,25.6014,11.398599624633789 +2835,36,32.17445,3.825550079345703 +2836,33,26.666391,6.333608627319336 +2837,35,30.564669,4.435331344604492 +2838,35,25.95192,9.048080444335938 +2839,38,29.598783,8.401216506958008 +2840,37,33.120773,3.8792266845703125 +2841,36,27.441965,8.558034896850586 +2842,35,33.00298,1.9970207214355469 +2843,33,24.848011,8.151988983154297 +2844,33,33.84659,0.846588134765625 +2845,36,28.443054,7.55694580078125 +2846,36,25.606005,10.39399528503418 +2847,33,35.33851,2.3385086059570312 +2848,37,32.04219,4.9578094482421875 +2849,33,25.371412,7.62858772277832 +2850,38,30.740286,7.259714126586914 +2851,33,27.790655,5.209344863891602 +2852,33,24.716955,8.283044815063477 +2853,40,34.324158,5.67584228515625 +2854,37,37.74801,0.7480087280273438 +2855,37,33.43566,3.5643386840820312 +2856,41,29.896442,11.103557586669922 +2857,33,29.420952,3.5790481567382812 +2858,33,32.99355,0.006450653076171875 +2859,38,27.567373,10.432626724243164 +2860,33,28.355955,4.644044876098633 +2861,33,31.16377,1.8362293243408203 +2862,38,38.55,0.5499992370605469 +2863,36,36.93217,0.9321708679199219 +2864,34,29.067354,4.932645797729492 +2865,35,27.725206,7.27479362487793 +2866,37,27.852427,9.147573471069336 +2867,38,37.387146,0.61285400390625 +2868,38,34.034805,3.9651947021484375 +2869,38,30.984558,7.01544189453125 +2870,40,25.740519,14.259481430053711 +2871,41,31.284449,9.715551376342773 +2872,38,26.297022,11.702978134155273 +2873,35,22.605436,12.394563674926758 +2874,38,27.274214,10.725786209106445 +2875,39,34.1246,4.875400543212891 +2876,39,36.976875,2.0231246948242188 +2877,39,34.998405,4.001594543457031 +2878,36,30.58188,5.418119430541992 +2879,41,38.012215,2.9877853393554688 +2880,38,27.230116,10.76988410949707 +2881,41,28.426657,12.573343276977539 +2882,37,37.69741,0.6974105834960938 +2883,41,42.44292,1.4429206848144531 +2884,37,25.576374,11.423625946044922 +2885,41,32.16125,8.838748931884766 +2886,40,33.08266,6.917339324951172 +2887,41,32.447598,8.55240249633789 +2888,41,29.799585,11.200414657592773 +2889,41,29.667402,11.332597732543945 +2890,41,36.74522,4.254779815673828 +2891,39,38.710934,0.2890663146972656 +2892,38,35.749783,2.2502174377441406 +2893,41,29.439056,11.560943603515625 +2894,41,37.495964,3.5040359497070312 +2895,38,30.55819,7.441810607910156 +2896,41,28.524569,12.475431442260742 +2897,38,33.915146,4.0848541259765625 +2898,38,33.525818,4.47418212890625 +2899,38,34.979576,3.0204238891601562 +2900,38,28.21832,9.781679153442383 +2901,36,27.819586,8.180414199829102 +2902,41,30.681734,10.318265914916992 +2903,36,32.163963,3.8360366821289062 +2904,41,28.795893,12.204107284545898 +2905,41,31.82836,9.171640396118164 +2906,37,23.20273,13.797269821166992 +2907,41,35.15888,5.841121673583984 +2908,39,25.233173,13.766826629638672 +2909,41,21.579124,19.420875549316406 +2910,40,28.67983,11.320169448852539 +2911,40,29.88925,10.110750198364258 +2912,38,27.56607,10.433929443359375 +2913,41,27.860645,13.139354705810547 +2914,37,32.51575,4.484249114990234 +2915,41,29.359858,11.640142440795898 +2916,38,31.13551,6.864490509033203 +2917,36,27.559343,8.440656661987305 +2918,35,22.039139,12.960861206054688 +2919,38,32.70781,5.2921905517578125 +2920,34,28.819773,5.180227279663086 +2921,38,25.178593,12.821407318115234 +2922,39,34.78759,4.212409973144531 +2923,40,37.81134,2.18865966796875 +2924,35,35.084076,0.084075927734375 +2925,36,28.185802,7.814197540283203 +2926,41,32.41193,8.588069915771484 +2927,38,30.683723,7.316276550292969 +2928,35,35.632065,0.6320648193359375 +2929,41,28.632048,12.367952346801758 +2930,38,32.674538,5.325462341308594 +2931,38,33.501534,4.498466491699219 +2932,38,36.607098,1.3929023742675781 +2933,37,23.33122,13.668779373168945 +2934,37,36.179214,0.8207855224609375 +2935,36,29.128254,6.871746063232422 +2936,40,31.943048,8.056951522827148 +2937,39,29.629328,9.370672225952148 +2938,40,29.117678,10.882322311401367 +2939,39,33.002525,5.997474670410156 +2940,38,23.787815,14.21218490600586 +2941,41,29.424124,11.575876235961914 +2942,40,35.910843,4.0891571044921875 +2943,38,27.420694,10.579305648803711 +2944,37,33.730976,3.269023895263672 +2945,37,33.026264,3.973735809326172 +2946,38,38.961365,0.96136474609375 +2947,38,30.991106,7.008893966674805 +2948,39,32.734604,6.2653961181640625 +2949,40,37.189583,2.8104171752929688 +2950,38,29.08504,8.914960861206055 +2951,38,30.723143,7.276857376098633 +2952,35,30.043106,4.9568939208984375 +2953,36,23.309286,12.690713882446289 +2954,5,1.985394,3.01460599899292 +2955,3,2.14548,0.8545200824737549 +2956,3,1.090771,1.909229040145874 +2957,2,3.966875,1.9668750762939453 +2958,2,1.2504175,0.7495825290679932 +2959,2,3.5505567,1.5505566596984863 +2960,1,2.3712072,1.3712072372436523 +2961,1,1.5528705,0.552870512008667 +2962,5,1.9108403,3.0891597270965576 +2963,5,2.2754896,2.724510431289673 +2964,5,3.9498718,1.0501282215118408 +2965,1,1.9316077,0.931607723236084 +2966,1,2.7872326,1.7872326374053955 +2967,4,1.9474564,2.0525436401367188 +2968,1,2.9572756,1.957275629043579 +2969,5,1.2827401,3.7172598838806152 +2970,5,2.0975182,2.9024817943573 +2971,1,2.0434577,1.0434577465057373 +2972,4,5.494014,1.494013786315918 +2973,5,2.2093418,2.7906582355499268 +2974,5,4.5108967,0.4891033172607422 +2975,2,6.1665196,4.166519641876221 +2976,1,3.8147583,2.81475830078125 +2977,1,1.1465607,0.1465606689453125 +2978,2,2.95597,0.955970048904419 +2979,1,1.5049024,0.5049023628234863 +2980,5,10.554007,5.554006576538086 +2981,5,9.787707,4.787707328796387 +2982,5,2.7598562,2.2401437759399414 +2983,5,6.051187,1.0511870384216309 +2984,1,2.0435576,1.0435576438903809 +2985,1,1.0974793,0.09747934341430664 +2986,3,2.4290726,0.5709273815155029 +2987,1,1.0103536,0.010353565216064453 +2988,1,0.987412,0.012588024139404297 +2989,1,3.4183335,2.4183335304260254 +2990,1,1.2146862,0.21468615531921387 +2991,1,11.76716,10.767160415649414 +2992,1,2.269871,1.2698709964752197 +2993,1,1.6326058,0.632605791091919 +2994,1,3.785067,2.785067081451416 +2995,2,2.080769,0.08076906204223633 +2996,2,1.596786,0.4032139778137207 +2997,2,2.1615837,0.16158366203308105 +2998,2,3.613896,1.6138958930969238 +2999,1,1.9847457,0.9847457408905029 +3000,1,3.9095542,2.9095542430877686 +3001,1,0.66128635,0.3387136459350586 +3002,4,3.0825517,0.9174482822418213 +3003,3,2.164811,0.8351891040802002 +3004,5,2.1236129,2.876387119293213 +3005,1,2.5483906,1.5483906269073486 +3006,1,1.6909034,0.6909034252166748 +3007,3,4.55434,1.55433988571167 +3008,1,14.282236,13.282236099243164 +3009,3,8.292518,5.29251766204834 +3010,1,1.2204893,0.2204892635345459 +3011,1,3.1556294,2.1556293964385986 +3012,1,3.0765588,2.076558828353882 +3013,3,4.816822,1.8168220520019531 +3014,1,2.6230223,1.6230223178863525 +3015,3,3.2936568,0.2936568260192871 +3016,2,1.8491371,0.1508629322052002 +3017,1,1.4514289,0.4514288902282715 +3018,2,3.1153111,1.1153111457824707 +3019,1,4.550464,3.550464153289795 +3020,1,2.7432542,1.7432541847229004 +3021,1,3.2040021,2.2040021419525146 +3022,2,2.4347317,0.43473172187805176 +3023,2,1.5885854,0.41141462326049805 +3024,1,2.1529984,1.152998447418213 +3025,1,2.6908185,1.6908185482025146 +3026,2,3.492189,1.4921889305114746 +3027,1,1.9081035,0.9081034660339355 +3028,2,5.4966288,3.496628761291504 +3029,2,2.7739081,0.7739081382751465 +3030,1,3.065782,2.065782070159912 +3031,1,1.5499737,0.549973726272583 +3032,1,2.4999566,1.4999566078186035 +3033,2,2.3068283,0.30682826042175293 +3034,2,11.899855,9.89985466003418 +3035,3,3.8533747,0.853374719619751 +3036,3,4.8305626,1.8305625915527344 +3037,2,2.6816914,0.6816914081573486 +3038,5,4.1567574,0.8432426452636719 +3039,2,1.2514501,0.7485499382019043 +3040,2,2.6708918,0.6708917617797852 +3041,1,2.5017805,1.5017805099487305 +3042,2,3.8516595,1.8516595363616943 +3043,3,1.9902818,1.0097181797027588 +3044,2,1.7925601,0.20743989944458008 +3045,4,2.5747137,1.4252862930297852 +3046,2,2.162596,0.1625959873199463 +3047,1,2.299703,1.2997028827667236 +3048,2,2.1492064,0.14920639991760254 +3049,1,0.8057065,0.19429349899291992 +3050,2,4.3097887,2.309788703918457 +3051,3,6.41911,3.41910982131958 +3052,2,3.4085526,1.408552646636963 +3053,2,2.2853363,0.2853362560272217 +3054,2,3.9049668,1.9049668312072754 +3055,3,6.6996565,3.6996564865112305 +3056,2,3.1012344,1.1012344360351562 +3057,2,0.5715318,1.4284682273864746 +3058,1,1.8799336,0.8799335956573486 +3059,2,2.1284142,0.12841415405273438 +3060,3,2.4810548,0.5189452171325684 +3061,1,2.0861888,1.086188793182373 +3062,3,2.9524364,0.04756355285644531 +3063,2,1.4158001,0.5841999053955078 +3064,2,2.2443726,0.24437260627746582 +3065,1,1.4523187,0.4523186683654785 +3066,3,2.4414132,0.558586835861206 +3067,3,2.0314562,0.9685437679290771 +3068,1,1.9754808,0.9754807949066162 +3069,3,1.9340904,1.0659096240997314 +3070,1,3.6533337,2.6533336639404297 +3071,3,6.0555816,3.055581569671631 +3072,1,2.9137232,1.9137232303619385 +3073,1,1.4016402,0.4016401767730713 +3074,2,1.692569,0.3074309825897217 +3075,2,2.793381,0.7933809757232666 +3076,1,2.0987499,1.0987498760223389 +3077,1,1.7428901,0.7428901195526123 +3078,2,2.383647,0.38364696502685547 +3079,37,33.043026,3.9569740295410156 +3080,36,32.09951,3.9004898071289062 +3081,39,34.284355,4.715644836425781 +3082,41,29.951292,11.048707962036133 +3083,36,33.64302,2.3569793701171875 +3084,41,32.92509,8.074909210205078 +3085,42,36.073425,5.92657470703125 +3086,35,30.063707,4.93629264831543 +3087,39,29.273394,9.726606369018555 +3088,39,40.78771,1.7877082824707031 +3089,39,33.133163,5.8668365478515625 +3090,35,33.754387,1.2456130981445312 +3091,39,30.065943,8.934057235717773 +3092,40,32.287926,7.712074279785156 +3093,37,32.984467,4.015533447265625 +3094,35,34.081997,0.9180030822753906 +3095,38,38.663383,0.6633834838867188 +3096,39,27.514732,11.485267639160156 +3097,42,35.353924,6.646076202392578 +3098,39,28.754156,10.245843887329102 +3099,39,39.75668,0.7566795349121094 +3100,39,37.995556,1.0044441223144531 +3101,39,37.841103,1.1588973999023438 +3102,39,38.34388,0.6561203002929688 +3103,39,30.752539,8.247461318969727 +3104,39,35.007923,3.992076873779297 +3105,37,35.325745,1.67425537109375 +3106,35,30.858793,4.141206741333008 +3107,40,29.378523,10.621477127075195 +3108,36,33.126328,2.8736724853515625 +3109,37,30.332373,6.667627334594727 +3110,36,25.915445,10.084554672241211 +3111,43,32.94232,10.05767822265625 +3112,42,43.775833,1.7758331298828125 +3113,38,30.742369,7.257631301879883 +3114,38,35.756332,2.2436676025390625 +3115,40,37.957653,2.042346954345703 +3116,39,37.253933,1.7460670471191406 +3117,35,28.716085,6.283914566040039 +3118,38,27.802687,10.19731330871582 +3119,41,37.42629,3.573711395263672 +3120,37,33.90329,3.096710205078125 +3121,36,24.795141,11.204858779907227 +3122,39,35.80741,3.1925888061523438 +3123,38,30.956457,7.043542861938477 +3124,37,30.846214,6.153785705566406 +3125,35,29.779299,5.220701217651367 +3126,37,29.255104,7.744895935058594 +3127,39,40.70464,1.7046394348144531 +3128,39,37.18628,1.813720703125 +3129,36,32.656586,3.343414306640625 +3130,35,32.237854,2.76214599609375 +3131,43,41.22658,1.7734184265136719 +3132,39,25.246862,13.753137588500977 +3133,35,34.589226,0.41077423095703125 +3134,39,38.196014,0.803985595703125 +3135,39,32.16458,6.835418701171875 +3136,35,46.56658,11.566581726074219 +3137,36,26.145142,9.8548583984375 +3138,36,28.224695,7.775304794311523 +3139,43,32.61444,10.38555908203125 +3140,38,30.569727,7.430273056030273 +3141,39,30.930267,8.069732666015625 +3142,39,32.926556,6.073444366455078 +3143,35,32.400036,2.599964141845703 +3144,42,33.86236,8.137641906738281 +3145,37,34.190998,2.809001922607422 +3146,36,25.985336,10.014663696289062 +3147,35,23.375425,11.624574661254883 +3148,38,42.724586,4.724586486816406 +3149,43,37.385963,5.614036560058594 +3150,39,27.324703,11.675296783447266 +3151,39,36.431953,2.5680465698242188 +3152,35,29.392267,5.607732772827148 +3153,39,36.839993,2.1600074768066406 +3154,40,30.492682,9.507318496704102 +3155,43,33.347363,9.652637481689453 +3156,38,31.984312,6.015687942504883 +3157,35,23.950512,11.049488067626953 +3158,40,30.446604,9.553396224975586 +3159,36,29.099268,6.900732040405273 +3160,38,35.54853,2.4514694213867188 +3161,36,39.12191,3.1219100952148438 +3162,40,16.737131,23.262868881225586 +3163,43,30.956457,12.043542861938477 +3164,43,34.050507,8.949493408203125 +3165,39,28.038767,10.961233139038086 +3166,43,30.103018,12.896982192993164 +3167,42,35.46737,6.532630920410156 +3168,41,28.3371,12.662900924682617 +3169,38,35.417316,2.582683563232422 +3170,36,32.626945,3.3730545043945312 +3171,36,28.0086,7.991399765014648 +3172,36,31.253803,4.746196746826172 +3173,36,33.380245,2.6197547912597656 +3174,45,33.562363,11.437637329101562 +3175,40,35.497692,4.502307891845703 +3176,46,43.84886,2.1511383056640625 +3177,39,42.20212,3.2021217346191406 +3178,44,32.737026,11.26297378540039 +3179,38,36.601177,1.3988227844238281 +3180,40,38.798824,1.2011756896972656 +3181,38,30.498713,7.501287460327148 +3182,38,34.708565,3.2914352416992188 +3183,38,25.22836,12.77164077758789 +3184,42,35.71234,6.28765869140625 +3185,38,29.105913,8.894086837768555 +3186,38,33.060364,4.93963623046875 +3187,42,31.779873,10.22012710571289 +3188,45,33.77459,11.225410461425781 +3189,45,31.936043,13.063957214355469 +3190,47,32.49379,14.506210327148438 +3191,47,43.536922,3.4630775451660156 +3192,47,31.879448,15.120552062988281 +3193,43,35.947964,7.052036285400391 +3194,46,34.633926,11.366073608398438 +3195,39,27.536865,11.463134765625 +3196,39,42.11884,3.1188392639160156 +3197,43,34.88102,8.118980407714844 +3198,43,35.88375,7.116249084472656 +3199,45,26.319427,18.680572509765625 +3200,43,33.64675,9.353248596191406 +3201,46,41.09826,4.901741027832031 +3202,41,35.21834,5.781658172607422 +3203,46,34.266342,11.733657836914062 +3204,39,35.74546,3.2545394897460938 +3205,46,30.251245,15.748754501342773 +3206,46,39.434967,6.565032958984375 +3207,48,42.881493,5.118507385253906 +3208,40,34.614826,5.385173797607422 +3209,46,36.919735,9.080265045166016 +3210,42,34.60306,7.396938323974609 +3211,48,33.854424,14.145576477050781 +3212,46,31.46311,14.536890029907227 +3213,46,40.31475,5.685249328613281 +3214,46,43.874268,2.125732421875 +3215,48,37.25948,10.740520477294922 +3216,42,34.006374,7.993625640869141 +3217,44,32.910408,11.089591979980469 +3218,41,41.102573,0.10257339477539062 +3219,42,37.454033,4.545967102050781 +3220,40,34.827682,5.1723175048828125 +3221,46,38.78083,7.219169616699219 +3222,40,35.911064,4.088935852050781 +3223,46,30.731277,15.268722534179688 +3224,48,42.162678,5.837322235107422 +3225,40,32.244488,7.755512237548828 +3226,40,36.618378,3.381622314453125 +3227,40,30.948038,9.051961898803711 +3228,40,29.629625,10.37037467956543 +3229,44,37.575733,6.424266815185547 +3230,46,39.240574,6.759426116943359 +3231,48,39.918068,8.081932067871094 +3232,40,38.254307,1.7456932067871094 +3233,41,30.367758,10.632242202758789 +3234,42,35.90086,6.099140167236328 +3235,41,30.447065,10.552934646606445 +3236,45,41.75553,3.2444686889648438 +3237,44,36.576584,7.4234161376953125 +3238,45,34.36499,10.635009765625 +3239,49,31.879763,17.120237350463867 +3240,46,36.29599,9.704010009765625 +3241,48,33.221115,14.778884887695312 +3242,50,38.827785,11.17221450805664 +3243,42,41.04747,0.9525299072265625 +3244,46,38.8331,7.166900634765625 +3245,48,33.735603,14.264396667480469 +3246,46,39.41235,6.587650299072266 +3247,48,42.4992,5.500801086425781 +3248,46,34.254753,11.745246887207031 +3249,46,37.227383,8.77261734008789 +3250,46,34.728985,11.271015167236328 +3251,42,27.471283,14.528717041015625 +3252,49,40.215263,8.784736633300781 +3253,45,35.13919,9.860809326171875 +3254,46,29.532404,16.46759605407715 +3255,50,44.750126,5.249874114990234 +3256,43,39.59482,3.405181884765625 +3257,47,44.015896,2.9841041564941406 +3258,48,43.35825,4.641750335693359 +3259,43,29.372446,13.627553939819336 +3260,43,40.19508,2.804920196533203 +3261,43,36.408813,6.5911865234375 +3262,43,32.846737,10.153263092041016 +3263,46,44.172916,1.8270835876464844 +3264,45,39.500046,5.4999542236328125 +3265,47,37.134285,9.865715026855469 +3266,44,41.420628,2.5793724060058594 +3267,43,35.553413,7.446586608886719 +3268,46,32.308083,13.691917419433594 +3269,46,43.8394,2.1605987548828125 +3270,46,35.092674,10.907325744628906 +3271,45,37.39965,7.600349426269531 +3272,46,45.921024,0.07897567749023438 +3273,46,38.36487,7.6351318359375 +3274,43,35.23377,7.766231536865234 +3275,46,33.39768,12.60232162475586 +3276,43,36.04867,6.951328277587891 +3277,50,38.639492,11.36050796508789 +3278,51,39.76302,11.236980438232422 +3279,49,43.24778,5.752220153808594 +3280,51,36.60775,14.392250061035156 +3281,43,38.708153,4.291847229003906 +3282,43,42.48956,0.5104408264160156 +3283,51,43.258865,7.7411346435546875 +3284,44,43.74319,0.2568092346191406 +3285,44,33.837852,10.162147521972656 +3286,50,42.548508,7.4514923095703125 +3287,44,37.777313,6.222686767578125 +3288,50,33.157795,16.842205047607422 +3289,46,36.388474,9.611526489257812 +3290,44,36.870483,7.1295166015625 +3291,52,33.899395,18.100605010986328 +3292,49,35.629295,13.370704650878906 +3293,44,35.375248,8.624752044677734 +3294,44,38.232025,5.767974853515625 +3295,1,1.7184033,0.7184033393859863 +3296,4,5.001686,1.0016860961914062 +3297,2,4.254405,2.2544050216674805 +3298,2,3.4085526,1.408552646636963 +3299,2,3.10109,1.1010899543762207 +3300,1,1.879503,0.8795030117034912 +3301,1,6.4106736,5.41067361831665 +3302,4,3.6323416,0.3676583766937256 +3303,3,4.6173067,1.6173067092895508 +3304,1,5.156831,4.156830787658691 +3305,2,1.976871,0.023128986358642578 +3306,6,5.1083574,0.8916425704956055 +3307,2,2.6473742,0.647374153137207 +3308,3,5.021741,2.0217409133911133 +3309,3,1.9619465,1.0380535125732422 +3310,2,2.7682166,0.768216609954834 +3311,4,2.3162677,1.683732271194458 +3312,6,3.7979584,2.2020416259765625 +3313,6,3.740539,2.259460926055908 +3314,3,2.9144685,0.08553147315979004 +3315,2,4.141262,2.1412620544433594 +3316,1,3.184626,2.1846261024475098 +3317,1,2.8908064,1.8908064365386963 +3318,3,4.913554,1.9135541915893555 +3319,2,1.7416048,0.2583951950073242 +3320,3,4.9444475,1.9444475173950195 +3321,1,2.1793995,1.1793994903564453 +3322,3,3.2794206,0.2794206142425537 +3323,3,2.734046,0.26595401763916016 +3324,3,11.202686,8.202686309814453 +3325,3,15.859404,12.859403610229492 +3326,2,1.9603148,0.03968524932861328 +3327,1,6.9589224,5.958922386169434 +3328,2,6.308749,4.308749198913574 +3329,2,1.9753802,0.02461981773376465 +3330,2,2.1712422,0.17124223709106445 +3331,3,1.9529905,1.0470094680786133 +3332,1,1.9347696,0.9347696304321289 +3333,1,3.0662441,2.066244125366211 +3334,1,2.222537,1.2225370407104492 +3335,2,3.6624682,1.6624681949615479 +3336,4,2.0863724,1.9136276245117188 +3337,2,4.090501,2.090500831604004 +3338,2,1.7695854,0.23041462898254395 +3339,3,5.6707015,2.670701503753662 +3340,4,6.325037,2.3250370025634766 +3341,2,2.452121,0.4521210193634033 +3342,2,2.6510012,0.6510012149810791 +3343,1,1.8318374,0.8318374156951904 +3344,3,2.3714142,0.6285858154296875 +3345,4,4.134266,0.13426589965820312 +3346,2,2.7545025,0.754502534866333 +3347,3,9.552262,6.552262306213379 +3348,3,6.7441688,3.744168758392334 +3349,1,2.6881013,1.6881012916564941 +3350,2,8.220824,6.220824241638184 +3351,3,3.7806787,0.7806787490844727 +3352,2,2.9760764,0.9760763645172119 +3353,1,0.7524321,0.24756789207458496 +3354,1,1.7111778,0.7111778259277344 +3355,2,3.4080536,1.4080536365509033 +3356,2,2.5537057,0.5537056922912598 +3357,1,1.3910038,0.3910038471221924 +3358,2,2.6376672,0.637667179107666 +3359,2,4.2535276,2.2535276412963867 +3360,3,2.7463713,0.2536287307739258 +3361,2,3.6284873,1.6284873485565186 +3362,4,2.026401,1.9735989570617676 +3363,3,2.3512576,0.6487424373626709 +3364,1,0.3228898,0.6771101951599121 +3365,3,2.7705264,0.22947359085083008 +3366,3,13.531683,10.531682968139648 +3367,1,1.8301318,0.8301317691802979 +3368,6,6.34897,0.3489699363708496 +3369,1,1.5563614,0.5563614368438721 +3370,3,3.3660975,0.36609745025634766 +3371,2,2.6072385,0.6072385311126709 +3372,3,2.1491945,0.8508055210113525 +3373,3,2.554709,0.44529104232788086 +3374,3,2.4329634,0.5670366287231445 +3375,1,2.7274165,1.7274165153503418 +3376,1,2.5434656,1.5434656143188477 +3377,4,2.0055723,1.9944276809692383 +3378,3,2.6719346,0.3280653953552246 +3379,2,2.4830513,0.4830513000488281 +3380,2,3.4205942,1.4205942153930664 +3381,3,4.409913,1.4099130630493164 +3382,2,1.9301779,0.06982207298278809 +3383,1,1.5285785,0.528578519821167 +3384,2,7.4059954,5.4059953689575195 +3385,2,3.1582994,1.158299446105957 +3386,1,1.9695668,0.969566822052002 +3387,3,1.7982948,1.2017052173614502 +3388,1,1.3697467,0.36974668502807617 +3389,4,14.306206,10.306205749511719 +3390,3,2.3292985,0.6707015037536621 +3391,1,3.0673246,2.067324638366699 +3392,1,3.2809784,2.2809784412384033 +3393,2,8.577584,6.577584266662598 +3394,1,4.4465075,3.446507453918457 +3395,2,4.005522,2.005521774291992 +3396,3,3.4027908,0.40279078483581543 +3397,2,3.6783342,1.6783342361450195 +3398,2,2.8701513,0.8701512813568115 +3399,1,2.189321,1.1893210411071777 +3400,1,1.5765939,0.5765938758850098 +3401,2,8.569986,6.569986343383789 +3402,2,4.2100563,2.2100563049316406 +3403,1,1.9100604,0.9100604057312012 +3404,3,4.3464384,1.3464384078979492 +3405,2,2.6922328,0.6922328472137451 +3406,2,3.5450358,1.5450358390808105 +3407,1,5.471321,4.471321105957031 +3408,3,1.8426025,1.1573975086212158 +3409,51,48.219368,2.7806320190429688 +3410,49,37.287376,11.712623596191406 +3411,51,41.579674,9.420326232910156 +3412,47,39.746414,7.2535858154296875 +3413,51,35.43405,15.565948486328125 +3414,51,41.259056,9.740943908691406 +3415,48,37.185863,10.814136505126953 +3416,50,39.393345,10.60665512084961 +3417,51,41.718803,9.281196594238281 +3418,49,28.579676,20.420324325561523 +3419,49,37.20844,11.791561126708984 +3420,53,44.176197,8.823802947998047 +3421,52,39.780525,12.219474792480469 +3422,51,50.00417,0.9958305358886719 +3423,48,37.266464,10.733535766601562 +3424,52,36.942524,15.057476043701172 +3425,46,37.9932,8.00680160522461 +3426,49,42.332676,6.667324066162109 +3427,53,44.45649,8.543510437011719 +3428,51,40.466885,10.53311538696289 +3429,51,38.56671,12.43328857421875 +3430,51,45.29808,5.7019195556640625 +3431,52,36.069065,15.93093490600586 +3432,53,42.20441,10.795589447021484 +3433,46,34.404453,11.59554672241211 +3434,53,37.81963,15.180370330810547 +3435,53,39.86811,13.131889343261719 +3436,53,35.040634,17.959365844726562 +3437,49,37.717678,11.28232192993164 +3438,54,47.114296,6.885704040527344 +3439,53,36.434288,16.565711975097656 +3440,49,45.618774,3.3812255859375 +3441,47,45.77023,1.2297706604003906 +3442,53,37.20946,15.790538787841797 +3443,54,25.988678,28.011322021484375 +3444,52,43.52333,8.476669311523438 +3445,50,42.83822,7.161781311035156 +3446,52,43.09417,8.905830383300781 +3447,52,45.28737,6.7126312255859375 +3448,54,43.846176,10.153823852539062 +3449,52,38.024685,13.97531509399414 +3450,53,44.600998,8.399002075195312 +3451,51,44.53206,6.4679412841796875 +3452,54,35.645554,18.354446411132812 +3453,54,37.691284,16.3087158203125 +3454,54,37.039673,16.9603271484375 +3455,53,32.06788,20.93212127685547 +3456,55,43.846176,11.153823852539062 +3457,53,37.26575,15.734249114990234 +3458,55,47.002823,7.9971771240234375 +3459,54,41.603237,12.39676284790039 +3460,54,36.160465,17.839534759521484 +3461,53,38.06271,14.93729019165039 +3462,51,36.749336,14.250663757324219 +3463,53,23.090218,29.90978240966797 +3464,48,41.77031,6.2296905517578125 +3465,51,52.184017,1.1840171813964844 +3466,47,43.71276,3.2872390747070312 +3467,50,33.2732,16.72679901123047 +3468,51,45.27278,5.7272186279296875 +3469,48,42.3433,5.656700134277344 +3470,49,39.153786,9.846214294433594 +3471,51,47.29002,3.709980010986328 +3472,49,41.240368,7.759632110595703 +3473,52,42.40848,9.59151840209961 +3474,51,43.25606,7.743938446044922 +3475,51,46.029243,4.970756530761719 +3476,51,56.76526,5.7652587890625 +3477,52,36.477825,15.522174835205078 +3478,51,36.42507,14.574928283691406 +3479,51,44.76638,6.233619689941406 +3480,51,43.186096,7.81390380859375 +3481,50,42.40252,7.597480773925781 +3482,54,44.509068,9.49093246459961 +3483,56,52.256042,3.74395751953125 +3484,50,44.023094,5.976905822753906 +3485,57,40.078316,16.92168426513672 +3486,55,42.218212,12.781787872314453 +3487,55,45.90276,9.097240447998047 +3488,58,52.660995,5.3390045166015625 +3489,57,49.139877,7.8601226806640625 +3490,59,43.519524,15.480476379394531 +3491,57,55.82975,1.1702499389648438 +3492,57,41.345192,15.654808044433594 +3493,57,41.345192,15.654808044433594 +3494,53,44.936802,8.06319808959961 +3495,60,44.955627,15.04437255859375 +3496,56,48.59301,7.406990051269531 +3497,59,48.988075,10.011924743652344 +3498,61,50.33763,10.662368774414062 +3499,59,55.395573,3.6044273376464844 +3500,60,50.705242,9.294757843017578 +3501,57,43.510975,13.489025115966797 +3502,6,8.281549,2.2815494537353516 +3503,4,2.1187015,1.881298542022705 +3504,6,4.636847,1.3631529808044434 +3505,2,3.504459,1.5044589042663574 +3506,2,1.2860277,0.7139723300933838 +3507,5,5.522059,0.5220589637756348 +3508,2,5.2832108,3.2832107543945312 +3509,6,4.7655864,1.2344136238098145 +3510,5,2.6625473,2.3374526500701904 +3511,5,3.4832158,1.5167841911315918 +3512,2,5.178138,3.17813777923584 +3513,1,1.5392876,0.5392875671386719 +3514,4,7.024963,3.024962902069092 +3515,5,2.402854,2.5971460342407227 +3516,2,4.6628675,2.662867546081543 +3517,6,6.4550285,0.4550285339355469 +3518,4,5.3417635,1.3417634963989258 +3519,6,4.450923,1.549077033996582 +3520,6,3.6424224,2.3575775623321533 +3521,4,4.42239,0.4223899841308594 +3522,1,1.7467556,0.7467555999755859 +3523,4,4.2941933,0.2941932678222656 +3524,3,2.227767,0.7722330093383789 +3525,6,8.203367,2.203367233276367 +3526,3,4.679989,1.6799888610839844 +3527,5,5.0562396,0.05623960494995117 +3528,4,8.901655,4.901655197143555 +3529,2,1.739435,0.26056504249572754 +3530,5,2.2826219,2.7173781394958496 +3531,2,2.645998,0.6459980010986328 +3532,4,2.0452886,1.9547114372253418 +3533,6,7.9051137,1.905113697052002 +3534,2,0.54963875,1.4503612518310547 +3535,2,2.1982257,0.198225736618042 +3536,4,2.4543524,1.5456476211547852 +3537,6,6.137726,0.137725830078125 +3538,2,4.774089,2.7740888595581055 +3539,5,5.237456,0.2374558448791504 +3540,2,2.655738,0.655738115310669 +3541,2,1.83444,0.16556000709533691 +3542,2,2.7009454,0.7009453773498535 +3543,4,5.020499,1.0204992294311523 +3544,4,7.9592485,3.9592485427856445 +3545,2,3.6709251,1.6709251403808594 +3546,5,2.0928152,2.9071848392486572 +3547,2,5.784724,3.784724235534668 +3548,2,2.7733636,0.7733635902404785 +3549,2,7.9187326,5.918732643127441 +3550,4,4.8461356,0.8461356163024902 +3551,2,2.8748255,0.8748254776000977 +3552,2,1.7794273,0.22057271003723145 +3553,2,2.561163,0.5611629486083984 +3554,2,1.0977278,0.9022722244262695 +3555,5,3.1334066,1.866593360900879 +3556,3,2.1123648,0.8876352310180664 +3557,3,9.948509,6.948509216308594 +3558,2,3.7333379,1.7333378791809082 +3559,2,4.056609,2.0566091537475586 +3560,6,8.098988,2.098987579345703 +3561,4,1.7273936,2.272606372833252 +3562,2,7.4775214,5.4775214195251465 +3563,2,1.3389015,0.6610984802246094 +3564,5,3.9986868,1.0013132095336914 +3565,3,3.964331,0.9643309116363525 +3566,3,1.5440564,1.4559435844421387 +3567,5,6.71028,1.710279941558838 +3568,4,3.896165,0.1038351058959961 +3569,4,7.9174685,3.917468547821045 +3570,1,0.9867594,0.013240575790405273 +3571,2,1.9482906,0.05170941352844238 +3572,3,5.9133587,2.913358688354492 +3573,1,1.5533099,0.5533099174499512 +3574,3,4.2348948,1.2348947525024414 +3575,4,7.644334,3.644333839416504 +3576,4,3.4452674,0.5547325611114502 +3577,9,6.077421,2.922578811645508 +3578,3,4.281892,1.2818918228149414 +3579,3,8.811365,5.811365127563477 +3580,5,4.5017147,0.49828529357910156 +3581,7,5.836928,1.163072109222412 +3582,2,4.163692,2.163691997528076 +3583,2,2.9129825,0.9129824638366699 +3584,2,3.6362388,1.6362388134002686 +3585,1,5.7855377,4.7855377197265625 +3586,1,1.7704606,0.7704606056213379 +3587,4,4.0170507,0.017050743103027344 +3588,9,4.8007526,4.199247360229492 +3589,2,2.096829,0.09682893753051758 +3590,8,2.6042364,5.395763635635376 +3591,4,2.5900075,1.4099924564361572 +3592,3,2.6148155,0.38518452644348145 +3593,1,2.0943701,1.0943701267242432 +3594,3,2.361518,0.6384820938110352 +3595,2,2.9531634,0.9531633853912354 +3596,4,2.8756762,1.124323844909668 +3597,3,5.0296507,2.0296506881713867 +3598,2,1.1531668,0.8468332290649414 +3599,2,1.7566464,0.24335360527038574 +3600,2,2.185268,0.18526792526245117 +3601,4,2.658557,1.3414430618286133 +3602,1,1.659473,0.6594729423522949 +3603,3,2.3461955,0.6538045406341553 +3604,3,2.7349079,0.265092134475708 +3605,2,3.0802708,1.080270767211914 +3606,4,1.8710206,2.128979444503784 +3607,4,6.3713603,2.3713603019714355 +3608,6,2.328918,3.671082019805908 +3609,4,5.4968796,1.4968795776367188 +3610,6,4.8312116,1.1687884330749512 +3611,2,6.7476363,4.747636318206787 +3612,1,1.1280789,0.12807893753051758 +3613,1,4.5665507,3.5665507316589355 +3614,56,46.0533,9.946701049804688 +3615,63,47.000893,15.999107360839844 +3616,59,46.925346,12.074653625488281 +3617,58,50.4983,7.501701354980469 +3618,59,48.65104,10.348960876464844 +3619,61,46.79762,14.202381134033203 +3620,56,49.88268,6.117321014404297 +3621,58,50.44403,7.55596923828125 +3622,56,50.38599,5.614009857177734 +3623,62,49.489677,12.510322570800781 +3624,58,50.731792,7.268207550048828 +3625,60,50.847874,9.15212631225586 +3626,64,50.93973,13.06026840209961 +3627,65,56.4995,8.500499725341797 +3628,64,49.982292,14.017707824707031 +3629,64,51.086555,12.913444519042969 +3630,60,52.87579,7.124210357666016 +3631,66,54.40844,11.591560363769531 +3632,63,50.608215,12.39178466796875 +3633,66,47.705048,18.294952392578125 +3634,59,52.47036,6.529640197753906 +3635,64,39.21364,24.786361694335938 +3636,64,45.397354,18.602645874023438 +3637,65,56.136215,8.863784790039062 +3638,64,55.00158,8.998420715332031 +3639,62,47.592575,14.407424926757812 +3640,67,51.94516,15.054840087890625 +3641,66,56.314144,9.685855865478516 +3642,60,52.46044,7.539558410644531 +3643,66,55.47146,10.528541564941406 +3644,67,54.583252,12.416748046875 +3645,60,47.559044,12.440956115722656 +3646,64,55.50202,8.497978210449219 +3647,61,49.771637,11.228363037109375 +3648,66,47.579338,18.42066192626953 +3649,64,58.432213,5.567787170410156 +3650,68,54.65313,13.346870422363281 +3651,67,50.4538,16.546199798583984 +3652,64,50.98584,13.01416015625 +3653,68,56.722614,11.277385711669922 +3654,64,53.463017,10.536983489990234 +3655,66,57.621193,8.378807067871094 +3656,68,54.67124,13.328758239746094 +3657,68,52.543995,15.456005096435547 +3658,62,53.77939,8.220611572265625 +3659,64,56.88195,7.118049621582031 +3660,67,52.396095,14.603904724121094 +3661,69,54.352707,14.647293090820312 +3662,67,49.411423,17.588577270507812 +3663,67,53.585815,13.4141845703125 +3664,69,56.011436,12.988563537597656 +3665,69,52.78103,16.218971252441406 +3666,66,55.13195,10.868049621582031 +3667,68,52.912903,15.08709716796875 +3668,69,56.017067,12.982933044433594 +3669,67,47.670044,19.3299560546875 +3670,67,51.65554,15.344459533691406 +3671,67,54.22257,12.77743148803711 +3672,69,51.538235,17.46176528930664 +3673,67,54.207542,12.792457580566406 +3674,69,51.554848,17.445152282714844 +3675,68,58.01603,9.983970642089844 +3676,69,59.42305,9.576950073242188 +3677,68,53.63556,14.36444091796875 +3678,4,5.025779,1.0257787704467773 +3679,2,6.893418,4.893417835235596 +3680,3,7.0000873,4.000087261199951 +3681,2,4.2521152,2.252115249633789 +3682,3,7.852811,4.852810859680176 +3683,2,5.9071956,3.907195568084717 +3684,4,2.4418542,1.5581457614898682 +3685,2,1.7695854,0.23041462898254395 +3686,2,2.5112998,0.5112998485565186 +3687,2,2.69672,0.6967198848724365 +3688,2,3.8896644,1.8896644115447998 +3689,3,2.5215752,0.4784247875213623 +3690,2,7.3602347,5.36023473739624 +3691,7,5.717965,1.2820348739624023 +3692,2,1.9717727,0.02822732925415039 +3693,3,5.1659613,2.165961265563965 +3694,3,6.677802,3.677802085876465 +3695,2,1.4852996,0.5147004127502441 +3696,2,1.1498456,0.8501543998718262 +3697,2,2.701341,0.701340913772583 +3698,2,6.8387694,4.838769435882568 +3699,7,2.9811985,4.01880145072937 +3700,7,3.824662,3.17533802986145 +3701,7,5.4570513,1.5429487228393555 +3702,2,2.2399147,0.2399146556854248 +3703,2,0.89441276,1.1055872440338135 +3704,7,1.8070757,5.19292426109314 +3705,5,3.1244159,1.8755841255187988 +3706,2,2.0771532,0.07715320587158203 +3707,7,3.3019314,3.698068618774414 +3708,5,6.8748765,1.8748764991760254 +3709,5,3.2161877,1.7838122844696045 +3710,3,2.2462113,0.7537887096405029 +3711,5,2.3987126,2.601287364959717 +3712,5,4.7415433,0.2584567070007324 +3713,7,1.4806218,5.519378185272217 +3714,4,3.3533654,0.646634578704834 +3715,7,6.9782686,0.02173137664794922 +3716,3,1.5885854,1.411414623260498 +3717,7,9.195072,2.1950721740722656 +3718,7,6.7676945,0.23230552673339844 +3719,3,2.0964663,0.9035336971282959 +3720,7,6.268402,0.731597900390625 +3721,5,1.0758142,3.9241857528686523 +3722,5,2.7092457,2.2907543182373047 +3723,2,1.40608,0.5939199924468994 +3724,5,2.2236722,2.7763278484344482 +3725,5,12.493132,7.493131637573242 +3726,5,11.084965,6.084964752197266 +3727,7,2.603761,4.3962390422821045 +3728,3,3.5858414,0.5858414173126221 +3729,7,2.0521739,4.947826147079468 +3730,5,1.9767454,3.023254632949829 +3731,5,5.8358836,0.835883617401123 +3732,5,2.730812,2.2691879272460938 +3733,2,2.911892,0.9118919372558594 +3734,5,6.217339,1.217339038848877 +3735,5,3.3394964,1.660503625869751 +3736,3,5.4698896,2.4698896408081055 +3737,5,5.5706654,0.5706653594970703 +3738,3,4.7837195,1.783719539642334 +3739,2,3.1333263,1.1333262920379639 +3740,5,8.325302,3.3253021240234375 +3741,3,3.990978,0.9909780025482178 +3742,3,8.353437,5.353437423706055 +3743,5,2.5293393,2.47066068649292 +3744,5,7.366946,2.366946220397949 +3745,7,3.9087312,3.09126877784729 +3746,4,10.835218,6.83521842956543 +3747,5,3.1958115,1.8041884899139404 +3748,4,2.0338328,1.9661672115325928 +3749,4,4.146912,0.1469120979309082 +3750,4,3.9315236,0.06847643852233887 +3751,4,3.1120133,0.8879866600036621 +3752,4,3.6296468,0.37035322189331055 +3753,4,3.640293,0.3597071170806885 +3754,2,4.4463882,2.4463882446289062 +3755,5,6.5337963,1.5337963104248047 +3756,4,9.25618,5.2561798095703125 +3757,7,6.753537,0.24646282196044922 +3758,5,5.5416603,0.5416603088378906 +3759,7,9.109118,2.1091184616088867 +3760,3,4.3697987,1.3697986602783203 +3761,3,4.890438,1.8904380798339844 +3762,4,12.397547,8.397546768188477 +3763,5,5.419028,0.41902780532836914 +3764,69,60.326183,8.673816680908203 +3765,69,54.5883,14.411701202392578 +3766,64,56.9666,7.0334014892578125 +3767,69,29.631258,39.36874198913574 +3768,67,64.96131,2.0386886596679688 +3769,67,52.777374,14.222625732421875 +3770,64,59.37629,4.623710632324219 +3771,65,54.655388,10.344612121582031 +3772,67,54.513046,12.486953735351562 +3773,69,56.50905,12.490951538085938 +3774,69,60.071705,8.928295135498047 +3775,64,59.000427,4.99957275390625 +3776,69,50.774834,18.22516632080078 +3777,69,56.697033,12.302967071533203 +3778,66,48.75232,17.2476806640625 +3779,68,56.815086,11.184913635253906 +3780,64,58.29313,5.706871032714844 +3781,66,48.022858,17.977142333984375 +3782,69,58.197453,10.802547454833984 +3783,68,56.156517,11.843482971191406 +3784,69,55.465897,13.534103393554688 +3785,67,54.709545,12.290454864501953 +3786,64,54.767345,9.232654571533203 +3787,69,49.280064,19.71993637084961 +3788,69,55.04148,13.958518981933594 +3789,68,52.487396,15.512603759765625 +3790,68,56.189377,11.810623168945312 +3791,68,56.150642,11.849357604980469 +3792,66,57.594707,8.405292510986328 +3793,67,47.977722,19.02227783203125 +3794,5,2.0718365,2.928163528442383 +3795,5,5.051605,0.051605224609375 +3796,4,8.65304,4.653039932250977 +3797,5,4.186427,0.813572883605957 +3798,3,7.074856,4.074855804443359 +3799,5,3.7686903,1.2313096523284912 +3800,5,3.3686874,1.631312608718872 +3801,7,5.863865,1.1361351013183594 +3802,3,8.329561,5.329561233520508 +3803,6,2.6413064,3.3586935997009277 +3804,5,4.6550016,0.3449983596801758 +3805,5,3.1012344,1.8987655639648438 +3806,8,4.8461356,3.1538643836975098 +3807,5,8.303434,3.303434371948242 +3808,3,6.1130633,3.113063335418701 +3809,6,3.6306853,2.369314670562744 +3810,3,2.1789498,0.8210501670837402 +3811,6,8.67233,2.672329902648926 +3812,7,5.6071405,1.3928594589233398 +3813,7,13.2732315,6.273231506347656 +3814,3,10.748322,7.748321533203125 +3815,5,3.3394964,1.660503625869751 +3816,3,7.499039,4.499039173126221 +3817,6,6.0845604,0.08456039428710938 +3818,3,6.127146,3.127145767211914 +3819,6,7.8454537,1.8454537391662598 +3820,5,5.4698896,0.46988964080810547 +3821,3,5.3372097,2.337209701538086 +3822,3,5.375784,2.375783920288086 +3823,5,10.609528,5.609527587890625 +3824,6,3.7732944,2.226705551147461 +3825,6,2.0323563,3.9676437377929688 +3826,5,5.211626,0.2116260528564453 +3827,6,5.022396,0.9776039123535156 +3828,6,11.15317,5.153169631958008 +3829,4,4.8608055,0.8608055114746094 +3830,6,2.8426454,3.1573545932769775 +3831,5,2.795689,2.204310894012451 +3832,5,2.7942264,2.2057735919952393 +3833,6,5.65065,0.3493499755859375 +3834,6,1.40608,4.593919992446899 +3835,6,5.208519,0.7914810180664062 +3836,5,4.511263,0.4887371063232422 +3837,8,6.6586018,1.3413982391357422 +3838,5,3.7595253,1.2404747009277344 +3839,5,4.8119106,0.18808937072753906 +3840,5,11.65152,6.651519775390625 +3841,5,7.9864526,2.986452579498291 +3842,8,5.466472,2.5335278511047363 +3843,7,9.801737,2.801736831665039 +3844,5,3.7839975,1.2160024642944336 +3845,3,7.472134,4.472134113311768 +3846,5,10.702053,5.702053070068359 +3847,5,4.7878294,0.21217060089111328 +3848,8,6.315519,1.684481143951416 +3849,4,2.265724,1.7342760562896729 +3850,3,4.139098,1.1390981674194336 +3851,3,6.6521645,3.6521644592285156 +3852,3,3.1826012,0.1826012134552002 +3853,6,4.4554796,1.544520378112793 +3854,3,5.9474006,2.9474005699157715 +3855,5,8.125221,3.1252212524414062 +3856,6,2.951228,3.048772096633911 +3857,5,8.401361,3.4013614654541016 +3858,5,6.449641,1.449641227722168 +3859,6,2.242941,3.757059097290039 +3860,5,6.5918355,1.5918354988098145 +3861,4,7.042506,3.042506217956543 +3862,5,3.3782105,1.6217894554138184 +3863,6,5.9161325,0.08386754989624023 +3864,5,5.5197253,0.5197253227233887 +3865,5,6.6344094,1.6344094276428223 +3866,6,5.2874765,0.7125234603881836 +3867,6,4.022771,1.977229118347168 +3868,6,7.797446,1.7974457740783691 +3869,8,7.78274,0.21725988388061523 +3870,4,5.713083,1.7130827903747559 +3871,7,8.65304,1.6530399322509766 +3872,6,11.829628,5.829627990722656 +3873,11,6.717129,4.282870769500732 +3874,8,5.65065,2.3493499755859375 +3875,6,6.449641,0.44964122772216797 +3876,6,6.5567555,0.556755542755127 +3877,4,10.660202,6.6602020263671875 +3878,6,4.782646,1.2173538208007812 +3879,4,4.6044636,0.6044635772705078 +3880,8,12.021336,4.021335601806641 +3881,4,4.535268,0.5352678298950195 +3882,4,10.460077,6.460077285766602 +3883,6,5.3569565,0.6430435180664062 +3884,6,2.8315132,3.1684868335723877 +3885,4,2.391223,1.6087770462036133 +3886,11,8.83824,2.1617603302001953 +3887,4,2.982097,1.0179030895233154 +3888,10,7.0990186,2.9009814262390137 +3889,6,6.4311705,0.4311704635620117 +3890,6,3.7810035,2.218996524810791 +3891,7,8.998015,1.9980154037475586 +3892,4,2.9691417,1.0308582782745361 +3893,11,10.429935,0.5700645446777344 +3894,6,5.138689,0.8613109588623047 +3895,5,3.5800827,1.419917345046997 +3896,6,7.339591,1.3395910263061523 +3897,10,7.0107603,2.9892396926879883 +3898,8,6.3320394,1.6679606437683105 +3899,10,6.575711,3.4242892265319824 +3900,6,5.4796534,0.5203466415405273 +3901,9,2.2770853,6.722914695739746 +3902,6,5.791568,0.20843219757080078 +3903,6,8.673573,2.673572540283203 +3904,12,8.033688,3.9663124084472656 +3905,11,7.8151364,3.184863567352295 +3906,9,11.289015,2.2890148162841797 +3907,6,7.035822,1.0358219146728516 +3908,6,12.5577545,6.5577545166015625 +3909,6,5.5197253,0.48027467727661133 +3910,6,3.905952,2.094048023223877 +3911,9,6.4765334,2.5234665870666504 +3912,6,5.8226366,0.17736339569091797 +3913,12,6.4968367,5.5031633377075195 +3914,4,3.1362615,0.8637385368347168 +3915,6,3.7438502,2.2561497688293457 +3916,6,16.241852,10.241851806640625 +3917,6,2.1938982,3.8061017990112305 +3918,6,7.903492,1.9034919738769531 +3919,11,16.009193,5.009193420410156 +3920,8,7.932736,0.06726408004760742 +3921,6,10.867939,4.867938995361328 +3922,8,5.548444,2.4515562057495117 +3923,5,8.295173,3.295172691345215 +3924,4,3.110958,0.8890419006347656 +3925,7,6.4916883,0.5083117485046387 +3926,6,2.7877767,3.2122232913970947 +3927,7,3.7595253,3.2404747009277344 +3928,5,8.105534,3.1055335998535156 +3929,6,5.118636,0.8813638687133789 +3930,4,8.276907,4.276906967163086 +3931,8,9.437778,1.4377784729003906 +3932,7,5.5886126,1.4113874435424805 +3933,4,5.55519,1.555190086364746 +3934,5,6.7501802,1.7501802444458008 +3935,10,10.406631,0.4066314697265625 +3936,7,5.792611,1.2073888778686523 +3937,5,5.284482,0.2844820022583008 +3938,5,3.5709763,1.4290237426757812 +3939,5,3.74478,1.2552199363708496 +3940,5,6.433955,1.433955192565918 +3941,5,2.599742,2.4002580642700195 +3942,7,5.707199,1.2928009033203125 +3943,8,3.0030148,4.996985197067261 +3944,8,2.775358,5.224642038345337 diff --git a/Results_csv/mobilenet_reg_0.25_64.csv b/Results_csv/mobilenet_reg_0.25_64.csv new file mode 100644 index 0000000..e43c03a --- /dev/null +++ b/Results_csv/mobilenet_reg_0.25_64.csv @@ -0,0 +1,3950 @@ +MAE +5.5619609352904735 +ID,age,age_p,error +CA3,CA5 +0.4223067173637516,0.5870722433460076 +0,1,1.0574734,0.05747342109680176 +1,1,1.0228395,0.02283954620361328 +2,2,1.0257843,0.9742157459259033 +3,1,1.015407,0.015406966209411621 +4,1,1.0571415,0.05714154243469238 +5,1,1.0230433,0.023043274879455566 +6,1,1.045865,0.04586505889892578 +7,1,1.0155408,0.015540838241577148 +8,1,1.0155114,0.015511393547058105 +9,1,1.0179118,0.017911791801452637 +10,1,1.0383286,0.03832864761352539 +11,1,1.1290938,0.12909376621246338 +12,1,1.6235142,0.6235141754150391 +13,1,0.99609953,0.003900468349456787 +14,1,1.0358881,0.035888075828552246 +15,1,1.3923742,0.3923741579055786 +16,1,1.0353817,0.03538167476654053 +17,1,1.0289707,0.028970718383789062 +18,1,1.0160282,0.016028165817260742 +19,1,0.9948546,0.005145370960235596 +20,1,4.5221195,3.5221195220947266 +21,1,1.0032904,0.003290414810180664 +22,1,1.0183448,0.018344759941101074 +23,1,1.0182555,0.01825547218322754 +24,2,1.0390537,0.9609463214874268 +25,1,1.0382601,0.03826010227203369 +26,1,1.0390172,0.0390172004699707 +27,1,1.0225358,0.02253580093383789 +28,1,1.0247701,0.024770140647888184 +29,1,1.0254936,0.025493621826171875 +30,1,1.0506215,0.05062150955200195 +31,1,1.0368159,0.03681588172912598 +32,1,1.0116518,0.011651754379272461 +33,1,1.0091649,0.009164929389953613 +34,1,2.2240493,1.2240493297576904 +35,1,16.621592,15.621591567993164 +36,1,2.3995826,1.3995826244354248 +37,1,1.5641155,0.5641155242919922 +38,1,2.6699307,1.6699306964874268 +39,1,0.9911951,0.008804917335510254 +40,1,1.0148274,0.014827370643615723 +41,1,1.03478,0.034780025482177734 +42,1,1.0413035,0.04130351543426514 +43,1,0.99843895,0.0015610456466674805 +44,1,1.0172987,0.01729869842529297 +45,1,1.0352795,0.03527951240539551 +46,1,1.0168064,0.016806364059448242 +47,1,1.0107863,0.010786294937133789 +48,2,1.0534576,0.9465423822402954 +49,1,1.0311974,0.031197428703308105 +50,1,1.9166632,0.9166631698608398 +51,1,1.0365765,0.03657650947570801 +52,1,1.2871562,0.28715622425079346 +53,1,1.0191013,0.019101262092590332 +54,1,1.0079634,0.007963418960571289 +55,1,1.0198768,0.019876837730407715 +56,1,1.0179636,0.017963647842407227 +57,2,1.0440024,0.9559975862503052 +58,1,1.005183,0.005182981491088867 +59,1,1.0270584,0.02705836296081543 +60,1,1.0689365,0.06893646717071533 +61,1,1.02307,0.02306997776031494 +62,1,1.0427718,0.04277181625366211 +63,1,1.0172436,0.017243623733520508 +64,1,1.0127978,0.012797832489013672 +65,1,1.0170665,0.017066478729248047 +66,2,1.0554003,0.9445997476577759 +67,1,1.0504271,0.05042707920074463 +68,1,1.0081972,0.008197188377380371 +69,1,1.0265926,0.026592612266540527 +70,2,1.0600599,0.9399400949478149 +71,1,1.0165379,0.016537904739379883 +72,1,1.0212598,0.021259784698486328 +73,1,1.0652337,0.06523370742797852 +74,1,1.0059496,0.005949616432189941 +75,1,1.0331441,0.03314411640167236 +76,1,1.04608,0.04607999324798584 +77,3,1.3348839,1.6651160717010498 +78,1,1.0188063,0.0188063383102417 +79,1,1.0327841,0.032784104347229004 +80,4,1.0469139,2.9530861377716064 +81,3,3.5308483,0.5308482646942139 +82,1,1.0004416,0.00044155120849609375 +83,1,1.0238411,0.023841142654418945 +84,1,1.002464,0.0024640560150146484 +85,1,1.0145594,0.014559388160705566 +86,1,1.0133315,0.01333153247833252 +87,2,7.5325007,5.532500743865967 +88,3,9.941149,6.94114875793457 +89,1,1.0359231,0.035923123359680176 +90,2,1.045141,0.9548590183258057 +91,1,1.0416535,0.04165351390838623 +92,1,1.0120167,0.012016654014587402 +93,1,8.055551,7.055550575256348 +94,1,1.0198593,0.01985931396484375 +95,3,3.8882954,0.8882954120635986 +96,4,3.2044544,0.7955455780029297 +97,1,2.8024323,1.8024322986602783 +98,1,2.665388,1.6653881072998047 +99,1,1.0379233,0.037923336029052734 +100,1,1.0272517,0.027251720428466797 +101,1,1.0097595,0.00975954532623291 +102,2,1.0124037,0.9875962734222412 +103,2,1.03061,0.9693900346755981 +104,1,1.0122551,0.012255072593688965 +105,1,4.34832,3.3483200073242188 +106,2,1.0517358,0.9482642412185669 +107,2,1.012271,0.9877289533615112 +108,2,1.0096837,0.9903162717819214 +109,1,1.7009661,0.7009661197662354 +110,1,3.7296603,2.7296602725982666 +111,3,2.7058923,0.29410767555236816 +112,2,4.996105,2.996105194091797 +113,2,1.0203872,0.9796128273010254 +114,1,2.3058207,1.3058207035064697 +115,1,3.5233016,2.523301601409912 +116,1,1.0131384,0.013138413429260254 +117,1,1.0281262,0.028126239776611328 +118,1,1.0223131,0.02231311798095703 +119,1,2.1270947,1.1270947456359863 +120,1,1.0141934,0.014193415641784668 +121,1,0.9960594,0.003940582275390625 +122,2,1.0096521,0.9903478622436523 +123,1,1.0060306,0.006030559539794922 +124,2,1.0526903,0.9473097324371338 +125,2,1.0403444,0.9596556425094604 +126,2,1.7815071,0.21849286556243896 +127,3,2.7462475,0.2537524700164795 +128,1,7.4233894,6.423389434814453 +129,1,1.0150564,0.015056371688842773 +130,3,4.890033,1.8900327682495117 +131,1,1.0256909,0.025690913200378418 +132,1,1.0071834,0.007183432579040527 +133,1,1.014059,0.014058947563171387 +134,1,2.6491988,1.6491987705230713 +135,2,4.40493,2.4049301147460938 +136,1,1.0413866,0.04138660430908203 +137,1,1.0158236,0.0158236026763916 +138,1,1.0376718,0.037671804428100586 +139,1,1.0648131,0.06481313705444336 +140,1,1.0108905,0.010890483856201172 +141,1,1.0224863,0.022486329078674316 +142,2,2.3317692,0.3317692279815674 +143,1,1.7632163,0.7632162570953369 +144,11,7.6372733,3.3627266883850098 +145,10,5.432809,4.567191123962402 +146,9,5.149155,3.8508448600769043 +147,6,20.754057,14.754056930541992 +148,5,5.416149,0.4161491394042969 +149,11,9.316697,1.683302879333496 +150,9,4.2529097,4.7470903396606445 +151,11,5.5792537,5.420746326446533 +152,6,1.3746576,4.62534236907959 +153,6,2.8738499,3.126150131225586 +154,10,4.412225,5.587775230407715 +155,10,4.4286833,5.571316719055176 +156,9,7.8876915,1.1123085021972656 +157,7,8.560462,1.5604619979858398 +158,6,4.979572,1.02042818069458 +159,9,4.115085,4.884914875030518 +160,9,8.977649,0.02235126495361328 +161,10,4.057902,5.942098140716553 +162,6,4.329431,1.6705689430236816 +163,8,3.6296976,4.370302438735962 +164,12,7.0539384,4.946061611175537 +165,9,3.640867,5.359133005142212 +166,6,10.570568,4.570568084716797 +167,9,5.0323806,3.9676194190979004 +168,9,4.2527347,4.747265338897705 +169,6,5.3885603,0.6114397048950195 +170,9,3.1059778,5.894022226333618 +171,9,3.672362,5.3276379108428955 +172,6,1.5691179,4.430882096290588 +173,13,4.4582596,8.541740417480469 +174,13,8.971048,4.028951644897461 +175,8,19.968071,11.968070983886719 +176,10,4.3473854,5.652614593505859 +177,9,16.871668,7.871667861938477 +178,7,4.1891093,2.8108906745910645 +179,6,4.2475457,1.7524542808532715 +180,9,18.34234,9.342340469360352 +181,6,7.084636,1.0846362113952637 +182,9,6.8539987,2.1460013389587402 +183,6,2.364977,3.6350231170654297 +184,6,3.123179,2.8768210411071777 +185,7,2.5553439,4.444656133651733 +186,7,7.2465224,0.2465224266052246 +187,11,5.4296756,5.570324420928955 +188,6,5.4322286,0.5677714347839355 +189,7,20.393068,13.393068313598633 +190,8,3.688471,4.311528921127319 +191,6,19.415894,13.4158935546875 +192,6,3.7361183,2.2638816833496094 +193,8,3.3570907,4.642909288406372 +194,7,15.157098,8.157097816467285 +195,6,3.1844513,2.8155486583709717 +196,13,7.0457063,5.954293727874756 +197,10,9.749531,0.2504692077636719 +198,9,24.18646,15.186460494995117 +199,6,6.6818643,0.6818642616271973 +200,11,6.417065,4.582934856414795 +201,6,1.1024914,4.89750862121582 +202,10,20.971985,10.97198486328125 +203,9,10.203571,1.2035713195800781 +204,6,3.8229647,2.177035331726074 +205,9,4.1816864,4.8183135986328125 +206,6,4.7925377,1.2074623107910156 +207,8,8.028452,0.028451919555664062 +208,12,16.187912,4.1879119873046875 +209,13,12.193344,0.8066558837890625 +210,10,7.1639056,2.836094379425049 +211,10,5.306763,4.693236827850342 +212,11,22.23461,11.234609603881836 +213,7,5.007859,1.992140769958496 +214,13,17.585724,4.585723876953125 +215,11,9.891712,1.1082878112792969 +216,9,8.513023,0.48697662353515625 +217,11,12.431544,1.431544303894043 +218,14,19.656502,5.656501770019531 +219,7,16.49445,9.494449615478516 +220,7,5.470714,1.5292859077453613 +221,7,2.9212048,4.0787951946258545 +222,9,8.084655,0.9153451919555664 +223,11,33.387848,22.387847900390625 +224,11,4.714187,6.285812854766846 +225,7,7.56016,0.5601601600646973 +226,12,20.001928,8.001928329467773 +227,7,4.596615,2.4033851623535156 +228,9,6.3498325,2.650167465209961 +229,10,4.180448,5.819551944732666 +230,11,17.922369,6.922369003295898 +231,7,5.989139,1.0108609199523926 +232,11,6.567956,4.43204402923584 +233,7,7.1542554,0.15425539016723633 +234,11,5.9069557,5.093044281005859 +235,10,25.319965,15.319965362548828 +236,10,10.288989,0.2889890670776367 +237,12,5.8295965,6.170403480529785 +238,9,14.736896,5.736895561218262 +239,7,3.5124753,3.4875247478485107 +240,14,4.035537,9.964463233947754 +241,11,3.811322,7.188678026199341 +242,7,6.888412,0.1115880012512207 +243,11,8.669068,2.3309316635131836 +244,12,7.0976434,4.9023566246032715 +245,7,4.5566325,2.4433674812316895 +246,8,9.571823,1.5718231201171875 +247,14,2.8031945,11.196805477142334 +248,8,4.2445107,3.7554893493652344 +249,13,6.312205,6.687795162200928 +250,7,6.2140255,0.7859745025634766 +251,8,3.2413993,4.75860071182251 +252,7,14.607788,7.6077880859375 +253,7,6.5011177,0.4988822937011719 +254,8,5.454858,2.54514217376709 +255,13,10.270961,2.729039192199707 +256,7,1.0824218,5.917578220367432 +257,11,8.100364,2.8996362686157227 +258,11,20.319403,9.319402694702148 +259,10,21.788715,11.788715362548828 +260,7,4.4747043,2.5252957344055176 +261,10,5.954308,4.045691967010498 +262,7,1.0473309,5.952669143676758 +263,7,5.9330983,1.066901683807373 +264,7,3.9632132,3.0367867946624756 +265,9,23.129786,14.129785537719727 +266,10,15.275263,5.275262832641602 +267,14,12.287155,1.7128448486328125 +268,7,1.3171847,5.682815313339233 +269,8,9.861557,1.8615570068359375 +270,8,17.079378,9.079378128051758 +271,10,7.7395535,2.260446548461914 +272,8,5.6824017,2.317598342895508 +273,13,17.97008,4.97007942199707 +274,8,16.14848,8.148479461669922 +275,8,4.512113,3.487886905670166 +276,10,15.464609,5.464609146118164 +277,8,6.3054757,1.6945242881774902 +278,8,18.41697,10.416969299316406 +279,9,3.07861,5.921390056610107 +280,8,15.021332,7.021331787109375 +281,10,18.560957,8.560956954956055 +282,8,7.9566607,0.04333925247192383 +283,8,4.5831585,3.416841506958008 +284,10,11.670486,1.6704864501953125 +285,13,8.435861,4.564139366149902 +286,8,21.37257,13.372570037841797 +287,15,10.794701,4.205299377441406 +288,8,7.1400394,0.8599605560302734 +289,9,8.015329,0.9846706390380859 +290,9,6.3363104,2.663689613342285 +291,8,7.845479,0.15452098846435547 +292,9,20.542822,11.542821884155273 +293,8,18.594826,10.594825744628906 +294,10,16.376125,6.376125335693359 +295,8,18.858534,10.85853385925293 +296,8,18.469007,10.46900749206543 +297,8,5.026855,2.973145008087158 +298,10,7.162497,2.837502956390381 +299,11,6.3148794,4.685120582580566 +300,8,3.1536324,4.846367597579956 +301,8,15.275263,7.275262832641602 +302,12,9.522245,2.477754592895508 +303,8,8.730816,0.7308158874511719 +304,8,5.8603773,2.139622688293457 +305,9,8.845929,0.15407085418701172 +306,8,5.235579,2.764420986175537 +307,13,11.172974,1.8270263671875 +308,8,19.851568,11.851568222045898 +309,8,8.211738,0.21173763275146484 +310,8,3.284488,4.715512037277222 +311,8,6.3194165,1.6805834770202637 +312,11,13.769662,2.7696619033813477 +313,12,15.446627,3.446626663208008 +314,12,7.9111433,4.0888566970825195 +315,9,7.770525,1.2294750213623047 +316,12,10.178125,1.8218746185302734 +317,12,5.9653134,6.03468656539917 +318,12,4.9879217,7.012078285217285 +319,8,13.103453,5.103452682495117 +320,12,12.364962,0.3649616241455078 +321,12,3.2759755,8.724024534225464 +322,13,15.309613,2.3096132278442383 +323,12,8.515273,3.484726905822754 +324,12,17.961946,5.961946487426758 +325,13,21.466047,8.466047286987305 +326,7,13.372579,6.3725786209106445 +327,10,15.731046,5.731045722961426 +328,12,6.7690353,5.230964660644531 +329,12,15.159907,3.159907341003418 +330,12,19.967896,7.9678955078125 +331,10,16.98043,6.980430603027344 +332,8,4.403118,3.596881866455078 +333,8,11.740501,3.7405014038085938 +334,8,9.421979,1.4219789505004883 +335,11,22.940767,11.940767288208008 +336,12,16.083166,4.083166122436523 +337,12,5.4183607,6.581639289855957 +338,12,28.784348,16.784347534179688 +339,12,14.330063,2.3300628662109375 +340,12,3.6274927,8.372507333755493 +341,12,19.575876,7.575876235961914 +342,14,23.959827,9.959827423095703 +343,8,2.845364,5.154635906219482 +344,8,15.103538,7.103537559509277 +345,8,5.4778376,2.522162437438965 +346,9,4.3971295,4.602870464324951 +347,10,8.399678,1.6003217697143555 +348,12,10.579035,1.4209651947021484 +349,9,6.236003,2.7639970779418945 +350,14,5.0734844,8.926515579223633 +351,8,3.13338,4.866620063781738 +352,12,14.8234625,2.82346248626709 +353,13,11.123947,1.8760528564453125 +354,8,9.451033,1.4510326385498047 +355,12,15.616201,3.616201400756836 +356,8,12.138833,4.138833045959473 +357,12,7.207966,4.792034149169922 +358,9,14.436793,5.436793327331543 +359,12,1.1192753,10.880724668502808 +360,12,8.619799,3.3802013397216797 +361,12,6.789571,5.2104291915893555 +362,16,17.223656,1.2236557006835938 +363,13,8.2480955,4.751904487609863 +364,12,12.408684,0.40868377685546875 +365,12,9.297605,2.702395439147949 +366,12,11.507979,0.4920206069946289 +367,9,13.3614,4.3613996505737305 +368,11,15.051008,4.051008224487305 +369,13,18.731121,5.731121063232422 +370,11,7.6109266,3.389073371887207 +371,12,8.964285,3.035715103149414 +372,12,9.5335045,2.4664955139160156 +373,12,14.318531,2.318531036376953 +374,9,15.140278,6.140277862548828 +375,12,10.448417,1.5515832901000977 +376,9,6.6796823,2.3203177452087402 +377,9,8.802192,0.19780826568603516 +378,12,2.7296824,9.270317554473877 +379,12,6.2567325,5.74326753616333 +380,12,13.745963,1.7459630966186523 +381,9,7.819087,1.180912971496582 +382,12,21.097893,9.097892761230469 +383,12,12.591581,0.5915813446044922 +384,12,7.353564,4.6464362144470215 +385,12,16.650364,4.650363922119141 +386,12,16.822596,4.822595596313477 +387,12,22.57283,10.572830200195312 +388,12,19.552113,7.552112579345703 +389,12,4.68899,7.311009883880615 +390,12,36.239655,24.239654541015625 +391,12,21.033327,9.033327102661133 +392,12,13.881718,1.8817176818847656 +393,12,7.389944,4.610055923461914 +394,12,8.908657,3.0913429260253906 +395,12,15.517591,3.5175914764404297 +396,12,12.226048,0.22604846954345703 +397,12,11.688643,0.3113565444946289 +398,12,17.5557,5.555700302124023 +399,12,1.9051745,10.094825506210327 +400,12,16.11893,4.118930816650391 +401,12,19.77025,7.77025032043457 +402,12,7.3161077,4.683892250061035 +403,12,15.244863,3.2448625564575195 +404,12,8.488281,3.51171875 +405,12,11.265999,0.7340011596679688 +406,12,7.5048556,4.495144367218018 +407,8,6.736227,1.263772964477539 +408,12,10.412568,1.5874319076538086 +409,12,3.4885426,8.511457443237305 +410,13,14.176837,1.1768369674682617 +411,12,8.016606,3.983393669128418 +412,12,3.972517,8.027482986450195 +413,10,5.5925617,4.407438278198242 +414,8,4.7479587,3.2520413398742676 +415,8,8.891339,0.8913393020629883 +416,12,15.885563,3.8855628967285156 +417,8,6.922762,1.0772380828857422 +418,11,14.669012,3.6690120697021484 +419,14,7.64009,6.359910011291504 +420,10,15.405665,5.405665397644043 +421,11,8.315703,2.6842966079711914 +422,12,12.113834,0.11383438110351562 +423,14,9.509228,4.490772247314453 +424,13,14.871458,1.8714580535888672 +425,13,12.765505,0.2344951629638672 +426,13,8.105146,4.894853591918945 +427,8,12.233273,4.233272552490234 +428,16,7.046943,8.953056812286377 +429,11,5.6008463,5.399153709411621 +430,15,2.8944821,12.105517864227295 +431,13,16.921503,3.9215030670166016 +432,9,17.881615,8.881614685058594 +433,9,5.008936,3.9910640716552734 +434,15,22.651905,7.651905059814453 +435,14,14.432986,0.4329862594604492 +436,9,11.015181,2.0151805877685547 +437,12,14.146857,2.146857261657715 +438,13,21.176733,8.176733016967773 +439,12,18.574661,6.5746612548828125 +440,14,12.335717,1.6642827987670898 +441,14,18.31625,4.316249847412109 +442,16,14.944589,1.0554113388061523 +443,14,20.776022,6.776021957397461 +444,12,20.249895,8.249895095825195 +445,13,14.166269,1.166269302368164 +446,11,12.278592,1.2785921096801758 +447,15,6.555168,8.444831848144531 +448,14,21.96615,7.966150283813477 +449,9,5.026855,3.973145008087158 +450,14,12.221982,1.7780179977416992 +451,13,14.440808,1.4408082962036133 +452,13,10.959745,2.040254592895508 +453,14,3.5922081,10.40779185295105 +454,13,4.9470153,8.052984714508057 +455,13,13.766206,0.7662057876586914 +456,12,14.880742,2.880742073059082 +457,13,15.472895,2.4728946685791016 +458,12,17.333307,5.333307266235352 +459,12,9.793097,2.2069034576416016 +460,13,13.254067,0.25406742095947266 +461,13,10.775151,2.224848747253418 +462,13,13.451336,0.4513359069824219 +463,14,15.225782,1.2257823944091797 +464,13,12.712654,0.28734588623046875 +465,14,17.902773,3.902772903442383 +466,12,5.5572815,6.442718505859375 +467,16,22.70777,6.707769393920898 +468,13,16.343126,3.3431262969970703 +469,13,6.3142633,6.685736656188965 +470,13,9.100352,3.8996477127075195 +471,13,14.176837,1.1768369674682617 +472,12,5.2708116,6.729188442230225 +473,14,13.671494,0.3285064697265625 +474,16,12.51411,3.4858903884887695 +475,13,18.884142,5.88414192199707 +476,14,18.56932,4.5693206787109375 +477,16,12.196234,3.8037662506103516 +478,10,1.6761478,8.323852181434631 +479,9,1.3725492,7.6274508237838745 +480,14,10.365625,3.6343746185302734 +481,14,16.583773,2.583772659301758 +482,9,6.3728986,2.627101421356201 +483,12,8.281083,3.718916893005371 +484,9,4.2469497,4.753050327301025 +485,14,19.510395,5.510395050048828 +486,12,18.521204,6.521203994750977 +487,10,4.111445,5.88855504989624 +488,14,11.532244,2.4677562713623047 +489,9,14.300793,5.300792694091797 +490,9,12.349976,3.3499755859375 +491,14,18.399258,4.399257659912109 +492,14,15.40416,1.4041595458984375 +493,16,11.963654,4.036346435546875 +494,13,4.179412,8.820588111877441 +495,11,7.9181232,3.081876754760742 +496,10,5.168856,4.831143856048584 +497,10,12.508415,2.5084152221679688 +498,13,10.865401,2.134598731994629 +499,13,4.44108,8.558919906616211 +500,17,21.822815,4.82281494140625 +501,10,6.9732037,3.026796340942383 +502,14,11.688643,2.311356544494629 +503,14,1.0266948,12.973305225372314 +504,10,16.684942,6.684942245483398 +505,17,18.692707,1.6927070617675781 +506,14,9.60413,4.395870208740234 +507,13,10.52516,2.4748401641845703 +508,13,10.424884,2.5751161575317383 +509,16,10.176513,5.823487281799316 +510,13,10.435347,2.5646533966064453 +511,12,5.99542,6.004580020904541 +512,10,22.511507,12.511507034301758 +513,10,15.058144,5.058143615722656 +514,11,9.995745,1.0042552947998047 +515,11,9.95079,1.0492095947265625 +516,13,11.863951,1.1360492706298828 +517,13,14.108257,1.1082572937011719 +518,13,15.000975,2.000974655151367 +519,13,11.485936,1.514063835144043 +520,10,13.636594,3.636593818664551 +521,13,9.3071575,3.692842483520508 +522,14,15.585099,1.585099220275879 +523,10,13.2545595,3.2545595169067383 +524,13,14.19618,1.1961803436279297 +525,13,16.935032,3.9350318908691406 +526,10,15.599445,5.599445343017578 +527,10,18.229733,8.229732513427734 +528,10,12.681454,2.6814537048339844 +529,10,12.3968935,2.3968935012817383 +530,15,16.988836,1.9888362884521484 +531,10,19.468628,9.4686279296875 +532,14,12.705616,1.2943840026855469 +533,13,6.671293,6.328707218170166 +534,17,15.942222,1.0577783584594727 +535,14,20.672272,6.672271728515625 +536,10,22.167583,12.167583465576172 +537,16,10.637865,5.36213493347168 +538,13,12.40074,0.5992603302001953 +539,12,7.4937005,4.5062994956970215 +540,10,11.631475,1.6314754486083984 +541,13,16.94505,3.945049285888672 +542,10,22.64581,12.645809173583984 +543,12,8.899196,3.100804328918457 +544,15,17.534002,2.5340023040771484 +545,15,12.844731,2.155268669128418 +546,15,8.586929,6.4130706787109375 +547,16,18.8944,2.894399642944336 +548,16,15.127814,0.8721857070922852 +549,15,13.564155,1.4358453750610352 +550,15,15.464261,0.4642610549926758 +551,15,15.2336855,0.23368549346923828 +552,15,8.301831,6.698168754577637 +553,15,19.43676,4.436759948730469 +554,18,13.276923,4.723076820373535 +555,15,17.776754,2.776754379272461 +556,19,13.109161,5.890838623046875 +557,15,22.484972,7.48497200012207 +558,16,17.134993,1.1349925994873047 +559,15,13.478486,1.5215139389038086 +560,15,11.131551,3.8684492111206055 +561,11,18.054333,7.054332733154297 +562,14,12.77994,1.2200603485107422 +563,14,19.94531,5.945310592651367 +564,15,17.306274,2.3062744140625 +565,15,16.394176,1.3941764831542969 +566,15,17.267744,2.2677440643310547 +567,11,12.843443,1.8434429168701172 +568,15,20.876581,5.876581192016602 +569,15,15.2633295,0.26332950592041016 +570,14,13.247402,0.7525978088378906 +571,15,14.541576,0.4584236145019531 +572,15,32.412792,17.412792205810547 +573,15,15.367193,0.36719322204589844 +574,15,14.902544,0.09745597839355469 +575,19,17.652975,1.347024917602539 +576,16,7.1884995,8.811500549316406 +577,15,17.980598,2.9805984497070312 +578,11,13.029885,2.0298852920532227 +579,19,13.194753,5.8052473068237305 +580,15,7.5254784,7.474521636962891 +581,15,22.054598,7.054597854614258 +582,16,11.050812,4.949188232421875 +583,13,23.82922,10.829219818115234 +584,15,16.52918,1.5291805267333984 +585,14,13.281459,0.718541145324707 +586,14,18.315895,4.315895080566406 +587,14,9.831899,4.1681013107299805 +588,14,9.768939,4.231060981750488 +589,16,15.759693,0.24030685424804688 +590,15,16.027166,1.0271663665771484 +591,16,13.664382,2.335618019104004 +592,16,16.595015,0.5950145721435547 +593,11,7.64009,3.359910011291504 +594,15,16.001547,1.001546859741211 +595,15,14.256318,0.7436819076538086 +596,15,15.256526,0.25652599334716797 +597,15,12.047092,2.9529075622558594 +598,11,18.122356,7.122356414794922 +599,15,4.0135016,10.986498355865479 +600,15,13.096909,1.9030914306640625 +601,15,22.947048,7.947048187255859 +602,11,13.0452795,2.0452795028686523 +603,16,22.759811,6.7598114013671875 +604,17,20.814192,3.8141918182373047 +605,15,14.111548,0.8884515762329102 +606,16,17.475328,1.4753284454345703 +607,16,17.345219,1.3452186584472656 +608,15,10.822994,4.177005767822266 +609,16,8.997739,7.002261161804199 +610,15,18.138287,3.138286590576172 +611,17,19.370235,2.3702354431152344 +612,15,12.742582,2.257417678833008 +613,15,18.625093,3.625093460083008 +614,19,19.239094,0.23909378051757812 +615,15,12.882362,2.1176376342773438 +616,16,16.050894,0.05089378356933594 +617,15,4.44108,10.558919906616211 +618,18,9.88086,8.119139671325684 +619,16,13.557699,2.442300796508789 +620,18,16.262102,1.7378978729248047 +621,15,19.614721,4.614721298217773 +622,15,13.617628,1.3823719024658203 +623,16,11.371495,4.628504753112793 +624,15,12.53807,2.461930274963379 +625,15,12.868737,2.13126277923584 +626,15,8.512047,6.487953186035156 +627,17,17.786781,0.7867813110351562 +628,19,10.852643,8.147356986999512 +629,16,22.694853,6.694852828979492 +630,16,16.637304,0.6373043060302734 +631,12,14.006787,2.0067873001098633 +632,16,11.730714,4.269286155700684 +633,19,13.494185,5.505814552307129 +634,16,16.082466,0.08246612548828125 +635,16,16.795328,0.7953281402587891 +636,19,20.136179,1.136178970336914 +637,16,13.923131,2.076869010925293 +638,16,14.4865675,1.513432502746582 +639,17,13.045138,3.954861640930176 +640,17,13.329573,3.6704273223876953 +641,19,12.83906,6.160940170288086 +642,19,12.679471,6.320528984069824 +643,16,19.099546,3.099546432495117 +644,16,15.848753,0.1512470245361328 +645,17,12.788132,4.2118682861328125 +646,16,17.916061,1.9160614013671875 +647,17,16.103931,0.8960685729980469 +648,17,17.183195,0.1831951141357422 +649,16,17.248245,1.2482452392578125 +650,20,11.914706,8.085293769836426 +651,16,14.068234,1.9317655563354492 +652,16,16.175123,0.1751232147216797 +653,16,15.080916,0.9190835952758789 +654,16,10.468655,5.531345367431641 +655,16,13.545972,2.4540281295776367 +656,16,18.050272,2.050271987915039 +657,16,21.73861,5.738609313964844 +658,16,14.665398,1.3346023559570312 +659,16,23.298864,7.298864364624023 +660,16,20.356722,4.356721878051758 +661,16,10.166944,5.833056449890137 +662,20,21.301247,1.3012466430664062 +663,16,22.048872,6.048871994018555 +664,16,18.642635,2.6426353454589844 +665,16,11.985187,4.014813423156738 +666,16,8.108763,7.891237258911133 +667,16,17.540823,1.540822982788086 +668,19,17.309626,1.6903743743896484 +669,16,14.836614,1.163386344909668 +670,17,14.412282,2.5877180099487305 +671,17,12.7448225,4.2551774978637695 +672,12,15.528263,3.5282630920410156 +673,17,18.825918,1.825918197631836 +674,12,16.913624,4.913623809814453 +675,13,18.75303,5.753030776977539 +676,18,16.334728,1.6652717590332031 +677,13,16.828474,3.8284740447998047 +678,17,14.035495,2.964505195617676 +679,16,16.996798,0.9967975616455078 +680,15,10.931421,4.068578720092773 +681,16,16.234215,0.23421478271484375 +682,16,20.184402,4.1844024658203125 +683,12,13.722222,1.7222223281860352 +684,15,14.700347,0.2996530532836914 +685,12,22.038414,10.038414001464844 +686,15,6.0433855,8.95661449432373 +687,15,13.270133,1.7298669815063477 +688,12,16.10936,4.1093597412109375 +689,17,14.425206,2.574793815612793 +690,13,15.568486,2.568486213684082 +691,16,14.563329,1.436671257019043 +692,15,18.136803,3.1368026733398438 +693,18,20.329327,2.329326629638672 +694,17,8.74256,8.257439613342285 +695,15,21.194973,6.194972991943359 +696,14,6.5963774,7.403622627258301 +697,18,15.638109,2.3618907928466797 +698,15,11.889241,3.1107587814331055 +699,15,1.0611119,13.93888807296753 +700,15,19.618265,4.618265151977539 +701,13,15.956065,2.9560651779174805 +702,17,16.54445,0.4555492401123047 +703,16,17.587221,1.5872211456298828 +704,20,16.098446,3.9015541076660156 +705,17,14.71162,2.288379669189453 +706,12,16.70665,4.7066497802734375 +707,16,12.233302,3.766697883605957 +708,17,18.463497,1.4634971618652344 +709,18,11.513241,6.486759185791016 +710,18,17.487593,0.5124073028564453 +711,16,14.844773,1.155226707458496 +712,12,19.747824,7.747823715209961 +713,18,21.425081,3.425081253051758 +714,15,9.860469,5.139531135559082 +715,17,16.823154,0.17684555053710938 +716,15,19.91386,4.913860321044922 +717,16,16.480045,0.4800453186035156 +718,15,19.790792,4.790792465209961 +719,13,12.452674,0.5473260879516602 +720,16,19.68991,3.689910888671875 +721,12,8.621256,3.378744125366211 +722,15,6.87261,8.127389907836914 +723,17,12.490599,4.509401321411133 +724,15,18.097439,3.0974388122558594 +725,16,16.464851,0.46485137939453125 +726,17,23.729574,6.729574203491211 +727,13,10.353418,2.6465816497802734 +728,16,18.920387,2.9203872680664062 +729,16,16.177769,0.17776870727539062 +730,16,15.338332,0.6616678237915039 +731,16,16.541922,0.5419216156005859 +732,19,10.003157,8.996843338012695 +733,17,15.936093,1.0639066696166992 +734,17,17.620157,0.6201572418212891 +735,16,13.680089,2.319911003112793 +736,20,10.9625635,9.037436485290527 +737,16,18.061792,2.0617923736572266 +738,16,17.437254,1.4372539520263672 +739,16,16.670477,0.6704769134521484 +740,16,19.51624,3.5162391662597656 +741,17,9.709115,7.290884971618652 +742,16,20.701124,4.70112419128418 +743,15,14.161747,0.8382530212402344 +744,16,12.910822,3.0891780853271484 +745,16,23.051197,7.051197052001953 +746,17,17.291998,0.29199790954589844 +747,17,19.566948,2.5669479370117188 +748,15,9.094873,5.905126571655273 +749,16,15.411461,0.5885391235351562 +750,16,17.507956,1.507955551147461 +751,15,12.576714,2.4232864379882812 +752,16,18.095293,2.0952930450439453 +753,17,18.699192,1.6991920471191406 +754,13,15.349022,2.3490219116210938 +755,16,11.37588,4.624119758605957 +756,17,12.508532,4.49146842956543 +757,16,20.031698,4.031698226928711 +758,17,17.335352,0.33535194396972656 +759,17,17.396008,0.3960075378417969 +760,15,10.313771,4.6862287521362305 +761,17,20.89133,3.8913307189941406 +762,17,20.262115,3.262115478515625 +763,20,17.400337,2.5996627807617188 +764,16,21.685326,5.685325622558594 +765,17,16.473774,0.5262260437011719 +766,17,18.178644,1.1786441802978516 +767,20,20.413975,0.4139747619628906 +768,19,15.067735,3.932265281677246 +769,15,16.245743,1.2457427978515625 +770,17,17.139412,0.13941192626953125 +771,21,11.149748,9.850252151489258 +772,17,17.239883,0.2398834228515625 +773,18,12.11272,5.887280464172363 +774,20,20.858395,0.8583946228027344 +775,15,19.514423,4.514423370361328 +776,20,20.499676,0.4996757507324219 +777,19,7.516402,11.483598232269287 +778,15,17.180162,2.1801624298095703 +779,18,13.539413,4.4605865478515625 +780,17,18.104218,1.104217529296875 +781,16,15.717411,0.2825889587402344 +782,20,15.996971,4.003028869628906 +783,20,24.7802,4.78019905090332 +784,20,14.947727,5.052272796630859 +785,17,13.405008,3.594991683959961 +786,17,14.449267,2.5507326126098633 +787,19,10.400273,8.599726676940918 +788,18,17.816256,0.1837444305419922 +789,18,6.8822713,11.11772871017456 +790,16,12.661802,3.338197708129883 +791,17,15.65761,1.3423900604248047 +792,13,20.050722,7.050722122192383 +793,20,15.924785,4.0752153396606445 +794,19,15.541115,3.4588851928710938 +795,14,11.42213,2.5778703689575195 +796,18,16.89943,1.1005706787109375 +797,18,18.723316,0.7233161926269531 +798,20,18.0368,1.9631996154785156 +799,18,17.499414,0.5005855560302734 +800,20,23.093311,3.093311309814453 +801,16,11.146724,4.853276252746582 +802,21,17.167196,3.832803726196289 +803,17,10.590212,6.409788131713867 +804,20,21.424831,1.4248313903808594 +805,18,13.616549,4.383451461791992 +806,18,16.25482,1.7451801300048828 +807,20,18.927855,1.0721454620361328 +808,20,21.729834,1.7298336029052734 +809,18,23.215357,5.215356826782227 +810,17,19.901136,2.9011363983154297 +811,20,14.155639,5.844361305236816 +812,16,10.205401,5.794598579406738 +813,20,18.060383,1.9396171569824219 +814,21,13.1001,7.899900436401367 +815,17,10.888924,6.111076354980469 +816,13,1.6640632,11.335936784744263 +817,21,17.988575,3.011425018310547 +818,13,19.44714,6.447139739990234 +819,17,12.635679,4.364320755004883 +820,17,16.452213,0.5477867126464844 +821,17,18.236118,1.2361183166503906 +822,13,10.240747,2.7592525482177734 +823,17,12.834776,4.165224075317383 +824,17,17.405056,0.4050559997558594 +825,21,22.67148,1.6714801788330078 +826,17,15.354126,1.6458740234375 +827,17,13.734064,3.2659358978271484 +828,21,17.89333,3.106670379638672 +829,21,17.473654,3.526346206665039 +830,17,14.643227,2.3567733764648438 +831,21,21.3878,0.3878002166748047 +832,17,10.691155,6.308844566345215 +833,17,17.269045,0.2690448760986328 +834,17,19.873142,2.8731422424316406 +835,21,13.871129,7.128870964050293 +836,17,9.834212,7.165787696838379 +837,18,13.318033,4.681966781616211 +838,18,14.238127,3.761873245239258 +839,21,11.337804,9.662196159362793 +840,18,14.581423,3.418577194213867 +841,22,22.802467,0.8024673461914062 +842,22,19.280434,2.7195663452148438 +843,22,13.9410715,8.058928489685059 +844,14,20.695442,6.695442199707031 +845,18,22.707647,4.707647323608398 +846,18,15.084483,2.9155168533325195 +847,18,19.94741,1.9474105834960938 +848,14,17.44784,3.4478397369384766 +849,18,15.737882,2.262118339538574 +850,18,13.490132,4.5098676681518555 +851,22,15.880692,6.1193084716796875 +852,18,21.473022,3.4730224609375 +853,18,20.093336,2.0933361053466797 +854,18,18.630276,0.6302757263183594 +855,22,16.843441,5.156558990478516 +856,18,18.48309,0.4830894470214844 +857,18,16.48215,1.5178508758544922 +858,19,21.612398,2.612398147583008 +859,21,18.763792,2.236207962036133 +860,22,24.160826,2.160825729370117 +861,21,9.584193,11.415806770324707 +862,18,17.864922,0.13507843017578125 +863,22,22.179914,0.1799144744873047 +864,18,21.636642,3.6366424560546875 +865,17,12.997567,4.002432823181152 +866,18,12.9915085,5.008491516113281 +867,19,19.119148,0.11914825439453125 +868,14,18.540194,4.540193557739258 +869,14,19.781784,5.7817840576171875 +870,19,16.57438,2.425619125366211 +871,22,24.188068,2.188068389892578 +872,18,14.793223,3.2067766189575195 +873,18,17.563187,0.4368133544921875 +874,22,18.293436,3.706563949584961 +875,22,14.438796,7.561203956604004 +876,19,15.678484,3.3215160369873047 +877,18,12.342934,5.657066345214844 +878,18,13.410714,4.589285850524902 +879,14,15.359278,1.3592777252197266 +880,14,14.361047,0.36104679107666016 +881,18,15.537906,2.462094306945801 +882,18,17.147646,0.8523540496826172 +883,18,16.55866,1.4413394927978516 +884,14,9.050895,4.949105262756348 +885,18,19.174253,1.1742534637451172 +886,22,18.687748,3.3122520446777344 +887,22,13.215557,8.784442901611328 +888,14,17.929352,3.929351806640625 +889,18,20.016964,2.0169639587402344 +890,15,21.431671,6.431671142578125 +891,21,14.659851,6.34014892578125 +892,20,14.63538,5.364620208740234 +893,21,12.89069,8.109310150146484 +894,18,17.956055,0.0439453125 +895,18,13.711801,4.288199424743652 +896,16,12.231891,3.7681093215942383 +897,18,28.3377,10.337699890136719 +898,16,18.49793,2.4979305267333984 +899,14,12.86012,1.1398801803588867 +900,21,19.163822,1.8361778259277344 +901,14,20.556215,6.556215286254883 +902,17,19.159912,2.159912109375 +903,18,21.698961,3.6989612579345703 +904,16,17.955473,1.9554729461669922 +905,15,18.335247,3.335247039794922 +906,16,8.631976,7.368023872375488 +907,15,20.488457,5.488456726074219 +908,18,21.282518,3.2825183868408203 +909,19,19.610748,0.610748291015625 +910,20,21.581743,1.5817432403564453 +911,16,15.921384,0.07861614227294922 +912,16,18.035572,2.035572052001953 +913,14,20.63039,6.630390167236328 +914,21,22.041504,1.04150390625 +915,19,18.72261,0.2773895263671875 +916,18,16.95473,1.0452709197998047 +917,19,18.487103,0.5128974914550781 +918,18,17.052946,0.9470539093017578 +919,14,19.013052,5.013051986694336 +920,21,17.539488,3.460512161254883 +921,16,16.323275,0.3232746124267578 +922,19,19.390026,0.3900260925292969 +923,18,23.157608,5.1576080322265625 +924,1,1.017395,0.01739501953125 +925,1,10.637269,9.637269020080566 +926,1,3.1770127,2.1770126819610596 +927,1,1.0472401,0.04724013805389404 +928,1,2.8767896,1.8767895698547363 +929,2,2.248455,0.24845504760742188 +930,1,1.830714,0.8307139873504639 +931,1,1.0724885,0.07248854637145996 +932,1,1.0204474,0.020447373390197754 +933,1,1.0291426,0.02914261817932129 +934,1,4.1147385,3.1147384643554688 +935,1,1.0121648,0.012164831161499023 +936,1,1.0604393,0.060439348220825195 +937,1,3.4890804,2.4890804290771484 +938,1,2.397093,1.3970930576324463 +939,1,1.0307676,0.03076756000518799 +940,1,1.0483855,0.04838550090789795 +941,1,2.4356992,1.435699224472046 +942,1,1.0210472,0.021047234535217285 +943,1,1.0242442,0.024244189262390137 +944,1,3.9913173,2.9913172721862793 +945,1,4.0591745,3.0591745376586914 +946,2,1.164777,0.8352229595184326 +947,1,1.9760478,0.9760477542877197 +948,1,1.0506247,0.050624728202819824 +949,1,1.0552491,0.05524909496307373 +950,1,1.0231835,0.023183465003967285 +951,1,1.0134804,0.013480424880981445 +952,1,1.0278189,0.027818918228149414 +953,1,1.013865,0.013864994049072266 +954,1,6.401195,5.401195049285889 +955,1,1.0464609,0.046460866928100586 +956,1,1.0293528,0.029352784156799316 +957,1,1.0465096,0.046509623527526855 +958,1,1.0691178,0.06911778450012207 +959,1,1.0413712,0.04137122631072998 +960,1,2.5498693,1.5498692989349365 +961,1,1.1006949,0.10069489479064941 +962,4,20.158726,16.15872573852539 +963,1,1.069311,0.06931102275848389 +964,1,6.060406,5.06040620803833 +965,1,1.0677173,0.06771731376647949 +966,1,1.0190954,0.019095420837402344 +967,1,1.0123383,0.01233828067779541 +968,1,1.0199192,0.019919157028198242 +969,1,1.0176804,0.01768040657043457 +970,1,1.0224704,0.02247035503387451 +971,1,1.0681622,0.06816220283508301 +972,2,2.3727264,0.3727264404296875 +973,1,1.0995392,0.09953916072845459 +974,2,2.657047,0.6570470333099365 +975,2,1.0270535,0.9729465246200562 +976,2,3.8421042,1.842104196548462 +977,1,2.275586,1.2755858898162842 +978,1,1.1471676,0.14716756343841553 +979,1,3.3883328,2.3883328437805176 +980,1,1.0323755,0.032375454902648926 +981,1,1.0265905,0.026590466499328613 +982,1,22.076454,21.076454162597656 +983,2,2.2822194,0.28221940994262695 +984,2,1.0280422,0.971957802772522 +985,3,3.7440445,0.7440445423126221 +986,1,1.0460103,0.04601025581359863 +987,1,1.0422935,0.042293548583984375 +988,1,7.0215383,6.021538257598877 +989,4,1.037888,2.9621119499206543 +990,1,1.044902,0.04490196704864502 +991,3,6.20584,3.2058401107788086 +992,2,2.248913,0.24891304969787598 +993,1,5.2019176,4.20191764831543 +994,2,1.0450107,0.9549893140792847 +995,3,1.316661,1.68333899974823 +996,1,1.0389397,0.038939714431762695 +997,3,3.5238118,0.5238118171691895 +998,4,2.251729,1.7482709884643555 +999,4,1.0321732,2.9678268432617188 +1000,3,2.6885216,0.3114783763885498 +1001,1,2.87295,1.8729500770568848 +1002,2,1.0530717,0.9469282627105713 +1003,1,1.0242841,0.02428412437438965 +1004,2,1.0247147,0.9752852916717529 +1005,1,1.0216798,0.02167975902557373 +1006,1,1.5630438,0.5630438327789307 +1007,2,1.9290433,0.07095670700073242 +1008,1,1.2273833,0.22738325595855713 +1009,1,1.0161262,0.016126155853271484 +1010,3,1.1606015,1.8393985033035278 +1011,2,1.0242063,0.975793719291687 +1012,2,2.6614227,0.6614227294921875 +1013,1,1.0307231,0.030723094940185547 +1014,2,1.0221027,0.9778972864151001 +1015,3,3.7222173,0.722217321395874 +1016,1,1.0063796,0.006379604339599609 +1017,2,1.0570843,0.942915678024292 +1018,3,1.0275563,1.972443699836731 +1019,1,2.1271133,1.1271133422851562 +1020,4,3.1636832,0.8363168239593506 +1021,1,3.84367,2.843669891357422 +1022,2,19.110874,17.11087417602539 +1023,2,6.3089066,4.308906555175781 +1024,2,1.272002,0.7279980182647705 +1025,1,1.0176798,0.017679810523986816 +1026,3,1.0275571,1.972442865371704 +1027,1,5.2841277,4.284127712249756 +1028,4,2.4502654,1.5497345924377441 +1029,1,1.0289439,0.028943896293640137 +1030,3,2.656909,0.3430910110473633 +1031,3,3.141203,0.1412029266357422 +1032,2,1.0071834,0.9928165674209595 +1033,3,1.0005355,1.999464511871338 +1034,1,1.0227402,0.02274024486541748 +1035,1,1.0119913,0.011991262435913086 +1036,4,4.050383,0.05038309097290039 +1037,1,1.0147609,0.014760851860046387 +1038,1,1.0250032,0.02500319480895996 +1039,2,1.0328773,0.9671226739883423 +1040,3,8.54538,5.545379638671875 +1041,1,1.0291071,0.029107093811035156 +1042,1,1.0093576,0.009357571601867676 +1043,1,3.4739673,2.4739673137664795 +1044,1,1.0258346,0.02583456039428711 +1045,1,1.0221066,0.022106647491455078 +1046,1,1.3614991,0.3614990711212158 +1047,1,5.254864,4.25486421585083 +1048,1,1.0591424,0.059142351150512695 +1049,1,20.424643,19.42464256286621 +1050,4,3.3162045,0.6837954521179199 +1051,1,1.0279832,0.02798318862915039 +1052,4,1.0600421,2.939957857131958 +1053,4,2.2041607,1.7958393096923828 +1054,1,1.0439036,0.04390358924865723 +1055,1,1.0417937,0.04179370403289795 +1056,1,1.0173889,0.01738893985748291 +1057,1,1.0440832,0.044083237648010254 +1058,2,1.0414927,0.9585072994232178 +1059,1,1.0967779,0.09677791595458984 +1060,1,2.0166707,1.0166707038879395 +1061,1,1.0132326,0.013232588768005371 +1062,19,13.287012,5.712987899780273 +1063,20,29.74006,9.740060806274414 +1064,22,16.788973,5.211027145385742 +1065,20,23.588438,3.588438034057617 +1066,15,17.799381,2.7993812561035156 +1067,19,21.034119,2.03411865234375 +1068,22,20.517126,1.4828739166259766 +1069,21,20.948929,0.0510711669921875 +1070,20,19.149286,0.8507137298583984 +1071,21,14.383991,6.616008758544922 +1072,21,20.07803,0.9219703674316406 +1073,20,3.5943327,16.405667304992676 +1074,15,15.571832,0.5718317031860352 +1075,20,16.456675,3.543325424194336 +1076,18,19.45648,1.4564800262451172 +1077,17,20.812792,3.8127918243408203 +1078,17,18.296534,1.2965335845947266 +1079,21,14.921828,6.078171730041504 +1080,17,11.938647,5.061352729797363 +1081,21,17.191715,3.8082847595214844 +1082,19,15.667587,3.3324127197265625 +1083,20,17.961475,2.038524627685547 +1084,20,22.070446,2.070446014404297 +1085,20,19.203485,0.7965145111083984 +1086,18,21.345058,3.3450584411621094 +1087,19,18.215317,0.7846832275390625 +1088,20,21.008059,1.0080585479736328 +1089,22,27.899593,5.899593353271484 +1090,22,18.054943,3.945056915283203 +1091,19,18.245115,0.7548847198486328 +1092,17,15.527865,1.4721345901489258 +1093,17,1.0209497,15.979050278663635 +1094,22,16.470726,5.529273986816406 +1095,19,21.786583,2.7865829467773438 +1096,20,17.40037,2.599630355834961 +1097,22,16.369205,5.630794525146484 +1098,15,13.881462,1.1185379028320312 +1099,17,18.279963,1.2799625396728516 +1100,21,16.319853,4.680147171020508 +1101,19,14.315687,4.68431282043457 +1102,20,18.289198,1.7108020782470703 +1103,21,25.902971,4.902971267700195 +1104,19,20.20818,1.2081794738769531 +1105,20,16.153212,3.8467884063720703 +1106,21,18.6939,2.306100845336914 +1107,17,9.5897875,7.410212516784668 +1108,22,13.248366,8.751633644104004 +1109,19,17.154758,1.8452415466308594 +1110,17,16.306479,0.6935214996337891 +1111,17,10.731491,6.2685089111328125 +1112,21,21.179594,0.1795940399169922 +1113,21,18.725618,2.274381637573242 +1114,15,17.872868,2.8728675842285156 +1115,22,17.315744,4.684255599975586 +1116,22,18.382055,3.6179447174072266 +1117,17,11.763603,5.236396789550781 +1118,19,5.178836,13.82116413116455 +1119,20,18.244646,1.7553539276123047 +1120,15,22.858692,7.858692169189453 +1121,22,19.688585,2.3114147186279297 +1122,21,19.436844,1.5631561279296875 +1123,16,18.500542,2.5005416870117188 +1124,17,20.921713,3.921712875366211 +1125,21,16.32071,4.679290771484375 +1126,21,11.378484,9.621516227722168 +1127,22,19.837332,2.162668228149414 +1128,23,20.21019,2.7898101806640625 +1129,20,13.7504635,6.249536514282227 +1130,21,17.678265,3.321735382080078 +1131,23,17.55954,5.440460205078125 +1132,17,18.236258,1.236257553100586 +1133,16,19.786285,3.786285400390625 +1134,22,21.408312,0.5916881561279297 +1135,17,20.883862,3.883861541748047 +1136,22,20.808668,1.1913318634033203 +1137,20,17.929787,2.0702133178710938 +1138,16,20.809052,4.809051513671875 +1139,23,19.869917,3.1300830841064453 +1140,18,10.3400545,7.659945487976074 +1141,20,15.025959,4.974040985107422 +1142,19,12.883324,6.116676330566406 +1143,23,19.85257,3.147430419921875 +1144,19,14.339415,4.660585403442383 +1145,23,19.413504,3.586496353149414 +1146,23,19.957754,3.042245864868164 +1147,23,20.661692,2.338308334350586 +1148,16,18.718815,2.7188148498535156 +1149,21,21.214111,0.214111328125 +1150,20,16.559181,3.4408187866210938 +1151,16,21.5943,5.59429931640625 +1152,23,17.464071,5.535928726196289 +1153,21,16.946924,4.053075790405273 +1154,22,23.411812,1.4118118286132812 +1155,23,21.612421,1.3875789642333984 +1156,20,10.684456,9.315544128417969 +1157,20,18.202566,1.797433853149414 +1158,21,19.38969,1.6103096008300781 +1159,21,14.073641,6.926359176635742 +1160,20,16.520515,3.4794845581054688 +1161,16,15.925873,0.074127197265625 +1162,21,23.0315,2.0314998626708984 +1163,23,18.538113,4.461887359619141 +1164,20,22.037468,2.0374679565429688 +1165,23,23.74777,0.7477703094482422 +1166,21,14.047587,6.9524126052856445 +1167,23,21.00628,1.9937191009521484 +1168,21,13.512497,7.4875030517578125 +1169,19,19.923946,0.9239463806152344 +1170,23,17.315199,5.68480110168457 +1171,18,19.387707,1.3877067565917969 +1172,23,23.309849,0.3098487854003906 +1173,19,6.777262,12.222737789154053 +1174,23,14.822365,8.177635192871094 +1175,18,9.500952,8.499048233032227 +1176,23,20.16172,2.8382797241210938 +1177,20,17.0283,2.971700668334961 +1178,23,18.894192,4.105808258056641 +1179,21,21.87653,0.8765296936035156 +1180,21,19.744041,1.2559585571289062 +1181,23,34.06571,11.06570816040039 +1182,22,19.991064,2.0089359283447266 +1183,20,15.128324,4.871676445007324 +1184,23,16.52721,6.472789764404297 +1185,22,13.927475,8.072525024414062 +1186,22,10.015439,11.9845609664917 +1187,17,17.648132,0.64813232421875 +1188,23,16.747227,6.252773284912109 +1189,21,17.161112,3.838888168334961 +1190,20,19.920538,0.07946205139160156 +1191,18,15.691693,2.3083066940307617 +1192,22,16.586765,5.413234710693359 +1193,21,13.548172,7.4518280029296875 +1194,21,17.35495,3.645050048828125 +1195,23,20.622011,2.377988815307617 +1196,23,19.936121,3.0638790130615234 +1197,21,17.500954,3.4990463256835938 +1198,19,16.083342,2.916658401489258 +1199,24,19.557068,4.44293212890625 +1200,23,13.356878,9.643121719360352 +1201,24,23.443068,0.5569324493408203 +1202,24,17.76751,6.232490539550781 +1203,20,10.912973,9.087026596069336 +1204,24,18.532896,5.467103958129883 +1205,21,17.599497,3.400503158569336 +1206,24,17.905025,6.094974517822266 +1207,19,12.526435,6.473565101623535 +1208,24,18.82385,5.176149368286133 +1209,18,20.593893,2.593893051147461 +1210,18,18.735266,0.7352657318115234 +1211,19,18.566433,0.4335670471191406 +1212,18,19.609406,1.609405517578125 +1213,19,17.932138,1.067861557006836 +1214,18,22.197596,4.197595596313477 +1215,22,19.464897,2.5351028442382812 +1216,21,16.004856,4.995143890380859 +1217,21,21.57573,0.5757293701171875 +1218,25,17.883945,7.116054534912109 +1219,24,23.417803,0.5821971893310547 +1220,22,16.741447,5.258552551269531 +1221,17,19.383314,2.3833141326904297 +1222,17,25.85019,8.850189208984375 +1223,22,18.888369,3.111631393432617 +1224,22,21.792044,0.20795631408691406 +1225,19,18.140802,0.8591976165771484 +1226,21,20.188238,0.8117618560791016 +1227,22,21.826908,0.17309188842773438 +1228,21,7.3537455,13.646254539489746 +1229,25,21.992426,3.0075740814208984 +1230,17,18.293703,1.2937030792236328 +1231,22,18.256447,3.7435531616210938 +1232,22,21.045666,0.9543342590332031 +1233,21,17.26854,3.7314605712890625 +1234,21,23.848303,2.8483028411865234 +1235,21,15.660497,5.339503288269043 +1236,21,35.173466,14.173465728759766 +1237,20,25.438757,5.438756942749023 +1238,24,23.60841,0.3915901184082031 +1239,22,15.675277,6.324723243713379 +1240,21,21.17606,0.17605972290039062 +1241,21,17.317556,3.682443618774414 +1242,24,20.932976,3.0670242309570312 +1243,21,15.75202,5.247980117797852 +1244,24,19.042929,4.957071304321289 +1245,21,18.357336,2.6426639556884766 +1246,20,18.32202,1.6779804229736328 +1247,21,18.423279,2.57672119140625 +1248,21,19.299347,1.700653076171875 +1249,22,22.514534,0.5145339965820312 +1250,22,16.505499,5.494501113891602 +1251,23,18.306307,4.693693161010742 +1252,26,20.101133,5.898866653442383 +1253,23,18.38514,4.614860534667969 +1254,20,20.853838,0.8538379669189453 +1255,23,18.546186,4.453813552856445 +1256,19,22.946266,3.9462661743164062 +1257,23,16.679522,6.320478439331055 +1258,23,21.123661,1.8763389587402344 +1259,25,12.817652,12.182348251342773 +1260,22,18.067736,3.9322643280029297 +1261,19,23.540228,4.540227890014648 +1262,22,22.839348,0.8393478393554688 +1263,23,17.619911,5.380088806152344 +1264,23,26.02097,3.0209693908691406 +1265,24,18.50004,5.499959945678711 +1266,21,18.838192,2.1618080139160156 +1267,22,15.321735,6.678264617919922 +1268,24,21.440178,2.5598220825195312 +1269,21,16.632736,4.3672637939453125 +1270,20,21.790373,1.7903728485107422 +1271,25,22.717398,2.282602310180664 +1272,22,7.633589,14.366411209106445 +1273,22,16.757183,5.242816925048828 +1274,22,9.828626,12.171374320983887 +1275,21,16.493952,4.506048202514648 +1276,24,20.475601,3.5243988037109375 +1277,23,16.688698,6.311302185058594 +1278,24,20.291912,3.708087921142578 +1279,20,19.089237,0.9107627868652344 +1280,19,18.189453,0.810546875 +1281,24,32.19183,8.191829681396484 +1282,25,22.74984,2.2501602172851562 +1283,20,17.990837,2.0091629028320312 +1284,25,19.10519,5.894809722900391 +1285,22,16.668882,5.331117630004883 +1286,21,11.933496,9.066503524780273 +1287,24,18.502636,5.497364044189453 +1288,20,16.049706,3.9502944946289062 +1289,18,21.348272,3.3482723236083984 +1290,22,23.369583,1.3695831298828125 +1291,18,19.981121,1.9811210632324219 +1292,22,21.022434,0.9775657653808594 +1293,18,24.191559,6.191558837890625 +1294,22,26.871014,4.871013641357422 +1295,24,21.400583,2.599416732788086 +1296,22,5.95129,16.048709869384766 +1297,25,29.11262,4.112619400024414 +1298,22,9.193417,12.806583404541016 +1299,25,17.811188,7.188812255859375 +1300,24,20.61693,3.3830699920654297 +1301,23,10.847131,12.15286922454834 +1302,25,16.900736,8.099264144897461 +1303,26,17.296053,8.703947067260742 +1304,25,22.573814,2.4261856079101562 +1305,25,18.367615,6.63238525390625 +1306,24,11.248272,12.751728057861328 +1307,25,18.515263,6.484737396240234 +1308,22,13.338298,8.661702156066895 +1309,22,20.490816,1.5091838836669922 +1310,22,13.431893,8.568106651306152 +1311,18,19.684847,1.6848468780517578 +1312,22,20.549706,1.4502944946289062 +1313,22,5.51576,16.48424005508423 +1314,22,19.022064,2.977935791015625 +1315,20,16.479317,3.5206832885742188 +1316,21,19.198566,1.8014335632324219 +1317,24,14.402776,9.597224235534668 +1318,25,20.434687,4.565313339233398 +1319,23,18.38173,4.6182708740234375 +1320,22,20.929523,1.0704765319824219 +1321,22,11.386292,10.613707542419434 +1322,22,24.754341,2.7543411254882812 +1323,18,19.159727,1.1597270965576172 +1324,23,14.399423,8.600577354431152 +1325,22,17.86049,4.139509201049805 +1326,22,18.750496,3.2495040893554688 +1327,24,19.816772,4.1832275390625 +1328,21,19.127537,1.8724632263183594 +1329,26,19.060905,6.939094543457031 +1330,22,18.303873,3.696126937866211 +1331,22,21.141418,0.85858154296875 +1332,22,26.018276,4.018276214599609 +1333,26,24.360384,1.6396160125732422 +1334,22,18.048342,3.951658248901367 +1335,21,19.32152,1.6784801483154297 +1336,23,24.459307,1.4593067169189453 +1337,23,23.439934,0.43993377685546875 +1338,23,19.506433,3.4935665130615234 +1339,23,22.003973,0.9960269927978516 +1340,26,17.226784,8.773216247558594 +1341,24,20.707304,3.292695999145508 +1342,20,18.654753,1.3452472686767578 +1343,26,22.593924,3.406076431274414 +1344,20,23.8254,3.825399398803711 +1345,22,22.446775,0.4467754364013672 +1346,20,23.078918,3.07891845703125 +1347,20,13.072753,6.927247047424316 +1348,23,20.155966,2.844034194946289 +1349,21,14.805112,6.194888114929199 +1350,22,19.532917,2.467082977294922 +1351,20,22.65651,2.6565093994140625 +1352,26,18.216423,7.783576965332031 +1353,21,20.242552,0.7574481964111328 +1354,26,26.522472,0.5224723815917969 +1355,23,19.023142,3.976858139038086 +1356,21,23.066666,2.0666656494140625 +1357,22,17.758099,4.241901397705078 +1358,23,23.06968,0.06967926025390625 +1359,22,15.561159,6.438840866088867 +1360,23,22.507662,0.4923381805419922 +1361,21,21.637339,0.6373386383056641 +1362,19,22.791832,3.7918319702148438 +1363,22,17.561123,4.438877105712891 +1364,23,21.731411,1.2685890197753906 +1365,25,20.762794,4.237205505371094 +1366,20,19.586027,0.4139728546142578 +1367,23,21.202322,1.797677993774414 +1368,23,14.862358,8.137641906738281 +1369,22,4.9623494,17.03765058517456 +1370,26,20.542747,5.457252502441406 +1371,22,20.131763,1.8682365417480469 +1372,19,28.362621,9.362621307373047 +1373,21,22.869843,1.869842529296875 +1374,23,20.301357,2.6986427307128906 +1375,25,16.573969,8.426031112670898 +1376,21,18.661238,2.3387622833251953 +1377,22,19.437216,2.562784194946289 +1378,25,30.117363,5.117362976074219 +1379,20,18.216133,1.7838668823242188 +1380,21,17.977612,3.022388458251953 +1381,23,21.649385,1.3506145477294922 +1382,20,19.832317,0.16768264770507812 +1383,26,20.056395,5.943605422973633 +1384,19,20.624712,1.6247119903564453 +1385,23,18.557177,4.44282341003418 +1386,27,20.923445,6.076555252075195 +1387,21,18.682762,2.3172378540039062 +1388,21,22.99311,1.9931106567382812 +1389,24,18.037422,5.962577819824219 +1390,26,21.753986,4.246013641357422 +1391,19,15.81475,3.1852502822875977 +1392,23,16.176105,6.823894500732422 +1393,23,21.944952,1.0550479888916016 +1394,20,20.055101,0.05510139465332031 +1395,23,17.207008,5.792991638183594 +1396,23,16.725792,6.274208068847656 +1397,26,26.50981,0.5098094940185547 +1398,24,24.82409,0.8240890502929688 +1399,23,19.112547,3.887453079223633 +1400,19,18.571402,0.42859840393066406 +1401,23,22.98831,0.011690139770507812 +1402,19,12.80032,6.199680328369141 +1403,21,28.061716,7.061716079711914 +1404,24,22.479733,1.5202674865722656 +1405,20,14.551512,5.448488235473633 +1406,23,21.410257,1.589742660522461 +1407,23,19.683533,3.31646728515625 +1408,25,20.145283,4.854717254638672 +1409,22,22.82565,0.8256492614746094 +1410,23,18.605484,4.3945159912109375 +1411,24,21.250837,2.7491626739501953 +1412,24,22.53854,1.4614601135253906 +1413,27,20.183357,6.816642761230469 +1414,26,23.754726,2.2452735900878906 +1415,22,16.054579,5.94542121887207 +1416,23,19.745686,3.254314422607422 +1417,26,21.9932,4.006799697875977 +1418,23,14.377949,8.622051239013672 +1419,23,20.983027,2.0169734954833984 +1420,27,27.224571,0.22457122802734375 +1421,24,23.32649,0.6735095977783203 +1422,23,18.442022,4.557977676391602 +1423,23,20.188879,2.8111209869384766 +1424,25,22.797724,2.2022762298583984 +1425,23,20.793856,2.206144332885742 +1426,23,33.72575,10.725749969482422 +1427,23,18.844608,4.155391693115234 +1428,26,17.939503,8.060497283935547 +1429,24,17.544714,6.455286026000977 +1430,23,17.756924,5.243076324462891 +1431,22,18.519318,3.480682373046875 +1432,24,21.107275,2.8927249908447266 +1433,24,23.831743,0.1682567596435547 +1434,23,21.49562,1.5043792724609375 +1435,23,20.250643,2.749357223510742 +1436,23,16.908018,6.091981887817383 +1437,23,22.254934,0.7450656890869141 +1438,23,19.58684,3.4131603240966797 +1439,22,21.60614,0.39385986328125 +1440,22,22.10618,0.10618019104003906 +1441,24,23.762756,0.23724365234375 +1442,23,24.481625,1.4816246032714844 +1443,21,14.10384,6.896160125732422 +1444,27,22.199696,4.800304412841797 +1445,26,22.179403,3.820596694946289 +1446,25,24.438923,0.5610771179199219 +1447,20,13.814353,6.185647010803223 +1448,26,20.73253,5.26746940612793 +1449,24,22.343903,1.656097412109375 +1450,23,19.35467,3.6453304290771484 +1451,20,18.287605,1.7123947143554688 +1452,24,19.763577,4.236423492431641 +1453,23,14.885368,8.114631652832031 +1454,24,23.528893,0.47110748291015625 +1455,24,20.533474,3.4665260314941406 +1456,24,21.391958,2.608041763305664 +1457,21,19.42992,1.5700798034667969 +1458,25,22.353853,2.646146774291992 +1459,21,27.933535,6.933534622192383 +1460,26,20.20238,5.79762077331543 +1461,21,14.16324,6.836759567260742 +1462,22,15.948377,6.051623344421387 +1463,28,22.572958,5.427042007446289 +1464,24,17.28345,6.716550827026367 +1465,20,17.457188,2.5428123474121094 +1466,23,17.744991,5.255008697509766 +1467,20,23.023676,3.0236759185791016 +1468,24,21.443653,2.556346893310547 +1469,28,10.888648,17.11135196685791 +1470,24,20.312174,3.687826156616211 +1471,22,21.579525,0.4204750061035156 +1472,24,21.431732,2.568267822265625 +1473,24,22.123642,1.8763580322265625 +1474,27,26.020403,0.9795970916748047 +1475,22,14.075,7.925000190734863 +1476,20,19.019411,0.9805889129638672 +1477,23,21.565248,1.4347515106201172 +1478,21,20.19806,0.80194091796875 +1479,24,16.678587,7.321413040161133 +1480,24,19.054676,4.945323944091797 +1481,21,19.56389,1.4361095428466797 +1482,23,18.839577,4.160423278808594 +1483,24,20.3635,3.6364994049072266 +1484,28,17.793865,10.206134796142578 +1485,24,17.38897,6.611030578613281 +1486,20,17.893337,2.1066627502441406 +1487,25,21.070639,3.929361343383789 +1488,21,20.129276,0.8707237243652344 +1489,23,21.780653,1.2193470001220703 +1490,20,17.641434,2.3585662841796875 +1491,27,13.96216,13.037839889526367 +1492,24,18.145126,5.8548736572265625 +1493,24,43.84469,19.844688415527344 +1494,25,22.487564,2.5124359130859375 +1495,28,19.90208,8.097919464111328 +1496,20,15.624388,4.375612258911133 +1497,23,21.427485,1.5725154876708984 +1498,23,17.658802,5.341197967529297 +1499,24,15.867513,8.132487297058105 +1500,20,23.996782,3.9967823028564453 +1501,26,19.291239,6.708761215209961 +1502,21,17.55543,3.444570541381836 +1503,24,26.010248,2.0102481842041016 +1504,23,18.196264,4.803735733032227 +1505,24,23.68588,0.3141193389892578 +1506,24,19.40779,4.59221076965332 +1507,24,22.602453,1.3975467681884766 +1508,25,21.33739,3.662609100341797 +1509,27,20.504389,6.495611190795898 +1510,24,24.559532,0.5595321655273438 +1511,20,23.992113,3.9921131134033203 +1512,25,19.861202,5.138797760009766 +1513,25,22.619596,2.380403518676758 +1514,23,21.08506,1.9149398803710938 +1515,24,18.79022,5.209779739379883 +1516,21,25.343935,4.343935012817383 +1517,24,20.398363,3.6016368865966797 +1518,25,18.728687,6.271312713623047 +1519,24,20.653042,3.3469581604003906 +1520,24,27.319223,3.319223403930664 +1521,22,8.390736,13.609264373779297 +1522,27,20.864786,6.135213851928711 +1523,26,21.295801,4.704198837280273 +1524,24,19.397316,4.602684020996094 +1525,24,19.003916,4.996084213256836 +1526,23,21.058792,1.9412078857421875 +1527,23,20.092472,2.9075279235839844 +1528,22,21.18126,0.8187408447265625 +1529,24,19.097424,4.902576446533203 +1530,25,22.866447,2.1335525512695312 +1531,23,18.7802,4.21980094909668 +1532,24,17.259035,6.740964889526367 +1533,24,23.561838,0.43816184997558594 +1534,23,21.954758,1.0452423095703125 +1535,24,18.995764,5.004236221313477 +1536,23,17.690302,5.309698104858398 +1537,25,21.478004,3.5219955444335938 +1538,25,26.993595,1.9935951232910156 +1539,24,18.332758,5.667242050170898 +1540,25,18.247513,6.7524871826171875 +1541,28,17.70651,10.293489456176758 +1542,24,21.643337,2.3566627502441406 +1543,24,23.228077,0.7719230651855469 +1544,20,21.455244,1.4552440643310547 +1545,22,21.217144,0.7828559875488281 +1546,25,24.885057,0.11494255065917969 +1547,21,20.886377,0.11362266540527344 +1548,24,19.690323,4.3096771240234375 +1549,24,22.525375,1.4746246337890625 +1550,24,17.708149,6.291851043701172 +1551,27,14.690596,12.309404373168945 +1552,23,21.042162,1.9578380584716797 +1553,25,19.402952,5.597047805786133 +1554,24,24.668137,0.6681365966796875 +1555,24,19.414083,4.585916519165039 +1556,24,21.02888,2.971120834350586 +1557,24,16.82401,7.175989151000977 +1558,26,16.537146,9.462854385375977 +1559,22,5.678746,16.321253776550293 +1560,25,27.024065,2.0240650177001953 +1561,24,18.346956,5.653043746948242 +1562,23,23.108831,0.10883140563964844 +1563,25,18.969814,6.030185699462891 +1564,26,24.001974,1.998025894165039 +1565,24,24.10093,0.10092926025390625 +1566,24,19.75144,4.248559951782227 +1567,24,17.777327,6.222673416137695 +1568,24,22.73262,1.2673797607421875 +1569,20,5.179882,14.820117950439453 +1570,24,21.695457,2.3045425415039062 +1571,24,16.42059,7.579410552978516 +1572,26,25.501015,0.49898529052734375 +1573,24,25.431799,1.4317989349365234 +1574,24,18.952734,5.047266006469727 +1575,28,22.35204,5.647960662841797 +1576,28,22.332954,5.667045593261719 +1577,23,21.049147,1.9508533477783203 +1578,24,24.511486,0.5114860534667969 +1579,24,17.726973,6.273027420043945 +1580,24,18.57849,5.421510696411133 +1581,24,10.68768,13.3123197555542 +1582,20,16.545137,3.454862594604492 +1583,24,19.020967,4.979032516479492 +1584,27,25.173702,1.8262977600097656 +1585,20,17.522411,2.477588653564453 +1586,24,17.896236,6.103763580322266 +1587,22,23.89584,1.8958396911621094 +1588,20,21.56278,1.5627803802490234 +1589,24,27.24909,3.2490901947021484 +1590,28,17.249271,10.750728607177734 +1591,20,21.50635,1.5063495635986328 +1592,24,20.059965,3.940034866333008 +1593,20,24.717167,4.717166900634766 +1594,28,23.200745,4.79925537109375 +1595,28,21.306568,6.693431854248047 +1596,20,20.471117,0.4711170196533203 +1597,26,37.365627,11.36562728881836 +1598,24,19.59109,4.408910751342773 +1599,24,20.632957,3.3670425415039062 +1600,21,20.654018,0.3459815979003906 +1601,21,21.88295,0.8829498291015625 +1602,25,18.731749,6.268251419067383 +1603,25,26.944796,1.9447956085205078 +1604,26,15.732632,10.26736831665039 +1605,25,20.43105,4.568950653076172 +1606,21,23.731682,2.7316818237304688 +1607,29,20.179,8.820999145507812 +1608,25,28.357592,3.3575916290283203 +1609,29,21.561766,7.438234329223633 +1610,25,21.859343,3.140657424926758 +1611,25,21.017353,3.982646942138672 +1612,23,20.814997,2.1850032806396484 +1613,25,22.764124,2.2358760833740234 +1614,25,22.866175,2.1338253021240234 +1615,25,22.909307,2.0906925201416016 +1616,26,26.7502,0.7502002716064453 +1617,25,23.075645,1.9243545532226562 +1618,29,24.207476,4.792524337768555 +1619,25,21.347052,3.6529483795166016 +1620,25,18.634024,6.365976333618164 +1621,21,20.657562,0.342437744140625 +1622,29,18.510107,10.489892959594727 +1623,25,14.629149,10.370850563049316 +1624,25,14.274899,10.725101470947266 +1625,29,17.70788,11.292119979858398 +1626,21,18.89279,2.107210159301758 +1627,25,20.889078,4.110921859741211 +1628,26,21.354889,4.645111083984375 +1629,25,24.058441,0.941558837890625 +1630,29,26.905453,2.0945472717285156 +1631,22,22.139887,0.13988685607910156 +1632,28,11.996509,16.003491401672363 +1633,21,23.787546,2.787546157836914 +1634,25,19.509556,5.490444183349609 +1635,28,26.796463,1.2035369873046875 +1636,21,16.566528,4.4334716796875 +1637,21,21.903849,0.9038486480712891 +1638,26,19.235546,6.764453887939453 +1639,28,25.764778,2.2352218627929688 +1640,25,19.32294,5.677059173583984 +1641,26,28.078518,2.0785179138183594 +1642,25,20.16534,4.834659576416016 +1643,25,30.92065,5.920650482177734 +1644,25,18.489042,6.510957717895508 +1645,29,29.960678,0.9606781005859375 +1646,25,30.060982,5.060981750488281 +1647,23,23.879923,0.8799228668212891 +1648,23,22.800375,0.19962501525878906 +1649,25,26.096449,1.0964488983154297 +1650,25,31.2484,6.24839973449707 +1651,25,23.499176,1.500823974609375 +1652,21,19.29601,1.7039909362792969 +1653,26,20.755241,5.244758605957031 +1654,21,17.34081,3.659189224243164 +1655,25,16.984055,8.015945434570312 +1656,25,21.739185,3.260814666748047 +1657,25,22.894432,2.1055679321289062 +1658,24,22.54529,1.4547100067138672 +1659,22,23.461252,1.461252212524414 +1660,26,22.90145,3.098550796508789 +1661,25,14.893528,10.10647201538086 +1662,25,22.073093,2.9269065856933594 +1663,21,19.598616,1.4013843536376953 +1664,24,20.057781,3.942218780517578 +1665,24,22.076311,1.9236888885498047 +1666,25,20.615564,4.384435653686523 +1667,21,30.38957,9.389570236206055 +1668,21,22.329319,1.3293190002441406 +1669,25,21.70526,3.294740676879883 +1670,21,22.966198,1.9661979675292969 +1671,24,23.116022,0.8839778900146484 +1672,21,22.301323,1.3013229370117188 +1673,24,22.286371,1.7136287689208984 +1674,23,14.550065,8.449934959411621 +1675,25,21.289139,3.7108612060546875 +1676,21,12.503082,8.496917724609375 +1677,25,20.121796,4.878204345703125 +1678,21,13.520898,7.47910213470459 +1679,23,21.555124,1.444875717163086 +1680,25,22.993896,2.006103515625 +1681,24,17.457645,6.542354583740234 +1682,25,22.797333,2.202667236328125 +1683,24,24.616873,0.6168727874755859 +1684,25,19.37482,5.625179290771484 +1685,28,50.191967,22.191967010498047 +1686,25,29.212933,4.212932586669922 +1687,21,29.518932,8.518932342529297 +1688,25,17.030783,7.969217300415039 +1689,23,7.9206867,15.079313278198242 +1690,24,18.413944,5.586055755615234 +1691,25,18.387125,6.612874984741211 +1692,25,29.134249,4.134248733520508 +1693,24,24.298761,0.29876136779785156 +1694,25,22.549082,2.450918197631836 +1695,24,20.753576,3.2464237213134766 +1696,25,20.106586,4.893413543701172 +1697,25,28.425766,3.4257659912109375 +1698,25,21.761007,3.238992691040039 +1699,25,20.12556,4.874439239501953 +1700,23,23.120598,0.12059783935546875 +1701,21,23.096937,2.0969371795654297 +1702,25,18.264898,6.735101699829102 +1703,25,24.16522,0.8347797393798828 +1704,23,31.02124,8.021240234375 +1705,21,15.8059,5.194100379943848 +1706,23,22.16966,0.8303394317626953 +1707,25,24.022669,0.9773311614990234 +1708,25,19.528994,5.471006393432617 +1709,21,28.994251,7.994251251220703 +1710,29,18.06558,10.934419631958008 +1711,23,26.486515,3.4865150451660156 +1712,23,23.470211,0.4702110290527344 +1713,21,17.16731,3.8326892852783203 +1714,25,20.8888,4.111200332641602 +1715,23,24.833363,1.8333625793457031 +1716,21,17.065285,3.9347152709960938 +1717,23,27.554714,4.554714202880859 +1718,29,21.388298,7.611701965332031 +1719,21,21.386599,0.3865985870361328 +1720,21,22.673115,1.6731147766113281 +1721,25,31.47908,6.4790802001953125 +1722,21,22.26192,1.2619209289550781 +1723,23,21.505116,1.4948844909667969 +1724,21,23.301956,2.3019561767578125 +1725,21,20.155684,0.8443164825439453 +1726,25,8.574541,16.425458908081055 +1727,21,23.224625,2.2246246337890625 +1728,25,19.595608,5.404392242431641 +1729,25,16.734444,8.265556335449219 +1730,21,20.797802,0.20219802856445312 +1731,22,23.671516,1.6715164184570312 +1732,21,21.941757,0.9417572021484375 +1733,21,21.508776,0.5087757110595703 +1734,25,23.544434,1.45556640625 +1735,23,28.50238,5.50238037109375 +1736,22,5.0693216,16.930678367614746 +1737,24,27.758976,3.7589759826660156 +1738,22,23.926594,1.9265937805175781 +1739,22,22.916346,0.9163455963134766 +1740,22,22.42626,0.42625999450683594 +1741,24,19.282948,4.717052459716797 +1742,26,23.882774,2.1172256469726562 +1743,22,20.694946,1.3050537109375 +1744,22,19.029148,2.9708518981933594 +1745,24,23.344517,0.6554832458496094 +1746,22,17.312927,4.68707275390625 +1747,24,22.296883,1.7031173706054688 +1748,24,17.582932,6.4170684814453125 +1749,22,22.907871,0.9078712463378906 +1750,25,18.914688,6.0853118896484375 +1751,30,24.441837,5.558162689208984 +1752,24,19.278461,4.721538543701172 +1753,26,17.017899,8.982101440429688 +1754,22,19.409351,2.590648651123047 +1755,30,14.02168,15.978320121765137 +1756,25,21.275154,3.7248458862304688 +1757,30,20.68723,9.31277084350586 +1758,26,33.038506,7.038505554199219 +1759,25,13.484128,11.51587200164795 +1760,22,22.875647,0.8756465911865234 +1761,26,12.279628,13.720372200012207 +1762,22,19.359774,2.640226364135742 +1763,24,24.520409,0.5204086303710938 +1764,22,17.461658,4.538341522216797 +1765,25,21.24611,3.7538890838623047 +1766,30,28.465233,1.5347671508789062 +1767,22,12.79125,9.208749771118164 +1768,26,16.8417,9.158300399780273 +1769,26,20.130127,5.869873046875 +1770,22,32.313038,10.313037872314453 +1771,22,21.56227,0.4377307891845703 +1772,26,19.978973,6.021026611328125 +1773,27,21.894197,5.105802536010742 +1774,22,19.879255,2.1207447052001953 +1775,22,18.256437,3.743562698364258 +1776,30,20.34676,9.653240203857422 +1777,26,22.698202,3.301797866821289 +1778,29,19.308048,9.691951751708984 +1779,22,30.882488,8.882488250732422 +1780,24,19.711823,4.288177490234375 +1781,26,19.100353,6.899646759033203 +1782,23,21.16495,1.8350505828857422 +1783,27,21.575577,5.4244232177734375 +1784,30,25.051712,4.9482879638671875 +1785,27,27.310846,0.31084632873535156 +1786,22,38.005234,16.005233764648438 +1787,22,24.66682,2.666820526123047 +1788,26,19.728596,6.271404266357422 +1789,24,6.595437,17.404562950134277 +1790,30,21.99111,8.008890151977539 +1791,26,22.194223,3.805776596069336 +1792,26,22.353125,3.6468753814697266 +1793,26,27.709011,1.7090110778808594 +1794,26,21.464998,4.535001754760742 +1795,26,21.030054,4.969945907592773 +1796,22,32.860336,10.860336303710938 +1797,22,19.517138,2.4828624725341797 +1798,30,18.90829,11.09170913696289 +1799,30,23.63227,6.367729187011719 +1800,22,22.838932,0.8389320373535156 +1801,26,21.800615,4.199384689331055 +1802,27,24.30507,2.6949291229248047 +1803,22,25.407331,3.4073314666748047 +1804,27,22.05159,4.9484100341796875 +1805,30,18.12153,11.878469467163086 +1806,30,20.119207,9.880792617797852 +1807,30,19.999912,10.00008773803711 +1808,30,27.096132,2.903867721557617 +1809,30,25.006212,4.99378776550293 +1810,22,22.512854,0.5128536224365234 +1811,26,31.304163,5.304162979125977 +1812,29,29.562428,0.5624275207519531 +1813,25,24.282269,0.7177314758300781 +1814,28,32.14294,4.142940521240234 +1815,24,22.60743,1.3925704956054688 +1816,26,16.181128,9.818872451782227 +1817,30,22.01968,7.980319976806641 +1818,29,17.02607,11.973930358886719 +1819,26,19.39951,6.600490570068359 +1820,27,31.29531,4.295309066772461 +1821,30,23.328268,6.671731948852539 +1822,22,21.456108,0.5438919067382812 +1823,26,22.063614,3.9363861083984375 +1824,30,23.365694,6.634305953979492 +1825,26,21.077173,4.922826766967773 +1826,30,21.734081,8.265918731689453 +1827,23,18.198044,4.8019561767578125 +1828,27,19.523357,7.476642608642578 +1829,26,21.974392,4.025608062744141 +1830,28,23.028053,4.971946716308594 +1831,24,25.742395,1.7423954010009766 +1832,31,32.1111,1.1110992431640625 +1833,31,23.692137,7.307863235473633 +1834,23,18.24669,4.75330924987793 +1835,27,21.07002,5.92997932434082 +1836,23,16.648336,6.351663589477539 +1837,28,23.413927,4.58607292175293 +1838,23,23.557764,0.5577640533447266 +1839,23,28.283098,5.283098220825195 +1840,27,31.25733,4.257329940795898 +1841,24,37.203053,13.203052520751953 +1842,25,21.342127,3.6578731536865234 +1843,23,28.462494,5.462493896484375 +1844,23,23.546217,0.5462169647216797 +1845,27,17.386398,9.613601684570312 +1846,23,28.516083,5.516082763671875 +1847,27,23.846699,3.153301239013672 +1848,23,27.671734,4.671733856201172 +1849,26,25.408115,0.5918846130371094 +1850,30,19.453678,10.546321868896484 +1851,31,27.345863,3.6541366577148438 +1852,31,25.647663,5.352336883544922 +1853,30,16.8905,13.109500885009766 +1854,30,25.864666,4.135334014892578 +1855,30,32.022274,2.0222740173339844 +1856,27,23.413406,3.5865936279296875 +1857,26,19.331875,6.668125152587891 +1858,28,31.411556,3.4115562438964844 +1859,27,23.902191,3.097808837890625 +1860,30,23.724213,6.275787353515625 +1861,28,23.425417,4.574583053588867 +1862,27,22.954718,4.045282363891602 +1863,28,24.248007,3.751993179321289 +1864,27,14.816343,12.183656692504883 +1865,28,24.615501,3.3844985961914062 +1866,24,23.851095,0.14890480041503906 +1867,28,34.245014,6.245014190673828 +1868,26,12.748175,13.251825332641602 +1869,27,25.778505,1.2214946746826172 +1870,27,16.816277,10.183723449707031 +1871,27,25.526695,1.4733047485351562 +1872,28,30.277859,2.2778587341308594 +1873,28,23.128458,4.871541976928711 +1874,27,21.594412,5.405588150024414 +1875,28,30.12561,2.1256103515625 +1876,26,23.501345,2.498655319213867 +1877,26,14.553191,11.446808815002441 +1878,27,22.228539,4.771461486816406 +1879,28,26.893911,1.106088638305664 +1880,26,29.981756,3.9817562103271484 +1881,26,21.373428,4.6265716552734375 +1882,27,15.46812,11.531880378723145 +1883,27,20.38157,6.618429183959961 +1884,24,20.14618,3.85382080078125 +1885,24,21.079956,2.9200439453125 +1886,31,21.706938,9.293062210083008 +1887,27,19.35177,7.648229598999023 +1888,31,28.064434,2.935565948486328 +1889,30,32.794598,2.794597625732422 +1890,28,21.399797,6.600202560424805 +1891,25,23.25072,1.7492809295654297 +1892,28,22.182405,5.817594528198242 +1893,28,19.54891,8.451089859008789 +1894,30,24.025942,5.974058151245117 +1895,26,19.186182,6.813817977905273 +1896,29,23.139154,5.860845565795898 +1897,28,21.6844,6.31559944152832 +1898,24,22.169695,1.8303050994873047 +1899,25,24.933573,0.06642723083496094 +1900,31,21.51799,9.482009887695312 +1901,30,17.87886,12.121139526367188 +1902,27,19.535805,7.464195251464844 +1903,30,16.716694,13.283306121826172 +1904,28,25.722906,2.2770938873291016 +1905,26,16.047138,9.952861785888672 +1906,25,25.915369,0.9153690338134766 +1907,28,29.203548,1.2035484313964844 +1908,28,23.117466,4.882534027099609 +1909,26,28.30939,2.309389114379883 +1910,30,23.402094,6.597906112670898 +1911,24,17.31251,6.687490463256836 +1912,29,17.181652,11.818347930908203 +1913,28,24.044497,3.955503463745117 +1914,28,21.598604,6.401395797729492 +1915,27,17.721651,9.278348922729492 +1916,24,22.927841,1.0721588134765625 +1917,30,20.827572,9.172428131103516 +1918,30,25.50966,4.490339279174805 +1919,30,38.26336,8.263359069824219 +1920,30,28.960045,1.0399551391601562 +1921,30,24.541143,5.458856582641602 +1922,30,22.620632,7.379367828369141 +1923,28,18.517984,9.482015609741211 +1924,28,26.139225,1.8607749938964844 +1925,29,22.677994,6.3220062255859375 +1926,28,30.513725,2.5137252807617188 +1927,30,22.302914,7.697086334228516 +1928,27,23.168142,3.831857681274414 +1929,28,13.86745,14.132550239562988 +1930,27,23.769178,3.2308216094970703 +1931,27,19.927624,7.072376251220703 +1932,28,20.094778,7.905221939086914 +1933,30,26.17015,3.8298492431640625 +1934,30,15.494795,14.505205154418945 +1935,27,20.214573,6.785427093505859 +1936,24,18.721266,5.27873420715332 +1937,30,27.965572,2.0344276428222656 +1938,28,13.077323,14.922677040100098 +1939,28,21.929829,6.070171356201172 +1940,26,24.011127,1.9888725280761719 +1941,30,21.378624,8.621376037597656 +1942,32,26.541195,5.458805084228516 +1943,28,32.38094,4.380939483642578 +1944,25,22.651215,2.348785400390625 +1945,28,20.487803,7.512197494506836 +1946,29,17.728308,11.271692276000977 +1947,27,20.873032,6.1269683837890625 +1948,27,23.355387,3.644613265991211 +1949,28,27.877192,0.12280845642089844 +1950,29,17.15337,11.846630096435547 +1951,31,31.293585,0.29358482360839844 +1952,25,22.014254,2.985746383666992 +1953,25,21.5906,3.4094009399414062 +1954,29,28.013168,0.9868316650390625 +1955,28,21.897058,6.102941513061523 +1956,27,11.575291,15.42470932006836 +1957,25,21.23054,3.769460678100586 +1958,27,24.19218,2.807819366455078 +1959,28,23.280916,4.719083786010742 +1960,31,24.33692,6.663080215454102 +1961,31,25.78528,5.214719772338867 +1962,31,22.841219,8.158781051635742 +1963,28,18.365875,9.634124755859375 +1964,28,22.126644,5.873355865478516 +1965,25,20.042427,4.957572937011719 +1966,29,27.6352,1.3647994995117188 +1967,28,19.277138,8.722862243652344 +1968,27,24.075064,2.924936294555664 +1969,28,19.433092,8.56690788269043 +1970,31,33.25059,2.250591278076172 +1971,28,23.888018,4.111982345581055 +1972,28,25.129375,2.870624542236328 +1973,28,20.728132,7.271867752075195 +1974,31,23.601112,7.398887634277344 +1975,28,16.643059,11.356941223144531 +1976,31,28.87047,2.1295299530029297 +1977,29,22.838,6.16200065612793 +1978,28,21.589994,6.410005569458008 +1979,24,20.845419,3.154581069946289 +1980,28,20.489773,7.510227203369141 +1981,27,21.440971,5.559028625488281 +1982,26,32.563206,6.563205718994141 +1983,29,24.57654,4.423460006713867 +1984,24,24.48807,0.4880695343017578 +1985,28,24.475891,3.52410888671875 +1986,24,22.740942,1.2590579986572266 +1987,26,18.677006,7.322994232177734 +1988,24,19.210476,4.789524078369141 +1989,26,19.947756,6.052244186401367 +1990,26,26.120861,0.12086105346679688 +1991,24,18.335665,5.664335250854492 +1992,26,22.588276,3.411724090576172 +1993,24,21.72313,2.2768707275390625 +1994,26,17.693182,8.306818008422852 +1995,24,29.730688,5.730688095092773 +1996,26,20.90035,5.099649429321289 +1997,24,18.213804,5.786195755004883 +1998,26,25.443995,0.5560054779052734 +1999,26,23.776262,2.2237377166748047 +2000,30,20.289509,9.710491180419922 +2001,24,17.674862,6.325138092041016 +2002,24,31.34888,7.348880767822266 +2003,24,22.222094,1.7779064178466797 +2004,26,23.069359,2.9306411743164062 +2005,24,17.690006,6.309993743896484 +2006,26,22.466875,3.5331249237060547 +2007,26,23.603971,2.396028518676758 +2008,24,27.482391,3.482391357421875 +2009,32,18.958914,13.041086196899414 +2010,24,18.789139,5.2108612060546875 +2011,32,37.102913,5.102912902832031 +2012,24,22.557053,1.4429473876953125 +2013,24,24.753952,0.7539520263671875 +2014,26,23.165781,2.834218978881836 +2015,24,18.241383,5.758617401123047 +2016,24,8.771546,15.228453636169434 +2017,27,22.485209,4.514791488647461 +2018,28,21.524717,6.475282669067383 +2019,26,25.283741,0.7162590026855469 +2020,29,18.228739,10.771261215209961 +2021,28,21.085098,6.9149017333984375 +2022,1,2.2482653,1.248265266418457 +2023,1,2.8191767,1.8191766738891602 +2024,2,5.089783,3.089783191680908 +2025,1,1.0626817,0.06268167495727539 +2026,1,2.0702941,1.0702941417694092 +2027,1,10.069636,9.069636344909668 +2028,1,1.0424695,0.04246950149536133 +2029,1,1.0131786,0.013178586959838867 +2030,1,8.687974,7.687973976135254 +2031,1,2.4632576,1.4632575511932373 +2032,1,1.0471636,0.04716360569000244 +2033,1,1.7964444,0.7964444160461426 +2034,1,1.4997244,0.4997243881225586 +2035,1,3.3595364,2.3595364093780518 +2036,1,1.0133315,0.01333153247833252 +2037,1,1.3482883,0.34828829765319824 +2038,1,5.7162547,4.716254711151123 +2039,1,1.0533147,0.0533146858215332 +2040,1,1.0842781,0.08427810668945312 +2041,1,1.0300721,0.03007209300994873 +2042,1,1.0414467,0.041446685791015625 +2043,1,1.9794374,0.9794373512268066 +2044,1,6.1319094,5.131909370422363 +2045,1,2.8151882,1.8151881694793701 +2046,1,5.072344,4.072343826293945 +2047,1,11.101553,10.101552963256836 +2048,1,1.0035425,0.0035425424575805664 +2049,1,1.0435789,0.0435788631439209 +2050,1,1.0431552,0.04315519332885742 +2051,1,1.1649897,0.16498970985412598 +2052,1,2.5641787,1.564178705215454 +2053,1,1.0133536,0.013353586196899414 +2054,1,4.4166594,3.416659355163574 +2055,1,2.1899052,1.1899051666259766 +2056,1,1.0600599,0.06005990505218506 +2057,1,1.9145037,0.9145036935806274 +2058,1,2.3018463,1.3018462657928467 +2059,1,1.0181673,0.01816725730895996 +2060,1,2.2256613,1.225661277770996 +2061,1,1.2049303,0.20493030548095703 +2062,1,1.0133041,0.01330411434173584 +2063,1,2.6240768,1.6240768432617188 +2064,1,2.7556763,1.75567626953125 +2065,1,1.0306283,0.030628323554992676 +2066,1,1.5470924,0.5470924377441406 +2067,1,4.34832,3.3483200073242188 +2068,1,1.0216765,0.02167654037475586 +2069,1,2.3239396,1.323939561843872 +2070,1,1.0061071,0.0061070919036865234 +2071,1,4.557315,3.557314872741699 +2072,1,1.0449045,0.044904470443725586 +2073,1,1.0586811,0.05868113040924072 +2074,1,5.71912,4.719120025634766 +2075,1,3.0215948,2.021594762802124 +2076,1,1.1339686,0.13396859169006348 +2077,1,7.8198714,6.819871425628662 +2078,1,1.7485371,0.7485370635986328 +2079,1,4.2417946,3.2417945861816406 +2080,1,5.414357,4.4143571853637695 +2081,1,2.1223443,1.1223442554473877 +2082,1,3.159328,2.159327983856201 +2083,1,5.5826983,4.582698345184326 +2084,1,1.0484107,0.048410654067993164 +2085,1,9.823101,8.823101043701172 +2086,1,1.0771939,0.07719385623931885 +2087,1,3.4339638,2.4339637756347656 +2088,1,6.1543927,5.154392719268799 +2089,1,1.0693712,0.06937122344970703 +2090,1,2.497738,1.4977378845214844 +2091,1,1.0086128,0.008612751960754395 +2092,1,4.2219296,3.2219295501708984 +2093,1,12.497629,11.497629165649414 +2094,1,1.020972,0.020972013473510742 +2095,1,3.5104945,2.5104944705963135 +2096,1,9.56249,8.562490463256836 +2097,1,1.0130868,0.013086795806884766 +2098,2,1.0350628,0.9649372100830078 +2099,1,3.9590056,2.95900559425354 +2100,1,1.0405196,0.0405195951461792 +2101,1,1.8476894,0.8476893901824951 +2102,2,7.356225,5.35622501373291 +2103,1,2.3785896,1.3785896301269531 +2104,1,4.3206205,3.320620536804199 +2105,1,1.0686234,0.06862342357635498 +2106,3,5.140204,2.1402039527893066 +2107,1,1.039221,0.03922104835510254 +2108,2,1.0427444,0.9572556018829346 +2109,2,6.824191,4.824191093444824 +2110,1,2.8895464,1.8895463943481445 +2111,1,14.436861,13.436861038208008 +2112,1,2.1444116,1.144411563873291 +2113,1,1.0082945,0.008294463157653809 +2114,1,1.0457914,0.0457913875579834 +2115,5,4.191244,0.8087558746337891 +2116,1,5.077204,4.07720422744751 +2117,1,2.7835386,1.783538579940796 +2118,1,1.3227715,0.3227715492248535 +2119,2,2.2487311,0.24873113632202148 +2120,1,3.0457838,2.045783758163452 +2121,1,1.0440512,0.044051170349121094 +2122,1,2.6890738,1.6890738010406494 +2123,1,1.0304816,0.030481576919555664 +2124,2,4.2026954,2.202695369720459 +2125,1,1.0222954,0.022295355796813965 +2126,2,1.7686796,0.23132038116455078 +2127,1,1.0181192,0.018119215965270996 +2128,1,6.3294477,5.3294477462768555 +2129,2,4.4798107,2.4798107147216797 +2130,1,1.0100232,0.010023236274719238 +2131,1,2.493809,1.4938089847564697 +2132,1,1.4446359,0.44463586807250977 +2133,1,3.1996129,2.199612855911255 +2134,1,1.0513309,0.05133092403411865 +2135,1,1.0244051,0.02440512180328369 +2136,1,2.1265268,1.1265268325805664 +2137,1,1.0233817,0.023381710052490234 +2138,1,1.0279578,0.027957797050476074 +2139,1,8.371334,7.371334075927734 +2140,1,1.056693,0.05669295787811279 +2141,1,1.0190601,0.019060134887695312 +2142,1,1.4467814,0.4467813968658447 +2143,1,2.9523518,1.9523518085479736 +2144,1,1.0286639,0.02866387367248535 +2145,1,4.488729,3.4887290000915527 +2146,1,12.408431,11.408431053161621 +2147,1,1.036402,0.036401987075805664 +2148,1,2.0804417,1.0804417133331299 +2149,1,1.4134504,0.41345036029815674 +2150,1,1.675776,0.6757760047912598 +2151,1,3.2096095,2.2096095085144043 +2152,1,1.8914311,0.8914310932159424 +2153,1,2.685558,1.6855580806732178 +2154,1,1.0665383,0.06653833389282227 +2155,1,1.0301847,0.03018474578857422 +2156,2,1.0087476,0.9912524223327637 +2157,1,1.0710709,0.07107090950012207 +2158,1,1.5227742,0.5227742195129395 +2159,1,1.0237948,0.02379477024078369 +2160,1,1.0481981,0.04819810390472412 +2161,1,1.0208611,0.020861148834228516 +2162,1,1.9413083,0.9413082599639893 +2163,1,3.8580163,2.8580162525177 +2164,1,1.037976,0.03797602653503418 +2165,1,2.6821184,1.6821184158325195 +2166,1,1.0674728,0.06747281551361084 +2167,1,1.0264323,0.026432275772094727 +2168,1,3.106177,2.1061770915985107 +2169,1,4.334956,3.334956169128418 +2170,1,1.0450883,0.04508829116821289 +2171,1,2.4887276,1.4887275695800781 +2172,1,2.765675,1.7656750679016113 +2173,1,1.0190322,0.01903223991394043 +2174,1,1.0253416,0.02534162998199463 +2175,1,1.0333594,0.033359408378601074 +2176,1,2.1795254,1.179525375366211 +2177,1,21.835794,20.83579444885254 +2178,1,4.295617,3.29561710357666 +2179,2,2.449887,0.4498870372772217 +2180,1,6.1891165,5.189116477966309 +2181,1,1.0215552,0.021555185317993164 +2182,1,1.0380664,0.03806638717651367 +2183,1,17.05344,16.05344009399414 +2184,1,1.0392209,0.03922092914581299 +2185,1,1.0234873,0.023487329483032227 +2186,33,23.15248,9.84752082824707 +2187,25,23.699415,1.3005847930908203 +2188,29,18.873102,10.126897811889648 +2189,25,21.674692,3.325307846069336 +2190,29,21.040071,7.959928512573242 +2191,28,26.58352,1.4164791107177734 +2192,33,28.2377,4.762300491333008 +2193,29,19.744854,9.255146026611328 +2194,25,23.821978,1.1780223846435547 +2195,30,19.760782,10.239217758178711 +2196,29,23.533283,5.466716766357422 +2197,26,20.64324,5.356760025024414 +2198,33,23.012623,9.987377166748047 +2199,30,21.753962,8.246038436889648 +2200,29,24.962196,4.037803649902344 +2201,30,20.639387,9.360612869262695 +2202,27,21.979364,5.020635604858398 +2203,29,26.325333,2.6746673583984375 +2204,33,30.075903,2.9240970611572266 +2205,33,23.399323,9.600677490234375 +2206,33,19.423166,13.576833724975586 +2207,33,24.139074,8.860925674438477 +2208,25,21.46734,3.5326595306396484 +2209,25,16.606865,8.393135070800781 +2210,29,22.242378,6.757621765136719 +2211,27,18.840332,8.15966796875 +2212,32,25.699905,6.3000946044921875 +2213,33,25.728811,7.271188735961914 +2214,30,24.797966,5.202033996582031 +2215,31,29.203548,1.7964515686035156 +2216,26,26.257511,0.2575111389160156 +2217,29,27.241653,1.7583465576171875 +2218,33,25.418032,7.581968307495117 +2219,33,21.474482,11.525518417358398 +2220,25,18.98326,6.016740798950195 +2221,33,19.91922,13.080780029296875 +2222,26,36.44968,10.44968032836914 +2223,29,18.961391,10.03860855102539 +2224,29,20.366789,8.633211135864258 +2225,29,15.638892,13.36110782623291 +2226,29,38.03728,9.037281036376953 +2227,30,22.828182,7.171817779541016 +2228,33,24.175426,8.824573516845703 +2229,30,22.197916,7.802083969116211 +2230,32,24.844597,7.155403137207031 +2231,29,19.146465,9.853534698486328 +2232,29,23.648745,5.351255416870117 +2233,29,22.667137,6.332862854003906 +2234,26,22.49637,3.503629684448242 +2235,25,16.1961,8.803899765014648 +2236,31,30.879038,0.12096214294433594 +2237,33,25.351557,7.648443222045898 +2238,25,19.36016,5.639839172363281 +2239,31,18.095142,12.904857635498047 +2240,33,18.261663,14.738336563110352 +2241,32,26.905973,5.094026565551758 +2242,30,22.537386,7.462614059448242 +2243,29,22.145014,6.854986190795898 +2244,29,29.702703,0.7027034759521484 +2245,33,21.08175,11.918249130249023 +2246,29,16.725643,12.274356842041016 +2247,33,22.562592,10.437408447265625 +2248,30,21.585974,8.414026260375977 +2249,33,26.011457,6.988542556762695 +2250,33,26.405054,6.594945907592773 +2251,29,20.40382,8.596179962158203 +2252,29,19.673683,9.326316833496094 +2253,30,21.747229,8.252771377563477 +2254,29,24.09535,4.90464973449707 +2255,25,22.754389,2.2456111907958984 +2256,33,21.944723,11.055276870727539 +2257,33,22.850708,10.1492919921875 +2258,30,22.013681,7.986318588256836 +2259,29,19.202791,9.797208786010742 +2260,28,18.992937,9.007062911987305 +2261,33,21.975922,11.024078369140625 +2262,30,22.896952,7.103048324584961 +2263,29,20.090736,8.909263610839844 +2264,33,16.710398,16.289602279663086 +2265,30,38.253788,8.253787994384766 +2266,33,28.93989,4.060110092163086 +2267,28,22.286934,5.713066101074219 +2268,30,23.217388,6.782611846923828 +2269,33,22.382353,10.617647171020508 +2270,28,19.68568,8.314319610595703 +2271,33,20.939434,12.060565948486328 +2272,31,22.90351,8.096490859985352 +2273,33,15.268266,17.73173427581787 +2274,33,23.335657,9.664342880249023 +2275,30,18.54719,11.452810287475586 +2276,32,24.501709,7.498291015625 +2277,30,23.054014,6.945985794067383 +2278,29,24.821812,4.178188323974609 +2279,29,23.134655,5.865345001220703 +2280,28,17.842508,10.157491683959961 +2281,33,19.520147,13.479852676391602 +2282,26,23.649885,2.3501148223876953 +2283,27,21.641325,5.358675003051758 +2284,29,22.813412,6.186588287353516 +2285,25,24.016743,0.9832572937011719 +2286,25,12.376741,12.623258590698242 +2287,31,22.9776,8.02239990234375 +2288,29,30.292278,1.2922782897949219 +2289,27,27.181051,0.18105125427246094 +2290,31,29.752924,1.2470760345458984 +2291,25,28.513159,3.5131587982177734 +2292,25,23.179073,1.8209266662597656 +2293,25,22.027029,2.972970962524414 +2294,30,27.709011,2.2909889221191406 +2295,33,27.497496,5.502504348754883 +2296,28,23.824965,4.17503547668457 +2297,28,35.67067,7.6706695556640625 +2298,29,21.875412,7.1245880126953125 +2299,25,20.764694,4.2353057861328125 +2300,25,18.801184,6.198816299438477 +2301,29,18.17443,10.825569152832031 +2302,29,21.777578,7.222421646118164 +2303,25,22.585175,2.414825439453125 +2304,30,16.849573,13.150426864624023 +2305,29,14.907389,14.092611312866211 +2306,29,27.160046,1.8399543762207031 +2307,28,24.263313,3.7366867065429688 +2308,28,19.715971,8.284029006958008 +2309,25,18.912785,6.087215423583984 +2310,25,14.657802,10.342198371887207 +2311,29,20.439545,8.560455322265625 +2312,29,29.229847,0.22984695434570312 +2313,32,20.501074,11.498926162719727 +2314,29,28.829975,0.17002487182617188 +2315,29,19.74689,9.253110885620117 +2316,30,20.48484,9.515159606933594 +2317,25,22.283998,2.716001510620117 +2318,29,27.74281,1.2571907043457031 +2319,32,30.028717,1.971282958984375 +2320,30,20.561733,9.43826675415039 +2321,27,22.907772,4.092227935791016 +2322,25,14.430824,10.569175720214844 +2323,29,19.149414,9.8505859375 +2324,29,20.373434,8.626565933227539 +2325,27,20.61742,6.382579803466797 +2326,27,23.251034,3.7489662170410156 +2327,33,29.799124,3.200876235961914 +2328,29,18.112143,10.887857437133789 +2329,33,18.491377,14.508623123168945 +2330,33,24.572546,8.427453994750977 +2331,33,33.893826,0.8938255310058594 +2332,29,20.600784,8.399215698242188 +2333,25,19.736853,5.263147354125977 +2334,26,23.413927,2.5860729217529297 +2335,32,21.693558,10.306442260742188 +2336,34,22.554497,11.445503234863281 +2337,33,20.44912,12.550880432128906 +2338,30,28.039297,1.960702896118164 +2339,30,20.335445,9.664554595947266 +2340,26,27.499035,1.4990348815917969 +2341,30,21.30332,8.696680068969727 +2342,30,22.589344,7.410655975341797 +2343,30,28.110577,1.8894233703613281 +2344,28,26.747648,1.2523517608642578 +2345,31,24.16041,6.839590072631836 +2346,28,21.359642,6.640357971191406 +2347,33,29.409502,3.5904979705810547 +2348,30,21.926334,8.073665618896484 +2349,32,29.187128,2.8128719329833984 +2350,33,22.6634,10.336599349975586 +2351,31,28.520554,2.4794464111328125 +2352,34,20.196936,13.803064346313477 +2353,32,25.041727,6.958272933959961 +2354,33,27.044592,5.955408096313477 +2355,34,28.487423,5.512577056884766 +2356,32,25.576847,6.423152923583984 +2357,28,17.941393,10.05860710144043 +2358,28,16.159542,11.840457916259766 +2359,28,16.405127,11.594873428344727 +2360,28,25.14853,2.851470947265625 +2361,28,20.75617,7.243829727172852 +2362,26,15.581577,10.41842269897461 +2363,30,22.082397,7.9176025390625 +2364,26,24.546541,1.4534587860107422 +2365,28,18.993582,9.006418228149414 +2366,29,21.491426,7.508573532104492 +2367,28,29.489687,1.4896869659423828 +2368,32,7.5531135,24.44688653945923 +2369,32,21.730024,10.269975662231445 +2370,28,19.168375,8.831624984741211 +2371,32,24.63994,7.36005973815918 +2372,28,28.796486,0.7964859008789062 +2373,26,21.429579,4.57042121887207 +2374,34,30.935043,3.0649566650390625 +2375,28,30.744778,2.7447776794433594 +2376,28,22.13154,5.868459701538086 +2377,28,32.28799,4.287990570068359 +2378,32,23.088476,8.911523818969727 +2379,28,22.487658,5.51234245300293 +2380,27,24.252535,2.747465133666992 +2381,30,30.930244,0.9302444458007812 +2382,28,29.906906,1.9069061279296875 +2383,30,23.1323,6.867700576782227 +2384,26,19.065718,6.934282302856445 +2385,28,24.032486,3.9675140380859375 +2386,30,24.19185,5.808149337768555 +2387,28,27.257833,0.7421665191650391 +2388,30,33.048374,3.0483741760253906 +2389,28,22.26156,5.738439559936523 +2390,30,30.935286,0.9352855682373047 +2391,26,26.78163,0.7816295623779297 +2392,26,17.704454,8.29554557800293 +2393,30,24.905638,5.094362258911133 +2394,28,20.222866,7.777133941650391 +2395,32,27.806576,4.193424224853516 +2396,30,26.543823,3.4561767578125 +2397,26,25.147459,0.8525409698486328 +2398,28,19.410673,8.589326858520508 +2399,28,32.318665,4.31866455078125 +2400,29,28.438812,0.561187744140625 +2401,29,21.393106,7.606893539428711 +2402,35,29.45296,5.547040939331055 +2403,28,24.59803,3.4019699096679688 +2404,27,21.748817,5.251182556152344 +2405,28,24.95413,3.045869827270508 +2406,29,20.369223,8.630777359008789 +2407,32,17.633121,14.366878509521484 +2408,27,19.973137,7.026863098144531 +2409,32,11.757053,20.24294662475586 +2410,35,28.729397,6.270603179931641 +2411,31,20.988668,10.011331558227539 +2412,27,21.17348,5.826520919799805 +2413,27,20.276379,6.723621368408203 +2414,27,22.258936,4.741064071655273 +2415,34,18.433094,15.566905975341797 +2416,27,25.32079,1.6792106628417969 +2417,27,20.051569,6.948431015014648 +2418,35,26.90087,8.099130630493164 +2419,27,23.70607,3.2939300537109375 +2420,27,22.189003,4.810997009277344 +2421,32,21.620201,10.379798889160156 +2422,34,22.98997,11.010030746459961 +2423,34,25.477098,8.52290153503418 +2424,28,29.355757,1.3557567596435547 +2425,32,26.619738,5.38026237487793 +2426,32,21.28621,10.713790893554688 +2427,30,20.545107,9.454893112182617 +2428,27,27.85742,0.8574199676513672 +2429,32,32.287735,0.2877349853515625 +2430,32,22.429749,9.57025146484375 +2431,32,27.657976,4.342023849487305 +2432,33,32.7826,0.21739959716796875 +2433,31,34.081783,3.0817832946777344 +2434,34,29.002686,4.997314453125 +2435,34,21.042786,12.95721435546875 +2436,31,20.159664,10.840335845947266 +2437,34,22.902369,11.097631454467773 +2438,31,33.01591,2.015911102294922 +2439,31,30.22427,0.7757301330566406 +2440,32,25.41232,6.587680816650391 +2441,28,33.9633,5.963298797607422 +2442,32,29.434536,2.5654640197753906 +2443,27,29.454496,2.454496383666992 +2444,32,23.51235,8.487649917602539 +2445,34,35.444824,1.44482421875 +2446,31,25.14931,5.850690841674805 +2447,32,24.764622,7.235378265380859 +2448,33,24.54476,8.455240249633789 +2449,27,22.781176,4.21882438659668 +2450,34,32.40649,1.5935096740722656 +2451,32,20.52294,11.477060317993164 +2452,32,20.52521,11.474790573120117 +2453,34,22.050741,11.949258804321289 +2454,34,27.545084,6.454916000366211 +2455,31,26.020342,4.979658126831055 +2456,33,25.422619,7.577381134033203 +2457,31,19.043793,11.956207275390625 +2458,28,17.795275,10.20472526550293 +2459,30,29.804361,0.19563865661621094 +2460,32,24.64292,7.357080459594727 +2461,32,25.585114,6.414886474609375 +2462,34,26.663795,7.336204528808594 +2463,29,25.181213,3.81878662109375 +2464,34,22.986282,11.013717651367188 +2465,31,29.005074,1.9949264526367188 +2466,34,14.07452,19.925479888916016 +2467,33,25.421661,7.578338623046875 +2468,31,22.48627,8.513729095458984 +2469,32,20.671625,11.328374862670898 +2470,32,23.392101,8.607898712158203 +2471,34,30.822908,3.177091598510742 +2472,34,22.163626,11.836374282836914 +2473,31,21.335249,9.664751052856445 +2474,32,31.97469,0.025310516357421875 +2475,31,27.861702,3.1382980346679688 +2476,31,22.397188,8.602811813354492 +2477,30,21.425642,8.574357986450195 +2478,32,19.702425,12.297574996948242 +2479,27,29.985497,2.9854965209960938 +2480,33,23.249523,9.750476837158203 +2481,31,17.276999,13.723001480102539 +2482,32,22.10562,9.894380569458008 +2483,31,22.79969,8.200309753417969 +2484,29,24.181648,4.818351745605469 +2485,29,22.754719,6.245281219482422 +2486,33,20.313156,12.686843872070312 +2487,34,25.031961,8.968038558959961 +2488,32,26.76658,5.233419418334961 +2489,34,21.974695,12.025304794311523 +2490,34,23.66264,10.337360382080078 +2491,29,26.797798,2.2022018432617188 +2492,28,21.134493,6.865507125854492 +2493,35,25.890615,9.109384536743164 +2494,27,17.82315,9.176849365234375 +2495,31,24.180756,6.819244384765625 +2496,33,24.475256,8.524744033813477 +2497,35,25.138374,9.861625671386719 +2498,31,25.341877,5.658123016357422 +2499,35,20.286448,14.713552474975586 +2500,33,23.92089,9.079109191894531 +2501,32,24.359339,7.640661239624023 +2502,29,28.693716,0.30628395080566406 +2503,31,22.006565,8.99343490600586 +2504,32,26.87448,5.125520706176758 +2505,32,22.640528,9.359472274780273 +2506,32,32.063107,0.06310653686523438 +2507,30,19.098577,10.901422500610352 +2508,35,22.161308,12.838691711425781 +2509,35,37.138805,2.138805389404297 +2510,32,22.09831,9.901689529418945 +2511,33,23.399794,9.60020637512207 +2512,35,25.126024,9.87397575378418 +2513,35,29.504435,5.495565414428711 +2514,31,22.899406,8.100593566894531 +2515,34,24.115717,9.884283065795898 +2516,35,12.257456,22.742544174194336 +2517,33,20.23701,12.762990951538086 +2518,31,26.315842,4.6841583251953125 +2519,32,30.0413,1.958700180053711 +2520,31,23.232246,7.767753601074219 +2521,33,23.29607,9.703929901123047 +2522,33,28.296051,4.703948974609375 +2523,31,23.512335,7.487665176391602 +2524,33,23.648485,9.35151481628418 +2525,31,21.801811,9.198188781738281 +2526,32,20.896132,11.10386848449707 +2527,35,21.20904,13.790960311889648 +2528,30,19.52913,10.470869064331055 +2529,32,14.227916,17.77208423614502 +2530,31,23.111076,7.888923645019531 +2531,35,26.838911,8.161088943481445 +2532,33,29.90886,3.0911407470703125 +2533,34,25.294983,8.70501708984375 +2534,31,27.729248,3.270751953125 +2535,30,20.88461,9.11539077758789 +2536,33,24.794685,8.205314636230469 +2537,31,21.99749,9.002510070800781 +2538,35,25.530306,9.469694137573242 +2539,35,34.11175,0.8882484436035156 +2540,35,29.935036,5.064964294433594 +2541,34,24.271936,9.728063583374023 +2542,31,24.581581,6.418418884277344 +2543,32,28.482777,3.517223358154297 +2544,31,34.879642,3.8796424865722656 +2545,30,27.949144,2.0508556365966797 +2546,32,22.998243,9.00175666809082 +2547,35,22.09551,12.904489517211914 +2548,32,33.525433,1.5254325866699219 +2549,32,30.209942,1.7900581359863281 +2550,32,22.743338,9.256662368774414 +2551,30,24.210386,5.789613723754883 +2552,31,24.313292,6.686708450317383 +2553,35,29.055954,5.9440460205078125 +2554,31,31.648386,0.6483860015869141 +2555,34,26.908155,7.09184455871582 +2556,33,29.162863,3.837137222290039 +2557,30,22.026636,7.973363876342773 +2558,33,28.132645,4.8673553466796875 +2559,32,29.991673,2.0083274841308594 +2560,33,14.772255,18.227745056152344 +2561,32,18.465439,13.534561157226562 +2562,32,12.320248,19.679752349853516 +2563,34,21.768879,12.231121063232422 +2564,32,23.392101,8.607898712158203 +2565,35,25.257944,9.742055892944336 +2566,35,23.637478,11.36252212524414 +2567,30,26.64144,3.358560562133789 +2568,32,29.817316,2.1826839447021484 +2569,35,29.00948,5.990520477294922 +2570,32,23.663548,8.33645248413086 +2571,33,22.32844,10.671560287475586 +2572,36,23.17881,12.821189880371094 +2573,31,28.865547,2.1344528198242188 +2574,31,23.522974,7.477025985717773 +2575,28,21.866476,6.133523941040039 +2576,31,21.340654,9.659345626831055 +2577,32,28.698597,3.301403045654297 +2578,32,21.714346,10.285654067993164 +2579,32,23.038752,8.961248397827148 +2580,32,30.480656,1.5193443298339844 +2581,32,37.545765,5.545764923095703 +2582,28,19.451693,8.548307418823242 +2583,30,29.063046,0.9369544982910156 +2584,28,29.340254,1.3402538299560547 +2585,33,20.321407,12.678592681884766 +2586,32,30.880611,1.1193885803222656 +2587,32,20.359148,11.640851974487305 +2588,32,31.673729,0.32627105712890625 +2589,32,26.107887,5.892112731933594 +2590,28,21.431664,6.568336486816406 +2591,32,13.856435,18.14356517791748 +2592,32,22.484724,9.515275955200195 +2593,36,23.736809,12.263191223144531 +2594,32,23.697124,8.302875518798828 +2595,28,29.72401,1.7240104675292969 +2596,32,21.46832,10.531679153442383 +2597,32,35.29083,3.2908287048339844 +2598,36,33.3394,2.6605987548828125 +2599,32,26.570114,5.4298858642578125 +2600,32,20.965038,11.034961700439453 +2601,28,42.149654,14.149654388427734 +2602,28,26.031874,1.9681262969970703 +2603,32,29.963766,2.036233901977539 +2604,32,20.13942,11.860580444335938 +2605,28,21.971018,6.028982162475586 +2606,34,24.654215,9.345785140991211 +2607,31,21.791533,9.208467483520508 +2608,34,30.41322,3.586780548095703 +2609,33,28.531385,4.46861457824707 +2610,32,27.088821,4.9111785888671875 +2611,35,24.311897,10.688102722167969 +2612,31,16.586414,14.413585662841797 +2613,32,26.342783,5.657217025756836 +2614,33,18.588266,14.411733627319336 +2615,32,27.80864,4.1913604736328125 +2616,28,20.351572,7.648427963256836 +2617,33,23.282084,9.717916488647461 +2618,32,22.42605,9.573949813842773 +2619,34,31.773962,2.2260379791259766 +2620,32,17.492012,14.507987976074219 +2621,32,30.661774,1.338226318359375 +2622,32,23.872267,8.12773323059082 +2623,28,19.340723,8.659276962280273 +2624,32,27.27564,4.724359512329102 +2625,32,25.623377,6.376623153686523 +2626,37,35.966595,1.0334053039550781 +2627,29,23.945677,5.054323196411133 +2628,33,28.24404,4.755960464477539 +2629,33,41.550663,8.550662994384766 +2630,29,31.285688,2.2856884002685547 +2631,29,23.633587,5.366413116455078 +2632,29,28.924809,0.07519149780273438 +2633,33,28.357998,4.642002105712891 +2634,33,31.940712,1.0592880249023438 +2635,33,27.61777,5.382230758666992 +2636,33,28.743872,4.256128311157227 +2637,29,31.334835,2.3348350524902344 +2638,29,33.01019,4.010189056396484 +2639,33,20.3019,12.698099136352539 +2640,33,25.00643,7.993570327758789 +2641,33,20.652699,12.347301483154297 +2642,33,24.097637,8.902362823486328 +2643,37,22.892693,14.107307434082031 +2644,33,24.57898,8.4210205078125 +2645,33,30.951004,2.0489959716796875 +2646,29,21.62814,7.371860504150391 +2647,33,21.344074,11.655925750732422 +2648,29,31.726166,2.726165771484375 +2649,29,22.69316,6.306840896606445 +2650,33,20.772411,12.227588653564453 +2651,29,24.000875,4.999124526977539 +2652,29,27.189472,1.8105278015136719 +2653,33,25.3877,7.612300872802734 +2654,37,31.585072,5.414928436279297 +2655,29,23.892193,5.107807159423828 +2656,33,22.376987,10.62301254272461 +2657,37,30.488583,6.511417388916016 +2658,33,25.954168,7.045831680297852 +2659,33,18.003334,14.996665954589844 +2660,29,24.397594,4.602405548095703 +2661,37,26.511301,10.488698959350586 +2662,33,28.686747,4.313253402709961 +2663,37,34.890762,2.1092376708984375 +2664,33,27.107634,5.892366409301758 +2665,33,28.931097,4.068902969360352 +2666,34,30.939503,3.060497283935547 +2667,31,29.175781,1.82421875 +2668,29,20.840475,8.159524917602539 +2669,33,21.595592,11.404407501220703 +2670,29,22.017132,6.982868194580078 +2671,33,31.699707,1.30029296875 +2672,29,23.342876,5.657123565673828 +2673,29,25.143768,3.856231689453125 +2674,33,20.296335,12.703664779663086 +2675,33,20.917547,12.082452774047852 +2676,29,19.049656,9.95034408569336 +2677,33,23.260448,9.739551544189453 +2678,33,24.830914,8.169086456298828 +2679,32,28.75533,3.244670867919922 +2680,33,23.667831,9.332168579101562 +2681,33,31.588646,1.4113540649414062 +2682,33,25.390081,7.609918594360352 +2683,29,8.712451,20.287549018859863 +2684,33,44.092106,11.092105865478516 +2685,33,9.407497,23.59250259399414 +2686,37,24.647383,12.352617263793945 +2687,31,24.778776,6.221223831176758 +2688,33,24.579962,8.420038223266602 +2689,33,16.586823,16.413177490234375 +2690,33,21.805355,11.194644927978516 +2691,29,23.307632,5.6923675537109375 +2692,33,24.224874,8.775125503540039 +2693,34,29.237055,4.762945175170898 +2694,29,29.059317,0.05931663513183594 +2695,34,30.427862,3.5721378326416016 +2696,33,25.377615,7.622385025024414 +2697,29,24.022799,4.977201461791992 +2698,34,30.020767,3.9792327880859375 +2699,33,21.921553,11.078447341918945 +2700,29,25.604836,3.3951644897460938 +2701,38,31.934948,6.065052032470703 +2702,35,28.96521,6.0347900390625 +2703,36,25.93798,10.062019348144531 +2704,34,15.726004,18.273996353149414 +2705,34,31.985971,2.014028549194336 +2706,34,30.027914,3.972085952758789 +2707,35,25.268435,9.731565475463867 +2708,31,26.599836,4.400163650512695 +2709,34,26.929052,7.070947647094727 +2710,34,28.063541,5.936458587646484 +2711,30,24.44975,5.550249099731445 +2712,30,4.590934,25.409066200256348 +2713,34,19.845009,14.154991149902344 +2714,36,24.684126,11.315874099731445 +2715,34,28.647972,5.352027893066406 +2716,38,24.684126,13.315874099731445 +2717,38,25.063084,12.93691635131836 +2718,30,22.806543,7.193456649780273 +2719,30,22.2027,7.797300338745117 +2720,34,22.35539,11.644609451293945 +2721,34,27.760805,6.239194869995117 +2722,34,26.050348,7.949651718139648 +2723,34,24.060938,9.939062118530273 +2724,30,23.035719,6.96428108215332 +2725,38,26.49224,11.507759094238281 +2726,34,26.837614,7.162385940551758 +2727,34,31.888538,2.111461639404297 +2728,34,20.641819,13.35818099975586 +2729,31,33.818043,2.818042755126953 +2730,34,20.442884,13.55711555480957 +2731,30,32.162014,2.1620140075683594 +2732,35,29.901989,5.098011016845703 +2733,37,23.679731,13.320268630981445 +2734,33,18.955849,14.044151306152344 +2735,32,26.368555,5.631444931030273 +2736,31,21.819225,9.180774688720703 +2737,32,22.49792,9.502080917358398 +2738,33,38.11953,5.119529724121094 +2739,34,21.822655,12.177345275878906 +2740,31,27.489656,3.510343551635742 +2741,33,29.078066,3.921934127807617 +2742,37,27.562702,9.437297821044922 +2743,31,24.592892,6.407108306884766 +2744,36,31.410263,4.5897369384765625 +2745,30,22.308678,7.691322326660156 +2746,34,24.883446,9.116554260253906 +2747,32,30.557621,1.4423789978027344 +2748,30,30.531778,0.5317783355712891 +2749,35,23.537212,11.462787628173828 +2750,30,20.100008,9.899991989135742 +2751,30,21.08208,8.917919158935547 +2752,32,27.506403,4.493597030639648 +2753,30,27.669434,2.33056640625 +2754,35,29.947426,5.052574157714844 +2755,32,30.07884,1.9211597442626953 +2756,30,24.432127,5.567873001098633 +2757,37,30.76978,6.230220794677734 +2758,32,23.500736,8.499263763427734 +2759,30,21.490934,8.509065628051758 +2760,38,32.128986,5.871013641357422 +2761,30,24.063723,5.936277389526367 +2762,32,30.003489,1.996511459350586 +2763,31,29.548424,1.4515762329101562 +2764,31,31.676613,0.6766128540039062 +2765,39,22.200119,16.799880981445312 +2766,32,23.399927,8.600072860717773 +2767,39,30.230833,8.769166946411133 +2768,39,21.189928,17.81007194519043 +2769,32,33.67773,1.6777305603027344 +2770,32,23.021591,8.978408813476562 +2771,35,23.90958,11.09041976928711 +2772,33,19.69494,13.305059432983398 +2773,32,29.587605,2.412395477294922 +2774,37,27.604038,9.39596176147461 +2775,32,31.743385,0.25661468505859375 +2776,36,28.74395,7.256050109863281 +2777,32,26.388918,5.611082077026367 +2778,31,31.258308,0.25830841064453125 +2779,36,33.071835,2.9281654357910156 +2780,32,23.846966,8.153034210205078 +2781,32,24.763655,7.236345291137695 +2782,32,23.603962,8.396038055419922 +2783,31,30.068218,0.9317817687988281 +2784,31,22.999104,8.000896453857422 +2785,34,28.110449,5.889551162719727 +2786,39,30.57309,8.426910400390625 +2787,31,24.431618,6.568382263183594 +2788,33,21.310526,11.689474105834961 +2789,32,23.807508,8.19249153137207 +2790,33,30.5917,2.4083003997802734 +2791,40,50.41109,10.411090850830078 +2792,40,33.620987,6.3790130615234375 +2793,34,20.888454,13.11154556274414 +2794,36,30.575642,5.424358367919922 +2795,32,18.429665,13.570335388183594 +2796,32,21.34727,10.652730941772461 +2797,34,30.825851,3.1741485595703125 +2798,32,30.100348,1.8996524810791016 +2799,32,25.344751,6.655248641967773 +2800,34,25.153692,8.846307754516602 +2801,34,34.742657,0.7426567077636719 +2802,32,24.896477,7.103523254394531 +2803,34,35.216034,1.216033935546875 +2804,34,43.254444,9.254444122314453 +2805,39,41.479195,2.4791946411132812 +2806,32,24.985071,7.014928817749023 +2807,34,23.016947,10.983053207397461 +2808,34,23.728329,10.271671295166016 +2809,32,25.051725,6.948274612426758 +2810,40,29.942211,10.057788848876953 +2811,34,24.475756,9.52424430847168 +2812,35,24.56405,10.435949325561523 +2813,32,22.582182,9.417818069458008 +2814,34,24.537512,9.462488174438477 +2815,32,37.283577,5.283576965332031 +2816,32,16.661354,15.338645935058594 +2817,32,31.760254,0.23974609375 +2818,40,38.509335,1.4906654357910156 +2819,40,31.190897,8.809103012084961 +2820,40,36.48811,3.511890411376953 +2821,40,17.608543,22.391456604003906 +2822,35,24.44975,10.550249099731445 +2823,35,25.430285,9.56971549987793 +2824,33,28.407347,4.592653274536133 +2825,33,21.695793,11.304206848144531 +2826,37,31.578821,5.421178817749023 +2827,35,29.254076,5.74592399597168 +2828,34,28.155565,5.84443473815918 +2829,39,29.270641,9.729358673095703 +2830,41,27.412317,13.587682723999023 +2831,37,20.438826,16.561174392700195 +2832,35,26.506319,8.493680953979492 +2833,37,27.15365,9.846349716186523 +2834,37,25.822924,11.17707633972168 +2835,36,30.83419,5.165809631347656 +2836,33,6.044373,26.955626964569092 +2837,35,31.172436,3.827564239501953 +2838,35,28.810911,6.189088821411133 +2839,38,31.82109,6.1789093017578125 +2840,37,23.646788,13.353212356567383 +2841,36,28.961077,7.038923263549805 +2842,35,27.19575,7.804250717163086 +2843,33,25.638042,7.361957550048828 +2844,33,29.697865,3.302135467529297 +2845,36,27.064653,8.935346603393555 +2846,36,21.421593,14.578407287597656 +2847,33,33.29053,0.2905311584472656 +2848,37,29.974972,7.025028228759766 +2849,33,29.28455,3.7154502868652344 +2850,38,31.246439,6.753561019897461 +2851,33,29.762823,3.2371768951416016 +2852,33,22.48787,10.512130737304688 +2853,40,28.781267,11.218732833862305 +2854,37,25.950832,11.04916763305664 +2855,37,28.941013,8.05898666381836 +2856,41,23.354385,17.645614624023438 +2857,33,27.443829,5.556171417236328 +2858,33,29.647955,3.3520450592041016 +2859,38,20.291124,17.70887565612793 +2860,33,30.64445,2.355550765991211 +2861,33,26.754736,6.245264053344727 +2862,38,23.948084,14.051916122436523 +2863,36,29.273764,6.726236343383789 +2864,34,27.586851,6.413148880004883 +2865,35,27.216835,7.783164978027344 +2866,37,34.48114,2.51885986328125 +2867,38,34.62989,3.3701095581054688 +2868,38,33.74086,4.2591400146484375 +2869,38,32.24349,5.756511688232422 +2870,40,25.094168,14.905832290649414 +2871,41,28.640871,12.359128952026367 +2872,38,31.016382,6.983617782592773 +2873,35,30.97957,4.020429611206055 +2874,38,30.139084,7.8609161376953125 +2875,39,31.769812,7.230188369750977 +2876,39,24.23111,14.768890380859375 +2877,39,24.430439,14.569561004638672 +2878,36,23.570074,12.429925918579102 +2879,41,25.841614,15.15838623046875 +2880,38,30.534767,7.465232849121094 +2881,41,30.755005,10.2449951171875 +2882,37,24.852333,12.147666931152344 +2883,41,37.66753,3.332469940185547 +2884,37,25.333193,11.666807174682617 +2885,41,23.681728,17.31827163696289 +2886,40,25.854778,14.145221710205078 +2887,41,30.645168,10.35483169555664 +2888,41,23.705164,17.294836044311523 +2889,41,30.601364,10.398635864257812 +2890,41,34.09772,6.902278900146484 +2891,39,29.41094,9.589059829711914 +2892,38,35.54715,2.452850341796875 +2893,41,22.149502,18.85049819946289 +2894,41,27.178488,13.821512222290039 +2895,38,26.141056,11.858943939208984 +2896,41,18.628283,22.37171745300293 +2897,38,24.163223,13.836776733398438 +2898,38,31.246004,6.753995895385742 +2899,38,24.03903,13.960969924926758 +2900,38,29.128857,8.871143341064453 +2901,36,22.220068,13.779932022094727 +2902,41,30.230745,10.769254684448242 +2903,36,40.51386,4.513858795166016 +2904,41,22.208864,18.791135787963867 +2905,41,22.337845,18.662155151367188 +2906,37,18.691662,18.308338165283203 +2907,41,7.202185,33.79781484603882 +2908,39,27.026361,11.973638534545898 +2909,41,11.928689,29.071310997009277 +2910,40,26.589643,13.410356521606445 +2911,40,29.580606,10.419393539428711 +2912,38,27.590487,10.409513473510742 +2913,41,16.860756,24.139244079589844 +2914,37,34.603237,2.3967628479003906 +2915,41,31.450396,9.549604415893555 +2916,38,27.240793,10.759206771850586 +2917,36,20.111858,15.888141632080078 +2918,35,24.35226,10.64773941040039 +2919,38,23.423187,14.576812744140625 +2920,34,29.670385,4.329614639282227 +2921,38,25.955824,12.04417610168457 +2922,39,43.92814,4.928138732910156 +2923,40,28.31962,11.680379867553711 +2924,35,29.80758,5.192419052124023 +2925,36,32.715275,3.2847251892089844 +2926,41,31.005064,9.994935989379883 +2927,38,30.286469,7.713531494140625 +2928,35,42.597713,7.597713470458984 +2929,41,28.25841,12.74159049987793 +2930,38,25.8822,12.117799758911133 +2931,38,25.607546,12.392454147338867 +2932,38,35.769478,2.2305221557617188 +2933,37,21.020054,15.97994613647461 +2934,37,29.459108,7.540891647338867 +2935,36,21.084686,14.915313720703125 +2936,40,25.268587,14.731412887573242 +2937,39,30.445469,8.55453109741211 +2938,40,29.08822,10.911779403686523 +2939,39,30.952877,8.047122955322266 +2940,38,29.771248,8.228752136230469 +2941,41,27.963797,13.036203384399414 +2942,40,30.555603,9.44439697265625 +2943,38,23.075703,14.924297332763672 +2944,37,30.31886,6.681140899658203 +2945,37,36.082798,0.9172019958496094 +2946,38,39.879818,1.8798179626464844 +2947,38,23.034573,14.96542739868164 +2948,39,24.566929,14.43307113647461 +2949,40,37.11712,2.8828811645507812 +2950,38,27.553196,10.44680404663086 +2951,38,19.932356,18.067644119262695 +2952,35,31.152483,3.8475170135498047 +2953,36,4.7645154,31.23548460006714 +2954,5,2.6903815,2.3096184730529785 +2955,3,2.6890738,0.3109261989593506 +2956,3,1.0470338,1.9529662132263184 +2957,2,1.1297003,0.8702996969223022 +2958,2,1.3316431,0.6683568954467773 +2959,2,3.3651178,1.3651177883148193 +2960,1,1.3488824,0.34888243675231934 +2961,1,1.057286,0.05728602409362793 +2962,5,1.0799916,3.920008420944214 +2963,5,3.7398322,1.2601678371429443 +2964,5,3.0951054,1.9048945903778076 +2965,1,2.0412598,1.041259765625 +2966,1,2.6695132,1.66951322555542 +2967,4,2.93748,1.0625200271606445 +2968,1,4.798447,3.7984471321105957 +2969,5,1.0561831,3.943816900253296 +2970,5,1.0501292,3.9498708248138428 +2971,1,1.0243049,0.024304866790771484 +2972,4,10.002987,6.002986907958984 +2973,5,2.8517094,2.1482906341552734 +2974,5,4.396523,0.6034770011901855 +2975,2,6.0732994,4.073299407958984 +2976,1,1.5903618,0.5903618335723877 +2977,1,1.675776,0.6757760047912598 +2978,2,1.0546658,0.9453341960906982 +2979,1,1.1014065,0.10140645503997803 +2980,5,22.99483,17.994829177856445 +2981,5,5.491244,0.491243839263916 +2982,5,3.158222,1.841778039932251 +2983,5,5.5326996,0.5326995849609375 +2984,1,3.1800294,2.1800293922424316 +2985,1,1.0221066,0.022106647491455078 +2986,3,2.9640353,0.0359647274017334 +2987,1,1.0039692,0.0039691925048828125 +2988,1,1.0478977,0.04789769649505615 +2989,1,1.1530273,0.15302729606628418 +2990,1,1.0090494,0.009049415588378906 +2991,1,2.9651532,1.9651532173156738 +2992,1,11.89702,10.89702033996582 +2993,1,1.0440164,0.044016361236572266 +2994,1,1.2556546,0.25565457344055176 +2995,2,1.9182434,0.081756591796875 +2996,2,1.0582473,0.9417526721954346 +2997,2,1.0436372,0.9563628435134888 +2998,2,5.503934,3.503933906555176 +2999,1,3.3939257,2.393925666809082 +3000,1,6.971263,5.9712629318237305 +3001,1,1.0392442,0.03924417495727539 +3002,4,5.42823,1.428229808807373 +3003,3,4.191244,1.191244125366211 +3004,5,1.0517261,3.9482738971710205 +3005,1,4.3206205,3.320620536804199 +3006,1,2.680906,1.680906057357788 +3007,3,3.449151,0.44915103912353516 +3008,1,3.915953,2.9159529209136963 +3009,3,3.0602694,0.06026935577392578 +3010,1,2.2822194,1.282219409942627 +3011,1,4.3606896,3.360689640045166 +3012,1,6.1787724,5.178772449493408 +3013,3,17.864235,14.864234924316406 +3014,1,3.2185423,2.2185423374176025 +3015,3,3.67403,0.674030065536499 +3016,2,1.0501444,0.9498555660247803 +3017,1,1.0059181,0.005918145179748535 +3018,2,5.2654076,3.2654075622558594 +3019,1,2.5630162,1.5630161762237549 +3020,1,4.1288114,3.1288113594055176 +3021,1,2.8025892,1.8025891780853271 +3022,2,1.7964444,0.20355558395385742 +3023,2,1.0357671,0.9642329216003418 +3024,1,1.0545659,0.0545659065246582 +3025,1,2.5367875,1.536787509918213 +3026,2,1.8361546,0.16384541988372803 +3027,1,1.1636918,0.16369175910949707 +3028,2,2.0414815,0.04148149490356445 +3029,2,1.2126228,0.7873772382736206 +3030,1,1.947077,0.9470770359039307 +3031,1,1.4222579,0.4222579002380371 +3032,1,5.0141563,4.014156341552734 +3033,2,9.713681,7.713681221008301 +3034,2,18.33408,16.33407974243164 +3035,3,3.512091,0.5120909214019775 +3036,3,3.7728603,0.7728602886199951 +3037,2,1.869313,0.13068699836730957 +3038,5,5.224321,0.2243208885192871 +3039,2,1.0345596,0.9654403924942017 +3040,2,1.0846837,0.9153163433074951 +3041,1,2.2222757,1.222275733947754 +3042,2,2.692629,0.6926290988922119 +3043,3,1.2816424,1.7183575630187988 +3044,2,2.5257885,0.5257885456085205 +3045,4,2.1868165,1.8131835460662842 +3046,2,1.6683424,0.33165764808654785 +3047,1,1.6300559,0.6300559043884277 +3048,2,8.27425,6.274250030517578 +3049,1,1.015298,0.015298008918762207 +3050,2,3.6124175,1.612417459487915 +3051,3,5.0830693,2.083069324493408 +3052,2,1.1596481,0.840351939201355 +3053,2,2.6638854,0.6638853549957275 +3054,2,12.29633,10.296330451965332 +3055,3,4.2469497,1.2469496726989746 +3056,2,2.3959978,0.3959977626800537 +3057,2,1.0227118,0.9772882461547852 +3058,1,1.0356741,0.035674095153808594 +3059,2,1.0537766,0.9462233781814575 +3060,3,2.2297544,0.7702455520629883 +3061,1,1.0361463,0.03614628314971924 +3062,3,5.4814363,2.481436252593994 +3063,2,1.0143492,0.9856507778167725 +3064,2,1.039143,0.9608570337295532 +3065,1,1.0129966,0.012996554374694824 +3066,3,3.3304365,0.33043646812438965 +3067,3,1.0403173,1.9596827030181885 +3068,1,1.0565271,0.056527137756347656 +3069,3,5.855298,2.8552980422973633 +3070,1,39.476048,38.47604751586914 +3071,3,4.6775594,1.6775593757629395 +3072,1,2.6710546,1.6710546016693115 +3073,1,1.0205789,0.020578861236572266 +3074,2,1.0244865,0.9755134582519531 +3075,2,1.4142723,0.5857276916503906 +3076,1,1.0582048,0.0582047700881958 +3077,1,3.003965,2.003964900970459 +3078,2,3.1880107,1.1880106925964355 +3079,37,31.259382,5.740617752075195 +3080,36,24.263998,11.736001968383789 +3081,39,25.48623,13.513769149780273 +3082,41,25.002802,15.997198104858398 +3083,36,37.643864,1.6438636779785156 +3084,41,29.343906,11.65609359741211 +3085,42,31.440777,10.559223175048828 +3086,35,31.281025,3.718975067138672 +3087,39,27.36216,11.637840270996094 +3088,39,32.6381,6.361900329589844 +3089,39,34.516434,4.4835662841796875 +3090,35,33.221687,1.7783126831054688 +3091,39,25.903416,13.09658432006836 +3092,40,26.85133,13.148670196533203 +3093,37,32.83947,4.160530090332031 +3094,35,26.141403,8.858596801757812 +3095,38,40.173985,2.1739845275878906 +3096,39,31.991985,7.008014678955078 +3097,42,35.760094,6.239906311035156 +3098,39,28.087105,10.912895202636719 +3099,39,31.021193,7.97880744934082 +3100,39,30.043234,8.956766128540039 +3101,39,30.675373,8.324626922607422 +3102,39,37.463627,1.5363731384277344 +3103,39,30.299156,8.700843811035156 +3104,39,31.43708,7.562919616699219 +3105,37,31.359533,5.640466690063477 +3106,35,35.28338,0.28337860107421875 +3107,40,25.892405,14.107595443725586 +3108,36,29.662964,6.3370361328125 +3109,37,24.424599,12.575401306152344 +3110,36,24.048582,11.951417922973633 +3111,43,28.86334,14.136659622192383 +3112,42,35.575886,6.424114227294922 +3113,38,33.582382,4.4176177978515625 +3114,38,30.7418,7.258199691772461 +3115,40,37.490047,2.5099525451660156 +3116,39,34.956596,4.043403625488281 +3117,35,12.100532,22.899468421936035 +3118,38,28.062721,9.937278747558594 +3119,41,39.600098,1.39990234375 +3120,37,32.75435,4.2456512451171875 +3121,36,28.975504,7.024496078491211 +3122,39,24.256643,14.743356704711914 +3123,38,28.462883,9.537117004394531 +3124,37,30.134607,6.865392684936523 +3125,35,25.942644,9.057355880737305 +3126,37,20.016945,16.983055114746094 +3127,39,40.146027,1.146026611328125 +3128,39,32.668583,6.331417083740234 +3129,36,30.665813,5.334186553955078 +3130,35,30.638119,4.361881256103516 +3131,43,52.57374,9.573738098144531 +3132,39,15.093218,23.906782150268555 +3133,35,38.21394,3.213939666748047 +3134,39,35.922394,3.077606201171875 +3135,39,31.293606,7.706394195556641 +3136,35,46.59622,11.59621810913086 +3137,36,23.409292,12.590707778930664 +3138,36,27.376753,8.623247146606445 +3139,43,33.553116,9.446884155273438 +3140,38,30.594315,7.4056854248046875 +3141,39,21.920347,17.079652786254883 +3142,39,24.287792,14.712207794189453 +3143,35,26.319456,8.680543899536133 +3144,42,26.553259,15.446741104125977 +3145,37,23.27779,13.722209930419922 +3146,36,25.041777,10.958223342895508 +3147,35,7.181227,27.818772792816162 +3148,38,35.456036,2.543964385986328 +3149,43,35.976856,7.023143768310547 +3150,39,19.796978,19.203022003173828 +3151,39,35.755238,3.244762420654297 +3152,35,20.406635,14.593364715576172 +3153,39,30.705563,8.294437408447266 +3154,40,31.70259,8.297409057617188 +3155,43,20.857672,22.1423282623291 +3156,38,24.306456,13.693544387817383 +3157,35,15.77489,19.225110054016113 +3158,40,33.470234,6.529766082763672 +3159,36,28.842766,7.157234191894531 +3160,38,31.057476,6.942523956298828 +3161,36,21.737925,14.262075424194336 +3162,40,22.19635,17.80364990234375 +3163,43,28.462883,14.537117004394531 +3164,43,22.937824,20.062175750732422 +3165,39,31.083359,7.9166412353515625 +3166,43,22.141733,20.858266830444336 +3167,42,34.25238,7.74761962890625 +3168,41,29.153334,11.84666633605957 +3169,38,33.306187,4.693813323974609 +3170,36,30.135406,5.864593505859375 +3171,36,25.69868,10.301319122314453 +3172,36,24.097637,11.902362823486328 +3173,36,28.188084,7.811916351318359 +3174,45,25.18949,19.810510635375977 +3175,40,25.997007,14.002992630004883 +3176,46,43.242153,2.7578468322753906 +3177,39,36.262085,2.7379150390625 +3178,44,29.42476,14.575239181518555 +3179,38,33.809128,4.1908721923828125 +3180,40,39.001614,0.9983863830566406 +3181,38,34.839695,3.1603050231933594 +3182,38,25.58069,12.419309616088867 +3183,38,20.852356,17.14764404296875 +3184,42,33.851154,8.148845672607422 +3185,38,22.518612,15.481388092041016 +3186,38,22.694382,15.305618286132812 +3187,42,30.613588,11.386411666870117 +3188,45,40.101063,4.898937225341797 +3189,45,35.822803,9.177196502685547 +3190,47,23.213518,23.786481857299805 +3191,47,29.4412,17.558799743652344 +3192,47,24.81298,22.18701934814453 +3193,43,24.598951,18.40104866027832 +3194,46,31.11982,14.880180358886719 +3195,39,19.816236,19.18376350402832 +3196,39,23.198915,15.801084518432617 +3197,43,37.26407,5.735931396484375 +3198,43,30.187168,12.81283187866211 +3199,45,27.743303,17.256696701049805 +3200,43,32.194595,10.805404663085938 +3201,46,32.385708,13.61429214477539 +3202,41,36.831795,4.168205261230469 +3203,46,26.921432,19.078567504882812 +3204,39,35.957706,3.0422935485839844 +3205,46,30.481533,15.51846694946289 +3206,46,41.31735,4.682651519775391 +3207,48,35.056087,12.943912506103516 +3208,40,27.256802,12.74319839477539 +3209,46,32.95249,13.047508239746094 +3210,42,34.42203,7.577968597412109 +3211,48,28.444109,19.555891036987305 +3212,46,24.259995,21.740005493164062 +3213,46,34.557495,11.4425048828125 +3214,46,26.916056,19.08394432067871 +3215,48,39.77263,8.227371215820312 +3216,42,31.220814,10.779186248779297 +3217,44,26.896908,17.103092193603516 +3218,41,39.42501,1.5749893188476562 +3219,42,32.869843,9.130157470703125 +3220,40,32.73417,7.265830993652344 +3221,46,28.888422,17.1115779876709 +3222,40,32.03237,7.967628479003906 +3223,46,32.699646,13.30035400390625 +3224,48,29.853424,18.146575927734375 +3225,40,28.448492,11.551507949829102 +3226,40,34.08644,5.9135589599609375 +3227,40,36.52077,3.479228973388672 +3228,40,33.88737,6.112628936767578 +3229,44,27.660828,16.33917236328125 +3230,46,23.965014,22.03498649597168 +3231,48,31.78514,16.214860916137695 +3232,40,32.940247,7.05975341796875 +3233,41,26.914713,14.085287094116211 +3234,42,22.585632,19.41436767578125 +3235,41,37.82734,3.1726608276367188 +3236,45,34.188522,10.811477661132812 +3237,44,27.048513,16.951486587524414 +3238,45,30.546013,14.453987121582031 +3239,49,29.203493,19.796506881713867 +3240,46,29.950993,16.049007415771484 +3241,48,12.614221,35.38577938079834 +3242,50,35.18426,14.815738677978516 +3243,42,39.10478,2.895221710205078 +3244,46,23.31748,22.682519912719727 +3245,48,37.41308,10.586921691894531 +3246,46,32.16384,13.836158752441406 +3247,48,34.950638,13.049362182617188 +3248,46,31.26206,14.737939834594727 +3249,46,31.334929,14.665071487426758 +3250,46,34.202103,11.797897338867188 +3251,42,31.233301,10.766698837280273 +3252,49,33.567795,15.432205200195312 +3253,45,32.233543,12.766456604003906 +3254,46,22.605959,23.394041061401367 +3255,50,27.031322,22.968677520751953 +3256,43,36.614708,6.385292053222656 +3257,47,38.68969,8.310310363769531 +3258,48,40.822422,7.177577972412109 +3259,43,37.64568,5.354320526123047 +3260,43,35.953396,7.046604156494141 +3261,43,35.561134,7.438865661621094 +3262,43,25.091974,17.90802574157715 +3263,46,38.49471,7.505290985107422 +3264,45,32.082462,12.917537689208984 +3265,47,35.470917,11.529083251953125 +3266,44,37.49913,6.5008697509765625 +3267,43,24.19876,18.801240921020508 +3268,46,33.45606,12.543941497802734 +3269,46,39.62578,6.3742218017578125 +3270,46,33.6652,12.334800720214844 +3271,45,33.77292,11.227081298828125 +3272,46,41.486134,4.513866424560547 +3273,46,34.701645,11.298355102539062 +3274,43,20.696226,22.303773880004883 +3275,46,31.600601,14.399398803710938 +3276,43,34.392902,8.607097625732422 +3277,50,22.8448,27.15519905090332 +3278,51,35.281116,15.718883514404297 +3279,49,31.142899,17.857101440429688 +3280,51,29.559376,21.440624237060547 +3281,43,31.78376,11.216239929199219 +3282,43,34.189056,8.810943603515625 +3283,51,44.211975,6.78802490234375 +3284,44,35.220444,8.779556274414062 +3285,44,35.727467,8.272533416748047 +3286,50,31.41314,18.58686065673828 +3287,44,33.41373,10.58626937866211 +3288,50,30.851738,19.14826202392578 +3289,46,50.938553,4.9385528564453125 +3290,44,36.258976,7.741024017333984 +3291,52,30.920359,21.079641342163086 +3292,49,31.397755,17.602245330810547 +3293,44,24.757296,19.242704391479492 +3294,44,36.2853,7.714698791503906 +3295,1,2.300073,1.3000729084014893 +3296,4,1.8176229,2.1823770999908447 +3297,2,6.9317684,4.931768417358398 +3298,2,1.1596481,0.840351939201355 +3299,2,3.1858063,1.1858062744140625 +3300,1,2.1423464,1.1423463821411133 +3301,1,18.56858,17.568580627441406 +3302,4,4.2811546,0.2811546325683594 +3303,3,1.0348542,1.9651458263397217 +3304,1,3.368992,2.3689920902252197 +3305,2,6.1681066,4.168106555938721 +3306,6,5.648501,0.351499080657959 +3307,2,4.974704,2.974703788757324 +3308,3,2.2035909,0.7964091300964355 +3309,3,1.0577989,1.9422011375427246 +3310,2,3.8477287,1.8477287292480469 +3311,4,8.687974,4.687973976135254 +3312,6,6.194981,0.19498109817504883 +3313,6,10.44004,4.44003963470459 +3314,3,12.481235,9.481234550476074 +3315,2,1.2620351,0.7379648685455322 +3316,1,1.3586998,0.3586997985839844 +3317,1,7.094123,6.094122886657715 +3318,3,6.167334,3.1673340797424316 +3319,2,11.4334,9.43340015411377 +3320,3,13.91732,10.917320251464844 +3321,1,14.2369175,13.236917495727539 +3322,3,3.1112201,0.11122012138366699 +3323,3,2.4400718,0.5599281787872314 +3324,3,14.876417,11.87641716003418 +3325,3,9.403268,6.403267860412598 +3326,2,3.6341524,1.6341524124145508 +3327,1,4.1264834,3.12648344039917 +3328,2,4.5515046,2.551504611968994 +3329,2,2.0453205,0.04532051086425781 +3330,2,1.7284741,0.27152585983276367 +3331,3,1.5057783,1.4942216873168945 +3332,1,1.0582967,0.05829668045043945 +3333,1,2.389881,1.389880895614624 +3334,1,1.3419766,0.3419766426086426 +3335,2,3.6379383,1.6379382610321045 +3336,4,2.388378,1.6116220951080322 +3337,2,13.903149,11.903148651123047 +3338,2,1.0339229,0.9660770893096924 +3339,3,22.514996,19.514995574951172 +3340,4,5.842982,1.8429818153381348 +3341,2,2.2387302,0.23873019218444824 +3342,2,2.4132485,0.41324853897094727 +3343,1,1.0432801,0.04328012466430664 +3344,3,2.2018938,0.7981061935424805 +3345,4,12.166504,8.16650390625 +3346,2,4.044957,2.044957160949707 +3347,3,8.031484,5.0314836502075195 +3348,3,2.2742364,0.7257635593414307 +3349,1,11.64695,10.646949768066406 +3350,2,10.895822,8.895821571350098 +3351,3,2.0480921,0.9519078731536865 +3352,2,3.3885806,1.388580560684204 +3353,1,1.0461038,0.046103835105895996 +3354,1,1.0497975,0.04979753494262695 +3355,2,3.7844825,1.784482479095459 +3356,2,3.187687,1.1876869201660156 +3357,1,1.0422432,0.042243242263793945 +3358,2,9.105177,7.10517692565918 +3359,2,21.399675,19.399675369262695 +3360,3,24.060791,21.060791015625 +3361,2,4.50514,2.5051398277282715 +3362,4,1.032056,2.967944025993347 +3363,3,4.6241517,1.6241517066955566 +3364,1,1.0121813,0.012181282043457031 +3365,3,1.0073996,1.992600440979004 +3366,3,17.96424,14.964239120483398 +3367,1,1.0302393,0.030239343643188477 +3368,6,5.253,0.7470002174377441 +3369,1,1.0387856,0.038785576820373535 +3370,3,13.181431,10.18143081665039 +3371,2,1.3822432,0.6177568435668945 +3372,3,1.0120404,1.987959623336792 +3373,3,2.6940374,0.30596256256103516 +3374,3,2.0171852,0.9828147888183594 +3375,1,3.621679,2.6216790676116943 +3376,1,3.5272064,2.5272064208984375 +3377,4,3.9451656,0.05483436584472656 +3378,3,2.853609,0.1463909149169922 +3379,2,12.55273,10.552729606628418 +3380,2,3.1239412,1.12394118309021 +3381,3,2.028595,0.971405029296875 +3382,2,3.2686248,1.2686247825622559 +3383,1,1.0365777,0.036577701568603516 +3384,2,3.6558633,1.6558632850646973 +3385,2,3.9528377,1.9528377056121826 +3386,1,1.632712,0.6327120065689087 +3387,3,1.5957829,1.4042171239852905 +3388,1,1.0233817,0.023381710052490234 +3389,4,12.350682,8.350682258605957 +3390,3,2.8329222,0.16707777976989746 +3391,1,1.4160421,0.4160420894622803 +3392,1,6.469661,5.469661235809326 +3393,2,18.773624,16.773624420166016 +3394,1,3.8194005,2.8194005489349365 +3395,2,2.1457434,0.14574337005615234 +3396,3,16.25266,13.252660751342773 +3397,2,14.259491,12.259490966796875 +3398,2,5.3128166,3.312816619873047 +3399,1,1.034719,0.034718990325927734 +3400,1,3.6258771,2.6258771419525146 +3401,2,21.306162,19.306161880493164 +3402,2,1.7885106,0.21148943901062012 +3403,1,3.6951723,2.6951723098754883 +3404,3,8.426289,5.426288604736328 +3405,2,3.7626054,1.7626054286956787 +3406,2,3.3276708,1.3276708126068115 +3407,1,14.159721,13.159721374511719 +3408,3,1.0296407,1.9703593254089355 +3409,51,30.237436,20.762563705444336 +3410,49,34.060463,14.939537048339844 +3411,51,30.86954,20.130460739135742 +3412,47,32.45585,14.544151306152344 +3413,51,37.478924,13.521076202392578 +3414,51,40.93899,10.06100845336914 +3415,48,25.869904,22.130096435546875 +3416,50,28.697483,21.30251693725586 +3417,51,39.15555,11.844451904296875 +3418,49,11.878904,37.12109565734863 +3419,49,35.83975,13.16025161743164 +3420,53,31.971603,21.028396606445312 +3421,52,33.575283,18.42471694946289 +3422,51,42.556873,8.443126678466797 +3423,48,41.134544,6.865455627441406 +3424,52,36.883564,15.116436004638672 +3425,46,40.427425,5.572574615478516 +3426,49,35.632946,13.367053985595703 +3427,53,36.11252,16.887481689453125 +3428,51,41.091663,9.908336639404297 +3429,51,24.90389,26.09610939025879 +3430,51,46.27513,4.7248687744140625 +3431,52,35.13989,16.860111236572266 +3432,53,26.497162,26.502838134765625 +3433,46,29.879768,16.12023162841797 +3434,53,30.840315,22.159685134887695 +3435,53,38.444588,14.555412292480469 +3436,53,32.057594,20.942405700683594 +3437,49,37.914284,11.085716247558594 +3438,54,30.917295,23.082704544067383 +3439,53,29.958908,23.041091918945312 +3440,49,43.177666,5.822334289550781 +3441,47,37.77599,9.224010467529297 +3442,53,30.337404,22.662595748901367 +3443,54,31.192604,22.807395935058594 +3444,52,42.02276,9.977241516113281 +3445,50,44.399647,5.600353240966797 +3446,52,46.717712,5.28228759765625 +3447,52,32.225807,19.774192810058594 +3448,54,24.436949,29.563051223754883 +3449,52,28.758568,23.241432189941406 +3450,53,39.023296,13.976703643798828 +3451,51,35.450153,15.549846649169922 +3452,54,34.324764,19.675235748291016 +3453,54,27.7706,26.229400634765625 +3454,54,28.735567,25.264432907104492 +3455,53,22.01058,30.98941993713379 +3456,55,24.436949,30.563051223754883 +3457,53,35.33209,17.667911529541016 +3458,55,43.03506,11.96493911743164 +3459,54,38.900757,15.0992431640625 +3460,54,39.494167,14.50583267211914 +3461,53,24.139776,28.8602237701416 +3462,51,31.42398,19.576019287109375 +3463,53,25.174257,27.825742721557617 +3464,48,33.261044,14.738956451416016 +3465,51,41.025276,9.974723815917969 +3466,47,42.54474,4.45526123046875 +3467,50,34.685432,15.314567565917969 +3468,51,37.497707,13.50229263305664 +3469,48,26.849207,21.150793075561523 +3470,49,36.178135,12.82186508178711 +3471,51,29.413208,21.5867919921875 +3472,49,41.21946,7.780540466308594 +3473,52,45.267666,6.732334136962891 +3474,51,43.016792,7.983207702636719 +3475,51,45.16958,5.830421447753906 +3476,51,49.495598,1.5044021606445312 +3477,52,30.960012,21.039987564086914 +3478,51,34.786053,16.213947296142578 +3479,51,36.347805,14.65219497680664 +3480,51,39.44406,11.555938720703125 +3481,50,41.45028,8.549720764160156 +3482,54,46.625988,7.374011993408203 +3483,56,27.23322,28.766780853271484 +3484,50,42.91619,7.083808898925781 +3485,57,29.292633,27.707366943359375 +3486,55,39.28225,15.717750549316406 +3487,55,37.49529,17.504711151123047 +3488,58,48.96864,9.031360626220703 +3489,57,27.913885,29.08611488342285 +3490,59,30.30122,28.698780059814453 +3491,57,49.323887,7.676113128662109 +3492,57,29.141888,27.858112335205078 +3493,57,29.141888,27.858112335205078 +3494,53,35.466763,17.53323745727539 +3495,60,32.191628,27.808372497558594 +3496,56,49.576447,6.423553466796875 +3497,59,32.45478,26.54521942138672 +3498,61,37.39192,23.608081817626953 +3499,59,45.744534,13.25546646118164 +3500,60,45.174793,14.825206756591797 +3501,57,32.15991,24.840091705322266 +3502,6,9.650234,3.6502342224121094 +3503,4,1.8006591,2.1993409395217896 +3504,6,3.5727446,2.427255392074585 +3505,2,1.6783304,0.3216695785522461 +3506,2,3.2096095,1.2096095085144043 +3507,5,4.6865873,0.3134126663208008 +3508,2,3.3999572,1.3999571800231934 +3509,6,2.9260137,3.073986291885376 +3510,5,2.4007897,2.599210262298584 +3511,5,3.6051984,1.3948016166687012 +3512,2,7.8198714,5.819871425628662 +3513,1,1.0732841,0.07328414916992188 +3514,4,18.871716,14.871715545654297 +3515,5,1.0398502,3.9601497650146484 +3516,2,1.676048,0.32395195960998535 +3517,6,5.92633,0.0736699104309082 +3518,4,6.1873064,2.1873064041137695 +3519,6,3.9803157,2.019684314727783 +3520,6,1.0336431,4.966356873512268 +3521,4,1.6866667,2.31333327293396 +3522,1,1.0238683,0.023868322372436523 +3523,4,6.7768264,2.7768263816833496 +3524,3,11.4147,8.41469955444336 +3525,6,4.3267655,1.673234462738037 +3526,3,2.798565,0.20143508911132812 +3527,5,4.2485914,0.751408576965332 +3528,4,8.066319,4.066319465637207 +3529,2,1.9179623,0.08203768730163574 +3530,5,1.0278916,3.9721083641052246 +3531,2,3.4888122,1.4888122081756592 +3532,4,19.826927,15.826927185058594 +3533,6,8.994944,2.994943618774414 +3534,2,1.0317229,0.968277096748352 +3535,2,3.815426,1.8154261112213135 +3536,4,1.2122953,2.7877047061920166 +3537,6,2.9422588,3.057741165161133 +3538,2,4.012743,2.0127429962158203 +3539,5,5.4022303,0.40223026275634766 +3540,2,5.004697,3.004696846008301 +3541,2,7.5726066,5.572606563568115 +3542,2,5.267737,3.2677369117736816 +3543,4,9.169266,5.1692657470703125 +3544,4,4.916609,0.9166088104248047 +3545,2,1.6359909,0.36400914192199707 +3546,5,3.088241,1.9117588996887207 +3547,2,8.010434,6.010434150695801 +3548,2,1.307303,0.6926970481872559 +3549,2,2.0161014,0.016101360321044922 +3550,4,8.186368,4.186367988586426 +3551,2,2.9255052,0.9255051612854004 +3552,2,2.3154404,0.31544041633605957 +3553,2,1.0549589,0.9450410604476929 +3554,2,1.0504271,0.9495729207992554 +3555,5,4.456858,0.5431418418884277 +3556,3,2.7573516,0.24264836311340332 +3557,3,8.785646,5.785646438598633 +3558,2,10.537106,8.537105560302734 +3559,2,3.9350417,1.9350416660308838 +3560,6,4.147432,1.8525681495666504 +3561,4,2.2334802,1.7665197849273682 +3562,2,5.7109594,3.7109594345092773 +3563,2,1.0225195,0.9774805307388306 +3564,5,3.682119,1.3178811073303223 +3565,3,3.2639012,0.2639012336730957 +3566,3,1.0558486,1.9441514015197754 +3567,5,4.418185,0.5818147659301758 +3568,4,1.9233348,2.076665163040161 +3569,4,43.35733,39.357330322265625 +3570,1,1.0182431,0.018243074417114258 +3571,2,3.466768,1.4667680263519287 +3572,3,3.2776914,0.2776913642883301 +3573,1,1.4853532,0.4853532314300537 +3574,3,3.868279,0.868278980255127 +3575,4,20.112963,16.11296272277832 +3576,4,2.842694,1.1573059558868408 +3577,9,3.1539104,5.846089601516724 +3578,3,4.1202583,1.1202583312988281 +3579,3,19.189835,16.189834594726562 +3580,5,2.6079688,2.392031192779541 +3581,7,5.012667,1.987332820892334 +3582,2,4.879419,2.8794188499450684 +3583,2,2.0290735,0.029073476791381836 +3584,2,1.034882,0.9651180505752563 +3585,1,2.670252,1.6702520847320557 +3586,1,1.0371408,0.037140846252441406 +3587,4,4.3200316,0.32003164291381836 +3588,9,4.7006483,4.299351692199707 +3589,2,2.4106622,0.4106621742248535 +3590,8,1.0288458,6.97115421295166 +3591,4,2.5855403,1.4144597053527832 +3592,3,2.544802,0.4551980495452881 +3593,1,2.830726,1.830725908279419 +3594,3,7.4597874,4.459787368774414 +3595,2,5.0100775,3.010077476501465 +3596,4,5.1608467,1.1608467102050781 +3597,3,5.096129,2.0961289405822754 +3598,2,1.0265936,0.9734064340591431 +3599,2,1.0136728,0.9863271713256836 +3600,2,5.3732986,3.3732986450195312 +3601,4,2.7942574,1.205742597579956 +3602,1,4.34832,3.3483200073242188 +3603,3,4.724573,1.7245731353759766 +3604,3,1.4589152,1.5410847663879395 +3605,2,2.9924328,0.9924328327178955 +3606,4,1.2203001,2.779699921607971 +3607,4,4.2177305,0.21773052215576172 +3608,6,3.4952219,2.5047781467437744 +3609,4,6.9165087,2.916508674621582 +3610,6,8.0447645,2.044764518737793 +3611,2,7.941873,5.941873073577881 +3612,1,1.0153615,0.015361547470092773 +3613,1,2.8177116,1.817711591720581 +3614,56,42.238373,13.761627197265625 +3615,63,32.733097,30.266902923583984 +3616,59,45.867634,13.132366180419922 +3617,58,31.031143,26.968856811523438 +3618,59,51.98165,7.018348693847656 +3619,61,35.331436,25.668563842773438 +3620,56,16.650885,39.3491153717041 +3621,58,30.936806,27.063194274902344 +3622,56,45.33775,10.662250518798828 +3623,62,50.526493,11.473506927490234 +3624,58,43.832355,14.167644500732422 +3625,60,52.154022,7.845977783203125 +3626,64,37.811768,26.188232421875 +3627,65,53.417187,11.582813262939453 +3628,64,37.398117,26.601882934570312 +3629,64,39.24376,24.756240844726562 +3630,60,49.07049,10.929508209228516 +3631,66,50.126686,15.873313903808594 +3632,63,50.85438,12.145618438720703 +3633,66,47.288166,18.711833953857422 +3634,59,43.03908,15.960918426513672 +3635,64,34.267826,29.732173919677734 +3636,64,39.925858,24.074142456054688 +3637,65,30.910023,34.0899772644043 +3638,64,44.034843,19.96515655517578 +3639,62,41.75491,20.24509048461914 +3640,67,49.666996,17.333003997802734 +3641,66,45.05486,20.945140838623047 +3642,60,43.781456,16.218544006347656 +3643,66,38.41588,27.584121704101562 +3644,67,49.19108,17.808921813964844 +3645,60,41.654152,18.345848083496094 +3646,64,45.610462,18.389537811279297 +3647,61,45.53315,15.466850280761719 +3648,66,47.434254,18.565746307373047 +3649,64,53.04477,10.955230712890625 +3650,68,38.5525,29.447498321533203 +3651,67,44.70591,22.294090270996094 +3652,64,42.658436,21.341564178466797 +3653,68,39.328255,28.67174530029297 +3654,64,41.321217,22.678783416748047 +3655,66,47.301098,18.698902130126953 +3656,68,40.95217,27.047828674316406 +3657,68,50.84852,17.15148162841797 +3658,62,42.180813,19.81918716430664 +3659,64,43.628914,20.37108612060547 +3660,67,40.249054,26.750946044921875 +3661,69,52.180397,16.819602966308594 +3662,67,50.314335,16.685665130615234 +3663,67,44.45369,22.546310424804688 +3664,69,47.81811,21.181888580322266 +3665,69,35.733288,33.2667121887207 +3666,66,46.3194,19.680599212646484 +3667,68,46.38284,21.61716079711914 +3668,69,46.068665,22.93133544921875 +3669,67,32.6084,34.3916015625 +3670,67,50.520306,16.479694366455078 +3671,67,51.021595,15.978404998779297 +3672,69,52.319706,16.680294036865234 +3673,67,51.1356,15.864398956298828 +3674,69,41.803288,27.196712493896484 +3675,68,48.09791,19.902088165283203 +3676,69,52.001163,16.998836517333984 +3677,68,51.673637,16.32636260986328 +3678,4,16.397366,12.39736557006836 +3679,2,8.026671,6.026671409606934 +3680,3,2.9354367,0.06456327438354492 +3681,2,11.679747,9.679746627807617 +3682,3,6.4906955,3.4906954765319824 +3683,2,5.5619173,3.561917304992676 +3684,4,1.0362394,2.9637606143951416 +3685,2,1.0339229,0.9660770893096924 +3686,2,1.1006949,0.8993051052093506 +3687,2,25.731789,23.731788635253906 +3688,2,3.890225,1.8902249336242676 +3689,3,3.2404015,0.2404015064239502 +3690,2,4.0823736,2.08237361907959 +3691,7,6.8881803,0.11181974411010742 +3692,2,4.8961153,2.896115303039551 +3693,3,3.1844513,0.18445134162902832 +3694,3,4.586708,1.5867080688476562 +3695,2,1.0349401,0.9650598764419556 +3696,2,1.0600599,0.9399400949478149 +3697,2,6.460628,4.460628032684326 +3698,2,3.1167781,1.1167781352996826 +3699,7,3.5260887,3.4739112854003906 +3700,7,5.053573,1.9464268684387207 +3701,7,4.6100245,2.3899755477905273 +3702,2,1.0352547,0.964745283126831 +3703,2,1.0119348,0.988065242767334 +3704,7,3.37243,3.6275699138641357 +3705,5,13.738824,8.738823890686035 +3706,2,3.856963,1.8569629192352295 +3707,7,3.8366148,3.1633851528167725 +3708,5,3.2092698,1.7907302379608154 +3709,5,3.574531,1.425468921661377 +3710,3,2.384958,0.615041971206665 +3711,5,3.492017,1.5079829692840576 +3712,5,2.8713965,2.128603458404541 +3713,7,1.0582359,5.9417641162872314 +3714,4,1.2810395,2.7189605236053467 +3715,7,4.3268094,2.6731905937194824 +3716,3,1.0357671,1.9642329216003418 +3717,7,19.170351,12.170351028442383 +3718,7,8.119296,1.1192960739135742 +3719,3,4.996105,1.9961051940917969 +3720,7,15.5016,8.50160026550293 +3721,5,1.0094166,3.9905834197998047 +3722,5,5.145568,0.1455678939819336 +3723,2,1.5304122,0.4695878028869629 +3724,5,3.5160968,1.483903169631958 +3725,5,8.836219,3.83621883392334 +3726,5,11.260514,6.260514259338379 +3727,7,2.2854726,4.714527368545532 +3728,3,4.600726,1.6007261276245117 +3729,7,1.0599786,5.940021395683289 +3730,5,1.0543778,3.945622205734253 +3731,5,7.009769,2.0097689628601074 +3732,5,1.0362792,3.9637207984924316 +3733,2,2.587038,0.5870380401611328 +3734,5,11.868471,6.868471145629883 +3735,5,3.2783973,1.7216026782989502 +3736,3,4.329431,1.3294310569763184 +3737,5,3.9128518,1.0871481895446777 +3738,3,3.0993464,0.09934639930725098 +3739,2,3.4458349,1.4458348751068115 +3740,5,4.503312,0.4966878890991211 +3741,3,6.279644,3.279644012451172 +3742,3,5.302531,2.3025307655334473 +3743,5,6.983165,1.9831647872924805 +3744,5,8.866959,3.8669586181640625 +3745,7,6.242182,0.7578182220458984 +3746,4,17.087511,13.08751106262207 +3747,5,3.8378098,1.1621901988983154 +3748,4,5.2484465,1.2484464645385742 +3749,4,4.9400373,0.9400372505187988 +3750,4,33.6879,29.68790054321289 +3751,4,5.4828277,1.4828276634216309 +3752,4,5.330337,1.3303370475769043 +3753,4,3.2530005,0.7469995021820068 +3754,2,4.612788,2.612788200378418 +3755,5,6.848125,1.8481249809265137 +3756,4,5.262489,1.262488842010498 +3757,7,5.6409984,1.359001636505127 +3758,5,4.0994186,0.9005813598632812 +3759,7,4.9864297,2.0135703086853027 +3760,3,8.120438,5.1204376220703125 +3761,3,1.1419702,1.858029842376709 +3762,4,13.407319,9.407319068908691 +3763,5,1.3051547,3.6948453187942505 +3764,69,47.182613,21.817386627197266 +3765,69,50.751305,18.248695373535156 +3766,64,50.139847,13.860153198242188 +3767,69,16.180439,52.81956100463867 +3768,67,23.953074,43.046926498413086 +3769,67,38.044765,28.95523452758789 +3770,64,52.081158,11.918842315673828 +3771,65,50.556904,14.443096160888672 +3772,67,52.420666,14.579334259033203 +3773,69,46.89113,22.108871459960938 +3774,69,52.143017,16.856983184814453 +3775,64,52.535305,11.46469497680664 +3776,69,45.31654,23.683460235595703 +3777,69,48.5557,20.44430160522461 +3778,66,41.710155,24.289844512939453 +3779,68,53.0269,14.973098754882812 +3780,64,52.287804,11.712196350097656 +3781,66,42.34426,23.655738830566406 +3782,69,53.15029,15.849708557128906 +3783,68,53.587383,14.412616729736328 +3784,69,52.6896,16.31039810180664 +3785,67,51.37184,15.628158569335938 +3786,64,53.7594,10.2406005859375 +3787,69,34.38642,34.61357879638672 +3788,69,53.62348,15.376518249511719 +3789,68,42.67202,25.327980041503906 +3790,68,51.42424,16.575759887695312 +3791,68,51.563766,16.436233520507812 +3792,66,52.66477,13.33523178100586 +3793,67,50.483555,16.51644515991211 +3794,5,1.0229388,3.977061152458191 +3795,5,3.7244463,1.2755537033081055 +3796,4,10.707744,6.7077436447143555 +3797,5,2.2923715,2.7076284885406494 +3798,3,3.5280154,0.5280153751373291 +3799,5,4.959059,0.04094123840332031 +3800,5,1.0273857,3.972614288330078 +3801,7,3.023526,3.9764740467071533 +3802,3,6.864611,3.8646111488342285 +3803,6,1.0247859,4.975214123725891 +3804,5,4.0380983,0.9619016647338867 +3805,5,2.3959978,2.6040022373199463 +3806,8,8.186368,0.18636798858642578 +3807,5,6.048459,1.0484590530395508 +3808,3,4.4722204,1.4722204208374023 +3809,6,5.6855493,0.314450740814209 +3810,3,4.419713,1.419713020324707 +3811,6,5.394221,0.6057791709899902 +3812,7,6.841419,0.15858078002929688 +3813,7,15.892066,8.89206600189209 +3814,3,10.848952,7.848952293395996 +3815,5,3.2783973,1.7216026782989502 +3816,3,6.118752,3.1187520027160645 +3817,6,4.3892784,1.6107215881347656 +3818,3,4.055078,1.0550780296325684 +3819,6,12.832355,6.832354545593262 +3820,5,4.329431,0.6705689430236816 +3821,3,9.321561,6.321560859680176 +3822,3,5.0474787,2.047478675842285 +3823,5,18.627386,13.627386093139648 +3824,6,4.803574,1.1964259147644043 +3825,6,12.977978,6.977977752685547 +3826,5,4.1904545,0.8095455169677734 +3827,6,19.327528,13.32752799987793 +3828,6,22.168013,16.168012619018555 +3829,4,3.8665712,0.13342881202697754 +3830,6,2.8400972,3.159902811050415 +3831,5,3.5587413,1.4412586688995361 +3832,5,7.4026723,2.402672290802002 +3833,6,4.5831585,1.4168415069580078 +3834,6,1.5304122,4.469587802886963 +3835,6,2.9768035,3.0231964588165283 +3836,5,4.0255747,0.9744253158569336 +3837,8,3.8117688,4.1882312297821045 +3838,5,4.123262,0.8767380714416504 +3839,5,2.4524393,2.547560691833496 +3840,5,19.324003,14.324003219604492 +3841,5,3.6023798,1.3976202011108398 +3842,8,23.28656,15.28656005859375 +3843,7,2.8479338,4.152066230773926 +3844,5,4.897793,0.10220718383789062 +3845,3,8.310809,5.310809135437012 +3846,5,5.53619,0.5361900329589844 +3847,5,9.755815,4.755814552307129 +3848,8,6.881132,1.1188678741455078 +3849,4,1.3609245,2.639075517654419 +3850,3,6.070059,3.070058822631836 +3851,3,12.429754,9.429754257202148 +3852,3,3.7242484,0.7242484092712402 +3853,6,4.3149753,1.6850247383117676 +3854,3,4.2554917,1.2554917335510254 +3855,5,5.0002847,0.0002846717834472656 +3856,6,9.521329,3.521328926086426 +3857,5,7.054127,2.0541272163391113 +3858,5,6.593218,1.5932178497314453 +3859,6,14.228955,8.228955268859863 +3860,5,3.7006352,1.2993648052215576 +3861,4,4.457371,0.45737123489379883 +3862,5,1.4775572,3.5224428176879883 +3863,6,3.137854,2.8621459007263184 +3864,5,8.650242,3.6502418518066406 +3865,5,4.8798857,0.12011432647705078 +3866,6,16.493639,10.49363899230957 +3867,6,1.0658191,4.934180855751038 +3868,6,6.143956,0.14395618438720703 +3869,8,19.184593,11.184593200683594 +3870,4,1.1717045,2.8282954692840576 +3871,7,10.707744,3.7077436447143555 +3872,6,12.728765,6.728764533996582 +3873,11,8.292416,2.7075843811035156 +3874,8,4.5831585,3.416841506958008 +3875,6,6.593218,0.5932178497314453 +3876,6,4.925259,1.0747408866882324 +3877,4,13.913656,9.913656234741211 +3878,6,3.703617,2.2963829040527344 +3879,4,9.833182,5.833182334899902 +3880,8,18.462423,10.462423324584961 +3881,4,12.262369,8.262369155883789 +3882,4,5.7768035,1.7768034934997559 +3883,6,4.4097548,1.590245246887207 +3884,6,1.8720896,4.127910375595093 +3885,4,2.7001595,1.2998404502868652 +3886,11,20.378344,9.37834358215332 +3887,4,1.1537464,2.8462536334991455 +3888,10,8.477486,1.5225143432617188 +3889,6,5.5263963,0.4736037254333496 +3890,6,1.6027896,4.397210359573364 +3891,7,6.502849,0.49715089797973633 +3892,4,1.2271918,2.7728081941604614 +3893,11,4.100559,6.899440765380859 +3894,6,2.1528463,3.847153663635254 +3895,5,4.67685,0.3231501579284668 +3896,6,19.46984,13.469839096069336 +3897,10,1.0964093,8.903590679168701 +3898,8,6.824486,1.1755142211914062 +3899,10,4.5884194,5.411580562591553 +3900,6,3.8067355,2.1932644844055176 +3901,9,1.1596844,7.840315580368042 +3902,6,14.6594095,8.659409523010254 +3903,6,10.7240505,4.724050521850586 +3904,12,10.061082,1.938918113708496 +3905,11,5.676362,5.323637962341309 +3906,9,7.146841,1.853158950805664 +3907,6,2.7434535,3.2565464973449707 +3908,6,19.065952,13.06595230102539 +3909,6,8.650242,2.6502418518066406 +3910,6,3.4044986,2.59550142288208 +3911,9,10.06583,1.0658302307128906 +3912,6,4.8229423,1.1770577430725098 +3913,12,6.7409453,5.259054660797119 +3914,4,1.0407073,2.9592926502227783 +3915,6,6.61343,0.6134300231933594 +3916,6,24.174744,18.17474365234375 +3917,6,3.9948375,2.005162477493286 +3918,6,3.1639547,2.836045265197754 +3919,11,15.961732,4.961731910705566 +3920,8,9.594368,1.5943679809570312 +3921,6,6.4076347,0.4076347351074219 +3922,8,2.7424135,5.257586479187012 +3923,5,7.0700364,2.0700364112854004 +3924,4,2.7444408,1.2555592060089111 +3925,7,8.522014,1.5220136642456055 +3926,6,1.5393119,4.46068811416626 +3927,7,4.123262,2.8767380714416504 +3928,5,8.747437,3.7474365234375 +3929,6,5.7486577,0.2513422966003418 +3930,4,5.453764,1.4537639617919922 +3931,8,14.878918,6.878917694091797 +3932,7,3.7126534,3.287346601486206 +3933,4,5.6577506,1.6577506065368652 +3934,5,3.7969086,1.2030913829803467 +3935,10,1.4294869,8.570513129234314 +3936,7,1.1798214,5.8201786279678345 +3937,5,2.9238982,2.076101779937744 +3938,5,6.084516,1.0845160484313965 +3939,5,2.7939055,2.20609450340271 +3940,5,8.818473,3.8184728622436523 +3941,5,2.715652,2.2843480110168457 +3942,7,1.039901,5.9600989818573 +3943,8,3.160632,4.839368104934692 +3944,8,3.744891,4.255109071731567 diff --git a/Results_csv/ssrnet_3_3_3_64_1.0_1.0_age.csv b/Results_csv/ssrnet_3_3_3_64_1.0_1.0_age.csv new file mode 100644 index 0000000..a245777 --- /dev/null +++ b/Results_csv/ssrnet_3_3_3_64_1.0_1.0_age.csv @@ -0,0 +1,3950 @@ +MAE +64583.84712170714 +CA3,CA5 +0.5574144486692015,0.7460076045627376 +ID,age,age_p,error +0,1,[3.250012],[2.250012] +1,1,[3.133043],[2.133043] +2,2,[3.2257693],[1.2257693] +3,1,[3.0933855],[2.0933855] +4,1,[3.2378817],[2.2378817] +5,1,[3.1294224],[2.1294224] +6,1,[3.124327],[2.124327] +7,1,[3.2097778],[2.2097778] +8,1,[3.1660235],[2.1660235] +9,1,[3.0907397],[2.0907397] +10,1,[3.104156],[2.104156] +11,1,[3.1398046],[2.1398046] +12,1,[3.1311517],[2.1311517] +13,1,[3.1998014],[2.1998014] +14,1,[3.1524587],[2.1524587] +15,1,[3.0985982],[2.0985982] +16,1,[3.120456],[2.120456] +17,1,[3.143211],[2.143211] +18,1,[3.1446545],[2.1446545] +19,1,[3.0996459],[2.0996459] +20,1,[3.3487759],[2.3487759] +21,1,[3.145572],[2.145572] +22,1,[3.1263678],[2.1263678] +23,1,[3.1256766],[2.1256766] +24,2,[3.1529768],[1.1529768] +25,1,[3.3084493],[2.3084493] +26,1,[3.1379738],[2.1379738] +27,1,[3.1017032],[2.1017032] +28,1,[3.2809024],[2.2809024] +29,1,[3.2661386],[2.2661386] +30,1,[3.1743963],[2.1743963] +31,1,[3.2573142],[2.2573142] +32,1,[3.146411],[2.146411] +33,1,[3.2108836],[2.2108836] +34,1,[3.1209211],[2.1209211] +35,1,[3.2797608],[2.2797608] +36,1,[3.2371376],[2.2371376] +37,1,[3.2048967],[2.2048967] +38,1,[3.1374407],[2.1374407] +39,1,[3.1689866],[2.1689866] +40,1,[3.3059773],[2.3059773] +41,1,[3.1675057],[2.1675057] +42,1,[3.4124405],[2.4124405] +43,1,[3.1768007],[2.1768007] +44,1,[3.1514359],[2.1514359] +45,1,[3.1153858],[2.1153858] +46,1,[3.127385],[2.127385] +47,1,[3.1853743],[2.1853743] +48,2,[3.1666586],[1.1666586] +49,1,[3.1260204],[2.1260204] +50,1,[3.357195],[2.357195] +51,1,[3.1510472],[2.1510472] +52,1,[3.1847749],[2.1847749] +53,1,[3.083091],[2.083091] +54,1,[3.1834211],[2.1834211] +55,1,[3.2304752],[2.2304752] +56,1,[3.1912327],[2.1912327] +57,2,[3.233625],[1.2336249] +58,1,[3.151132],[2.151132] +59,1,[3.128165],[2.128165] +60,1,[3.1794112],[2.1794112] +61,1,[3.1506593],[2.1506593] +62,1,[3.414938],[2.414938] +63,1,[3.1767502],[2.1767502] +64,1,[3.2200217],[2.2200217] +65,1,[3.1212103],[2.1212103] +66,2,[3.107779],[1.107779] +67,1,[3.19124],[2.19124] +68,1,[3.1335583],[2.1335583] +69,1,[3.152432],[2.152432] +70,2,[3.1575732],[1.1575732] +71,1,[3.3823683],[2.3823683] +72,1,[3.237425],[2.237425] +73,1,[3.1633604],[2.1633604] +74,1,[3.230582],[2.230582] +75,1,[3.216795],[2.216795] +76,1,[3.1018682],[2.1018682] +77,3,[3.1510038],[0.15100384] +78,1,[3.1572065],[2.1572065] +79,1,[3.5745776],[2.5745776] +80,4,[3.2226183],[0.77738166] +81,3,[3.26286],[0.26286006] +82,1,[3.1844096],[2.1844096] +83,1,[3.2331312],[2.2331312] +84,1,[3.1947682],[2.1947682] +85,1,[3.3180172],[2.3180172] +86,1,[3.1475859],[2.1475859] +87,2,[4.3187156],[2.3187156] +88,3,[3.8470128],[0.84701276] +89,1,[3.243755],[2.243755] +90,2,[3.267778],[1.2677779] +91,1,[3.1092556],[2.1092556] +92,1,[3.1511948],[2.1511948] +93,1,[3.3660908],[2.3660908] +94,1,[3.0895598],[2.0895598] +95,3,[3.385168],[0.38516808] +96,4,[3.2276397],[0.7723603] +97,1,[3.417971],[2.417971] +98,1,[3.313327],[2.313327] +99,1,[3.304586],[2.304586] +100,1,[3.1942363],[2.1942363] +101,1,[3.1742055],[2.1742055] +102,2,[3.3272347],[1.3272347] +103,2,[3.1159227],[1.1159227] +104,1,[3.2071147],[2.2071147] +105,1,[3.2013726],[2.2013726] +106,2,[3.2064168],[1.2064168] +107,2,[3.2532682],[1.2532682] +108,2,[3.159625],[1.159625] +109,1,[3.2445614],[2.2445614] +110,1,[5.9715743],[4.9715743] +111,3,[3.1576402],[0.15764022] +112,2,[3.1335013],[1.1335013] +113,2,[3.2428172],[1.2428172] +114,1,[3.1551218],[2.1551218] +115,1,[3.4528136],[2.4528136] +116,1,[3.1049106],[2.1049106] +117,1,[3.124335],[2.124335] +118,1,[3.2143192],[2.2143192] +119,1,[3.0859122],[2.0859122] +120,1,[3.1246822],[2.1246822] +121,1,[3.2219293],[2.2219293] +122,2,[3.1025085],[1.1025085] +123,1,[3.2976382],[2.2976382] +124,2,[3.297976],[1.297976] +125,2,[3.094386],[1.0943861] +126,2,[3.2065113],[1.2065113] +127,3,[3.1182585],[0.11825848] +128,1,[3.2048883],[2.2048883] +129,1,[4.7608075],[3.7608075] +130,3,[3.2224865],[0.2224865] +131,1,[3.1846926],[2.1846926] +132,1,[3.152249],[2.152249] +133,1,[3.3195736],[2.3195736] +134,1,[3.900116],[2.900116] +135,2,[4.643022],[2.643022] +136,1,[3.2312698],[2.2312698] +137,1,[3.118032],[2.118032] +138,1,[3.185764],[2.185764] +139,1,[3.1327279],[2.1327279] +140,1,[3.1932614],[2.1932614] +141,1,[3.1828678],[2.1828678] +142,2,[3.1882267],[1.1882267] +143,1,[3.2696953],[2.2696953] +144,11,[14.261623],[3.2616234] +145,10,[3.153248],[6.846752] +146,9,[4.0543613],[4.9456387] +147,6,[3.572538],[2.427462] +148,5,[11.51342],[6.51342] +149,11,[11.003207],[0.00320721] +150,9,[6.6441517],[2.3558483] +151,11,[5.290982],[5.709018] +152,6,[3.1730776],[2.8269224] +153,6,[3.5739255],[2.4260745] +154,10,[9.402787],[0.5972128] +155,10,[3.2671812],[6.7328186] +156,9,[5.2959085],[3.7040915] +157,7,[6.508396],[0.49160385] +158,6,[3.92308],[2.07692] +159,9,[4.2710733],[4.7289267] +160,9,[3.521167],[5.478833] +161,10,[3.476306],[6.523694] +162,6,[3.4737418],[2.5262582] +163,8,[3.2113612],[4.788639] +164,12,[7.126312],[4.873688] +165,9,[5.073382],[3.926618] +166,6,[3.7740905],[2.2259095] +167,9,[6.7328854],[2.2671146] +168,9,[6.8995757],[2.1004243] +169,6,[15.483642],[9.483642] +170,9,[3.1967196],[5.8032804] +171,9,[3.492829],[5.5071707] +172,6,[3.1364493],[2.8635507] +173,13,[27.57613],[14.57613] +174,13,[9.174787],[3.8252134] +175,8,[7.095441],[0.90455914] +176,10,[6.5626717],[3.4373283] +177,9,[5.8527946],[3.1472054] +178,7,[7.87631],[0.8763099] +179,6,[7.8471637],[1.8471637] +180,9,[12.548334],[3.5483341] +181,6,[8.853576],[2.8535757] +182,9,[5.838269],[3.1617308] +183,6,[6.2000227],[0.2000227] +184,6,[23.639236],[17.639236] +185,7,[6.5021353],[0.49786472] +186,7,[4.919899],[2.080101] +187,11,[10.837285],[0.16271496] +188,6,[3.9254444],[2.0745556] +189,7,[10.718024],[3.7180243] +190,8,[4.1499014],[3.8500986] +191,6,[13.379616],[7.379616] +192,6,[3.2292953],[2.7707047] +193,8,[3.239493],[4.760507] +194,7,[16.751928],[9.751928] +195,6,[3.3409398],[2.6590602] +196,13,[5.977535],[7.022465] +197,10,[4.0723286],[5.9276714] +198,9,[23.529593],[14.5295925] +199,6,[21.868034],[15.868034] +200,11,[14.669025],[3.6690254] +201,6,[3.5062966],[2.4937034] +202,10,[23.51805],[13.518049] +203,9,[9.545233],[0.5452328] +204,6,[6.805388],[0.805388] +205,9,[6.890399],[2.109601] +206,6,[5.8475714],[0.15242863] +207,8,[4.0905437],[3.9094563] +208,12,[8.104032],[3.8959684] +209,13,[10.0367],[2.9632998] +210,10,[5.9352407],[4.0647593] +211,10,[16.856897],[6.8568974] +212,11,[22.395456],[11.395456] +213,7,[16.544582],[9.544582] +214,13,[20.438671],[7.438671] +215,11,[13.508799],[2.5087986] +216,9,[17.092766],[8.092766] +217,11,[6.642543],[4.357457] +218,14,[21.089005],[7.0890045] +219,7,[20.144602],[13.144602] +220,7,[3.3980217],[3.6019783] +221,7,[6.1503005],[0.8496995] +222,9,[4.971789],[4.028211] +223,11,[28.534525],[17.534525] +224,11,[18.47307],[7.47307] +225,7,[3.1754458],[3.8245542] +226,12,[14.07027],[2.0702696] +227,7,[4.7484818],[2.2515182] +228,9,[11.651657],[2.651657] +229,10,[3.2762764],[6.7237234] +230,11,[18.07539],[7.07539] +231,7,[6.4599648],[0.54003525] +232,11,[8.942006],[2.057994] +233,7,[12.030698],[5.030698] +234,11,[21.874027],[10.874027] +235,10,[21.496021],[11.496021] +236,10,[13.0650425],[3.0650425] +237,12,[7.196018],[4.803982] +238,9,[15.86677],[6.86677] +239,7,[7.7056465],[0.7056465] +240,14,[3.2746644],[10.725336] +241,11,[3.3319912],[7.668009] +242,7,[4.6255937],[2.3744063] +243,11,[24.101212],[13.101212] +244,12,[5.2344117],[6.7655883] +245,7,[9.146702],[2.1467018] +246,8,[3.252511],[4.747489] +247,14,[14.164238],[0.16423798] +248,8,[4.1670065],[3.8329935] +249,13,[5.3542027],[7.6457973] +250,7,[3.3340414],[3.6659586] +251,8,[3.2598133],[4.7401867] +252,7,[17.363491],[10.363491] +253,7,[4.30069],[2.6993098] +254,8,[3.3662536],[4.633746] +255,13,[19.239565],[6.239565] +256,7,[5.4457183],[1.5542817] +257,11,[11.698555],[0.698555] +258,11,[19.061928],[8.061928] +259,10,[10.327328],[0.32732773] +260,7,[4.0716243],[2.9283757] +261,10,[6.9139185],[3.0860815] +262,7,[4.9696827],[2.0303173] +263,7,[3.3985517],[3.6014483] +264,7,[3.3406053],[3.6593947] +265,9,[22.506695],[13.506695] +266,10,[16.562021],[6.5620213] +267,14,[10.008026],[3.9919739] +268,7,[3.1796136],[3.8203864] +269,8,[11.061977],[3.0619774] +270,8,[13.464768],[5.4647684] +271,10,[9.971591],[0.028409] +272,8,[19.545177],[11.545177] +273,13,[19.892479],[6.892479] +274,8,[6.3622665],[1.6377335] +275,8,[12.808085],[4.8080854] +276,10,[23.215067],[13.215067] +277,8,[3.968636],[4.031364] +278,8,[15.695298],[7.695298] +279,9,[3.437766],[5.562234] +280,8,[12.5078945],[4.5078945] +281,10,[11.109586],[1.1095858] +282,8,[4.549953],[3.450047] +283,8,[5.491795],[2.508205] +284,10,[4.8633757],[5.1366243] +285,13,[13.32118],[0.32118034] +286,8,[13.895215],[5.895215] +287,15,[15.531944],[0.5319443] +288,8,[8.198943],[0.19894314] +289,9,[14.268209],[5.2682095] +290,9,[3.3781874],[5.621813] +291,8,[8.238433],[0.23843288] +292,9,[13.470463],[4.470463] +293,8,[18.01532],[10.01532] +294,10,[15.658522],[5.6585217] +295,8,[8.262044],[0.26204395] +296,8,[10.305582],[2.305582] +297,8,[8.621055],[0.62105465] +298,10,[12.051524],[2.0515242] +299,11,[13.40831],[2.40831] +300,8,[5.8179536],[2.1820464] +301,8,[16.562021],[8.562021] +302,12,[10.912355],[1.0876446] +303,8,[6.8161335],[1.1838665] +304,8,[4.970975],[3.029025] +305,9,[4.7677417],[4.2322583] +306,8,[7.083919],[0.91608095] +307,13,[14.912143],[1.9121428] +308,8,[4.3529725],[3.6470275] +309,8,[10.3202305],[2.3202305] +310,8,[4.1827292],[3.8172708] +311,8,[12.656062],[4.656062] +312,11,[13.87383],[2.8738298] +313,12,[14.662959],[2.662959] +314,12,[6.575183],[5.424817] +315,9,[7.9988885],[1.0011115] +316,12,[15.8808775],[3.8808775] +317,12,[8.936044],[3.0639563] +318,12,[10.296654],[1.7033463] +319,8,[15.377787],[7.3777866] +320,12,[16.334438],[4.3344383] +321,12,[9.050781],[2.9492188] +322,13,[14.903021],[1.9030209] +323,12,[12.670935],[0.6709347] +324,12,[5.5913672],[6.4086328] +325,13,[19.868904],[6.868904] +326,7,[17.60554],[10.605539] +327,10,[21.853779],[11.853779] +328,12,[4.137136],[7.862864] +329,12,[19.994766],[7.994766] +330,12,[23.769375],[11.769375] +331,10,[14.7303505],[4.7303505] +332,8,[3.517739],[4.4822607] +333,8,[6.5181026],[1.4818974] +334,8,[6.195528],[1.804472] +335,11,[15.111639],[4.111639] +336,12,[12.478244],[0.47824383] +337,12,[6.4019065],[5.5980935] +338,12,[7.3329945],[4.6670055] +339,12,[7.046439],[4.953561] +340,12,[5.296892],[6.703108] +341,12,[13.748291],[1.748291] +342,14,[19.164787],[5.1647873] +343,8,[6.9409194],[1.0590806] +344,8,[13.914047],[5.9140472] +345,8,[10.91421],[2.9142103] +346,9,[8.02409],[0.9759102] +347,10,[9.331245],[0.6687546] +348,12,[14.652533],[2.6525326] +349,9,[15.583753],[6.5837526] +350,14,[3.5488856],[10.451115] +351,8,[3.8975978],[4.102402] +352,12,[19.156292],[7.156292] +353,13,[9.105677],[3.8943233] +354,8,[11.682309],[3.6823092] +355,12,[12.887665],[0.8876648] +356,8,[11.319478],[3.319478] +357,12,[4.8228335],[7.1771665] +358,9,[8.6714945],[0.32850552] +359,12,[10.40418],[1.5958204] +360,12,[15.60103],[3.6010303] +361,12,[4.3685246],[7.6314754] +362,16,[16.120216],[0.12021637] +363,13,[9.220436],[3.779564] +364,12,[14.337321],[2.3373213] +365,12,[16.981512],[4.981512] +366,12,[19.476625],[7.4766254] +367,9,[14.05994],[5.0599403] +368,11,[20.199139],[9.199139] +369,13,[20.85393],[7.8539295] +370,11,[20.50534],[9.505341] +371,12,[10.523701],[1.4762993] +372,12,[12.453393],[0.45339298] +373,12,[7.5270653],[4.4729347] +374,9,[13.814764],[4.814764] +375,12,[9.9443],[2.0557003] +376,9,[9.438089],[0.43808937] +377,9,[15.259994],[6.2599936] +378,12,[3.356732],[8.643269] +379,12,[12.561132],[0.56113243] +380,12,[15.946029],[3.9460287] +381,9,[5.108666],[3.891334] +382,12,[18.78419],[6.784189] +383,12,[7.856471],[4.143529] +384,12,[21.223675],[9.223675] +385,12,[18.813812],[6.8138123] +386,12,[12.853104],[0.85310364] +387,12,[12.12745],[0.12744999] +388,12,[25.766819],[13.766819] +389,12,[3.2599761],[8.740024] +390,12,[40.437737],[28.437737] +391,12,[12.69906],[0.69906044] +392,12,[7.6706223],[4.3293777] +393,12,[15.2541],[3.2540998] +394,12,[4.82055],[7.17945] +395,12,[16.220623],[4.220623] +396,12,[20.016901],[8.016901] +397,12,[12.54893],[0.54893017] +398,12,[23.529167],[11.529167] +399,12,[5.242799],[6.757201] +400,12,[10.557004],[1.442996] +401,12,[21.722282],[9.722282] +402,12,[5.871041],[6.128959] +403,12,[17.049282],[5.049282] +404,12,[5.891615],[6.108385] +405,12,[11.345163],[0.65483665] +406,12,[16.43964],[4.43964] +407,8,[8.62194],[0.62193966] +408,12,[3.9223654],[8.077635] +409,12,[8.587641],[3.4123592] +410,13,[19.265762],[6.2657623] +411,12,[11.167123],[0.83287716] +412,12,[7.1341853],[4.8658147] +413,10,[6.0755734],[3.9244266] +414,8,[3.924174],[4.0758257] +415,8,[3.6249218],[4.375078] +416,12,[20.179482],[8.1794815] +417,8,[12.346504],[4.346504] +418,11,[10.411201],[0.5887995] +419,14,[11.393169],[2.6068306] +420,10,[19.796753],[9.796753] +421,11,[12.46265],[1.4626503] +422,12,[7.41397],[4.58603] +423,14,[12.696254],[1.3037462] +424,13,[10.415334],[2.5846663] +425,13,[8.585601],[4.414399] +426,13,[24.493979],[11.4939785] +427,8,[5.056488],[2.943512] +428,16,[3.2845795],[12.715421] +429,11,[8.502881],[2.497119] +430,15,[16.890038],[1.8900375] +431,13,[13.174999],[0.17499924] +432,9,[20.467445],[11.467445] +433,9,[10.008963],[1.0089626] +434,15,[24.897522],[9.897522] +435,14,[10.52644],[3.4735603] +436,9,[6.0710225],[2.9289775] +437,12,[19.239342],[7.2393417] +438,13,[19.882841],[6.882841] +439,12,[19.33447],[7.3344707] +440,14,[16.57518],[2.57518] +441,14,[15.014593],[1.0145931] +442,16,[23.147503],[7.147503] +443,14,[24.294016],[10.294016] +444,12,[13.020791],[1.020791] +445,13,[16.837826],[3.8378258] +446,11,[17.024574],[6.0245743] +447,15,[10.730416],[4.2695837] +448,14,[23.980305],[9.980305] +449,9,[8.621055],[0.37894535] +450,14,[10.445616],[3.5543842] +451,13,[14.541171],[1.5411711] +452,13,[15.1334095],[2.1334095] +453,14,[7.5176463],[6.4823537] +454,13,[3.5414934],[9.458507] +455,13,[7.875254],[5.124746] +456,12,[11.91903],[0.08096981] +457,13,[17.327234],[4.3272343] +458,12,[9.607012],[2.3929882] +459,12,[14.049308],[2.0493078] +460,13,[15.619029],[2.619029] +461,13,[13.179037],[0.1790371] +462,13,[16.170506],[3.1705055] +463,14,[16.763165],[2.7631645] +464,13,[14.602214],[1.6022139] +465,14,[10.168254],[3.831746] +466,12,[7.1188364],[4.8811636] +467,16,[28.601511],[12.601511] +468,13,[12.074177],[0.9258232] +469,13,[4.7497587],[8.250241] +470,13,[16.490538],[3.4905376] +471,13,[19.265762],[6.2657623] +472,12,[3.5915942],[8.408405] +473,14,[15.81489],[1.8148899] +474,16,[13.619274],[2.3807259] +475,13,[19.773582],[6.7735825] +476,14,[8.276931],[5.723069] +477,16,[14.554143],[1.445857] +478,10,[12.912743],[2.9127426] +479,9,[6.728683],[2.271317] +480,14,[8.996612],[5.0033884] +481,14,[22.59826],[8.598261] +482,9,[4.860921],[4.139079] +483,12,[14.414412],[2.4144115] +484,9,[6.413683],[2.586317] +485,14,[7.829446],[6.170554] +486,12,[5.377971],[6.622029] +487,10,[9.775912],[0.22408772] +488,14,[14.579209],[0.5792093] +489,9,[11.261538],[2.2615376] +490,9,[11.874314],[2.8743143] +491,14,[17.994616],[3.9946156] +492,14,[18.166698],[4.1666985] +493,16,[13.729334],[2.2706661] +494,13,[8.247671],[4.752329] +495,11,[21.178318],[10.178318] +496,10,[4.185556],[5.814444] +497,10,[18.428778],[8.428778] +498,13,[7.426213],[5.573787] +499,13,[9.204838],[3.7951622] +500,17,[23.024658],[6.024658] +501,10,[3.5879138],[6.4120865] +502,14,[12.54893],[1.4510698] +503,14,[4.481912],[9.518087] +504,10,[20.736881],[10.736881] +505,17,[18.239729],[1.2397289] +506,14,[15.93527],[1.9352703] +507,13,[15.045154],[2.0451536] +508,13,[11.54908],[1.4509201] +509,16,[10.30392],[5.69608] +510,13,[17.649035],[4.6490345] +511,12,[8.536363],[3.4636374] +512,10,[9.157952],[0.8420477] +513,10,[16.557768],[6.557768] +514,11,[9.645671],[1.3543291] +515,11,[8.79978],[2.20022] +516,13,[23.042173],[10.042173] +517,13,[14.477938],[1.4779377] +518,13,[14.074687],[1.074687] +519,13,[4.8933754],[8.106625] +520,10,[10.190734],[0.19073391] +521,13,[9.911397],[3.088603] +522,14,[17.072084],[3.0720844] +523,10,[8.048629],[1.9513712] +524,13,[16.805332],[3.8053322] +525,13,[20.697536],[7.6975365] +526,10,[22.28114],[12.281139] +527,10,[24.804415],[14.804415] +528,10,[6.908568],[3.091432] +529,10,[8.904072],[1.0959282] +530,15,[17.21083],[2.2108307] +531,10,[10.305791],[0.3057909] +532,14,[11.351513],[2.648487] +533,13,[14.118167],[1.1181669] +534,17,[17.495623],[0.49562263] +535,14,[18.947206],[4.9472065] +536,10,[19.754707],[9.754707] +537,16,[13.245993],[2.7540073] +538,13,[6.695867],[6.304133] +539,12,[13.620653],[1.6206532] +540,10,[17.674414],[7.6744137] +541,13,[20.153725],[7.1537247] +542,10,[19.156317],[9.156317] +543,12,[12.181551],[0.18155098] +544,15,[18.867393],[3.8673935] +545,15,[16.334415],[1.3344154] +546,15,[14.588945],[0.4110546] +547,16,[19.08173],[3.08173] +548,16,[14.529723],[1.4702768] +549,15,[18.204514],[3.2045135] +550,15,[10.773112],[4.2268877] +551,15,[16.20973],[1.2097301] +552,15,[18.611374],[3.611374] +553,15,[18.076593],[3.0765934] +554,18,[13.834379],[4.165621] +555,15,[21.200722],[6.2007217] +556,19,[19.604717],[0.60471725] +557,15,[24.534483],[9.534483] +558,16,[16.415424],[0.41542435] +559,15,[17.398329],[2.3983288] +560,15,[19.38371],[4.383711] +561,11,[20.421373],[9.421373] +562,14,[15.630062],[1.6300621] +563,14,[16.342043],[2.342043] +564,15,[13.531853],[1.4681473] +565,15,[18.219227],[3.2192268] +566,15,[21.984348],[6.9843483] +567,11,[12.103326],[1.1033258] +568,15,[22.394432],[7.394432] +569,15,[15.714797],[0.714797] +570,14,[15.158169],[1.1581688] +571,15,[18.243649],[3.2436485] +572,15,[16.452496],[1.4524956] +573,15,[19.268528],[4.268528] +574,15,[25.609838],[10.6098385] +575,19,[21.01855],[2.0185509] +576,16,[6.6885242],[9.311476] +577,15,[18.803099],[3.8030987] +578,11,[14.977843],[3.9778433] +579,19,[21.62557],[2.6255703] +580,15,[20.102896],[5.1028957] +581,15,[18.509361],[3.5093613] +582,16,[16.91628],[0.91628075] +583,13,[21.264578],[8.264578] +584,15,[14.280096],[0.71990395] +585,14,[16.47457],[2.4745693] +586,14,[20.376804],[6.3768044] +587,14,[15.778166],[1.7781658] +588,14,[12.618322],[1.3816776] +589,16,[15.810083],[0.18991661] +590,15,[21.36012],[6.360121] +591,16,[17.37077],[1.3707695] +592,16,[18.519754],[2.5197544] +593,11,[11.393169],[0.3931694] +594,15,[16.574432],[1.5744324] +595,15,[15.438463],[0.4384632] +596,15,[20.43676],[5.43676] +597,15,[11.827959],[3.172041] +598,11,[15.682948],[4.682948] +599,15,[4.9658604],[10.03414] +600,15,[17.821428],[2.8214283] +601,15,[22.125854],[7.1258545] +602,11,[13.341111],[2.3411112] +603,16,[12.618947],[3.381053] +604,17,[21.513294],[4.513294] +605,15,[14.111001],[0.888999] +606,16,[17.417646],[1.4176464] +607,16,[10.481712],[5.5182877] +608,15,[14.5457],[0.45429993] +609,16,[17.030926],[1.0309258] +610,15,[18.129805],[3.1298046] +611,17,[19.926287],[2.9262867] +612,15,[18.504559],[3.5045586] +613,15,[20.198397],[5.1983967] +614,19,[16.257223],[2.7427769] +615,15,[18.343912],[3.3439121] +616,16,[17.721756],[1.721756] +617,15,[9.204838],[5.795162] +618,18,[10.4692135],[7.5307865] +619,16,[17.84048],[1.8404808] +620,18,[18.73526],[0.73526] +621,15,[22.235474],[7.2354736] +622,15,[17.226562],[2.2265625] +623,16,[12.3315735],[3.6684265] +624,15,[19.136675],[4.136675] +625,15,[18.811308],[3.811308] +626,15,[15.673748],[0.673748] +627,17,[23.01673],[6.0167294] +628,19,[17.796398],[1.2036018] +629,16,[16.415516],[0.4155159] +630,16,[18.319525],[2.3195248] +631,12,[15.636558],[3.6365576] +632,16,[17.827496],[1.8274956] +633,19,[17.030058],[1.9699421] +634,16,[18.518774],[2.518774] +635,16,[19.908031],[3.9080315] +636,19,[22.25975],[3.2597504] +637,16,[15.919398],[0.08060169] +638,16,[15.781006],[0.21899414] +639,17,[15.0959015],[1.9040985] +640,17,[11.348638],[5.6513624] +641,19,[17.645588],[1.3544121] +642,19,[17.201445],[1.7985554] +643,16,[21.526438],[5.5264378] +644,16,[16.267797],[0.26779747] +645,17,[16.111128],[0.88887215] +646,16,[20.028126],[4.028126] +647,17,[19.82443],[2.8244305] +648,17,[18.851606],[1.8516064] +649,16,[22.154629],[6.1546288] +650,20,[17.470411],[2.5295887] +651,16,[17.60071],[1.6007099] +652,16,[15.736263],[0.26373672] +653,16,[16.979807],[0.9798069] +654,16,[14.208396],[1.791604] +655,16,[16.18352],[0.18351936] +656,16,[20.875908],[4.875908] +657,16,[23.974003],[7.974003] +658,16,[16.638626],[0.6386261] +659,16,[24.196165],[8.196165] +660,16,[22.196426],[6.1964264] +661,16,[6.605018],[9.394981] +662,20,[22.012487],[2.0124874] +663,16,[18.103806],[2.1038055] +664,16,[17.066755],[1.0667553] +665,16,[13.218939],[2.7810612] +666,16,[13.96213],[2.0378704] +667,16,[14.487949],[1.5120506] +668,19,[18.246096],[0.75390434] +669,16,[18.229464],[2.2294636] +670,17,[12.227523],[4.772477] +671,17,[13.026058],[3.9739418] +672,12,[19.159111],[7.159111] +673,17,[18.244644],[1.2446442] +674,12,[20.038996],[8.038996] +675,13,[19.567415],[6.567415] +676,18,[13.317358],[4.682642] +677,13,[19.125416],[6.125416] +678,17,[25.152706],[8.152706] +679,16,[14.211236],[1.788764] +680,15,[16.596113],[1.5961132] +681,16,[13.57795],[2.4220505] +682,16,[19.266085],[3.2660847] +683,12,[22.73778],[10.73778] +684,15,[20.59506],[5.5950603] +685,12,[20.95025],[8.950251] +686,15,[6.868067],[8.131933] +687,15,[13.683827],[1.3161726] +688,12,[17.472271],[5.472271] +689,17,[16.502193],[0.49780655] +690,13,[19.048027],[6.048027] +691,16,[18.252417],[2.2524166] +692,15,[23.938856],[8.938856] +693,18,[20.115616],[2.1156158] +694,17,[12.430319],[4.569681] +695,15,[23.114204],[8.114204] +696,14,[12.903905],[1.0960951] +697,18,[20.873423],[2.8734226] +698,15,[15.490959],[0.49095917] +699,15,[12.68608],[2.31392] +700,15,[21.185213],[6.185213] +701,13,[14.940141],[1.9401407] +702,17,[16.249922],[0.7500782] +703,16,[21.917336],[5.9173355] +704,20,[19.108978],[0.8910217] +705,17,[16.010298],[0.9897022] +706,12,[22.678663],[10.678663] +707,16,[16.453138],[0.45313835] +708,17,[17.369366],[0.3693657] +709,18,[14.566332],[3.4336681] +710,18,[22.159029],[4.159029] +711,16,[17.706833],[1.7068329] +712,12,[18.394527],[6.3945274] +713,18,[17.585506],[0.41449356] +714,15,[11.423375],[3.5766249] +715,17,[19.52814],[2.528139] +716,15,[18.397612],[3.3976116] +717,16,[18.973837],[2.973837] +718,15,[21.10586],[6.1058598] +719,13,[25.312801],[12.312801] +720,16,[25.168182],[9.168182] +721,12,[19.395103],[7.3951035] +722,15,[20.08067],[5.0806694] +723,17,[12.500023],[4.499977] +724,15,[19.293205],[4.2932053] +725,16,[17.491053],[1.4910526] +726,17,[23.327839],[6.327839] +727,13,[9.856709],[3.1432915] +728,16,[19.594936],[3.5949364] +729,16,[23.98595],[7.9859505] +730,16,[17.272932],[1.272932] +731,16,[21.965593],[5.9655933] +732,19,[16.53893],[2.461069] +733,17,[17.757206],[0.75720596] +734,17,[13.550467],[3.4495335] +735,16,[21.410782],[5.410782] +736,20,[14.29055],[5.70945] +737,16,[18.20648],[2.20648] +738,16,[20.422083],[4.422083] +739,16,[20.366295],[4.366295] +740,16,[18.907385],[2.9073849] +741,17,[17.441185],[0.441185] +742,16,[19.8929],[3.8929005] +743,15,[17.098137],[2.098137] +744,16,[17.761745],[1.7617455] +745,16,[26.946049],[10.946049] +746,17,[17.029789],[0.02978897] +747,17,[15.598219],[1.4017811] +748,15,[11.2143545],[3.7856455] +749,16,[19.090229],[3.090229] +750,16,[13.518613],[2.4813871] +751,15,[16.856417],[1.8564167] +752,16,[20.16931],[4.1693096] +753,17,[20.582962],[3.582962] +754,13,[19.612371],[6.6123714] +755,16,[18.501144],[2.5011444] +756,17,[19.185305],[2.1853046] +757,16,[18.24441],[2.2444096] +758,17,[18.414053],[1.414053] +759,17,[13.7287],[3.2713003] +760,15,[13.336446],[1.6635542] +761,17,[18.206543],[1.206543] +762,17,[22.780533],[5.780533] +763,20,[20.993061],[0.99306107] +764,16,[22.85617],[6.8561707] +765,17,[19.93001],[2.9300098] +766,17,[19.000206],[2.000206] +767,20,[19.762629],[0.23737144] +768,19,[20.112457],[1.1124573] +769,15,[18.17174],[3.1717396] +770,17,[16.305975],[0.69402504] +771,21,[14.993038],[6.006962] +772,17,[17.457748],[0.4577484] +773,18,[21.807106],[3.807106] +774,20,[23.406746],[3.406746] +775,15,[19.654585],[4.654585] +776,20,[20.945633],[0.94563293] +777,19,[19.3598],[0.35980034] +778,15,[23.677841],[8.677841] +779,18,[17.796873],[0.2031269] +780,17,[15.183697],[1.8163033] +781,16,[22.158758],[6.158758] +782,20,[20.387056],[0.38705635] +783,20,[29.32348],[9.323481] +784,20,[18.403242],[1.5967579] +785,17,[16.665068],[0.33493233] +786,17,[12.693086],[4.3069143] +787,19,[31.995108],[12.995108] +788,18,[19.579294],[1.5792942] +789,18,[17.696117],[0.3038826] +790,16,[17.670372],[1.670372] +791,17,[21.940094],[4.940094] +792,13,[18.780764],[5.7807636] +793,20,[21.265087],[1.2650871] +794,19,[14.283069],[4.7169313] +795,14,[9.0237875],[4.9762125] +796,18,[17.955723],[0.04427719] +797,18,[18.994766],[0.99476624] +798,20,[18.818323],[1.1816769] +799,18,[22.595644],[4.595644] +800,20,[20.231174],[0.23117447] +801,16,[14.825906],[1.1740942] +802,21,[22.238018],[1.238018] +803,17,[15.144775],[1.8552246] +804,20,[22.645037],[2.6450367] +805,18,[16.376932],[1.6230679] +806,18,[16.075317],[1.9246826] +807,20,[19.343746],[0.6562538] +808,20,[24.831486],[4.8314857] +809,18,[22.739584],[4.739584] +810,17,[20.777748],[3.777748] +811,20,[18.13094],[1.8690605] +812,16,[16.75112],[0.7511196] +813,20,[25.737572],[5.7375717] +814,21,[12.614193],[8.385807] +815,17,[13.634562],[3.3654385] +816,13,[5.56242],[7.43758] +817,21,[22.45647],[1.4564705] +818,13,[25.903744],[12.903744] +819,17,[23.920452],[6.920452] +820,17,[16.83664],[0.1633606] +821,17,[22.668468],[5.6684685] +822,13,[15.174165],[2.1741648] +823,17,[16.596592],[0.40340805] +824,17,[21.17882],[4.1788197] +825,21,[21.430622],[0.4306221] +826,17,[21.831507],[4.8315067] +827,17,[18.682644],[1.6826439] +828,21,[20.756783],[0.24321747] +829,21,[16.51922],[4.4807796] +830,17,[17.531443],[0.53144264] +831,21,[19.91731],[1.0826893] +832,17,[17.991503],[0.99150276] +833,17,[21.33816],[4.3381596] +834,17,[17.00587],[0.00587082] +835,21,[17.093414],[3.9065857] +836,17,[17.098387],[0.09838676] +837,18,[3.6362011],[14.363799] +838,18,[21.114939],[3.1149387] +839,21,[19.980196],[1.019804] +840,18,[9.03684],[8.96316] +841,22,[30.497793],[8.497793] +842,22,[21.928106],[0.07189369] +843,22,[17.833591],[4.1664085] +844,14,[19.833511],[5.8335114] +845,18,[27.513649],[9.513649] +846,18,[23.346071],[5.3460712] +847,18,[17.40046],[0.5995407] +848,14,[18.649057],[4.6490574] +849,18,[21.109356],[3.109356] +850,18,[17.542923],[0.45707703] +851,22,[21.049038],[0.95096207] +852,18,[20.456081],[2.4560814] +853,18,[24.208565],[6.2085648] +854,18,[19.328915],[1.3289146] +855,22,[20.30154],[1.6984596] +856,18,[25.231585],[7.2315845] +857,18,[17.883577],[0.11642265] +858,19,[28.063528],[9.063528] +859,21,[17.936722],[3.0632782] +860,22,[28.142212],[6.142212] +861,21,[18.938574],[2.0614262] +862,18,[19.332043],[1.3320427] +863,22,[27.218927],[5.2189274] +864,18,[21.138823],[3.1388226] +865,17,[18.06142],[1.0614204] +866,18,[17.716963],[0.2830372] +867,19,[18.189869],[0.8101311] +868,14,[20.4954],[6.4953995] +869,14,[23.38043],[9.38043] +870,19,[22.492601],[3.4926014] +871,22,[25.091791],[3.0917912] +872,18,[19.726954],[1.7269535] +873,18,[11.845735],[6.1542654] +874,22,[18.233711],[3.7662888] +875,22,[20.124022],[1.8759785] +876,19,[24.67014],[5.6701393] +877,18,[19.277075],[1.2770748] +878,18,[18.830313],[0.8303127] +879,14,[12.888053],[1.1119471] +880,14,[20.810389],[6.8103886] +881,18,[29.64039],[11.64039] +882,18,[19.96708],[1.9670792] +883,18,[18.905554],[0.9055538] +884,14,[18.836637],[4.8366375] +885,18,[21.657583],[3.6575832] +886,22,[23.80264],[1.8026409] +887,22,[26.094122],[4.094122] +888,14,[21.511847],[7.5118465] +889,18,[21.06986],[3.0698605] +890,15,[22.414532],[7.4145317] +891,21,[26.62345],[5.6234493] +892,20,[23.247725],[3.2477245] +893,21,[29.23694],[8.23694] +894,18,[20.0102],[2.0102005] +895,18,[22.929234],[4.9292336] +896,16,[12.075066],[3.9249344] +897,18,[20.686659],[2.6866589] +898,16,[20.021614],[4.021614] +899,14,[21.90112],[7.901119] +900,21,[18.510517],[2.4894829] +901,14,[21.965525],[7.9655247] +902,17,[18.575348],[1.5753479] +903,18,[22.224144],[4.224144] +904,16,[19.05904],[3.05904] +905,15,[19.703472],[4.703472] +906,16,[18.119312],[2.1193123] +907,15,[25.724129],[10.724129] +908,18,[20.298223],[2.2982235] +909,19,[20.161894],[1.1618938] +910,20,[14.6377125],[5.3622875] +911,16,[17.696503],[1.6965027] +912,16,[19.869228],[3.8692284] +913,14,[26.69892],[12.698919] +914,21,[21.154753],[0.15475273] +915,19,[18.4745],[0.52549934] +916,18,[20.850357],[2.850357] +917,19,[26.166317],[7.166317] +918,18,[20.510185],[2.5101852] +919,14,[14.655975],[0.65597534] +920,21,[27.451056],[6.4510555] +921,16,[16.580296],[0.58029556] +922,19,[23.346624],[4.3466244] +923,18,[23.466087],[5.4660873] +924,1,[3.4865806],[2.4865806] +925,1,[9.503841],[8.503841] +926,1,[3.2804143],[2.2804143] +927,1,[3.3672304],[2.3672304] +928,1,[3.3723445],[2.3723445] +929,2,[3.5664384],[1.5664384] +930,1,[3.1856065],[2.1856065] +931,1,[3.1319265],[2.1319265] +932,1,[3.1875396],[2.1875396] +933,1,[3.17764],[2.17764] +934,1,[3.2880974],[2.2880974] +935,1,[3.17834],[2.17834] +936,1,[3.1100957],[2.1100957] +937,1,[3.2048445],[2.2048445] +938,1,[3.105921],[2.105921] +939,1,[3.1389296],[2.1389296] +940,1,[3.2704282],[2.2704282] +941,1,[3.114975],[2.114975] +942,1,[3.2585735],[2.2585735] +943,1,[3.1835077],[2.1835077] +944,1,[3.7282724],[2.7282724] +945,1,[5.501566],[4.501566] +946,2,[3.25487],[1.2548699] +947,1,[3.3266478],[2.3266478] +948,1,[3.167726],[2.167726] +949,1,[3.162304],[2.162304] +950,1,[3.1344802],[2.1344802] +951,1,[3.1487594],[2.1487594] +952,1,[3.1039717],[2.1039717] +953,1,[3.1621406],[2.1621406] +954,1,[8.2763],[7.2763004] +955,1,[3.1088443],[2.1088443] +956,1,[3.343637],[2.343637] +957,1,[3.3590126],[2.3590126] +958,1,[3.151715],[2.151715] +959,1,[3.1485407],[2.1485407] +960,1,[3.7030296],[2.7030296] +961,1,[3.2619495],[2.2619495] +962,4,[3.4762666],[0.5237334] +963,1,[3.475402],[2.475402] +964,1,[6.924973],[5.924973] +965,1,[3.1554012],[2.1554012] +966,1,[3.1264486],[2.1264486] +967,1,[3.1675358],[2.1675358] +968,1,[3.1045237],[2.1045237] +969,1,[3.172187],[2.172187] +970,1,[3.1106756],[2.1106756] +971,1,[4.9572906],[3.9572906] +972,2,[3.1228],[1.1228001] +973,1,[3.0851562],[2.0851562] +974,2,[3.1545804],[1.1545804] +975,2,[3.1753256],[1.1753256] +976,2,[5.0595064],[3.0595064] +977,1,[3.2913325],[2.2913325] +978,1,[3.2469296],[2.2469296] +979,1,[3.19781],[2.19781] +980,1,[3.1707637],[2.1707637] +981,1,[3.1726017],[2.1726017] +982,1,[12.970222],[11.970222] +983,2,[3.3452241],[1.3452241] +984,2,[3.160201],[1.1602011] +985,3,[3.2400534],[0.24005342] +986,1,[3.1186645],[2.1186645] +987,1,[3.1572332],[2.1572332] +988,1,[4.0706315],[3.0706315] +989,4,[3.1832623],[0.81673765] +990,1,[3.1288857],[2.1288857] +991,3,[3.168745],[0.16874504] +992,2,[3.156115],[1.156115] +993,1,[3.2863863],[2.2863863] +994,2,[3.1564226],[1.1564226] +995,3,[3.3418458],[0.34184575] +996,1,[3.246773],[2.246773] +997,3,[3.518927],[0.5189271] +998,4,[3.24142],[0.75857997] +999,4,[3.7289336],[0.27106643] +1000,3,[8.2343235],[5.2343235] +1001,1,[3.2522452],[2.2522452] +1002,2,[3.1166658],[1.1166658] +1003,1,[3.200237],[2.200237] +1004,2,[3.16824],[1.1682401] +1005,1,[3.2373698],[2.2373698] +1006,1,[3.343216],[2.343216] +1007,2,[3.1444592],[1.1444592] +1008,1,[3.1308184],[2.1308184] +1009,1,[3.1451716],[2.1451716] +1010,3,[3.0975192],[0.09751916] +1011,2,[3.216838],[1.2168379] +1012,2,[3.4234266],[1.4234266] +1013,1,[3.2994807],[2.2994807] +1014,2,[3.1136875],[1.1136875] +1015,3,[4.0062046],[1.0062046] +1016,1,[3.1745574],[2.1745574] +1017,2,[4.8435364],[2.8435364] +1018,3,[3.369738],[0.3697381] +1019,1,[3.2178898],[2.2178898] +1020,4,[3.535927],[0.46407294] +1021,1,[3.1844501],[2.1844501] +1022,2,[3.1703649],[1.1703649] +1023,2,[3.5916],[1.5916] +1024,2,[3.184064],[1.1840639] +1025,1,[3.145949],[2.145949] +1026,3,[3.140741],[0.14074111] +1027,1,[5.4480705],[4.4480705] +1028,4,[3.1100142],[0.8899858] +1029,1,[3.1867905],[2.1867905] +1030,3,[3.3179615],[0.31796145] +1031,3,[4.0305815],[1.0305815] +1032,2,[3.152249],[1.1522491] +1033,3,[3.245735],[0.24573493] +1034,1,[3.1323922],[2.1323922] +1035,1,[3.257076],[2.257076] +1036,4,[3.419042],[0.5809579] +1037,1,[3.1874468],[2.1874468] +1038,1,[3.242365],[2.242365] +1039,2,[3.2233136],[1.2233136] +1040,3,[3.4697335],[0.46973348] +1041,1,[3.1611717],[2.1611717] +1042,1,[3.1992183],[2.1992183] +1043,1,[3.2667491],[2.2667491] +1044,1,[3.0997858],[2.0997858] +1045,1,[3.1374273],[2.1374273] +1046,1,[3.1908665],[2.1908665] +1047,1,[3.5413609],[2.5413609] +1048,1,[3.2988892],[2.2988892] +1049,1,[3.3513594],[2.3513594] +1050,4,[3.2285936],[0.7714064] +1051,1,[3.14452],[2.14452] +1052,4,[3.1431596],[0.8568404] +1053,4,[3.118294],[0.881706] +1054,1,[3.129295],[2.129295] +1055,1,[3.224228],[2.224228] +1056,1,[3.3095036],[2.3095036] +1057,1,[3.4266508],[2.4266508] +1058,2,[3.1898842],[1.1898842] +1059,1,[3.1104448],[2.1104448] +1060,1,[3.1804469],[2.1804469] +1061,1,[3.1222541],[2.1222541] +1062,19,[15.5388975],[3.4611025] +1063,20,[22.179237],[2.1792374] +1064,22,[10.450364],[11.549636] +1065,20,[24.448717],[4.448717] +1066,15,[21.61987],[6.619869] +1067,19,[22.094051],[3.0940514] +1068,22,[23.102507],[1.1025066] +1069,21,[16.882895],[4.1171055] +1070,20,[11.877104],[8.122896] +1071,21,[21.70795],[0.7079506] +1072,21,[19.746094],[1.2539062] +1073,20,[27.215137],[7.2151375] +1074,15,[19.368698],[4.368698] +1075,20,[15.104443],[4.8955574] +1076,18,[18.460894],[0.46089363] +1077,17,[28.234188],[11.234188] +1078,17,[23.175749],[6.175749] +1079,21,[23.632223],[2.6322231] +1080,17,[21.544107],[4.5441074] +1081,21,[21.345716],[0.34571648] +1082,19,[21.00892],[2.0089207] +1083,20,[24.450518],[4.4505177] +1084,20,[17.164766],[2.8352337] +1085,20,[20.342718],[0.34271812] +1086,18,[26.552658],[8.552658] +1087,19,[16.355978],[2.644022] +1088,20,[23.612938],[3.612938] +1089,22,[29.040314],[7.0403137] +1090,22,[18.119604],[3.880396] +1091,19,[19.09084],[0.09083939] +1092,17,[22.638317],[5.638317] +1093,17,[20.47369],[3.47369] +1094,22,[17.25998],[4.7400208] +1095,19,[27.488934],[8.488934] +1096,20,[17.529873],[2.470127] +1097,22,[19.082392],[2.9176083] +1098,15,[17.037642],[2.0376415] +1099,17,[17.252693],[0.25269318] +1100,21,[18.985304],[2.0146961] +1101,19,[18.181696],[0.81830406] +1102,20,[20.975687],[0.975687] +1103,21,[20.806454],[0.1935463] +1104,19,[21.115637],[2.1156368] +1105,20,[16.556429],[3.443571] +1106,21,[24.6815],[3.6814995] +1107,17,[19.565466],[2.565466] +1108,22,[22.562656],[0.5626564] +1109,19,[14.324776],[4.6752243] +1110,17,[19.441229],[2.4412289] +1111,17,[15.953923],[1.0460768] +1112,21,[18.581043],[2.4189568] +1113,21,[20.350315],[0.6496849] +1114,15,[21.280254],[6.2802544] +1115,22,[20.400244],[1.5997562] +1116,22,[24.347878],[2.3478775] +1117,17,[18.835133],[1.8351326] +1118,19,[18.142487],[0.8575134] +1119,20,[22.261845],[2.2618446] +1120,15,[17.542467],[2.542467] +1121,22,[20.517443],[1.4825573] +1122,21,[24.415785],[3.4157848] +1123,16,[24.784279],[8.784279] +1124,17,[20.312834],[3.3128338] +1125,21,[19.750315],[1.2496853] +1126,21,[9.809784],[11.190216] +1127,22,[21.721622],[0.27837753] +1128,23,[26.150856],[3.150856] +1129,20,[22.160328],[2.160328] +1130,21,[24.41674],[3.4167404] +1131,23,[20.961937],[2.038063] +1132,17,[21.104795],[4.1047955] +1133,16,[17.920982],[1.9209824] +1134,22,[20.532383],[1.467617] +1135,17,[23.077417],[6.0774174] +1136,22,[29.443123],[7.443123] +1137,20,[22.415312],[2.4153118] +1138,16,[21.254673],[5.254673] +1139,23,[24.477194],[1.4771938] +1140,18,[19.710312],[1.7103119] +1141,20,[22.46466],[2.4646606] +1142,19,[17.877821],[1.122179] +1143,23,[19.064692],[3.9353085] +1144,19,[16.402872],[2.597128] +1145,23,[24.459423],[1.4594231] +1146,23,[22.913269],[0.08673096] +1147,23,[22.858559],[0.14144135] +1148,16,[18.32716],[2.32716] +1149,21,[22.379898],[1.3798981] +1150,20,[21.698614],[1.6986141] +1151,16,[27.614965],[11.614965] +1152,23,[19.947142],[3.0528584] +1153,21,[21.494366],[0.4943657] +1154,22,[24.766823],[2.7668228] +1155,23,[22.835901],[0.16409874] +1156,20,[21.864424],[1.8644238] +1157,20,[19.164333],[0.83566666] +1158,21,[24.995787],[3.9957867] +1159,21,[16.455194],[4.5448055] +1160,20,[15.144408],[4.855592] +1161,16,[19.912716],[3.912716] +1162,21,[18.70637],[2.2936306] +1163,23,[20.65193],[2.3480701] +1164,20,[23.410255],[3.4102554] +1165,23,[27.91116],[4.9111595] +1166,21,[15.034376],[5.965624] +1167,23,[25.996641],[2.9966412] +1168,21,[17.45536],[3.5446396] +1169,19,[22.04464],[3.0446396] +1170,23,[15.096693],[7.903307] +1171,18,[21.905884],[3.9058838] +1172,23,[20.192913],[2.807087] +1173,19,[17.740696],[1.259304] +1174,23,[23.765503],[0.7655029] +1175,18,[20.032183],[2.0321827] +1176,23,[19.254583],[3.7454166] +1177,20,[26.311459],[6.3114586] +1178,23,[25.539225],[2.5392246] +1179,21,[22.767065],[1.767065] +1180,21,[21.983627],[0.9836273] +1181,23,[30.34656],[7.3465595] +1182,22,[19.876192],[2.123808] +1183,20,[21.86585],[1.8658504] +1184,23,[18.64732],[4.35268] +1185,22,[18.543915],[3.4560852] +1186,22,[18.540735],[3.4592648] +1187,17,[20.84965],[3.8496494] +1188,23,[10.964709],[12.035291] +1189,21,[23.354555],[2.3545551] +1190,20,[17.447556],[2.5524445] +1191,18,[20.694086],[2.694086] +1192,22,[21.382147],[0.61785316] +1193,21,[25.27158],[4.2715797] +1194,21,[22.340326],[1.3403263] +1195,23,[22.749758],[0.25024223] +1196,23,[21.843262],[1.1567383] +1197,21,[16.666988],[4.3330116] +1198,19,[18.236248],[0.763752] +1199,24,[16.222383],[7.7776165] +1200,23,[20.66753],[2.33247] +1201,24,[28.183092],[4.183092] +1202,24,[22.686213],[1.3137875] +1203,20,[17.0644],[2.9356003] +1204,24,[23.905434],[0.09456635] +1205,21,[23.324242],[2.3242416] +1206,24,[25.581078],[1.5810776] +1207,19,[17.414581],[1.5854187] +1208,24,[27.546743],[3.5467434] +1209,18,[19.845148],[1.8451481] +1210,18,[19.996151],[1.996151] +1211,19,[23.147839],[4.1478386] +1212,18,[16.36201],[1.637991] +1213,19,[21.101332],[2.1013317] +1214,18,[28.610186],[10.610186] +1215,22,[28.615408],[6.615408] +1216,21,[21.31839],[0.3183899] +1217,21,[24.372234],[3.3722343] +1218,25,[19.279367],[5.7206326] +1219,24,[23.619184],[0.3808155] +1220,22,[22.454636],[0.45463562] +1221,17,[25.369682],[8.369682] +1222,17,[22.953316],[5.9533157] +1223,22,[21.832262],[0.16773796] +1224,22,[24.560436],[2.5604362] +1225,19,[20.59649],[1.5964909] +1226,21,[20.972813],[0.02718735] +1227,22,[23.800617],[1.8006172] +1228,21,[23.037066],[2.0370655] +1229,25,[26.359468],[1.3594685] +1230,17,[21.599356],[4.5993557] +1231,22,[23.944664],[1.944664] +1232,22,[22.425873],[0.4258728] +1233,21,[22.526278],[1.5262775] +1234,21,[22.790699],[1.790699] +1235,21,[25.858946],[4.858946] +1236,21,[27.01088],[6.0108795] +1237,20,[23.21109],[3.21109] +1238,24,[26.609325],[2.6093254] +1239,22,[15.441263],[6.558737] +1240,21,[24.148027],[3.1480274] +1241,21,[18.511553],[2.4884472] +1242,24,[25.634514],[1.6345139] +1243,21,[20.884155],[0.11584473] +1244,24,[19.823406],[4.176594] +1245,21,[22.985502],[1.9855022] +1246,20,[24.403885],[4.403885] +1247,21,[21.957504],[0.9575043] +1248,21,[26.987558],[5.9875584] +1249,22,[27.589176],[5.589176] +1250,22,[20.48987],[1.5101299] +1251,23,[21.54079],[1.4592094] +1252,26,[23.797766],[2.2022343] +1253,23,[22.481438],[0.5185623] +1254,20,[25.676199],[5.676199] +1255,23,[22.642912],[0.3570881] +1256,19,[28.82672],[9.826719] +1257,23,[15.17331],[7.8266897] +1258,23,[23.749264],[0.74926376] +1259,25,[16.813349],[8.186651] +1260,22,[21.14062],[0.8593807] +1261,19,[22.601166],[3.6011658] +1262,22,[27.14889],[5.1488895] +1263,23,[19.281572],[3.7184277] +1264,23,[30.542492],[7.542492] +1265,24,[30.858444],[6.858444] +1266,21,[23.188488],[2.188488] +1267,22,[21.420322],[0.5796776] +1268,24,[27.848654],[3.8486538] +1269,21,[21.919992],[0.91999245] +1270,20,[25.72238],[5.7223797] +1271,25,[23.901512],[1.0984879] +1272,22,[23.40214],[1.4021397] +1273,22,[24.466612],[2.4666119] +1274,22,[5.4107566],[16.589243] +1275,21,[18.877943],[2.122057] +1276,24,[21.345346],[2.6546535] +1277,23,[28.652308],[5.6523075] +1278,24,[26.334784],[2.3347836] +1279,20,[25.47488],[5.47488] +1280,19,[20.45571],[1.4557095] +1281,24,[28.961626],[4.961626] +1282,25,[25.619627],[0.619627] +1283,20,[24.097105],[4.097105] +1284,25,[20.226208],[4.7737923] +1285,22,[16.180727],[5.819273] +1286,21,[20.042658],[0.95734215] +1287,24,[23.238909],[0.76109123] +1288,20,[24.149137],[4.1491375] +1289,18,[22.465218],[4.4652176] +1290,22,[20.122551],[1.877449] +1291,18,[25.898167],[7.8981667] +1292,22,[23.788136],[1.7881355] +1293,18,[25.979391],[7.979391] +1294,22,[30.754173],[8.754173] +1295,24,[9.358152],[14.641848] +1296,22,[21.047764],[0.9522362] +1297,25,[30.257957],[5.2579575] +1298,22,[19.254782],[2.7452183] +1299,25,[22.836864],[2.1631355] +1300,24,[26.675901],[2.6759014] +1301,23,[18.132416],[4.867584] +1302,25,[21.061392],[3.9386082] +1303,26,[22.631],[3.3689995] +1304,25,[18.215445],[6.7845554] +1305,25,[21.911116],[3.0888844] +1306,24,[14.606463],[9.393537] +1307,25,[21.14467],[3.8553295] +1308,22,[20.30442],[1.6955795] +1309,22,[21.95005],[0.04994965] +1310,22,[20.290339],[1.7096615] +1311,18,[21.910757],[3.910757] +1312,22,[24.031971],[2.031971] +1313,22,[22.39517],[0.3951702] +1314,22,[22.649296],[0.6492958] +1315,20,[24.10896],[4.108959] +1316,21,[18.115871],[2.8841286] +1317,24,[20.825895],[3.1741047] +1318,25,[23.788929],[1.211071] +1319,23,[22.812384],[0.18761635] +1320,22,[27.675026],[5.675026] +1321,22,[22.903477],[0.9034767] +1322,22,[23.841856],[1.841856] +1323,18,[22.51813],[4.5181293] +1324,23,[20.731033],[2.2689667] +1325,22,[19.621897],[2.3781033] +1326,22,[21.899553],[0.1004467] +1327,24,[22.69483],[1.30517] +1328,21,[23.24138],[2.2413807] +1329,26,[26.34842],[0.3484192] +1330,22,[24.427692],[2.4276924] +1331,22,[25.30444],[3.3044395] +1332,22,[30.866673],[8.8666725] +1333,26,[31.155676],[5.155676] +1334,22,[24.17415],[2.1741505] +1335,21,[28.04414],[7.04414] +1336,23,[28.056736],[5.056736] +1337,23,[29.93988],[6.9398804] +1338,23,[26.11838],[3.1183796] +1339,23,[31.679031],[8.679031] +1340,26,[21.714306],[4.285694] +1341,24,[24.131437],[0.1314373] +1342,20,[20.556488],[0.55648804] +1343,26,[25.644827],[0.3551731] +1344,20,[21.751442],[1.751442] +1345,22,[27.257763],[5.257763] +1346,20,[24.12831],[4.1283092] +1347,20,[19.617441],[0.38255882] +1348,23,[20.923475],[2.0765247] +1349,21,[23.302212],[2.3022118] +1350,22,[19.968903],[2.0310974] +1351,20,[23.239304],[3.2393036] +1352,26,[24.396433],[1.6035671] +1353,21,[23.101797],[2.101797] +1354,26,[32.080784],[6.080784] +1355,23,[19.7333],[3.2667007] +1356,21,[23.327337],[2.3273373] +1357,22,[23.236282],[1.2362823] +1358,23,[25.163748],[2.1637478] +1359,22,[20.141966],[1.8580341] +1360,23,[25.409883],[2.4098835] +1361,21,[26.581173],[5.581173] +1362,19,[24.039665],[5.039665] +1363,22,[6.484479],[15.515521] +1364,23,[25.324375],[2.3243752] +1365,25,[22.404848],[2.595152] +1366,20,[23.733658],[3.7336578] +1367,23,[24.31968],[1.3196793] +1368,23,[19.690485],[3.309515] +1369,22,[23.583004],[1.583004] +1370,26,[22.323997],[3.6760025] +1371,22,[20.115969],[1.8840313] +1372,19,[29.387018],[10.387018] +1373,21,[23.93694],[2.9369392] +1374,23,[25.448698],[2.448698] +1375,25,[21.718193],[3.281807] +1376,21,[22.281374],[1.281374] +1377,22,[25.415281],[3.4152813] +1378,25,[23.826399],[1.1736012] +1379,20,[18.104969],[1.895031] +1380,21,[20.83899],[0.16101074] +1381,23,[21.34567],[1.6543293] +1382,20,[20.57612],[0.5761204] +1383,26,[26.454842],[0.4548416] +1384,19,[26.089216],[7.089216] +1385,23,[26.007439],[3.0074387] +1386,27,[23.531101],[3.4688988] +1387,21,[24.171986],[3.1719856] +1388,21,[25.093935],[4.093935] +1389,24,[18.40606],[5.5939407] +1390,26,[26.741854],[0.7418537] +1391,19,[20.958448],[1.9584484] +1392,23,[22.497227],[0.5027733] +1393,23,[25.036127],[2.036127] +1394,20,[22.460474],[2.460474] +1395,23,[18.768755],[4.231245] +1396,23,[20.696306],[2.3036938] +1397,26,[26.066273],[0.06627274] +1398,24,[33.217438],[9.217438] +1399,23,[22.435966],[0.5640335] +1400,19,[20.339182],[1.3391819] +1401,23,[25.9988],[2.9988003] +1402,19,[21.836927],[2.8369274] +1403,21,[25.94054],[4.9405403] +1404,24,[23.622005],[0.37799454] +1405,20,[19.916204],[0.08379555] +1406,23,[23.897635],[0.8976345] +1407,23,[24.831413],[1.8314133] +1408,25,[27.653587],[2.6535873] +1409,22,[25.016953],[3.0169525] +1410,23,[23.720854],[0.7208538] +1411,24,[18.858057],[5.141943] +1412,24,[26.877026],[2.8770256] +1413,27,[25.511894],[1.4881058] +1414,26,[21.615406],[4.384594] +1415,22,[22.386242],[0.3862419] +1416,23,[31.239397],[8.239397] +1417,26,[29.027382],[3.027382] +1418,23,[18.325115],[4.674885] +1419,23,[22.296711],[0.70328903] +1420,27,[32.173183],[5.1731834] +1421,24,[19.529194],[4.470806] +1422,23,[23.573746],[0.5737457] +1423,23,[25.552202],[2.5522022] +1424,25,[24.481651],[0.5183487] +1425,23,[26.658548],[3.6585484] +1426,23,[30.258999],[7.258999] +1427,23,[23.468616],[0.4686165] +1428,26,[32.82356],[6.823559] +1429,24,[22.24369],[1.7563095] +1430,23,[24.090473],[1.0904732] +1431,22,[22.50167],[0.50167084] +1432,24,[28.957159],[4.957159] +1433,24,[23.946163],[0.05383682] +1434,23,[25.886137],[2.886137] +1435,23,[23.990038],[0.9900379] +1436,23,[25.584717],[2.5847168] +1437,23,[24.75887],[1.7588692] +1438,23,[25.057583],[2.0575829] +1439,22,[24.93257],[2.9325695] +1440,22,[27.272448],[5.2724476] +1441,24,[27.981176],[3.9811764] +1442,23,[25.202707],[2.2027073] +1443,21,[18.112017],[2.8879833] +1444,27,[29.126373],[2.1263733] +1445,26,[29.848906],[3.8489056] +1446,25,[28.22967],[3.2296696] +1447,20,[17.223415],[2.7765846] +1448,26,[29.328907],[3.328907] +1449,24,[23.89827],[0.10173035] +1450,23,[15.9336195],[7.0663805] +1451,20,[24.051798],[4.051798] +1452,24,[24.284172],[0.28417206] +1453,23,[19.879055],[3.120945] +1454,24,[22.146975],[1.8530254] +1455,24,[28.455435],[4.455435] +1456,24,[24.641705],[0.64170456] +1457,21,[24.14161],[3.1416092] +1458,25,[28.384249],[3.3842487] +1459,21,[33.515053],[12.515053] +1460,26,[28.000664],[2.0006638] +1461,21,[21.236258],[0.23625755] +1462,22,[25.50521],[3.5052109] +1463,28,[24.386951],[3.6130486] +1464,24,[23.581446],[0.4185543] +1465,20,[22.708885],[2.7088852] +1466,23,[22.913034],[0.08696556] +1467,20,[27.081345],[7.0813446] +1468,24,[24.179264],[0.17926407] +1469,28,[19.758755],[8.241245] +1470,24,[23.179585],[0.8204155] +1471,22,[25.05333],[3.0533295] +1472,24,[28.582716],[4.582716] +1473,24,[32.38127],[8.381271] +1474,27,[27.474308],[0.474308] +1475,22,[18.023506],[3.9764938] +1476,20,[28.597334],[8.597334] +1477,23,[27.977365],[4.9773655] +1478,21,[21.982096],[0.9820957] +1479,24,[22.469774],[1.5302258] +1480,24,[27.471304],[3.471304] +1481,21,[14.153342],[6.8466578] +1482,23,[23.491074],[0.4910736] +1483,24,[25.507746],[1.5077457] +1484,28,[20.996382],[7.0036182] +1485,24,[21.296848],[2.7031517] +1486,20,[23.19448],[3.194481] +1487,25,[26.254583],[1.2545834] +1488,21,[22.600973],[1.6009731] +1489,23,[19.674446],[3.325554] +1490,20,[22.578947],[2.578947] +1491,27,[17.845474],[9.154526] +1492,24,[20.751478],[3.2485218] +1493,24,[31.409307],[7.4093075] +1494,25,[29.527145],[4.5271454] +1495,28,[21.620846],[6.379154] +1496,20,[19.934292],[0.06570816] +1497,23,[22.219807],[0.7801933] +1498,23,[22.463758],[0.53624153] +1499,24,[22.765533],[1.2344666] +1500,20,[22.671394],[2.6713943] +1501,26,[23.910055],[2.0899448] +1502,21,[17.91877],[3.0812302] +1503,24,[26.182371],[2.1823711] +1504,23,[22.9797],[0.02029991] +1505,24,[29.581556],[5.5815563] +1506,24,[21.936117],[2.0638828] +1507,24,[25.745195],[1.7451954] +1508,25,[39.549534],[14.549534] +1509,27,[21.826452],[5.1735477] +1510,24,[26.969437],[2.9694366] +1511,20,[24.93756],[4.937559] +1512,25,[32.085167],[7.085167] +1513,25,[27.725317],[2.725317] +1514,23,[20.527933],[2.4720669] +1515,24,[26.103123],[2.1031227] +1516,21,[25.430922],[4.4309216] +1517,24,[20.788292],[3.211708] +1518,25,[18.813797],[6.186203] +1519,24,[24.39783],[0.39782906] +1520,24,[25.920034],[1.9200344] +1521,22,[14.558437],[7.4415627] +1522,27,[25.941856],[1.0581436] +1523,26,[24.21473],[1.7852707] +1524,24,[25.773327],[1.7733269] +1525,24,[20.392355],[3.607645] +1526,23,[24.234808],[1.234808] +1527,23,[18.8131],[4.186899] +1528,22,[22.906069],[0.9060688] +1529,24,[28.427082],[4.427082] +1530,25,[28.547598],[3.547598] +1531,23,[22.2151],[0.78490067] +1532,24,[18.182762],[5.817238] +1533,24,[27.254717],[3.2547169] +1534,23,[26.229887],[3.229887] +1535,24,[25.162228],[1.1622276] +1536,23,[25.96109],[2.96109] +1537,25,[25.370485],[0.3704853] +1538,25,[25.0752],[0.07519913] +1539,24,[22.164682],[1.8353176] +1540,25,[21.225239],[3.7747612] +1541,28,[26.557652],[1.4423485] +1542,24,[23.069586],[0.9304142] +1543,24,[28.551706],[4.5517063] +1544,20,[25.29322],[5.2932205] +1545,22,[25.870127],[3.8701267] +1546,25,[26.410519],[1.4105186] +1547,21,[27.877014],[6.877014] +1548,24,[24.823418],[0.82341766] +1549,24,[21.705034],[2.2949657] +1550,24,[21.126337],[2.873663] +1551,27,[21.786802],[5.2131977] +1552,23,[22.507914],[0.4920864] +1553,25,[24.732815],[0.2671852] +1554,24,[27.205437],[3.2054367] +1555,24,[27.584929],[3.5849285] +1556,24,[24.149744],[0.14974403] +1557,24,[25.045542],[1.0455418] +1558,26,[21.351212],[4.6487885] +1559,22,[22.702316],[0.7023163] +1560,25,[23.570652],[1.429348] +1561,24,[22.726473],[1.2735271] +1562,23,[25.902328],[2.9023285] +1563,25,[22.280434],[2.7195663] +1564,26,[22.411148],[3.588852] +1565,24,[24.820562],[0.82056236] +1566,24,[25.814196],[1.8141956] +1567,24,[22.205929],[1.7940712] +1568,24,[25.00976],[1.0097599] +1569,20,[21.584433],[1.5844326] +1570,24,[27.81791],[3.8179092] +1571,24,[20.244253],[3.7557468] +1572,26,[23.6142],[2.3857994] +1573,24,[25.355844],[1.3558445] +1574,24,[24.352774],[0.35277367] +1575,28,[29.775728],[1.7757282] +1576,28,[30.430788],[2.430788] +1577,23,[23.366213],[0.36621284] +1578,24,[25.971256],[1.9712563] +1579,24,[22.55685],[1.4431496] +1580,24,[21.879105],[2.1208954] +1581,24,[19.32499],[4.6750107] +1582,20,[22.505257],[2.5052567] +1583,24,[23.57459],[0.42540932] +1584,27,[26.124058],[0.87594223] +1585,20,[25.477146],[5.477146] +1586,24,[22.035362],[1.9646378] +1587,22,[24.351686],[2.3516865] +1588,20,[21.407055],[1.4070549] +1589,24,[28.04908],[4.04908] +1590,28,[23.81871],[4.1812897] +1591,20,[29.503649],[9.503649] +1592,24,[24.737211],[0.7372112] +1593,20,[30.39751],[10.397511] +1594,28,[25.567991],[2.4320087] +1595,28,[24.392467],[3.6075325] +1596,20,[24.243248],[4.243248] +1597,26,[37.707573],[11.707573] +1598,24,[21.718573],[2.2814274] +1599,24,[20.660082],[3.3399181] +1600,21,[26.917622],[5.9176216] +1601,21,[23.945614],[2.9456139] +1602,25,[23.390167],[1.6098328] +1603,25,[30.749033],[5.749033] +1604,26,[21.9186],[4.0814] +1605,25,[18.144646],[6.8553543] +1606,21,[23.57364],[2.5736408] +1607,29,[26.414095],[2.585905] +1608,25,[24.101246],[0.8987541] +1609,29,[25.51721],[3.48279] +1610,25,[23.953577],[1.046423] +1611,25,[24.838978],[0.16102219] +1612,23,[25.514212],[2.5142117] +1613,25,[28.070808],[3.0708084] +1614,25,[28.470722],[3.4707222] +1615,25,[25.082962],[0.08296204] +1616,26,[25.822063],[0.17793655] +1617,25,[28.321867],[3.321867] +1618,29,[29.870377],[0.8703766] +1619,25,[25.516634],[0.516634] +1620,25,[17.601177],[7.398823] +1621,21,[24.459215],[3.4592152] +1622,29,[21.061655],[7.938345] +1623,25,[16.089077],[8.910923] +1624,25,[22.507751],[2.4922485] +1625,29,[20.756914],[8.243086] +1626,21,[20.934324],[0.06567574] +1627,25,[27.88245],[2.88245] +1628,26,[26.11098],[0.11097908] +1629,25,[26.175161],[1.1751614] +1630,29,[27.920382],[1.0796185] +1631,22,[23.189438],[1.1894379] +1632,28,[27.151125],[0.84887505] +1633,21,[26.891758],[5.891758] +1634,25,[22.476788],[2.5232124] +1635,28,[38.418217],[10.418217] +1636,21,[22.587496],[1.5874958] +1637,21,[24.022686],[3.022686] +1638,26,[25.04472],[0.9552803] +1639,28,[28.351515],[0.35151482] +1640,25,[23.498396],[1.5016041] +1641,26,[30.887539],[4.887539] +1642,25,[22.457447],[2.542553] +1643,25,[29.825306],[4.825306] +1644,25,[24.744991],[0.2550087] +1645,29,[31.771465],[2.7714653] +1646,25,[32.94763],[7.947632] +1647,23,[23.893747],[0.8937473] +1648,23,[26.637884],[3.6378841] +1649,25,[32.57565],[7.5756493] +1650,25,[31.130663],[6.130663] +1651,25,[29.994516],[4.9945164] +1652,21,[21.20372],[0.2037201] +1653,26,[24.703886],[1.296114] +1654,21,[19.440968],[1.5590324] +1655,25,[22.462364],[2.5376358] +1656,25,[27.230425],[2.230425] +1657,25,[27.57478],[2.5747795] +1658,24,[29.4655],[5.4655] +1659,22,[25.567345],[3.5673447] +1660,26,[33.165432],[7.165432] +1661,25,[23.234571],[1.7654285] +1662,25,[34.4294],[9.429401] +1663,21,[22.5638],[1.5638008] +1664,24,[27.047668],[3.0476685] +1665,24,[30.883589],[6.883589] +1666,25,[32.898273],[7.8982735] +1667,21,[35.06052],[14.06052] +1668,21,[26.924498],[5.9244976] +1669,25,[26.613653],[1.6136532] +1670,21,[26.026031],[5.0260315] +1671,24,[27.657425],[3.657425] +1672,21,[22.98296],[1.9829597] +1673,24,[24.659712],[0.65971184] +1674,23,[16.779682],[6.220318] +1675,25,[27.577778],[2.5777779] +1676,21,[19.514425],[1.4855747] +1677,25,[28.349123],[3.349123] +1678,21,[27.720028],[6.720028] +1679,23,[23.083172],[0.08317184] +1680,25,[28.78798],[3.7879791] +1681,24,[22.437393],[1.5626068] +1682,25,[26.121174],[1.1211739] +1683,24,[33.80332],[9.803322] +1684,25,[27.2508],[2.2507992] +1685,28,[38.50807],[10.508072] +1686,25,[26.512966],[1.5129662] +1687,21,[30.46543],[9.465429] +1688,25,[26.241291],[1.241291] +1689,23,[18.871178],[4.1288223] +1690,24,[21.753784],[2.2462158] +1691,25,[23.758162],[1.2418385] +1692,25,[25.962559],[0.96255875] +1693,24,[25.016165],[1.0161648] +1694,25,[30.567043],[5.5670433] +1695,24,[24.655003],[0.6550026] +1696,25,[30.878153],[5.878153] +1697,25,[25.222807],[0.22280693] +1698,25,[24.147665],[0.852335] +1699,25,[22.902193],[2.097807] +1700,23,[28.64612],[5.64612] +1701,21,[32.160492],[11.160492] +1702,25,[21.176538],[3.8234615] +1703,25,[28.238098],[3.2380981] +1704,23,[28.535189],[5.5351887] +1705,21,[19.927683],[1.0723171] +1706,23,[17.214981],[5.785019] +1707,25,[34.380527],[9.3805275] +1708,25,[20.993122],[4.006878] +1709,21,[27.439545],[6.4395447] +1710,29,[25.59669],[3.4033108] +1711,23,[26.716747],[3.7167473] +1712,23,[21.882214],[1.1177864] +1713,21,[19.677685],[1.3223152] +1714,25,[24.360744],[0.6392555] +1715,23,[24.710754],[1.7107544] +1716,21,[23.554457],[2.5544567] +1717,23,[22.48091],[0.51909065] +1718,29,[28.29435],[0.7056503] +1719,21,[31.89377],[10.89377] +1720,21,[32.61171],[11.61171] +1721,25,[28.784803],[3.7848034] +1722,21,[26.24507],[5.2450695] +1723,23,[29.516401],[6.5164013] +1724,21,[29.798481],[8.798481] +1725,21,[23.493965],[2.4939651] +1726,25,[15.789044],[9.210956] +1727,21,[27.08183],[6.081829] +1728,25,[27.661814],[2.6618137] +1729,25,[31.055662],[6.055662] +1730,21,[25.461346],[4.4613457] +1731,22,[28.316353],[6.316353] +1732,21,[27.0387],[6.0387] +1733,21,[24.315674],[3.3156738] +1734,25,[28.699451],[3.6994514] +1735,23,[28.19878],[5.19878] +1736,22,[23.986143],[1.9861431] +1737,24,[30.023987],[6.023987] +1738,22,[27.409979],[5.409979] +1739,22,[28.652552],[6.6525517] +1740,22,[27.484789],[5.484789] +1741,24,[23.645363],[0.35463715] +1742,26,[24.939985],[1.0600147] +1743,22,[25.696117],[3.6961174] +1744,22,[23.95114],[1.9511395] +1745,24,[25.825323],[1.8253231] +1746,22,[23.930883],[1.9308834] +1747,24,[34.645557],[10.645557] +1748,24,[22.872505],[1.1274948] +1749,22,[24.183853],[2.1838531] +1750,25,[24.189215],[0.8107853] +1751,30,[29.532108],[0.4678917] +1752,24,[26.046103],[2.0461025] +1753,26,[19.182377],[6.817623] +1754,22,[26.31842],[4.3184204] +1755,30,[21.423756],[8.576244] +1756,25,[22.188791],[2.8112087] +1757,30,[29.322153],[0.6778469] +1758,26,[29.096697],[3.0966969] +1759,25,[22.382936],[2.6170635] +1760,22,[21.400185],[0.59981537] +1761,26,[18.05701],[7.9429893] +1762,22,[22.58373],[0.5837307] +1763,24,[23.709404],[0.290596] +1764,22,[18.112558],[3.8874416] +1765,25,[29.063292],[4.0632915] +1766,30,[32.051125],[2.0511246] +1767,22,[23.899376],[1.8993759] +1768,26,[23.79779],[2.2022095] +1769,26,[27.019293],[1.0192928] +1770,22,[27.908537],[5.908537] +1771,22,[24.810244],[2.8102436] +1772,26,[23.26442],[2.7355804] +1773,27,[29.74291],[2.7429104] +1774,22,[24.458569],[2.4585686] +1775,22,[22.441175],[0.44117546] +1776,30,[23.639687],[6.3603134] +1777,26,[25.561018],[0.438982] +1778,29,[22.982006],[6.017994] +1779,22,[27.096136],[5.096136] +1780,24,[21.66237],[2.3376293] +1781,26,[20.483244],[5.516756] +1782,23,[24.56837],[1.5683708] +1783,27,[25.440315],[1.5596848] +1784,30,[36.061478],[6.0614777] +1785,27,[35.332542],[8.332542] +1786,22,[44.057224],[22.057224] +1787,22,[27.061905],[5.061905] +1788,26,[34.5782],[8.578201] +1789,24,[22.803411],[1.1965885] +1790,30,[28.653826],[1.3461742] +1791,26,[29.427513],[3.4275131] +1792,26,[23.790909],[2.2090912] +1793,26,[30.271812],[4.2718124] +1794,26,[21.799421],[4.2005787] +1795,26,[25.56975],[0.43025017] +1796,22,[32.73258],[10.732578] +1797,22,[22.51619],[0.5161896] +1798,30,[25.721098],[4.278902] +1799,30,[32.339245],[2.3392448] +1800,22,[27.670446],[5.6704464] +1801,26,[26.623299],[0.62329865] +1802,27,[24.837734],[2.1622658] +1803,22,[22.47896],[0.47896004] +1804,27,[28.242828],[1.2428284] +1805,30,[23.896278],[6.1037216] +1806,30,[18.238102],[11.761898] +1807,30,[19.53201],[10.467991] +1808,30,[26.93816],[3.06184] +1809,30,[27.273806],[2.7261944] +1810,22,[29.919397],[7.9193974] +1811,26,[34.196815],[8.1968155] +1812,29,[34.40504],[5.4050407] +1813,25,[27.832691],[2.8326912] +1814,28,[28.204222],[0.20422173] +1815,24,[27.365139],[3.365139] +1816,26,[20.26642],[5.7335796] +1817,30,[28.211172],[1.7888279] +1818,29,[16.022463],[12.977537] +1819,26,[24.844143],[1.1558571] +1820,27,[26.404385],[0.5956154] +1821,30,[25.554306],[4.445694] +1822,22,[21.197046],[0.8029537] +1823,26,[30.914234],[4.914234] +1824,30,[23.1811],[6.818899] +1825,26,[21.808008],[4.191992] +1826,30,[25.55818],[4.44182] +1827,23,[22.840378],[0.15962219] +1828,27,[22.109344],[4.8906555] +1829,26,[27.392235],[1.3922348] +1830,28,[33.347893],[5.3478928] +1831,24,[33.264404],[9.264404] +1832,31,[31.416086],[0.4160862] +1833,31,[36.45488],[5.4548798] +1834,23,[21.914713],[1.0852871] +1835,27,[26.827948],[0.17205238] +1836,23,[21.991655],[1.0083447] +1837,28,[24.71535],[3.2846508] +1838,23,[29.324347],[6.3243465] +1839,23,[33.68595],[10.685951] +1840,27,[36.040325],[9.040325] +1841,24,[30.254766],[6.2547665] +1842,25,[32.474403],[7.4744034] +1843,23,[36.26493],[13.264931] +1844,23,[24.135298],[1.1352978] +1845,27,[22.966885],[4.0331154] +1846,23,[39.019592],[16.019592] +1847,27,[29.883352],[2.8833523] +1848,23,[36.455444],[13.455444] +1849,26,[26.18442],[0.18441963] +1850,30,[22.631308],[7.3686924] +1851,31,[28.21241],[2.78759] +1852,31,[29.852467],[1.1475334] +1853,30,[20.979757],[9.020243] +1854,30,[30.229433],[0.22943306] +1855,30,[26.169731],[3.8302689] +1856,27,[23.306715],[3.693285] +1857,26,[23.80573],[2.1942692] +1858,28,[26.767353],[1.232647] +1859,27,[28.649677],[1.6496773] +1860,30,[28.991152],[1.0088482] +1861,28,[29.60642],[1.6064205] +1862,27,[22.386908],[4.6130924] +1863,28,[31.873014],[3.8730145] +1864,27,[27.647213],[0.647213] +1865,28,[26.514757],[1.4852428] +1866,24,[20.970438],[3.029562] +1867,28,[39.6242],[11.624199] +1868,26,[22.323273],[3.6767273] +1869,27,[28.91053],[1.9105301] +1870,27,[17.808994],[9.191006] +1871,27,[27.36001],[0.36001015] +1872,28,[29.942339],[1.942339] +1873,28,[28.326185],[0.32618523] +1874,27,[26.241123],[0.7588768] +1875,28,[27.299202],[0.70079803] +1876,26,[23.661772],[2.3382282] +1877,26,[22.314392],[3.685608] +1878,27,[27.288708],[0.28870773] +1879,28,[25.0972],[2.9027996] +1880,26,[28.582157],[2.5821571] +1881,26,[26.523287],[0.5232868] +1882,27,[30.701927],[3.7019272] +1883,27,[24.75825],[2.2417507] +1884,24,[28.709984],[4.709984] +1885,24,[28.460241],[4.4602413] +1886,31,[26.770082],[4.2299175] +1887,27,[23.630936],[3.3690643] +1888,31,[31.363287],[0.36328697] +1889,30,[30.93795],[0.93795013] +1890,28,[25.17161],[2.8283901] +1891,25,[30.689398],[5.689398] +1892,28,[24.938833],[3.0611668] +1893,28,[26.228712],[1.7712879] +1894,30,[26.264376],[3.7356243] +1895,26,[26.166779],[0.16677856] +1896,29,[26.328495],[2.671505] +1897,28,[23.31222],[4.6877804] +1898,24,[29.154352],[5.154352] +1899,25,[26.572664],[1.5726643] +1900,31,[24.591494],[6.4085064] +1901,30,[28.4371],[1.5629005] +1902,27,[27.992815],[0.992815] +1903,30,[22.368526],[7.6314735] +1904,28,[32.3733],[4.3732986] +1905,26,[28.351952],[2.3519516] +1906,25,[29.936481],[4.9364815] +1907,28,[33.569828],[5.569828] +1908,28,[28.232075],[0.23207474] +1909,26,[17.154057],[8.845943] +1910,30,[31.6158],[1.6158009] +1911,24,[17.698633],[6.301367] +1912,29,[21.141602],[7.8583984] +1913,28,[25.155039],[2.8449612] +1914,28,[26.182848],[1.817152] +1915,27,[23.07225],[3.9277496] +1916,24,[24.355394],[0.35539436] +1917,30,[25.811312],[4.1886883] +1918,30,[34.85853],[4.858528] +1919,30,[26.156143],[3.8438568] +1920,30,[31.017834],[1.0178337] +1921,30,[27.078009],[2.9219913] +1922,30,[28.75069],[1.2493095] +1923,28,[25.376717],[2.6232834] +1924,28,[31.92803],[3.92803] +1925,29,[22.938438],[6.0615616] +1926,28,[30.631605],[2.6316051] +1927,30,[28.584826],[1.4151745] +1928,27,[28.494959],[1.4949589] +1929,28,[22.223171],[5.776829] +1930,27,[28.492277],[1.4922771] +1931,27,[10.526697],[16.473303] +1932,28,[26.858688],[1.1413116] +1933,30,[32.135357],[2.135357] +1934,30,[20.868567],[9.1314335] +1935,27,[20.243645],[6.7563553] +1936,24,[28.723526],[4.723526] +1937,30,[31.582932],[1.5829315] +1938,28,[25.184933],[2.8150673] +1939,28,[25.826204],[2.1737957] +1940,26,[21.343943],[4.6560574] +1941,30,[28.446272],[1.5537281] +1942,32,[31.315786],[0.68421364] +1943,28,[25.153664],[2.8463364] +1944,25,[23.90812],[1.0918808] +1945,28,[29.462471],[1.462471] +1946,29,[24.296745],[4.7032547] +1947,27,[25.618612],[1.3813877] +1948,27,[26.237177],[0.7628231] +1949,28,[31.541039],[3.5410385] +1950,29,[26.711784],[2.2882156] +1951,31,[24.145233],[6.854767] +1952,25,[24.189737],[0.8102627] +1953,25,[28.218464],[3.218464] +1954,29,[27.90901],[1.0909901] +1955,28,[26.374552],[1.6254482] +1956,27,[19.212162],[7.787838] +1957,25,[25.79953],[0.79953] +1958,27,[31.964123],[4.964123] +1959,28,[24.76232],[3.2376804] +1960,31,[26.119768],[4.880232] +1961,31,[32.424595],[1.4245949] +1962,31,[27.561916],[3.4380836] +1963,28,[24.839808],[3.1601925] +1964,28,[26.706287],[1.2937126] +1965,25,[7.7566786],[17.24332] +1966,29,[24.747175],[4.252825] +1967,28,[23.364386],[4.6356144] +1968,27,[24.064205],[2.9357948] +1969,28,[25.285639],[2.7143612] +1970,31,[30.430014],[0.56998634] +1971,28,[27.047232],[0.9527683] +1972,28,[27.548523],[0.45147705] +1973,28,[27.108768],[0.89123154] +1974,31,[23.248045],[7.751955] +1975,28,[30.314745],[2.314745] +1976,31,[26.818556],[4.181444] +1977,29,[27.9583],[1.0417004] +1978,28,[22.530787],[5.4692135] +1979,24,[24.919863],[0.91986275] +1980,28,[25.301239],[2.698761] +1981,27,[27.635338],[0.6353378] +1982,26,[31.46711],[5.4671097] +1983,29,[30.770151],[1.7701511] +1984,24,[31.073406],[7.073406] +1985,28,[21.470085],[6.529915] +1986,24,[31.185093],[7.185093] +1987,26,[19.653383],[6.3466167] +1988,24,[24.560497],[0.5604973] +1989,26,[24.798956],[1.2010441] +1990,26,[20.401213],[5.5987873] +1991,24,[23.377117],[0.62288284] +1992,26,[25.42493],[0.5750694] +1993,24,[23.939968],[0.06003189] +1994,26,[25.71958],[0.2804203] +1995,24,[30.540154],[6.5401535] +1996,26,[27.887283],[1.8872833] +1997,24,[21.58227],[2.4177303] +1998,26,[32.005108],[6.005108] +1999,26,[28.034637],[2.0346375] +2000,30,[22.607567],[7.392433] +2001,24,[26.678368],[2.6783676] +2002,24,[37.979305],[13.979305] +2003,24,[24.526978],[0.52697754] +2004,26,[23.580843],[2.419157] +2005,24,[22.14739],[1.8526096] +2006,26,[24.849339],[1.1506615] +2007,26,[27.217018],[1.2170181] +2008,24,[25.957157],[1.9571571] +2009,32,[22.98451],[9.01549] +2010,24,[25.184391],[1.184391] +2011,32,[33.208683],[1.208683] +2012,24,[28.641054],[4.641054] +2013,24,[32.4315],[8.4314995] +2014,26,[36.53548],[10.5354805] +2015,24,[19.719418],[4.2805824] +2016,24,[22.571941],[1.4280586] +2017,27,[24.271122],[2.728878] +2018,28,[27.648905],[0.3510952] +2019,26,[26.942537],[0.9425373] +2020,29,[22.34569],[6.65431] +2021,28,[27.667732],[0.33226776] +2022,1,[3.2886636],[2.2886636] +2023,1,[4.70275],[3.7027502] +2024,2,[3.17809],[1.1780901] +2025,1,[3.1181931],[2.1181931] +2026,1,[3.117079],[2.117079] +2027,1,[6.905375],[5.905375] +2028,1,[3.1893291],[2.1893291] +2029,1,[3.1617177],[2.1617177] +2030,1,[3.4365463],[2.4365463] +2031,1,[3.193208],[2.193208] +2032,1,[3.188603],[2.188603] +2033,1,[3.1720774],[2.1720774] +2034,1,[3.15839],[2.15839] +2035,1,[3.3722746],[2.3722746] +2036,1,[3.1475859],[2.1475859] +2037,1,[3.1548111],[2.1548111] +2038,1,[4.020244],[3.0202441] +2039,1,[3.294876],[2.294876] +2040,1,[3.139811],[2.139811] +2041,1,[3.1389751],[2.1389751] +2042,1,[3.2356043],[2.2356043] +2043,1,[3.2299],[2.2299] +2044,1,[3.5517378],[2.5517378] +2045,1,[3.286905],[2.286905] +2046,1,[9.310202],[8.310202] +2047,1,[32.030796],[31.030796] +2048,1,[3.1205165],[2.1205165] +2049,1,[3.172534],[2.172534] +2050,1,[3.2266145],[2.2266145] +2051,1,[3.1430926],[2.1430926] +2052,1,[3.1984022],[2.1984022] +2053,1,[3.253212],[2.253212] +2054,1,[3.2222126],[2.2222126] +2055,1,[3.181961],[2.181961] +2056,1,[3.1575732],[2.1575732] +2057,1,[5.454814],[4.454814] +2058,1,[3.1687953],[2.1687953] +2059,1,[3.186259],[2.186259] +2060,1,[3.3108552],[2.3108552] +2061,1,[3.2601175],[2.2601175] +2062,1,[3.113767],[2.113767] +2063,1,[3.283783],[2.283783] +2064,1,[3.1408987],[2.1408987] +2065,1,[3.117567],[2.117567] +2066,1,[3.1922169],[2.1922169] +2067,1,[3.2013726],[2.2013726] +2068,1,[3.544467],[2.544467] +2069,1,[4.0613017],[3.0613017] +2070,1,[3.11251],[2.11251] +2071,1,[4.357894],[3.357894] +2072,1,[3.1536748],[2.1536748] +2073,1,[3.2329025],[2.2329025] +2074,1,[3.9820063],[2.9820063] +2075,1,[3.2527592],[2.2527592] +2076,1,[3.2060044],[2.2060044] +2077,1,[9.353794],[8.353794] +2078,1,[3.2954867],[2.2954867] +2079,1,[3.2570987],[2.2570987] +2080,1,[3.3638415],[2.3638415] +2081,1,[3.1277225],[2.1277225] +2082,1,[9.225929],[8.225929] +2083,1,[3.464883],[2.464883] +2084,1,[3.295057],[2.295057] +2085,1,[3.6657329],[2.6657329] +2086,1,[3.1340778],[2.1340778] +2087,1,[6.5264707],[5.5264707] +2088,1,[3.2946236],[2.2946236] +2089,1,[3.283199],[2.283199] +2090,1,[10.035612],[9.035612] +2091,1,[3.2124474],[2.2124474] +2092,1,[5.9562626],[4.9562626] +2093,1,[26.121649],[25.121649] +2094,1,[3.1740077],[2.1740077] +2095,1,[33.187218],[32.187218] +2096,1,[9.709847],[8.709847] +2097,1,[3.136236],[2.136236] +2098,2,[3.1331792],[1.1331792] +2099,1,[10.361238],[9.361238] +2100,1,[4.3375864],[3.3375864] +2101,1,[3.7765207],[2.7765207] +2102,2,[3.3220563],[1.3220563] +2103,1,[3.4230886],[2.4230886] +2104,1,[5.174164],[4.174164] +2105,1,[3.2332067],[2.2332067] +2106,3,[3.5450408],[0.54504085] +2107,1,[3.1974638],[2.1974638] +2108,2,[3.1782045],[1.1782045] +2109,2,[4.1677694],[2.1677694] +2110,1,[3.2870653],[2.2870653] +2111,1,[5.327016],[4.327016] +2112,1,[3.2188692],[2.2188692] +2113,1,[3.2463806],[2.2463806] +2114,1,[3.179941],[2.179941] +2115,5,[3.311514],[1.6884861] +2116,1,[3.1873245],[2.1873245] +2117,1,[3.288409],[2.288409] +2118,1,[3.2897701],[2.2897701] +2119,2,[3.1999097],[1.1999097] +2120,1,[11.93688],[10.93688] +2121,1,[3.1571436],[2.1571436] +2122,1,[3.1705797],[2.1705797] +2123,1,[3.1725485],[2.1725485] +2124,2,[3.1968293],[1.1968293] +2125,1,[3.2622287],[2.2622287] +2126,2,[3.2310812],[1.2310812] +2127,1,[3.2057142],[2.2057142] +2128,1,[4.702433],[3.702433] +2129,2,[3.1405723],[1.1405723] +2130,1,[3.149079],[2.149079] +2131,1,[3.2993994],[2.2993994] +2132,1,[3.3928642],[2.3928642] +2133,1,[13.473383],[12.473383] +2134,1,[3.20509],[2.20509] +2135,1,[3.1975837],[2.1975837] +2136,1,[3.178543],[2.178543] +2137,1,[3.2890718],[2.2890718] +2138,1,[3.1508334],[2.1508334] +2139,1,[6.378495],[5.378495] +2140,1,[3.3159673],[2.3159673] +2141,1,[3.2931035],[2.2931035] +2142,1,[3.1289015],[2.1289015] +2143,1,[3.2388988],[2.2388988] +2144,1,[3.2411838],[2.2411838] +2145,1,[5.537387],[4.537387] +2146,1,[17.015776],[16.015776] +2147,1,[3.1301665],[2.1301665] +2148,1,[4.726635],[3.726635] +2149,1,[5.1911],[4.1911] +2150,1,[3.1898582],[2.1898582] +2151,1,[3.223525],[2.223525] +2152,1,[3.2055054],[2.2055054] +2153,1,[4.3153186],[3.3153186] +2154,1,[3.2119842],[2.2119842] +2155,1,[3.1775222],[2.1775222] +2156,2,[3.177304],[1.177304] +2157,1,[3.2779074],[2.2779074] +2158,1,[3.1985083],[2.1985083] +2159,1,[3.3156073],[2.3156073] +2160,1,[3.1648726],[2.1648726] +2161,1,[3.7069633],[2.7069633] +2162,1,[3.5341132],[2.5341132] +2163,1,[3.550767],[2.550767] +2164,1,[6.0553336],[5.0553336] +2165,1,[5.6063066],[4.6063066] +2166,1,[3.0864925],[2.0864925] +2167,1,[3.2124033],[2.2124033] +2168,1,[3.158923],[2.158923] +2169,1,[4.105155],[3.105155] +2170,1,[3.1064312],[2.1064312] +2171,1,[3.2874863],[2.2874863] +2172,1,[3.167016],[2.167016] +2173,1,[3.2874343],[2.2874343] +2174,1,[9.974651],[8.974651] +2175,1,[3.1224391],[2.1224391] +2176,1,[3.1604452],[2.1604452] +2177,1,[3.8665187],[2.8665187] +2178,1,[3.25174],[2.25174] +2179,2,[3.273025],[1.273025] +2180,1,[5.542048],[4.542048] +2181,1,[3.113676],[2.113676] +2182,1,[3.1843681],[2.1843681] +2183,1,[3.1525126],[2.1525126] +2184,1,[3.2075872],[2.2075872] +2185,1,[3.2233493],[2.2233493] +2186,33,[28.18217],[4.817829] +2187,25,[27.137686],[2.1376858] +2188,29,[27.44008],[1.5599194] +2189,25,[27.529032],[2.5290318] +2190,29,[22.790876],[6.2091236] +2191,28,[25.373625],[2.6263752] +2192,33,[37.061153],[4.0611534] +2193,29,[25.000076],[3.9999237] +2194,25,[29.063726],[4.0637264] +2195,30,[28.681204],[1.3187962] +2196,29,[25.119223],[3.8807774] +2197,26,[23.571133],[2.4288673] +2198,33,[27.708557],[5.291443] +2199,30,[30.336994],[0.33699417] +2200,29,[25.812996],[3.187004] +2201,30,[25.681753],[4.318247] +2202,27,[26.745481],[0.2545185] +2203,29,[31.189213],[2.1892128] +2204,33,[33.384132],[0.3841324] +2205,33,[28.32385],[4.6761494] +2206,33,[28.606966],[4.393034] +2207,33,[43.690716],[10.690716] +2208,25,[27.484982],[2.4849815] +2209,25,[24.717693],[0.28230667] +2210,29,[27.566418],[1.4335823] +2211,27,[23.007128],[3.9928722] +2212,32,[32.15164],[0.15164185] +2213,33,[33.49899],[0.4989891] +2214,30,[27.241201],[2.7587986] +2215,31,[33.569828],[2.569828] +2216,26,[29.74633],[3.7463303] +2217,29,[31.077677],[2.0776768] +2218,33,[33.80969],[0.80968857] +2219,33,[28.75964],[4.2403603] +2220,25,[14.45211],[10.54789] +2221,33,[25.306255],[7.6937447] +2222,26,[31.022343],[5.0223427] +2223,29,[26.357061],[2.6429386] +2224,29,[24.374977],[4.625023] +2225,29,[24.138073],[4.861927] +2226,29,[35.018005],[6.0180054] +2227,30,[25.681816],[4.318184] +2228,33,[30.071949],[2.928051] +2229,30,[29.847786],[0.15221405] +2230,32,[30.725355],[1.2746449] +2231,29,[24.029848],[4.970152] +2232,29,[29.9773],[0.97730064] +2233,29,[27.253042],[1.7469578] +2234,26,[26.814005],[0.8140049] +2235,25,[25.761168],[0.7611675] +2236,31,[39.469357],[8.469357] +2237,33,[30.328737],[2.6712627] +2238,25,[24.993696],[0.00630379] +2239,31,[23.563864],[7.4361362] +2240,33,[16.719603],[16.280397] +2241,32,[23.215317],[8.784683] +2242,30,[25.759106],[4.2408943] +2243,29,[23.78929],[5.2107105] +2244,29,[26.703516],[2.296484] +2245,33,[24.398996],[8.601004] +2246,29,[18.980692],[10.019308] +2247,33,[28.220512],[4.7794876] +2248,30,[24.469378],[5.5306225] +2249,33,[24.22693],[8.773069] +2250,33,[34.874268],[1.8742676] +2251,29,[24.0534],[4.9466] +2252,29,[30.892593],[1.8925934] +2253,30,[28.220121],[1.7798786] +2254,29,[28.312496],[0.6875038] +2255,25,[25.988268],[0.9882679] +2256,33,[28.79997],[4.2000294] +2257,33,[27.984125],[5.015875] +2258,30,[21.51387],[8.48613] +2259,29,[25.450373],[3.5496273] +2260,28,[27.24759],[0.7524109] +2261,33,[27.36026],[5.63974] +2262,30,[28.43781],[1.5621891] +2263,29,[19.144331],[9.855669] +2264,33,[20.663568],[12.3364315] +2265,30,[44.814392],[14.814392] +2266,33,[30.71717],[2.2828293] +2267,28,[31.129738],[3.1297379] +2268,30,[26.67281],[3.3271904] +2269,33,[28.41535],[4.58465] +2270,28,[24.419638],[3.5803623] +2271,33,[27.252892],[5.7471085] +2272,31,[28.273367],[2.726633] +2273,33,[18.395504],[14.604496] +2274,33,[28.058046],[4.9419537] +2275,30,[28.895082],[1.1049175] +2276,32,[27.697786],[4.3022137] +2277,30,[22.48808],[7.511921] +2278,29,[31.518198],[2.518198] +2279,29,[26.152975],[2.847025] +2280,28,[26.370766],[1.6292343] +2281,33,[28.483196],[4.5168037] +2282,26,[29.297771],[3.2977715] +2283,27,[29.130653],[2.1306534] +2284,29,[23.588799],[5.4112015] +2285,25,[28.420193],[3.4201927] +2286,25,[19.024193],[5.975807] +2287,31,[24.013062],[6.9869385] +2288,29,[30.490494],[1.4904938] +2289,27,[36.638554],[9.638554] +2290,31,[30.818401],[0.18159866] +2291,25,[31.855862],[6.8558617] +2292,25,[27.079288],[2.0792885] +2293,25,[24.566378],[0.43362236] +2294,30,[30.271812],[0.27181244] +2295,33,[28.596659],[4.4033413] +2296,28,[26.960388],[1.0396118] +2297,28,[39.368828],[11.368828] +2298,29,[22.456663],[6.543337] +2299,25,[27.412045],[2.4120445] +2300,25,[27.3096],[2.3096008] +2301,29,[22.701283],[6.2987175] +2302,29,[27.495646],[1.5043545] +2303,25,[27.16668],[2.1666794] +2304,30,[29.768064],[0.2319355] +2305,29,[24.39382],[4.60618] +2306,29,[27.541134],[1.4588661] +2307,28,[26.867006],[1.1329937] +2308,28,[26.877972],[1.1220284] +2309,25,[12.407159],[12.592841] +2310,25,[22.716516],[2.2834835] +2311,29,[22.894623],[6.105377] +2312,29,[30.997501],[1.9975014] +2313,32,[27.72918],[4.2708206] +2314,29,[34.60194],[5.60194] +2315,29,[27.0472],[1.9528008] +2316,30,[26.190865],[3.8091354] +2317,25,[24.647825],[0.35217476] +2318,29,[27.177074],[1.8229256] +2319,32,[25.454462],[6.545538] +2320,30,[25.71922],[4.280781] +2321,27,[29.483578],[2.4835777] +2322,25,[22.335785],[2.664215] +2323,29,[26.495314],[2.5046864] +2324,29,[23.14245],[5.8575497] +2325,27,[20.680567],[6.319433] +2326,27,[28.700598],[1.7005978] +2327,33,[32.382862],[0.6171379] +2328,29,[21.475073],[7.524927] +2329,33,[26.927788],[6.072212] +2330,33,[32.22865],[0.77135086] +2331,33,[28.898403],[4.101597] +2332,29,[28.61672],[0.3832798] +2333,25,[12.878162],[12.121838] +2334,26,[24.71535],[1.2846508] +2335,32,[28.569475],[3.4305248] +2336,34,[29.860443],[4.139557] +2337,33,[25.819471],[7.1805286] +2338,30,[26.64863],[3.3513699] +2339,30,[22.343172],[7.656828] +2340,26,[29.965057],[3.9650574] +2341,30,[26.424522],[3.5754776] +2342,30,[23.949837],[6.0501633] +2343,30,[30.622274],[0.6222744] +2344,28,[25.994057],[2.0059433] +2345,31,[26.407263],[4.592737] +2346,28,[30.143593],[2.1435928] +2347,33,[33.29461],[0.29460907] +2348,30,[25.78193],[4.218069] +2349,32,[34.93145],[2.93145] +2350,33,[36.21663],[3.216629] +2351,31,[32.361645],[1.3616447] +2352,34,[29.210272],[4.789728] +2353,32,[36.133026],[4.133026] +2354,33,[33.912292],[0.9122925] +2355,34,[39.813538],[5.8135376] +2356,32,[33.443394],[1.4433937] +2357,28,[23.623579],[4.376421] +2358,28,[19.782108],[8.217892] +2359,28,[20.547552],[7.452448] +2360,28,[28.798578],[0.79857826] +2361,28,[24.699543],[3.300457] +2362,26,[35.15486],[9.154861] +2363,30,[26.26463],[3.7353706] +2364,26,[26.589495],[0.5894947] +2365,28,[30.848095],[2.848095] +2366,29,[23.838034],[5.1619663] +2367,28,[34.350304],[6.3503036] +2368,32,[29.664787],[2.3352127] +2369,32,[28.299173],[3.7008266] +2370,28,[24.796564],[3.203436] +2371,32,[29.318436],[2.6815643] +2372,28,[25.777485],[2.222515] +2373,26,[28.8669],[2.8668995] +2374,34,[31.04625],[2.9537506] +2375,28,[30.204384],[2.2043839] +2376,28,[26.788288],[1.2117119] +2377,28,[35.64039],[7.6403885] +2378,32,[31.141348],[0.8586521] +2379,28,[24.885633],[3.1143665] +2380,27,[29.242323],[2.242323] +2381,30,[31.002615],[1.002615] +2382,28,[26.816124],[1.183876] +2383,30,[28.243837],[1.7561626] +2384,26,[19.387604],[6.6123962] +2385,28,[34.673077],[6.6730766] +2386,30,[25.707432],[4.292568] +2387,28,[34.11357],[6.113571] +2388,30,[32.72273],[2.7227287] +2389,28,[24.82778],[3.1722202] +2390,30,[39.328465],[9.3284645] +2391,26,[35.575912],[9.575912] +2392,26,[18.243973],[7.756027] +2393,30,[28.572332],[1.4276676] +2394,28,[25.229448],[2.7705517] +2395,32,[29.132292],[2.8677082] +2396,30,[27.02209],[2.977909] +2397,26,[33.31098],[7.3109818] +2398,28,[22.1778],[5.822201] +2399,28,[31.598415],[3.5984154] +2400,29,[32.490265],[3.490265] +2401,29,[31.019337],[2.0193367] +2402,35,[33.538063],[1.461937] +2403,28,[30.07488],[2.0748806] +2404,27,[29.194225],[2.1942253] +2405,28,[27.268902],[0.7310982] +2406,29,[33.984642],[4.984642] +2407,32,[25.073772],[6.9262276] +2408,27,[22.895308],[4.1046925] +2409,32,[16.83826],[15.161739] +2410,35,[21.960129],[13.039871] +2411,31,[28.504353],[2.4956474] +2412,27,[18.763802],[8.236198] +2413,27,[29.396479],[2.3964787] +2414,27,[23.4964],[3.5035992] +2415,34,[25.713915],[8.286085] +2416,27,[25.335201],[1.6647987] +2417,27,[26.081383],[0.91861725] +2418,35,[37.877064],[2.8770638] +2419,27,[28.543701],[1.5437012] +2420,27,[27.457384],[0.4573841] +2421,32,[27.547749],[4.4522514] +2422,34,[30.719765],[3.2802353] +2423,34,[32.706013],[1.2939873] +2424,28,[25.947897],[2.052103] +2425,32,[22.982363],[9.017637] +2426,32,[20.004963],[11.995037] +2427,30,[25.296915],[4.703085] +2428,27,[25.77698],[1.2230206] +2429,32,[27.191599],[4.808401] +2430,32,[26.452118],[5.547882] +2431,32,[29.170158],[2.8298416] +2432,33,[29.045313],[3.954687] +2433,31,[25.897623],[5.102377] +2434,34,[38.65891],[4.658909] +2435,34,[24.272202],[9.727798] +2436,31,[34.94505],[3.9450493] +2437,34,[29.51713],[4.48287] +2438,31,[32.80581],[1.805809] +2439,31,[30.200207],[0.79979324] +2440,32,[32.07107],[0.07107162] +2441,28,[24.128162],[3.8718376] +2442,32,[27.057457],[4.942543] +2443,27,[30.221298],[3.2212982] +2444,32,[31.092693],[0.9073067] +2445,34,[37.78067],[3.7806702] +2446,31,[21.958202],[9.041798] +2447,32,[33.53817],[1.5381699] +2448,33,[32.834404],[0.16559601] +2449,27,[26.612907],[0.3870926] +2450,34,[30.006733],[3.993267] +2451,32,[27.705986],[4.294014] +2452,32,[19.022419],[12.977581] +2453,34,[26.74056],[7.2594395] +2454,34,[35.26302],[1.2630196] +2455,31,[36.748066],[5.748066] +2456,33,[24.12481],[8.875191] +2457,31,[29.782871],[1.2171288] +2458,28,[20.636831],[7.3631687] +2459,30,[29.393415],[0.60658455] +2460,32,[34.84495],[2.8449516] +2461,32,[28.16127],[3.8387299] +2462,34,[34.259842],[0.25984192] +2463,29,[27.487612],[1.5123882] +2464,34,[27.239191],[6.760809] +2465,31,[39.09284],[8.092838] +2466,34,[26.63354],[7.366461] +2467,33,[30.664583],[2.3354168] +2468,31,[28.25315],[2.746849] +2469,32,[19.556892],[12.443108] +2470,32,[29.157497],[2.8425026] +2471,34,[32.284527],[1.7154732] +2472,34,[29.742046],[4.2579536] +2473,31,[24.415329],[6.584671] +2474,32,[37.339077],[5.339077] +2475,31,[35.700592],[4.700592] +2476,31,[28.48058],[2.5194206] +2477,30,[24.533972],[5.466028] +2478,32,[24.641804],[7.3581963] +2479,27,[31.219286],[4.219286] +2480,33,[29.06729],[3.9327106] +2481,31,[24.596645],[6.4033546] +2482,32,[24.685732],[7.314268] +2483,31,[23.95855],[7.0414505] +2484,29,[31.597723],[2.597723] +2485,29,[25.344055],[3.6559448] +2486,33,[25.458036],[7.5419636] +2487,34,[27.33131],[6.6686897] +2488,32,[27.67073],[4.3292694] +2489,34,[25.551313],[8.448687] +2490,34,[25.98855],[8.01145] +2491,29,[29.381773],[0.381773] +2492,28,[28.103727],[0.10372734] +2493,35,[35.676086],[0.6760864] +2494,27,[27.26136],[0.26136017] +2495,31,[39.66642],[8.66642] +2496,33,[33.007957],[0.00795746] +2497,35,[34.70779],[0.29220963] +2498,31,[25.215172],[5.784828] +2499,35,[29.518555],[5.4814453] +2500,33,[38.655346],[5.655346] +2501,32,[32.04855],[0.04854965] +2502,29,[31.910948],[2.9109478] +2503,31,[32.493946],[1.4939461] +2504,32,[28.40532],[3.5946808] +2505,32,[26.003008],[5.996992] +2506,32,[39.101845],[7.101845] +2507,30,[24.586208],[5.4137917] +2508,35,[26.898289],[8.101711] +2509,35,[37.507957],[2.5079575] +2510,32,[28.23264],[3.7673607] +2511,33,[28.379671],[4.620329] +2512,35,[25.838528],[9.161472] +2513,35,[38.638985],[3.6389847] +2514,31,[28.327677],[2.6723232] +2515,34,[39.70539],[5.705391] +2516,35,[32.862408],[2.1375923] +2517,33,[26.400429],[6.599571] +2518,31,[28.904152],[2.095848] +2519,32,[34.23835],[2.23835] +2520,31,[28.965948],[2.034052] +2521,33,[30.691595],[2.308405] +2522,33,[31.17724],[1.8227596] +2523,31,[32.37759],[1.3775902] +2524,33,[31.378374],[1.6216259] +2525,31,[30.49875],[0.5012493] +2526,32,[30.548374],[1.4516258] +2527,35,[26.956139],[8.043861] +2528,30,[33.046066],[3.0460663] +2529,32,[24.475525],[7.524475] +2530,31,[27.84538],[3.1546192] +2531,35,[28.650167],[6.3498325] +2532,33,[30.569561],[2.430439] +2533,34,[27.90348],[6.0965195] +2534,31,[30.309425],[0.69057465] +2535,30,[26.259626],[3.7403736] +2536,33,[26.823833],[6.1761665] +2537,31,[29.860542],[1.1394577] +2538,35,[31.219004],[3.7809963] +2539,35,[36.384247],[1.3842468] +2540,35,[40.99777],[5.9977684] +2541,34,[32.381622],[1.6183777] +2542,31,[29.452068],[1.5479317] +2543,32,[32.027443],[0.02744293] +2544,31,[39.113235],[8.113235] +2545,30,[34.377266],[4.377266] +2546,32,[29.534681],[2.4653187] +2547,35,[38.590233],[3.5902328] +2548,32,[28.512264],[3.4877357] +2549,32,[30.318853],[1.6811466] +2550,32,[28.245312],[3.7546883] +2551,30,[30.21077],[0.21076965] +2552,31,[39.833786],[8.833786] +2553,35,[26.936792],[8.063208] +2554,31,[28.50493],[2.4950695] +2555,34,[36.409515],[2.4095154] +2556,33,[27.762043],[5.237957] +2557,30,[25.622196],[4.377804] +2558,33,[39.14448],[6.1444817] +2559,32,[34.126846],[2.1268463] +2560,33,[21.54097],[11.45903] +2561,32,[26.996777],[5.0032234] +2562,32,[23.797213],[8.202787] +2563,34,[27.843235],[6.156765] +2564,32,[29.157497],[2.8425026] +2565,35,[27.479153],[7.5208473] +2566,35,[24.079504],[10.920496] +2567,30,[28.652708],[1.347292] +2568,32,[31.343794],[0.65620613] +2569,35,[30.161154],[4.838846] +2570,32,[31.614676],[0.38532448] +2571,33,[26.669647],[6.330353] +2572,36,[27.277107],[8.722893] +2573,31,[30.628237],[0.37176323] +2574,31,[31.166687],[0.16668701] +2575,28,[25.430374],[2.5696259] +2576,31,[19.829412],[11.170588] +2577,32,[33.673603],[1.673603] +2578,32,[24.103132],[7.8968678] +2579,32,[30.899363],[1.1006374] +2580,32,[30.260979],[1.7390213] +2581,32,[26.00462],[5.9953804] +2582,28,[32.179455],[4.179455] +2583,30,[30.824728],[0.824728] +2584,28,[33.94567],[5.945671] +2585,33,[32.593666],[0.40633392] +2586,32,[35.144207],[3.144207] +2587,32,[36.41415],[4.41415] +2588,32,[37.263176],[5.263176] +2589,32,[29.78658],[2.2134209] +2590,28,[34.387154],[6.3871536] +2591,32,[13.87391],[18.126091] +2592,32,[26.195768],[5.8042316] +2593,36,[28.97269],[7.0273094] +2594,32,[31.993038],[0.00696182] +2595,28,[45.631863],[17.631863] +2596,32,[27.225233],[4.774767] +2597,32,[32.043983],[0.04398346] +2598,36,[37.66316],[1.6631584] +2599,32,[25.296225],[6.7037754] +2600,32,[29.961805],[2.0381947] +2601,28,[39.970913],[11.970913] +2602,28,[33.19431],[5.194309] +2603,32,[40.274696],[8.274696] +2604,32,[27.477308],[4.5226917] +2605,28,[32.23757],[4.2375717] +2606,34,[40.20542],[6.2054214] +2607,31,[29.996458],[1.003542] +2608,34,[30.439522],[3.5604782] +2609,33,[34.127037],[1.127037] +2610,32,[31.104664],[0.89533615] +2611,35,[34.184505],[0.81549454] +2612,31,[25.231703],[5.768297] +2613,32,[29.50936],[2.4906406] +2614,33,[30.035086],[2.9649143] +2615,32,[36.06852],[4.0685196] +2616,28,[25.286837],[2.7131634] +2617,33,[30.490316],[2.5096836] +2618,32,[22.355038],[9.644962] +2619,34,[31.54901],[2.4509907] +2620,32,[24.092371],[7.907629] +2621,32,[41.05713],[9.057129] +2622,32,[28.23141],[3.768591] +2623,28,[23.633083],[4.3669167] +2624,32,[32.874546],[0.87454605] +2625,32,[30.1292],[1.8708] +2626,37,[33.752007],[3.2479935] +2627,29,[29.98007],[0.9800701] +2628,33,[35.638947],[2.6389465] +2629,33,[41.293224],[8.293224] +2630,29,[31.729834],[2.7298336] +2631,29,[29.87233],[0.8723297] +2632,29,[32.877296],[3.8772964] +2633,33,[33.212547],[0.2125473] +2634,33,[36.02194],[3.0219383] +2635,33,[39.055405],[6.0554047] +2636,33,[43.691483],[10.691483] +2637,29,[30.650068],[1.6500683] +2638,29,[39.097744],[10.097744] +2639,33,[28.85067],[4.14933] +2640,33,[35.237007],[2.2370071] +2641,33,[22.936235],[10.063765] +2642,33,[32.241756],[0.75824356] +2643,37,[42.080307],[5.080307] +2644,33,[33.809784],[0.80978394] +2645,33,[41.505463],[8.505463] +2646,29,[24.06626],[4.9337406] +2647,33,[15.177019],[17.82298] +2648,29,[30.932014],[1.9320145] +2649,29,[30.001072],[1.0010719] +2650,33,[24.570177],[8.429823] +2651,29,[31.268028],[2.2680283] +2652,29,[37.866146],[8.866146] +2653,33,[34.519974],[1.5199738] +2654,37,[37.572475],[0.57247543] +2655,29,[30.658499],[1.6584988] +2656,33,[28.243227],[4.756773] +2657,37,[39.299644],[2.2996445] +2658,33,[32.006985],[0.9930153] +2659,33,[26.274496],[6.725504] +2660,29,[30.910048],[1.9100475] +2661,37,[38.140347],[1.1403465] +2662,33,[33.082832],[0.08283234] +2663,37,[26.525267],[10.474733] +2664,33,[42.315937],[9.315937] +2665,33,[39.179707],[6.1797066] +2666,34,[36.993904],[2.993904] +2667,31,[35.096863],[4.096863] +2668,29,[27.449806],[1.5501938] +2669,33,[32.710663],[0.28933716] +2670,29,[31.498323],[2.4983234] +2671,33,[35.977608],[2.9776077] +2672,29,[30.19515],[1.1951504] +2673,29,[33.84945],[4.849449] +2674,33,[26.460152],[6.5398483] +2675,33,[36.0246],[3.024601] +2676,29,[23.909636],[5.0903645] +2677,33,[35.022232],[2.022232] +2678,33,[29.03368],[3.96632] +2679,32,[35.187733],[3.1877327] +2680,33,[43.158714],[10.158714] +2681,33,[35.714897],[2.7148972] +2682,33,[31.008924],[1.9910755] +2683,29,[28.994041],[0.00595856] +2684,33,[33.345978],[0.34597778] +2685,33,[26.045065],[6.954935] +2686,37,[32.790096],[4.2099037] +2687,31,[32.63965],[1.6396484] +2688,33,[30.89057],[2.1094303] +2689,33,[21.669365],[11.330635] +2690,33,[21.037104],[11.962896] +2691,29,[35.879124],[6.8791237] +2692,33,[27.276674],[5.7233257] +2693,34,[43.047523],[9.0475235] +2694,29,[40.51177],[11.511768] +2695,34,[30.905844],[3.0941563] +2696,33,[28.093124],[4.9068756] +2697,29,[30.693329],[1.6933289] +2698,34,[34.72881],[0.72880936] +2699,33,[30.187576],[2.8124237] +2700,29,[25.616598],[3.3834019] +2701,38,[35.23517],[2.7648315] +2702,35,[33.08669],[1.913311] +2703,36,[41.962585],[5.9625854] +2704,34,[27.718102],[6.2818985] +2705,34,[38.12471],[4.12471] +2706,34,[39.24299],[5.2429886] +2707,35,[39.419167],[4.4191666] +2708,31,[37.3549],[6.3549004] +2709,34,[29.783709],[4.2162914] +2710,34,[31.56491],[2.435089] +2711,30,[31.226824],[1.2268238] +2712,30,[19.575016],[10.424984] +2713,34,[27.629421],[6.370579] +2714,36,[34.465538],[1.534462] +2715,34,[36.332985],[2.332985] +2716,38,[34.465538],[3.534462] +2717,38,[31.97341],[6.0265903] +2718,30,[28.142168],[1.857832] +2719,30,[31.162758],[1.1627579] +2720,34,[30.079649],[3.920351] +2721,34,[36.002724],[2.0027237] +2722,34,[33.674652],[0.3253479] +2723,34,[30.994936],[3.005064] +2724,30,[27.492723],[2.5072765] +2725,38,[39.543644],[1.543644] +2726,34,[21.377495],[12.622505] +2727,34,[32.95798],[1.0420189] +2728,34,[29.831755],[4.1682453] +2729,31,[36.171337],[5.171337] +2730,34,[38.180153],[4.180153] +2731,30,[32.027733],[2.0277328] +2732,35,[39.874054],[4.874054] +2733,37,[33.22909],[3.7709084] +2734,33,[31.61892],[1.3810806] +2735,32,[40.719284],[8.719284] +2736,31,[31.686872],[0.6868725] +2737,32,[30.797413],[1.2025871] +2738,33,[39.271755],[6.271755] +2739,34,[29.967026],[4.0329742] +2740,31,[29.817884],[1.1821156] +2741,33,[31.915373],[1.0846272] +2742,37,[43.714775],[6.714775] +2743,31,[32.854553],[1.8545532] +2744,36,[42.557693],[6.5576935] +2745,30,[35.74714],[5.747139] +2746,34,[36.576218],[2.5762177] +2747,32,[48.71095],[16.710949] +2748,30,[33.931652],[3.931652] +2749,35,[36.706017],[1.7060165] +2750,30,[28.881348],[1.1186523] +2751,30,[28.107468],[1.8925323] +2752,32,[26.580835],[5.4191647] +2753,30,[33.11636],[3.1163597] +2754,35,[30.668472],[4.3315277] +2755,32,[26.625498],[5.374502] +2756,30,[26.638529],[3.3614712] +2757,37,[38.827488],[1.827488] +2758,32,[31.528898],[0.47110176] +2759,30,[23.201403],[6.7985973] +2760,38,[36.659428],[1.3405724] +2761,30,[28.598381],[1.401619] +2762,32,[33.717033],[1.7170334] +2763,31,[35.95229],[4.9522896] +2764,31,[30.57384],[0.4261608] +2765,39,[36.696472],[2.3035278] +2766,32,[33.917053],[1.9170532] +2767,39,[36.229115],[2.7708855] +2768,39,[28.841825],[10.1581745] +2769,32,[35.493553],[3.4935532] +2770,32,[28.482178],[3.5178223] +2771,35,[37.353638],[2.3536377] +2772,33,[29.341282],[3.658718] +2773,32,[40.282177],[8.282177] +2774,37,[28.53135],[8.468651] +2775,32,[32.890125],[0.8901253] +2776,36,[30.009743],[5.9902573] +2777,32,[35.93241],[3.9324112] +2778,31,[36.098232],[5.0982323] +2779,36,[36.46938],[0.46937943] +2780,32,[31.802786],[0.19721413] +2781,32,[28.091425],[3.908575] +2782,32,[39.33084],[7.330841] +2783,31,[32.501545],[1.501545] +2784,31,[40.734406],[9.7344055] +2785,34,[35.552086],[1.5520859] +2786,39,[27.864372],[11.135628] +2787,31,[28.33892],[2.6610794] +2788,33,[31.527142],[1.4728584] +2789,32,[29.601053],[2.3989468] +2790,33,[37.55797],[4.557968] +2791,40,[60.81445],[20.81445] +2792,40,[47.287334],[7.2873344] +2793,34,[39.063263],[5.063263] +2794,36,[36.15226],[0.15225983] +2795,32,[29.36213],[2.6378708] +2796,32,[30.781252],[1.2187481] +2797,34,[30.916325],[3.0836754] +2798,32,[34.74648],[2.746479] +2799,32,[38.14926],[6.1492615] +2800,34,[26.242632],[7.757368] +2801,34,[32.67621],[1.3237915] +2802,32,[29.867405],[2.132595] +2803,34,[33.995655],[0.00434494] +2804,34,[53.436203],[19.436203] +2805,39,[42.29875],[3.2987518] +2806,32,[31.623535],[0.37646484] +2807,34,[32.598595],[1.4014053] +2808,34,[32.885452],[1.1145477] +2809,32,[35.0911],[3.0910988] +2810,40,[34.908825],[5.091175] +2811,34,[36.25301],[2.2530098] +2812,35,[35.13261],[0.13261032] +2813,32,[29.846176],[2.1538239] +2814,34,[34.118717],[0.11871719] +2815,32,[36.480873],[4.480873] +2816,32,[28.886736],[3.113264] +2817,32,[39.6209],[7.620899] +2818,40,[45.157],[5.1570015] +2819,40,[37.93811],[2.0618896] +2820,40,[43.216747],[3.2167473] +2821,40,[34.156326],[5.8436737] +2822,35,[31.226824],[3.7731762] +2823,35,[33.65621],[1.3437881] +2824,33,[35.305073],[2.3050728] +2825,33,[32.03649],[0.9635086] +2826,37,[34.540108],[2.4598923] +2827,35,[28.991346],[6.0086536] +2828,34,[29.209814],[4.790186] +2829,39,[39.245983],[0.24598312] +2830,41,[41.20902],[0.2090187] +2831,37,[30.562897],[6.4371033] +2832,35,[30.042444],[4.957556] +2833,37,[33.81451],[3.1854897] +2834,37,[28.309868],[8.690132] +2835,36,[34.7449],[1.2551003] +2836,33,[28.398613],[4.601387] +2837,35,[36.370735],[1.3707352] +2838,35,[33.22945],[1.7705498] +2839,38,[36.62438],[1.3756218] +2840,37,[37.999405],[0.9994049] +2841,36,[27.526953],[8.473047] +2842,35,[30.541819],[4.4581814] +2843,33,[28.62114],[4.3788605] +2844,33,[34.65225],[1.6522484] +2845,36,[32.678493],[3.3215065] +2846,36,[30.126612],[5.8733883] +2847,33,[43.308376],[10.308376] +2848,37,[37.12001],[0.12001038] +2849,33,[33.038586],[0.03858566] +2850,38,[33.30743],[4.6925697] +2851,33,[32.522537],[0.47746277] +2852,33,[25.248701],[7.751299] +2853,40,[38.18486],[1.8151398] +2854,37,[41.94371],[4.9437103] +2855,37,[37.339203],[0.33920288] +2856,41,[30.12911],[10.87089] +2857,33,[31.245775],[1.7542248] +2858,33,[35.483425],[2.4834251] +2859,38,[26.94594],[11.05406] +2860,33,[27.21074],[5.789261] +2861,33,[33.350037],[0.35003662] +2862,38,[37.654778],[0.34522247] +2863,36,[38.20295],[2.2029495] +2864,34,[33.31315],[0.6868515] +2865,35,[36.64446],[1.6444588] +2866,37,[36.278255],[0.72174454] +2867,38,[40.91224],[2.912239] +2868,38,[37.673946],[0.32605362] +2869,38,[33.61665],[4.3833504] +2870,40,[37.48038],[2.519619] +2871,41,[40.094585],[0.9054146] +2872,38,[39.231705],[1.2317047] +2873,35,[34.206944],[0.7930565] +2874,38,[35.010567],[2.9894333] +2875,39,[34.982384],[4.0176163] +2876,39,[37.69987],[1.3001289] +2877,39,[36.348217],[2.651783] +2878,36,[33.936794],[2.0632057] +2879,41,[36.0376],[4.9623985] +2880,38,[32.976643],[5.0233574] +2881,41,[27.89249],[13.10751] +2882,37,[33.95672],[3.0432816] +2883,41,[47.64992],[6.6499214] +2884,37,[26.69584],[10.304159] +2885,41,[36.051712],[4.948288] +2886,40,[38.281094],[1.7189064] +2887,41,[35.659393],[5.3406067] +2888,41,[31.62516],[9.37484] +2889,41,[31.516567],[9.483433] +2890,41,[42.884773],[1.8847733] +2891,39,[41.10496],[2.1049614] +2892,38,[43.595215],[5.595215] +2893,41,[32.088295],[8.911705] +2894,41,[39.84704],[1.1529617] +2895,38,[32.47672],[5.523281] +2896,41,[27.774273],[13.225727] +2897,38,[39.18499],[1.1849899] +2898,38,[28.576365],[9.4236355] +2899,38,[24.932043],[13.067957] +2900,38,[32.62706],[5.37294] +2901,36,[30.679281],[5.320719] +2902,41,[34.888443],[6.111557] +2903,36,[43.182594],[7.1825943] +2904,41,[27.106632],[13.893368] +2905,41,[31.255774],[9.744226] +2906,37,[19.76567],[17.23433] +2907,41,[29.905132],[11.094868] +2908,39,[35.960896],[3.0391045] +2909,41,[25.890917],[15.109083] +2910,40,[33.266582],[6.7334175] +2911,40,[36.088387],[3.9116135] +2912,38,[32.33906],[5.6609383] +2913,41,[25.206333],[15.793667] +2914,37,[41.101986],[4.101986] +2915,41,[36.53274],[4.4672585] +2916,38,[35.458683],[2.541317] +2917,36,[31.029041],[4.9709587] +2918,35,[22.662928],[12.337072] +2919,38,[35.382275],[2.6177254] +2920,34,[35.57539],[1.5753899] +2921,38,[21.962648],[16.037352] +2922,39,[38.232536],[0.7674637] +2923,40,[38.15033],[1.8496704] +2924,35,[39.71885],[4.718849] +2925,36,[39.31007],[3.31007] +2926,41,[34.238785],[6.761215] +2927,38,[37.721775],[0.27822495] +2928,35,[44.366745],[9.366745] +2929,41,[36.842743],[4.157257] +2930,38,[33.214905],[4.785095] +2931,38,[38.28808],[0.2880783] +2932,38,[39.919033],[1.919033] +2933,37,[26.320137],[10.679863] +2934,37,[38.93918],[1.9391785] +2935,36,[27.662796],[8.337204] +2936,40,[41.332348],[1.3323479] +2937,39,[36.49668],[2.5033188] +2938,40,[31.504961],[8.495039] +2939,39,[40.411484],[1.4114838] +2940,38,[27.558222],[10.441778] +2941,41,[34.465443],[6.5345573] +2942,40,[42.182743],[2.182743] +2943,38,[31.940798],[6.059202] +2944,37,[38.530323],[1.530323] +2945,37,[41.144943],[4.144943] +2946,38,[39.51737],[1.5173683] +2947,38,[38.379787],[0.37978745] +2948,39,[27.954817],[11.045183] +2949,40,[44.25726],[4.2572594] +2950,38,[33.450977],[4.5490227] +2951,38,[27.763544],[10.236456] +2952,35,[33.073605],[1.9263954] +2953,36,[12.189925],[23.810074] +2954,5,[3.2699935],[1.7300065] +2955,3,[3.1705797],[0.17057967] +2956,3,[3.1540244],[0.15402436] +2957,2,[3.2149904],[1.2149904] +2958,2,[3.4059122],[1.4059122] +2959,2,[3.647515],[1.647515] +2960,1,[3.3931544],[2.3931544] +2961,1,[3.244766],[2.244766] +2962,5,[3.2086134],[1.7913866] +2963,5,[3.1544771],[1.8455229] +2964,5,[3.2556775],[1.7443225] +2965,1,[3.1398373],[2.1398373] +2966,1,[8.155471],[7.155471] +2967,4,[3.254882],[0.7451179] +2968,1,[3.6503901],[2.6503901] +2969,5,[3.2547202],[1.7452798] +2970,5,[3.1583767],[1.8416233] +2971,1,[3.1348217],[2.1348217] +2972,4,[15.705701],[11.705701] +2973,5,[3.2388344],[1.7611656] +2974,5,[5.807556],[0.80755615] +2975,2,[4.096847],[2.096847] +2976,1,[5.4999313],[4.4999313] +2977,1,[3.1898582],[2.1898582] +2978,2,[3.14193],[1.1419301] +2979,1,[3.1441035],[2.1441035] +2980,5,[16.253317],[11.253317] +2981,5,[6.9961677],[1.9961677] +2982,5,[3.2387328],[1.7612672] +2983,5,[4.97162],[0.02837992] +2984,1,[3.1234467],[2.1234467] +2985,1,[3.1374273],[2.1374273] +2986,3,[3.2180324],[0.21803236] +2987,1,[3.1143878],[2.1143878] +2988,1,[3.1232061],[2.1232061] +2989,1,[5.222722],[4.222722] +2990,1,[3.1425517],[2.1425517] +2991,1,[8.045458],[7.045458] +2992,1,[3.6212513],[2.6212513] +2993,1,[3.1178732],[2.1178732] +2994,1,[3.259358],[2.259358] +2995,2,[3.1690042],[1.1690042] +2996,2,[3.3907444],[1.3907444] +2997,2,[3.1954098],[1.1954098] +2998,2,[4.7036157],[2.7036157] +2999,1,[3.2889357],[2.2889357] +3000,1,[3.2382147],[2.2382147] +3001,1,[3.152073],[2.152073] +3002,4,[3.2540717],[0.7459283] +3003,3,[3.311514],[0.3115139] +3004,5,[3.2272186],[1.7727814] +3005,1,[5.174164],[4.174164] +3006,1,[3.1964831],[2.1964831] +3007,3,[3.3931668],[0.39316678] +3008,1,[24.110596],[23.110596] +3009,3,[4.1843038],[1.1843038] +3010,1,[3.3452241],[2.3452241] +3011,1,[3.4036553],[2.4036553] +3012,1,[6.8203607],[5.8203607] +3013,3,[6.0329256],[3.0329256] +3014,1,[3.2922118],[2.2922118] +3015,3,[3.2303743],[0.23037434] +3016,2,[3.1157384],[1.1157384] +3017,1,[3.1000533],[2.1000533] +3018,2,[5.576455],[3.576455] +3019,1,[3.5839376],[2.5839376] +3020,1,[3.1643481],[2.1643481] +3021,1,[6.0550323],[5.0550323] +3022,2,[3.1720774],[1.1720774] +3023,2,[3.152242],[1.152242] +3024,1,[3.583203],[2.583203] +3025,1,[3.6685956],[2.6685956] +3026,2,[5.566851],[3.5668511] +3027,1,[3.2003431],[2.2003431] +3028,2,[3.235931],[1.2359309] +3029,2,[3.7946937],[1.7946937] +3030,1,[7.322575],[6.322575] +3031,1,[3.1371543],[2.1371543] +3032,1,[3.4426348],[2.4426348] +3033,2,[3.3361785],[1.3361785] +3034,2,[7.688425],[5.688425] +3035,3,[3.3625195],[0.3625195] +3036,3,[4.001888],[1.0018878] +3037,2,[3.231429],[1.2314291] +3038,5,[3.3398888],[1.6601112] +3039,2,[3.1261218],[1.1261218] +3040,2,[3.6193273],[1.6193273] +3041,1,[3.8503706],[2.8503706] +3042,2,[3.859227],[1.859227] +3043,3,[3.702118],[0.7021179] +3044,2,[3.4309804],[1.4309804] +3045,4,[3.230598],[0.769402] +3046,2,[3.482391],[1.4823911] +3047,1,[3.17079],[2.17079] +3048,2,[3.5528033],[1.5528033] +3049,1,[3.115594],[2.115594] +3050,2,[4.7689495],[2.7689495] +3051,3,[5.0000634],[2.0000634] +3052,2,[3.1787443],[1.1787443] +3053,2,[3.1728015],[1.1728015] +3054,2,[3.942737],[1.9427371] +3055,3,[6.413683],[3.413683] +3056,2,[3.5845873],[1.5845873] +3057,2,[3.1964374],[1.1964374] +3058,1,[3.1020238],[2.1020238] +3059,2,[3.3396657],[1.3396657] +3060,3,[3.1100042],[0.11000419] +3061,1,[3.1512122],[2.1512122] +3062,3,[3.4365847],[0.4365847] +3063,2,[3.1272962],[1.1272962] +3064,2,[3.1849172],[1.1849172] +3065,1,[3.341866],[2.341866] +3066,3,[3.4257565],[0.42575645] +3067,3,[3.1827521],[0.18275213] +3068,1,[3.1686502],[2.1686502] +3069,3,[3.1307611],[0.13076115] +3070,1,[15.845147],[14.845147] +3071,3,[7.036076],[4.036076] +3072,1,[4.0957737],[3.0957737] +3073,1,[3.2315047],[2.2315047] +3074,2,[3.171418],[1.171418] +3075,2,[3.73403],[1.73403] +3076,1,[3.2238855],[2.2238855] +3077,1,[3.337442],[2.337442] +3078,2,[4.0000653],[2.0000653] +3079,37,[37.52003],[0.520031] +3080,36,[32.744934],[3.255066] +3081,39,[39.121365],[0.12136459] +3082,41,[31.805374],[9.194626] +3083,36,[41.618263],[5.6182632] +3084,41,[39.631905],[1.3680954] +3085,42,[41.550697],[0.44930267] +3086,35,[34.909203],[0.09079742] +3087,39,[39.942368],[0.94236755] +3088,39,[36.281086],[2.718914] +3089,39,[36.47816],[2.5218391] +3090,35,[39.960148],[4.960148] +3091,39,[35.11586],[3.88414] +3092,40,[39.762962],[0.23703766] +3093,37,[36.43141],[0.56858826] +3094,35,[39.985493],[4.9854927] +3095,38,[38.72516],[0.7251587] +3096,39,[30.219353],[8.780647] +3097,42,[42.965065],[0.965065] +3098,39,[30.089151],[8.910849] +3099,39,[41.31898],[2.3189812] +3100,39,[40.953094],[1.9530945] +3101,39,[38.087666],[0.91233444] +3102,39,[45.187374],[6.187374] +3103,39,[33.978085],[5.0219154] +3104,39,[41.055897],[2.0558968] +3105,37,[36.967716],[0.03228378] +3106,35,[30.744698],[4.2553024] +3107,40,[33.4899],[6.5101013] +3108,36,[36.7175],[0.7174988] +3109,37,[30.864676],[6.1353245] +3110,36,[29.845476],[6.154524] +3111,43,[38.36982],[4.6301804] +3112,42,[55.638954],[13.638954] +3113,38,[42.469124],[4.469124] +3114,38,[37.973232],[0.02676773] +3115,40,[38.4443],[1.5556984] +3116,39,[44.921646],[5.921646] +3117,35,[27.786932],[7.213068] +3118,38,[32.313026],[5.6869736] +3119,41,[49.21441],[8.214409] +3120,37,[33.298683],[3.7013168] +3121,36,[32.202435],[3.7975655] +3122,39,[33.34241],[5.657589] +3123,38,[34.65757],[3.34243] +3124,37,[34.24394],[2.7560616] +3125,35,[33.300316],[1.6996841] +3126,37,[29.650213],[7.3497868] +3127,39,[46.03897],[7.038971] +3128,39,[40.49245],[1.4924507] +3129,36,[38.3689],[2.3689003] +3130,35,[33.102047],[1.897953] +3131,43,[48.612568],[5.612568] +3132,39,[29.000702],[9.999298] +3133,35,[37.695778],[2.695778] +3134,39,[49.63213],[10.63213] +3135,39,[42.7228],[3.7228012] +3136,35,[54.2564],[19.256401] +3137,36,[33.361988],[2.638012] +3138,36,[34.59773],[1.4022713] +3139,43,[38.81106],[4.188938] +3140,38,[35.00914],[2.99086] +3141,39,[25.995823],[13.004177] +3142,39,[37.79461],[1.2053909] +3143,35,[28.706308],[6.2936916] +3144,42,[34.902332],[7.0976677] +3145,37,[7.5111637],[29.488836] +3146,36,[31.87858],[4.12142] +3147,35,[13.14107],[21.858929] +3148,38,[53.804657],[15.804657] +3149,43,[43.078873],[0.07887268] +3150,39,[33.28291],[5.7170906] +3151,39,[41.26441],[2.264408] +3152,35,[28.792643],[6.2073574] +3153,39,[40.965294],[1.9652939] +3154,40,[41.38816],[1.3881607] +3155,43,[29.244545],[13.755455] +3156,38,[30.819925],[7.1800747] +3157,35,[22.265911],[12.734089] +3158,40,[41.727448],[1.7274475] +3159,36,[31.523478],[4.4765224] +3160,38,[37.161293],[0.838707] +3161,36,[33.641815],[2.3581848] +3162,40,[27.55902],[12.440981] +3163,43,[34.65757],[8.34243] +3164,43,[34.328964],[8.671036] +3165,39,[30.550556],[8.449444] +3166,43,[29.793247],[13.206753] +3167,42,[39.65572],[2.3442802] +3168,41,[33.49298],[7.507019] +3169,38,[41.853104],[3.8531036] +3170,36,[36.35306],[0.35306168] +3171,36,[30.57554],[5.4244595] +3172,36,[32.241756],[3.7582436] +3173,36,[33.627094],[2.3729057] +3174,45,[37.35416],[7.6458397] +3175,40,[36.258175],[3.741825] +3176,46,[46.217415],[0.21741486] +3177,39,[49.23366],[10.233662] +3178,44,[43.40651],[0.5934906] +3179,38,[44.28389],[6.28389] +3180,40,[50.415466],[10.415466] +3181,38,[45.19506],[7.1950607] +3182,38,[27.345915],[10.654085] +3183,38,[29.74381],[8.256189] +3184,42,[39.759007],[2.2409935] +3185,38,[29.487045],[8.512955] +3186,38,[34.48304],[3.5169601] +3187,42,[34.588146],[7.411854] +3188,45,[42.338676],[2.6613235] +3189,45,[43.22047],[1.7795296] +3190,47,[29.580118],[17.419882] +3191,47,[39.011383],[7.988617] +3192,47,[32.713833],[14.286167] +3193,43,[32.263245],[10.736755] +3194,46,[39.31914],[6.6808586] +3195,39,[30.113392],[8.886608] +3196,39,[41.369614],[2.3696136] +3197,43,[42.841705],[0.15829468] +3198,43,[44.625175],[1.6251755] +3199,45,[38.279198],[6.7208023] +3200,43,[38.722267],[4.277733] +3201,46,[42.952263],[3.0477371] +3202,41,[40.67851],[0.32149124] +3203,46,[45.62997],[0.37002945] +3204,39,[42.11435],[3.1143494] +3205,46,[37.662674],[8.337326] +3206,46,[45.035206],[0.96479416] +3207,48,[46.11647],[1.8835297] +3208,40,[33.33533],[6.664669] +3209,46,[35.76751],[10.232491] +3210,42,[47.281075],[5.2810745] +3211,48,[38.4458],[9.554199] +3212,46,[41.586967],[4.4130325] +3213,46,[47.311417],[1.3114166] +3214,46,[43.69811],[2.3018913] +3215,48,[50.448093],[2.4480934] +3216,42,[37.461926],[4.5380745] +3217,44,[31.891346],[12.108654] +3218,41,[48.783337],[7.7833366] +3219,42,[45.39349],[3.3934898] +3220,40,[37.863777],[2.1362228] +3221,46,[40.90465],[5.0953484] +3222,40,[43.68832],[3.6883202] +3223,46,[35.51815],[10.48185] +3224,48,[47.61725],[0.38275146] +3225,40,[34.76193],[5.2380714] +3226,40,[40.970215],[0.97021484] +3227,40,[37.378757],[2.6212425] +3228,40,[43.797726],[3.7977257] +3229,44,[36.321613],[7.6783867] +3230,46,[43.619755],[2.3802452] +3231,48,[45.130043],[2.869957] +3232,40,[46.824936],[6.824936] +3233,41,[37.342163],[3.657837] +3234,42,[30.318714],[11.681286] +3235,41,[35.98507],[5.0149307] +3236,45,[45.023388],[0.02338791] +3237,44,[43.708557],[0.29144287] +3238,45,[44.51278],[0.48722076] +3239,49,[41.871387],[7.1286125] +3240,46,[40.901287],[5.098713] +3241,48,[28.119968],[19.880032] +3242,50,[43.00032],[6.9996796] +3243,42,[47.932472],[5.932472] +3244,46,[45.087334],[0.9126663] +3245,48,[39.080547],[8.919453] +3246,46,[43.05105],[2.9489517] +3247,48,[47.263824],[0.73617554] +3248,46,[41.559395],[4.440605] +3249,46,[44.511295],[1.4887047] +3250,46,[40.80474],[5.195259] +3251,42,[34.856026],[7.1439743] +3252,49,[47.185722],[1.8142776] +3253,45,[36.854782],[8.145218] +3254,46,[34.952953],[11.047047] +3255,50,[49.72536],[0.27463913] +3256,43,[48.97169],[5.971691] +3257,47,[55.43821],[8.43821] +3258,48,[57.222515],[9.222515] +3259,43,[38.811428],[4.188572] +3260,43,[49.287903],[6.287903] +3261,43,[47.03379],[4.0337906] +3262,43,[39.92431],[3.0756912] +3263,46,[46.961147],[0.9611473] +3264,45,[43.674156],[1.3258438] +3265,47,[44.396645],[2.6033554] +3266,44,[47.88226],[3.8822594] +3267,43,[44.125206],[1.125206] +3268,46,[39.30006],[6.6999397] +3269,46,[50.93561],[4.9356117] +3270,46,[40.059284],[5.940716] +3271,45,[43.86266],[1.1373405] +3272,46,[53.11598],[7.1159782] +3273,46,[42.92779],[3.0722084] +3274,43,[33.518105],[9.481895] +3275,46,[41.562973],[4.437027] +3276,43,[46.182625],[3.1826248] +3277,50,[41.84602],[8.15398] +3278,51,[48.676327],[2.3236732] +3279,49,[34.77687],[14.223129] +3280,51,[41.23931],[9.760689] +3281,43,[39.78508],[3.21492] +3282,43,[47.704582],[4.704582] +3283,51,[52.890083],[1.8900833] +3284,44,[45.606583],[1.6065826] +3285,44,[42.13492],[1.8650818] +3286,50,[46.28093],[3.7190704] +3287,44,[45.542454],[1.5424538] +3288,50,[35.18651],[14.813492] +3289,46,[49.4214],[3.4213982] +3290,44,[53.163567],[9.163567] +3291,52,[41.29288],[10.707119] +3292,49,[40.91447],[8.085529] +3293,44,[33.681015],[10.318985] +3294,44,[46.17731],[2.177311] +3295,1,[5.165912],[4.165912] +3296,4,[4.9210753],[0.92107534] +3297,2,[3.7778556],[1.7778556] +3298,2,[3.1787443],[1.1787443] +3299,2,[3.2376907],[1.2376907] +3300,1,[3.1274066],[2.1274066] +3301,1,[8.818456],[7.8184557] +3302,4,[4.5853777],[0.5853777] +3303,3,[3.0840495],[0.08404946] +3304,1,[5.1512184],[4.1512184] +3305,2,[3.2698717],[1.2698717] +3306,6,[5.2551117],[0.7448883] +3307,2,[3.2296546],[1.2296546] +3308,3,[3.838508],[0.8385079] +3309,3,[3.237377],[0.23737693] +3310,2,[3.8134038],[1.8134038] +3311,4,[3.4365463],[0.5634537] +3312,6,[4.141573],[1.858427] +3313,6,[3.9602623],[2.0397377] +3314,3,[15.600225],[12.600225] +3315,2,[3.746384],[1.7463839] +3316,1,[3.1750605],[2.1750605] +3317,1,[13.189907],[12.189907] +3318,3,[6.7268615],[3.7268615] +3319,2,[3.4477396],[1.4477396] +3320,3,[12.784367],[9.784367] +3321,1,[5.6832485],[4.6832485] +3322,3,[3.300496],[0.3004961] +3323,3,[4.5810146],[1.5810146] +3324,3,[19.5988],[16.5988] +3325,3,[16.339115],[13.339115] +3326,2,[3.2978199],[1.2978199] +3327,1,[12.753954],[11.753954] +3328,2,[7.7852435],[5.7852435] +3329,2,[3.2034225],[1.2034225] +3330,2,[3.1056795],[1.1056795] +3331,3,[3.2383578],[0.23835778] +3332,1,[3.3454025],[2.3454025] +3333,1,[3.5616136],[2.5616136] +3334,1,[3.110001],[2.110001] +3335,2,[5.9526505],[3.9526505] +3336,4,[3.2637131],[0.7362869] +3337,2,[5.954538],[3.9545379] +3338,2,[3.1537335],[1.1537335] +3339,3,[3.4602935],[0.46029353] +3340,4,[4.861031],[0.86103106] +3341,2,[3.7404695],[1.7404695] +3342,2,[4.0296807],[2.0296807] +3343,1,[3.1100047],[2.1100047] +3344,3,[3.5137556],[0.51375556] +3345,4,[3.366133],[0.633867] +3346,2,[3.3653135],[1.3653135] +3347,3,[10.365987],[7.365987] +3348,3,[4.2769065],[1.2769065] +3349,1,[4.8503036],[3.8503036] +3350,2,[12.900048],[10.900048] +3351,3,[3.2597177],[0.2597177] +3352,2,[3.283711],[1.283711] +3353,1,[3.174199],[2.174199] +3354,1,[3.299087],[2.299087] +3355,2,[3.2131863],[1.2131863] +3356,2,[4.066978],[2.066978] +3357,1,[3.163297],[2.163297] +3358,2,[7.4292636],[5.4292636] +3359,2,[9.690473],[7.6904726] +3360,3,[5.365376],[2.365376] +3361,2,[5.9877224],[3.9877224] +3362,4,[3.0949566],[0.90504336] +3363,3,[3.4979634],[0.49796343] +3364,1,[3.17538],[2.17538] +3365,3,[3.167355],[0.16735506] +3366,3,[24.85705],[21.85705] +3367,1,[3.1081326],[2.1081326] +3368,6,[6.082753],[0.08275318] +3369,1,[3.219281],[2.219281] +3370,3,[3.1455448],[0.14554477] +3371,2,[3.3262963],[1.3262963] +3372,3,[3.2151778],[0.21517777] +3373,3,[3.3048825],[0.30488253] +3374,3,[3.653561],[0.6535611] +3375,1,[5.6514893],[4.6514893] +3376,1,[3.743905],[2.743905] +3377,4,[4.320836],[0.32083607] +3378,3,[3.3388493],[0.3388493] +3379,2,[3.8245296],[1.8245296] +3380,2,[3.0920627],[1.0920627] +3381,3,[3.1736686],[0.17366862] +3382,2,[3.5121806],[1.5121806] +3383,1,[3.2120621],[2.2120621] +3384,2,[8.294305],[6.294305] +3385,2,[3.1162739],[1.1162739] +3386,1,[3.1886594],[2.1886594] +3387,3,[13.720717],[10.720717] +3388,1,[3.2890718],[2.2890718] +3389,4,[9.820518],[5.8205175] +3390,3,[3.1569216],[0.15692163] +3391,1,[3.111462],[2.111462] +3392,1,[3.3128576],[2.3128576] +3393,2,[5.4472704],[3.4472704] +3394,1,[5.9524155],[4.9524155] +3395,2,[3.133265],[1.133265] +3396,3,[10.999348],[7.9993477] +3397,2,[4.795436],[2.795436] +3398,2,[3.6314106],[1.6314106] +3399,1,[3.2314548],[2.2314548] +3400,1,[3.1818],[2.1818] +3401,2,[25.730818],[23.730818] +3402,2,[8.5212145],[6.5212145] +3403,1,[3.1797824],[2.1797824] +3404,3,[5.746593],[2.746593] +3405,2,[3.3599992],[1.3599992] +3406,2,[3.1591928],[1.1591928] +3407,1,[12.46088],[11.46088] +3408,3,[3.1415908],[0.14159083] +3409,51,[54.71573],[3.7157288] +3410,49,[40.88912],[8.110882] +3411,51,[42.953617],[8.046383] +3412,47,[44.736336],[2.2636642] +3413,51,[45.61318],[5.3868217] +3414,51,[48.019253],[2.9807472] +3415,48,[37.46118],[10.538818] +3416,50,[45.584305],[4.415695] +3417,51,[44.0375],[6.9625015] +3418,49,[26.878082],[22.121918] +3419,49,[43.886246],[5.1137543] +3420,53,[48.030678],[4.969322] +3421,52,[43.041115],[8.958885] +3422,51,[58.847923],[7.8479233] +3423,48,[45.601017],[2.398983] +3424,52,[40.747215],[11.252785] +3425,46,[47.552048],[1.5520477] +3426,49,[43.029915],[5.970085] +3427,53,[54.004017],[1.0040169] +3428,51,[45.176605],[5.823395] +3429,51,[50.54644],[0.45355988] +3430,51,[56.50897],[5.5089684] +3431,52,[30.46201],[21.53799] +3432,53,[46.991257],[6.0087433] +3433,46,[35.771255],[10.2287445] +3434,53,[47.23207],[5.767929] +3435,53,[45.4329],[7.5671005] +3436,53,[40.238686],[12.761314] +3437,49,[45.107613],[3.8923874] +3438,54,[49.41155],[4.5884514] +3439,53,[43.27827],[9.721729] +3440,49,[54.860344],[5.860344] +3441,47,[52.65856],[5.6585617] +3442,53,[40.38081],[12.61919] +3443,54,[39.325615],[14.674385] +3444,52,[52.89221],[0.8922081] +3445,50,[51.73739],[1.7373886] +3446,52,[47.024475],[4.975525] +3447,52,[60.567097],[8.567097] +3448,54,[37.919315],[16.080685] +3449,52,[40.161118],[11.838882] +3450,53,[50.050537],[2.949463] +3451,51,[48.529045],[2.470955] +3452,54,[40.75416],[13.245838] +3453,54,[36.798603],[17.201397] +3454,54,[47.305115],[6.6948853] +3455,53,[31.197958],[21.802042] +3456,55,[37.919315],[17.080685] +3457,53,[35.784924],[17.215076] +3458,55,[59.65763],[4.657631] +3459,54,[50.240135],[3.7598648] +3460,54,[39.771378],[14.228622] +3461,53,[44.285175],[8.714825] +3462,51,[38.62308],[12.376919] +3463,53,[32.626476],[20.373524] +3464,48,[47.130867],[0.869133] +3465,51,[54.60335],[3.6033516] +3466,47,[50.96783],[3.9678307] +3467,50,[44.96953],[5.030472] +3468,51,[58.94359],[7.9435883] +3469,48,[43.46059],[4.5394096] +3470,49,[45.54385],[3.45615] +3471,51,[54.252888],[3.2528877] +3472,49,[54.76343],[5.7634315] +3473,52,[58.996124],[6.9961243] +3474,51,[54.519203],[3.5192032] +3475,51,[58.646557],[7.646557] +3476,51,[58.658146],[7.658146] +3477,52,[48.907986],[3.0920143] +3478,51,[41.418125],[9.581875] +3479,51,[51.744747],[0.74474716] +3480,51,[50.284065],[0.71593475] +3481,50,[51.06733],[1.0673294] +3482,54,[54.015877],[0.01587677] +3483,56,[53.95794],[2.0420609] +3484,50,[47.84752],[2.152481] +3485,57,[44.90027],[12.099731] +3486,55,[46.831913],[8.168087] +3487,55,[54.630302],[0.36969757] +3488,58,[51.7627],[6.237301] +3489,57,[56.942413],[0.05758667] +3490,59,[55.325523],[3.6744766] +3491,57,[65.97548],[8.975479] +3492,57,[53.257973],[3.7420273] +3493,57,[53.257973],[3.7420273] +3494,53,[53.009346],[0.00934601] +3495,60,[49.918198],[10.081802] +3496,56,[45.535797],[10.464203] +3497,59,[47.194424],[11.805576] +3498,61,[58.60987],[2.390129] +3499,59,[65.17943],[6.179428] +3500,60,[57.499596],[2.5004044] +3501,57,[53.752457],[3.2475433] +3502,6,[8.371481],[2.371481] +3503,4,[3.3655975],[0.6344025] +3504,6,[9.302563],[3.3025627] +3505,2,[3.3106658],[1.3106658] +3506,2,[3.223525],[1.223525] +3507,5,[5.528053],[0.5280528] +3508,2,[3.8830845],[1.8830845] +3509,6,[3.3919003],[2.6080997] +3510,5,[3.3889167],[1.6110833] +3511,5,[4.2186494],[0.7813506] +3512,2,[9.353794],[7.353794] +3513,1,[3.101636],[2.101636] +3514,4,[5.016267],[1.0162668] +3515,5,[3.2158706],[1.7841294] +3516,2,[4.41076],[2.41076] +3517,6,[15.385668],[9.385668] +3518,4,[5.490605],[1.4906049] +3519,6,[4.8994384],[1.1005616] +3520,6,[13.483513],[7.483513] +3521,4,[4.1523433],[0.15234327] +3522,1,[3.106151],[2.106151] +3523,4,[5.296299],[1.296299] +3524,3,[4.1001935],[1.1001935] +3525,6,[14.306616],[8.306616] +3526,3,[5.3505063],[2.3505063] +3527,5,[3.3758113],[1.6241887] +3528,4,[11.838388],[7.8383884] +3529,2,[3.3012152],[1.3012152] +3530,5,[3.1221],[1.8778999] +3531,2,[3.2007365],[1.2007365] +3532,4,[3.268211],[0.7317891] +3533,6,[7.1363835],[1.1363835] +3534,2,[3.2232685],[1.2232685] +3535,2,[3.2579262],[1.2579262] +3536,4,[3.155025],[0.844975] +3537,6,[5.7502656],[0.2497344] +3538,2,[5.2459874],[3.2459874] +3539,5,[5.82432],[0.82431984] +3540,2,[3.1795793],[1.1795793] +3541,2,[3.3714964],[1.3714964] +3542,2,[3.2424052],[1.2424052] +3543,4,[3.508758],[0.49124193] +3544,4,[4.566916],[0.566916] +3545,2,[3.313995],[1.3139949] +3546,5,[3.1718736],[1.8281264] +3547,2,[3.2953649],[1.2953649] +3548,2,[3.1876254],[1.1876254] +3549,2,[10.443254],[8.443254] +3550,4,[3.654696],[0.345304] +3551,2,[4.5075345],[2.5075345] +3552,2,[3.3490171],[1.3490171] +3553,2,[3.1356695],[1.1356695] +3554,2,[3.19124],[1.1912401] +3555,5,[3.1926823],[1.8073177] +3556,3,[3.3727117],[0.37271166] +3557,3,[3.6474764],[0.64747643] +3558,2,[3.4320068],[1.4320068] +3559,2,[3.2453504],[1.2453504] +3560,6,[7.6129117],[1.6129117] +3561,4,[3.199338],[0.80066204] +3562,2,[7.849778],[5.849778] +3563,2,[3.3216977],[1.3216977] +3564,5,[3.990046],[1.009954] +3565,3,[3.3736506],[0.37365055] +3566,3,[3.1942878],[0.19428778] +3567,5,[19.791487],[14.791487] +3568,4,[3.263346],[0.73665404] +3569,4,[26.42401],[22.42401] +3570,1,[3.2096808],[2.2096808] +3571,2,[3.3076642],[1.3076642] +3572,3,[13.376115],[10.376115] +3573,1,[3.181028],[2.181028] +3574,3,[3.965661],[0.96566105] +3575,4,[14.381267],[10.381267] +3576,4,[3.211381],[0.78861904] +3577,9,[4.8666363],[4.1333637] +3578,3,[3.3057125],[0.30571246] +3579,3,[6.2055807],[3.2055807] +3580,5,[3.7927752],[1.2072248] +3581,7,[5.7085867],[1.2914133] +3582,2,[5.1274366],[3.1274366] +3583,2,[3.1469035],[1.1469035] +3584,2,[3.2801683],[1.2801683] +3585,1,[5.515967],[4.515967] +3586,1,[3.2504435],[2.2504435] +3587,4,[3.4853141],[0.51468587] +3588,9,[3.3006678],[5.699332] +3589,2,[3.147523],[1.1475229] +3590,8,[3.214501],[4.785499] +3591,4,[3.484306],[0.5156939] +3592,3,[6.3309493],[3.3309493] +3593,1,[3.13461],[2.13461] +3594,3,[3.2228227],[0.22282267] +3595,2,[3.315125],[1.315125] +3596,4,[4.0107503],[0.01075029] +3597,3,[3.3197198],[0.3197198] +3598,2,[3.3671634],[1.3671634] +3599,2,[3.2143497],[1.2143497] +3600,2,[3.3525126],[1.3525126] +3601,4,[3.2427847],[0.75721526] +3602,1,[3.2013726],[2.2013726] +3603,3,[4.0935297],[1.0935297] +3604,3,[5.1503067],[2.1503067] +3605,2,[3.177481],[1.1774809] +3606,4,[3.220396],[0.77960396] +3607,4,[17.925436],[13.925436] +3608,6,[3.174913],[2.825087] +3609,4,[5.004122],[1.0041218] +3610,6,[5.777681],[0.22231913] +3611,2,[3.639642],[1.639642] +3612,1,[3.1141977],[2.1141977] +3613,1,[3.2356856],[2.2356856] +3614,56,[55.24057],[0.75942993] +3615,63,[43.936684],[19.063316] +3616,59,[50.693516],[8.306484] +3617,58,[49.675804],[8.324196] +3618,59,[59.298416],[0.29841614] +3619,61,[57.43284],[3.5671616] +3620,56,[43.295483],[12.704517] +3621,58,[48.252895],[9.747105] +3622,56,[61.19927],[5.1992683] +3623,62,[57.985035],[4.014965] +3624,58,[60.728554],[2.7285538] +3625,60,[62.236168],[2.236168] +3626,64,[63.35844],[0.6415596] +3627,65,[63.756416],[1.2435837] +3628,64,[62.68845],[1.3115501] +3629,64,[62.608093],[1.3919067] +3630,60,[63.729187],[3.729187] +3631,66,[70.34482],[4.344818] +3632,63,[62.70483],[0.29516983] +3633,66,[56.45883],[9.541168] +3634,59,[58.202015],[0.7979851] +3635,64,[47.685184],[16.314816] +3636,64,[48.729378],[15.270622] +3637,65,[60.078876],[4.9211235] +3638,64,[60.218536],[3.7814636] +3639,62,[62.894257],[0.8942566] +3640,67,[59.223183],[7.7768173] +3641,66,[61.546165],[4.4538345] +3642,60,[56.545383],[3.4546165] +3643,66,[56.55691],[9.443089] +3644,67,[62.83352],[4.166481] +3645,60,[54.694157],[5.3058434] +3646,64,[62.258118],[1.7418823] +3647,61,[54.796555],[6.2034454] +3648,66,[55.737347],[10.262653] +3649,64,[67.23387],[3.2338715] +3650,68,[64.732216],[3.267784] +3651,67,[63.812607],[3.1873932] +3652,64,[61.879333],[2.1206665] +3653,68,[63.866203],[4.1337967] +3654,64,[62.29518],[1.7048187] +3655,66,[57.80842],[8.191582] +3656,68,[69.3943],[1.3943024] +3657,68,[66.47778],[1.5222168] +3658,62,[61.60626],[0.3937416] +3659,64,[54.75823],[9.241772] +3660,67,[55.968536],[11.031464] +3661,69,[60.350117],[8.649883] +3662,67,[60.71631],[6.2836914] +3663,67,[60.64206],[6.3579407] +3664,69,[54.14481],[14.85519] +3665,69,[62.182644],[6.817356] +3666,66,[60.2113],[5.7887] +3667,68,[57.20744],[10.792561] +3668,69,[60.809387],[8.190613] +3669,67,[56.88812],[10.111881] +3670,67,[60.563496],[6.4365044] +3671,67,[66.95734],[0.04266357] +3672,69,[55.118],[13.882] +3673,67,[66.73865],[0.26135254] +3674,69,[61.656715],[7.3432846] +3675,68,[71.18415],[3.1841507] +3676,69,[65.400475],[3.5995255] +3677,68,[62.493164],[5.506836] +3678,4,[3.996112],[0.00388789] +3679,2,[5.3376017],[3.3376017] +3680,3,[3.8340504],[0.8340504] +3681,2,[4.997611],[2.997611] +3682,3,[5.046732],[2.046732] +3683,2,[3.5768015],[1.5768015] +3684,4,[3.3193293],[0.68067074] +3685,2,[3.1537335],[1.1537335] +3686,2,[3.2619495],[1.2619495] +3687,2,[4.7725916],[2.7725916] +3688,2,[3.2396002],[1.2396002] +3689,3,[3.4704642],[0.47046423] +3690,2,[7.286374],[5.286374] +3691,7,[8.706461],[1.706461] +3692,2,[4.0386586],[2.0386586] +3693,3,[3.3409398],[0.34093976] +3694,3,[6.352886],[3.3528862] +3695,2,[3.252015],[1.2520151] +3696,2,[3.1575732],[1.1575732] +3697,2,[3.457048],[1.4570479] +3698,2,[3.8463666],[1.8463666] +3699,7,[3.550983],[3.449017] +3700,7,[3.4548645],[3.5451355] +3701,7,[4.592691],[2.407309] +3702,2,[3.2604823],[1.2604823] +3703,2,[3.204563],[1.2045629] +3704,7,[3.5242274],[3.4757726] +3705,5,[8.963594],[3.9635944] +3706,2,[3.146324],[1.1463239] +3707,7,[3.549541],[3.450459] +3708,5,[3.913378],[1.086622] +3709,5,[3.7402475],[1.2597525] +3710,3,[3.217606],[0.21760607] +3711,5,[3.113178],[1.886822] +3712,5,[3.2288678],[1.7711322] +3713,7,[3.2125347],[3.7874653] +3714,4,[3.1282933],[0.8717067] +3715,7,[11.817476],[4.8174763] +3716,3,[3.152242],[0.15224195] +3717,7,[5.9493394],[1.0506606] +3718,7,[4.4184866],[2.5815134] +3719,3,[3.1335013],[0.13350129] +3720,7,[3.3848362],[3.6151638] +3721,5,[3.2906299],[1.7093701] +3722,5,[5.340951],[0.34095097] +3723,2,[3.4319983],[1.4319983] +3724,5,[3.146655],[1.8533449] +3725,5,[10.67203],[5.6720304] +3726,5,[11.066998],[6.0669975] +3727,7,[3.294373],[3.705627] +3728,3,[3.5564096],[0.5564096] +3729,7,[3.2694943],[3.7305057] +3730,5,[3.1055994],[1.8944006] +3731,5,[9.709493],[4.7094927] +3732,5,[3.1911242],[1.8088758] +3733,2,[3.1969898],[1.1969898] +3734,5,[15.922306],[10.922306] +3735,5,[14.032977],[9.032977] +3736,3,[3.4737418],[0.47374177] +3737,5,[4.6233897],[0.37661028] +3738,3,[3.1960301],[0.19603014] +3739,2,[3.193035],[1.1930349] +3740,5,[15.047049],[10.047049] +3741,3,[3.9687207],[0.9687207] +3742,3,[12.7883005],[9.7883005] +3743,5,[3.2529876],[1.7470124] +3744,5,[12.985272],[7.9852724] +3745,7,[4.189553],[2.8104472] +3746,4,[17.842268],[13.842268] +3747,5,[3.6345494],[1.3654506] +3748,4,[3.268103],[0.7318971] +3749,4,[3.2138507],[0.78614926] +3750,4,[23.467705],[19.467705] +3751,4,[3.3822286],[0.6177714] +3752,4,[4.710538],[0.7105379] +3753,4,[3.87718],[0.1228199] +3754,2,[13.038801],[11.038801] +3755,5,[7.127659],[2.1276588] +3756,4,[4.7614307],[0.76143074] +3757,7,[3.2406857],[3.7593143] +3758,5,[4.105155],[0.894845] +3759,7,[5.147929],[1.8520708] +3760,3,[3.754614],[0.7546141] +3761,3,[3.292789],[0.29278898] +3762,4,[18.293972],[14.293972] +3763,5,[3.2153027],[1.7846973] +3764,69,[73.28158],[4.281578] +3765,69,[58.871666],[10.128334] +3766,64,[64.57062],[0.5706177] +3767,69,[37.028168],[31.971832] +3768,67,[50.049717],[16.950283] +3769,67,[62.998405],[4.0015945] +3770,64,[74.82402],[10.82402] +3771,65,[65.156975],[0.15697479] +3772,67,[72.807755],[5.8077545] +3773,69,[70.156235],[1.1562347] +3774,69,[70.242035],[1.2420349] +3775,64,[77.16421],[13.164207] +3776,69,[70.11094],[1.110939] +3777,69,[57.60204],[11.397961] +3778,66,[52.864594],[13.1354065] +3779,68,[61.149323],[6.8506775] +3780,64,[70.34207],[6.3420715] +3781,66,[53.62749],[12.372509] +3782,69,[68.557755],[0.44224548] +3783,68,[64.84725],[3.152748] +3784,69,[73.69621],[4.696213] +3785,67,[64.788765],[2.211235] +3786,64,[61.80562],[2.1943817] +3787,69,[64.84712],[4.152878] +3788,69,[65.48822],[3.5117798] +3789,68,[67.99334],[0.00666046] +3790,68,[74.12029],[6.1202927] +3791,68,[78.383934],[10.383934] +3792,66,[66.57518],[0.57518005] +3793,67,[62.981987],[4.018013] +3794,5,[3.3203228],[1.6796772] +3795,5,[3.7533753],[1.2466247] +3796,4,[15.635808],[11.635808] +3797,5,[3.5237834],[1.4762166] +3798,3,[6.784408],[3.784408] +3799,5,[3.443369],[1.5566311] +3800,5,[3.535683],[1.4643171] +3801,7,[3.6471198],[3.3528802] +3802,3,[4.992037],[1.9920368] +3803,6,[3.3636293],[2.6363707] +3804,5,[5.791074],[0.7910738] +3805,5,[3.5845873],[1.4154127] +3806,8,[3.654696],[4.345304] +3807,5,[5.781026],[0.7810259] +3808,3,[7.482797],[4.482797] +3809,6,[11.6290455],[5.6290455] +3810,3,[3.1270165],[0.12701654] +3811,6,[5.8444114],[0.15558863] +3812,7,[21.207073],[14.207073] +3813,7,[6.7573276],[0.24267244] +3814,3,[19.271969],[16.271969] +3815,5,[14.032977],[9.032977] +3816,3,[9.936288],[6.936288] +3817,6,[4.671383],[1.3286171] +3818,3,[3.3517377],[0.35173774] +3819,6,[3.527508],[2.472492] +3820,5,[3.4737418],[1.5262582] +3821,3,[5.6797457],[2.6797457] +3822,3,[3.328279],[0.32827902] +3823,5,[6.521737],[1.5217371] +3824,6,[3.4078205],[2.5921795] +3825,6,[4.101031],[1.8989692] +3826,5,[3.5244281],[1.4755719] +3827,6,[7.319153],[1.3191528] +3828,6,[15.303653],[9.303653] +3829,4,[3.397429],[0.602571] +3830,6,[3.242201],[2.757799] +3831,5,[4.207663],[0.79233694] +3832,5,[17.67118],[12.671181] +3833,6,[5.491795],[0.50820494] +3834,6,[3.4319983],[2.5680017] +3835,6,[5.7064586],[0.29354143] +3836,5,[7.854022],[2.854022] +3837,8,[4.018096],[3.981904] +3838,5,[5.1211834],[0.1211834] +3839,5,[3.170298],[1.8297019] +3840,5,[14.401766],[9.401766] +3841,5,[5.2301483],[0.23014832] +3842,8,[5.536656],[2.463344] +3843,7,[9.209774],[2.209774] +3844,5,[29.110565],[24.110565] +3845,3,[5.7207403],[2.7207403] +3846,5,[22.086193],[17.086193] +3847,5,[10.15334],[5.1533403] +3848,8,[3.3470166],[4.6529837] +3849,4,[3.3709726],[0.62902737] +3850,3,[3.8887591],[0.88875914] +3851,3,[6.982067],[3.982067] +3852,3,[3.6704414],[0.6704414] +3853,6,[3.3940296],[2.6059704] +3854,3,[3.4987166],[0.4987166] +3855,5,[5.236585],[0.23658514] +3856,6,[3.2180438],[2.7819562] +3857,5,[6.9487934],[1.9487934] +3858,5,[4.1435313],[0.8564687] +3859,6,[3.6536677],[2.3463323] +3860,5,[3.3819149],[1.6180851] +3861,4,[20.361458],[16.361458] +3862,5,[4.4250345],[0.5749655] +3863,6,[3.289783],[2.710217] +3864,5,[13.274975],[8.274975] +3865,5,[6.2786636],[1.2786636] +3866,6,[3.3520403],[2.6479597] +3867,6,[5.737772],[0.262228] +3868,6,[6.8351088],[0.83510876] +3869,8,[4.624789],[3.3752108] +3870,4,[3.216681],[0.783319] +3871,7,[15.635808],[8.635808] +3872,6,[17.33832],[11.33832] +3873,11,[11.707907],[0.7079067] +3874,8,[5.491795],[2.508205] +3875,6,[4.1435313],[1.8564687] +3876,6,[4.663558],[1.336442] +3877,4,[4.648915],[0.6489148] +3878,6,[5.872317],[0.12768316] +3879,4,[8.077824],[4.0778236] +3880,8,[5.1588674],[2.8411326] +3881,4,[8.788086],[4.788086] +3882,4,[10.554572],[6.554572] +3883,6,[4.3841047],[1.6158953] +3884,6,[7.6617208],[1.6617208] +3885,4,[3.2495892],[0.7504108] +3886,11,[4.021694],[6.978306] +3887,4,[6.2536254],[2.2536254] +3888,10,[5.4982333],[4.5017667] +3889,6,[6.7284956],[0.7284956] +3890,6,[3.250821],[2.749179] +3891,7,[8.116711],[1.1167107] +3892,4,[3.344237],[0.6557629] +3893,11,[7.647716],[3.352284] +3894,6,[5.1969705],[0.80302954] +3895,5,[3.9339297],[1.0660703] +3896,6,[5.283836],[0.7161641] +3897,10,[9.29474],[0.7052603] +3898,8,[5.5802035],[2.4197965] +3899,10,[4.4060655],[5.5939345] +3900,6,[3.281484],[2.718516] +3901,9,[3.5260038],[5.473996] +3902,6,[7.8865895],[1.8865895] +3903,6,[9.325247],[3.3252468] +3904,12,[10.870916],[1.1290836] +3905,11,[8.775784],[2.2242165] +3906,9,[10.558727],[1.5587273] +3907,6,[6.2869997],[0.2869997] +3908,6,[5.2897234],[0.7102766] +3909,6,[13.274975],[7.274975] +3910,6,[4.825176],[1.1748242] +3911,9,[7.176105],[1.823895] +3912,6,[5.7788143],[0.22118568] +3913,12,[12.238804],[0.23880386] +3914,4,[6.641327],[2.641327] +3915,6,[3.4055037],[2.5944963] +3916,6,[20.45197],[14.451969] +3917,6,[3.1684556],[2.8315444] +3918,6,[4.266593],[1.733407] +3919,11,[19.71209],[8.71209] +3920,8,[20.769018],[12.769018] +3921,6,[11.050765],[5.050765] +3922,8,[4.851224],[3.148776] +3923,5,[7.115538],[2.1155381] +3924,4,[3.2224858],[0.7775142] +3925,7,[7.565157],[0.56515694] +3926,6,[3.2543366],[2.7456634] +3927,7,[5.1211834],[1.8788166] +3928,5,[5.4982886],[0.49828863] +3929,6,[4.441147],[1.5588531] +3930,4,[5.5072103],[1.5072103] +3931,8,[15.554874],[7.5548744] +3932,7,[3.1729896],[3.8270104] +3933,4,[3.1160269],[0.8839731] +3934,5,[3.8238094],[1.1761906] +3935,10,[12.998851],[2.9988508] +3936,7,[7.3888116],[0.3888116] +3937,5,[3.2276788],[1.7723212] +3938,5,[3.2411551],[1.7588449] +3939,5,[4.2202635],[0.7797365] +3940,5,[18.222357],[13.222357] +3941,5,[3.9787054],[1.0212946] +3942,7,[11.540862],[4.540862] +3943,8,[3.9948554],[4.0051446] +3944,8,[3.1726792],[4.827321] diff --git a/SSRNET/CA.py b/SSRNET/CA.py new file mode 100644 index 0000000..c8e40c7 --- /dev/null +++ b/SSRNET/CA.py @@ -0,0 +1,73 @@ +import numpy as np +import sys +import logging +import csv +import os +os.environ['TF_CPP_MIN_LOG_LEVEL']='2' + +from SSRNET_model import SSR_net +from TYY_utils import mk_dir, load_data_npz + +def MAE(a,b): + mae = np.sum(np.absolute(a-b)) + mae/=len(a) + return mae + +''''''''''''''''''''''''''''''''''''''''''''' + file name +''''''''''''''''''''''''''''''''''''''''''''' +test_file = sys.argv[1] +netType1 = int(sys.argv[2]) +netType2 = int(sys.argv[3]) +stage_num = [3,3,3] + +lambda_local = 0.25*(netType1%5) +lambda_d = 0.25*(netType2%5) + + +logging.debug("Loading testing data...") +image2, age2, image_size = load_data_npz(test_file) + +mk_dir('Results_csv') + +model_file = 'megaage_models/batch_size_50/ssrnet_%d_%d_%d_%d_%s_%s/ssrnet_%d_%d_%d_%d_%s_%s.h5' % (stage_num[0],stage_num[1],stage_num[2], image_size, lambda_local, lambda_d, stage_num[0],stage_num[1],stage_num[2], image_size, lambda_local, lambda_d) +save_name = 'Results_csv/ssrnet_%d_%d_%d_%d_%s_%s_age.csv' % (stage_num[0],stage_num[1],stage_num[2], image_size, lambda_local, lambda_d) + +''''''''''''''''''''''''''''''''''''''''''''' + load data +''''''''''''''''''''''''''''''''''''''''''''' +model = SSR_net(image_size,stage_num, lambda_local, lambda_d)() + +logging.debug("Loading model file...") +model.load_weights(model_file) + +age_p=model.predict(image2) + +''''''''''''''''''''''''''''''''''''''''''''' + prediction +''''''''''''''''''''''''''''''''''''''''''''' +age_p2=age_p + +pred=[['MAE'],[str(MAE(age2[age2>=-1],age_p2[age2>=-1]))],['CA3','CA5'],['0','0'],['ID','age','age_p','error']] +CA3=0 +CA5=0 +for i in range(0,len(image2)): + error=np.absolute(age2[i]-age_p2[i]) + if age2[i]>=-1: + if error<=3: + CA3+=1 + if error<=5: + CA5+=1 + temp = [str(i), str(age2[i]), str(age_p2[i]), str(error)] + pred.append(temp) + +CA3/=len(age2[age2>=-1]) +CA5/=len(age2[age2>=-1]) +pred[3]=[str(CA3),str(CA5)] + +print('CA3: ',CA3,'\nCA5: ',CA5) + +f=open(save_name,'w') +w=csv.writer(f) +w.writerows(pred) +f.close diff --git a/SSRNET/SSRNET_model.py b/SSRNET/SSRNET_model.py new file mode 100644 index 0000000..95297df --- /dev/null +++ b/SSRNET/SSRNET_model.py @@ -0,0 +1,177 @@ +# This code is imported from the following project: https://github.com/asmith26/wide_resnets_keras + +import logging +import sys +import numpy as np +from keras.models import Model +from keras.layers import Input, Activation, add, Dense, Flatten, Dropout, Multiply, Embedding, Lambda, Add, Concatenate, Activation +from keras.layers.convolutional import Conv2D, AveragePooling2D, MaxPooling2D +from keras.layers.normalization import BatchNormalization +from keras.regularizers import l2 +from keras import backend as K +from keras.optimizers import SGD,Adam +from keras.applications.mobilenet import MobileNet +from densenet import * +from keras.utils import plot_model + +sys.setrecursionlimit(2 ** 20) +np.random.seed(2 ** 10) + + +class SSR_net: + def __init__(self, image_size,stage_num,lambda_local,lambda_d): + + if K.image_dim_ordering() == "th": + logging.debug("image_dim_ordering = 'th'") + self._channel_axis = 1 + self._input_shape = (3, image_size, image_size) + else: + logging.debug("image_dim_ordering = 'tf'") + self._channel_axis = -1 + self._input_shape = (image_size, image_size, 3) + + + self.stage_num = stage_num + self.lambda_local = lambda_local + self.lambda_d = lambda_d + +# def create_model(self): + def __call__(self): + logging.debug("Creating model...") + + + inputs = Input(shape=self._input_shape) + + #------------------------------------------------------------------------------------------------------------------------- + x = Conv2D(32,(3,3))(inputs) + x = BatchNormalization(axis=self._channel_axis)(x) + x = Activation('relu')(x) + x_layer1 = AveragePooling2D(2,2)(x) + x = Conv2D(32,(3,3))(x_layer1) + x = BatchNormalization(axis=self._channel_axis)(x) + x = Activation('relu')(x) + x_layer2 = AveragePooling2D(2,2)(x) + x = Conv2D(32,(3,3))(x_layer2) + x = BatchNormalization(axis=self._channel_axis)(x) + x = Activation('relu')(x) + x_layer3 = AveragePooling2D(2,2)(x) + x = Conv2D(32,(3,3))(x_layer3) + x = BatchNormalization(axis=self._channel_axis)(x) + x = Activation('relu')(x) + #------------------------------------------------------------------------------------------------------------------------- + s = Conv2D(16,(3,3))(inputs) + s = BatchNormalization(axis=self._channel_axis)(s) + s = Activation('tanh')(s) + s_layer1 = MaxPooling2D(2,2)(s) + s = Conv2D(16,(3,3))(s_layer1) + s = BatchNormalization(axis=self._channel_axis)(s) + s = Activation('tanh')(s) + s_layer2 = MaxPooling2D(2,2)(s) + s = Conv2D(16,(3,3))(s_layer2) + s = BatchNormalization(axis=self._channel_axis)(s) + s = Activation('tanh')(s) + s_layer3 = MaxPooling2D(2,2)(s) + s = Conv2D(16,(3,3))(s_layer3) + s = BatchNormalization(axis=self._channel_axis)(s) + s = Activation('tanh')(s) + + + #------------------------------------------------------------------------------------------------------------------------- + # Classifier block + s_layer4 = Conv2D(10,(1,1),activation='relu')(s) + s_layer4 = Flatten()(s_layer4) + s_layer4_mix = Dropout(0.2)(s_layer4) + s_layer4_mix = Dense(units=self.stage_num[0], activation="relu")(s_layer4_mix) + + x_layer4 = Conv2D(10,(1,1),activation='relu')(x) + x_layer4 = Flatten()(x_layer4) + x_layer4_mix = Dropout(0.2)(x_layer4) + x_layer4_mix = Dense(units=self.stage_num[0], activation="relu")(x_layer4_mix) + + feat_a_s1_pre = Multiply()([s_layer4,x_layer4]) + delta_s1 = Dense(1,activation='tanh',name='delta_s1')(feat_a_s1_pre) + + feat_a_s1 = Multiply()([s_layer4_mix,x_layer4_mix]) + feat_a_s1 = Dense(2*self.stage_num[0],activation='relu')(feat_a_s1) + pred_a_s1 = Dense(units=self.stage_num[0], activation="relu",name='pred_age_stage1')(feat_a_s1) + #feat_local_s1 = Lambda(lambda x: x/10)(feat_a_s1) + #feat_a_s1_local = Dropout(0.2)(pred_a_s1) + local_s1 = Dense(units=self.stage_num[0], activation='tanh', name='local_delta_stage1')(feat_a_s1) + #------------------------------------------------------------------------------------------------------------------------- + s_layer2 = Conv2D(10,(1,1),activation='relu')(s_layer2) + s_layer2 = MaxPooling2D(4,4)(s_layer2) + s_layer2 = Flatten()(s_layer2) + s_layer2_mix = Dropout(0.2)(s_layer2) + s_layer2_mix = Dense(self.stage_num[1],activation='relu')(s_layer2_mix) + + x_layer2 = Conv2D(10,(1,1),activation='relu')(x_layer2) + x_layer2 = AveragePooling2D(4,4)(x_layer2) + x_layer2 = Flatten()(x_layer2) + x_layer2_mix = Dropout(0.2)(x_layer2) + x_layer2_mix = Dense(self.stage_num[1],activation='relu')(x_layer2_mix) + + feat_a_s2_pre = Multiply()([s_layer2,x_layer2]) + delta_s2 = Dense(1,activation='tanh',name='delta_s2')(feat_a_s2_pre) + + feat_a_s2 = Multiply()([s_layer2_mix,x_layer2_mix]) + feat_a_s2 = Dense(2*self.stage_num[1],activation='relu')(feat_a_s2) + pred_a_s2 = Dense(units=self.stage_num[1], activation="relu",name='pred_age_stage2')(feat_a_s2) + #feat_local_s2 = Lambda(lambda x: x/10)(feat_a_s2) + #feat_a_s2_local = Dropout(0.2)(pred_a_s2) + local_s2 = Dense(units=self.stage_num[1], activation='tanh', name='local_delta_stage2')(feat_a_s2) + #------------------------------------------------------------------------------------------------------------------------- + s_layer1 = Conv2D(10,(1,1),activation='relu')(s_layer1) + s_layer1 = MaxPooling2D(8,8)(s_layer1) + s_layer1 = Flatten()(s_layer1) + s_layer1_mix = Dropout(0.2)(s_layer1) + s_layer1_mix = Dense(self.stage_num[2],activation='relu')(s_layer1_mix) + + x_layer1 = Conv2D(10,(1,1),activation='relu')(x_layer1) + x_layer1 = AveragePooling2D(8,8)(x_layer1) + x_layer1 = Flatten()(x_layer1) + x_layer1_mix = Dropout(0.2)(x_layer1) + x_layer1_mix = Dense(self.stage_num[2],activation='relu')(x_layer1_mix) + + feat_a_s3_pre = Multiply()([s_layer1,x_layer1]) + delta_s3 = Dense(1,activation='tanh',name='delta_s3')(feat_a_s3_pre) + + feat_a_s3 = Multiply()([s_layer1_mix,x_layer1_mix]) + feat_a_s3 = Dense(2*self.stage_num[2],activation='relu')(feat_a_s3) + pred_a_s3 = Dense(units=self.stage_num[2], activation="relu",name='pred_age_stage3')(feat_a_s3) + #feat_local_s3 = Lambda(lambda x: x/10)(feat_a_s3) + #feat_a_s3_local = Dropout(0.2)(pred_a_s3) + local_s3 = Dense(units=self.stage_num[2], activation='tanh', name='local_delta_stage3')(feat_a_s3) + #------------------------------------------------------------------------------------------------------------------------- + + def merge_age(x,s1,s2,s3,lambda_local,lambda_d): + a = x[0][:,0]*0 + b = x[0][:,0]*0 + c = x[0][:,0]*0 + A = s1*s2*s3 + V = 101 + + for i in range(0,s1): + a = a+(i+lambda_local*x[6][:,i])*x[0][:,i] + a = K.expand_dims(a,-1) + a = a/(s1*(1+lambda_d*x[3])) + + for j in range(0,s2): + b = b+(j+lambda_local*x[7][:,j])*x[1][:,j] + b = K.expand_dims(b,-1) + b = b/(s1*(1+lambda_d*x[3]))/(s2*(1+lambda_d*x[4])) + + for k in range(0,s3): + c = c+(k+lambda_local*x[8][:,k])*x[2][:,k] + c = K.expand_dims(c,-1) + c = c/(s1*(1+lambda_d*x[3]))/(s2*(1+lambda_d*x[4]))/(s3*(1+lambda_d*x[5])) + + + age = (a+b+c)*V + return age + + pred_a = Lambda(merge_age,arguments={'s1':self.stage_num[0],'s2':self.stage_num[1],'s3':self.stage_num[2],'lambda_local':self.lambda_local,'lambda_d':self.lambda_d},output_shape=(1,),name='pred_a')([pred_a_s1,pred_a_s2,pred_a_s3,delta_s1,delta_s2,delta_s3, local_s1, local_s2, local_s3]) + + model = Model(inputs=inputs, outputs=pred_a) + return model + + diff --git a/SSRNET/SSRNET_train.py b/SSRNET/SSRNET_train.py new file mode 100644 index 0000000..158799c --- /dev/null +++ b/SSRNET/SSRNET_train.py @@ -0,0 +1,126 @@ +import pandas as pd +import logging +import argparse +import os +os.environ['TF_CPP_MIN_LOG_LEVEL']='2' +import tensorflow as tf + +from keras.callbacks import ModelCheckpoint +from keras.optimizers import Adam +from SSRNET_model import SSR_net +from TYY_utils import mk_dir, load_data_npz +import numpy as np +import TYY_callbacks +from TYY_generators import * +from keras.utils import plot_model +from moviepy.editor import * +logging.basicConfig(level=logging.DEBUG) + + + + +def get_args(): + parser = argparse.ArgumentParser(description="This script trains the CNN model for age and gender estimation.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument("--input1", "-i1", type=str, required=True, + help="path to input1 database npz file") + parser.add_argument("--input2", "-i2", type=str, required=True, + help="path to input2 database npz file") + parser.add_argument("--db", type=str, required=True, + help="database name") + parser.add_argument("--batch_size", type=int, default=128, + help="batch size") + parser.add_argument("--nb_epochs", type=int, default=90, + help="number of epochs") + parser.add_argument("--netType1", type=int, required=True, + help="network type 1") + parser.add_argument("--netType2", type=int, required=True, + help="network type 2") + + args = parser.parse_args() + return args + + + +def main(): + args = get_args() + input_path1 = args.input1 + input_path2 = args.input2 + db_name = args.db + batch_size = args.batch_size + nb_epochs = args.nb_epochs + netType1 = args.netType1 + netType2 = args.netType2 + + logging.debug("Loading training data...") + image1, age1, image_size = load_data_npz(input_path1) + logging.debug("Loading testing data...") + image2, age2, image_size = load_data_npz(input_path2) + + + start_decay_epoch = [30,60] + + optMethod = Adam() + + stage_num = [3,3,3] + lambda_local = 0.25*(netType1%5) + lambda_d = 0.25*(netType2%5) + + model = SSR_net(image_size,stage_num, lambda_local, lambda_d)() + save_name = 'ssrnet_%d_%d_%d_%d_%s_%s' % (stage_num[0],stage_num[1],stage_num[2], image_size, lambda_local, lambda_d) + model.compile(optimizer=optMethod, loss=["mae"], metrics={'pred_a':'mae'}) + + if db_name == "megaage": + weight_file = "./pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.h5" + model.load_weights(weight_file) + + logging.debug("Model summary...") + model.count_params() + model.summary() + + logging.debug("Saving model...") + mk_dir(db_name+"_models") + mk_dir(db_name+"_models/batch_size_%d/"%(batch_size)) + mk_dir(db_name+"_models/batch_size_%d/"%(batch_size)+save_name) + mk_dir(db_name+"_checkpoints") + mk_dir(db_name+"_checkpoints/batch_size_%d/"%(batch_size)) + plot_model(model, to_file=db_name+"_models/batch_size_%d/"%(batch_size)+save_name+"/"+save_name+".png") + + with open(os.path.join(db_name+"_models/batch_size_%d/"%(batch_size)+save_name, save_name+'.json'), "w") as f: + f.write(model.to_json()) + + decaylearningrate = TYY_callbacks.DecayLearningRate(start_decay_epoch) + + callbacks = [ModelCheckpoint(db_name+"_checkpoints/batch_size_%d/"%(batch_size)+"weights.{epoch:02d}-{val_loss:.2f}.hdf5", + monitor="val_loss", + verbose=1, + save_best_only=True, + mode="auto"), decaylearningrate + ] + logging.debug("Running training...") + + data_num = len(image1)+len(image2) + indexes1 = np.arange(len(image1)) + indexes2 = np.arange(len(image2)) + np.random.shuffle(indexes1) + np.random.shuffle(indexes2) + x_train = image1[indexes1] + x_test = image2[indexes2] + y_train_a = age1[indexes1] + y_test_a = age2[indexes2] + train_num = len(image1) + + hist = model.fit_generator(generator=data_generator_reg(X=x_train, Y=y_train_a, batch_size=batch_size), + steps_per_epoch=train_num // batch_size, + validation_data=(x_test, [y_test_a]), + epochs=nb_epochs, verbose=1, + callbacks=callbacks) + + logging.debug("Saving weights...") + model.save_weights(os.path.join(db_name+"_models/batch_size_%d/"%(batch_size)+save_name, save_name+'.h5'), overwrite=True) + pd.DataFrame(hist.history).to_hdf(os.path.join(db_name+"_models/batch_size_%d/"%(batch_size)+save_name, 'history_'+save_name+'.h5'), "history") + + +if __name__ == '__main__': + main() + diff --git a/SSRNET/TYY_callbacks.py b/SSRNET/TYY_callbacks.py new file mode 100644 index 0000000..075c70e --- /dev/null +++ b/SSRNET/TYY_callbacks.py @@ -0,0 +1,36 @@ +import keras +from sklearn.metrics import roc_auc_score +import sys +import matplotlib.pyplot as plt +from keras.models import Model +import numpy as np +from keras import backend as K + + +class DecayLearningRate(keras.callbacks.Callback): + def __init__(self, startEpoch): + self.startEpoch = startEpoch + + def on_train_begin(self, logs={}): + return + def on_train_end(self, logs={}): + return + + def on_epoch_begin(self, epoch, logs={}): + if epoch in self.startEpoch: + if epoch == 0: + ratio = 1 + else: + ratio = 0.1 + LR = K.get_value(self.model.optimizer.lr) + K.set_value(self.model.optimizer.lr,LR*ratio) + return + + def on_epoch_end(self, epoch, logs={}): + return + + def on_batch_begin(self, batch, logs={}): + return + + def on_batch_end(self, batch, logs={}): + return diff --git a/SSRNET/TYY_generators.py b/SSRNET/TYY_generators.py new file mode 100644 index 0000000..8f7e132 --- /dev/null +++ b/SSRNET/TYY_generators.py @@ -0,0 +1,102 @@ +import keras +import numpy as np +import sys +from scipy import misc +import tensorflow as tf + + +def random_crop(x,dn): + dx = np.random.randint(dn,size=1)[0] + dy = np.random.randint(dn,size=1)[0] + w = x.shape[0] + h = x.shape[1] + out = x[0+dx:w-(dn-dx),0+dy:h-(dn-dy),:] + out = misc.imresize(out, (w,h), interp='nearest') + return out + +def augment_data(images): + for i in range(0,images.shape[0]): + + if np.random.random() > 0.5: + images[i] = images[i][:,::-1] + """ + if np.random.random() > 0.5: + images[i] = random_crop(images[i],4) + """ + if np.random.random() > 0.75: + images[i] = tf.contrib.keras.preprocessing.image.random_rotation(images[i], 20, row_axis=0, col_axis=1, channel_axis=2) + if np.random.random() > 0.75: + images[i] = tf.contrib.keras.preprocessing.image.random_shear(images[i], 0.2, row_axis=0, col_axis=1, channel_axis=2) + if np.random.random() > 0.75: + images[i] = tf.contrib.keras.preprocessing.image.random_shift(images[i], 0.2, 0.2, row_axis=0, col_axis=1, channel_axis=2) + if np.random.random() > 0.75: + images[i] = tf.contrib.keras.preprocessing.image.random_zoom(images[i], [0.8,1.2], row_axis=0, col_axis=1, channel_axis=2) + + return images + + +def data_generator_reg(X,Y,batch_size): + + while True: + idxs = np.random.permutation(len(X)) + X = X[idxs] + Y = Y[idxs] + p,q = [],[] + for i in range(len(X)): + p.append(X[i]) + q.append(Y[i]) + if len(p) == batch_size: + yield augment_data(np.array(p)),np.array(q) + p,q = [],[] + if p: + yield augment_data(np.array(p)),np.array(q) + p,q = [],[] + +def data_generator_dex(X,Y,batch_size): + + Y1 = Y[0] + Y2 = Y[1] + + while True: + idxs = np.random.permutation(len(X)) + X = X[idxs] + Y1 = Y1[idxs] + Y2 = Y2[idxs] + p,q1,q2 = [],[],[] + for i in range(len(X)): + p.append(X[i]) + q1.append(Y1[i]) + q2.append(Y2[i]) + if len(p) == batch_size: + yield augment_data(np.array(p)),[np.array(q1),np.array(q2)] + p,q1,q2 = [],[],[] + if p: + yield augment_data(np.array(p)),[np.array(q1),np.array(q2)] + p,q1,q2 = [],[],[] + +def data_generator_dex_centerloss(X,Y,batch_size): + X1 = X[0] + X2 = X[1] + Y1 = Y[0] + Y2 = Y[1] + Y3 = Y[2] + while True: + idxs = np.random.permutation(len(X1)) + X1 = X1[idxs] #images + X2 = X2[idxs] #labels for center loss + Y1 = Y1[idxs] + Y2 = Y2[idxs] + Y3 = Y3[idxs] + p1,p2,q1,q2,q3 = [],[],[],[],[] + for i in range(len(X1)): + p1.append(X1[i]) + p2.append(X2[i]) + q1.append(Y1[i]) + q2.append(Y2[i]) + q3.append(Y3[i]) + if len(p1) == batch_size: + yield [augment_data(np.array(p1)),np.array(p2)],[np.array(q1),np.array(q2),np.array(q3)] + p1,p2,q1,q2,q3 = [],[],[],[],[] + if p1: + yield [augment_data(np.array(p1)),np.array(p2)],[np.array(q1),np.array(q2),np.array(q3)] + p1,p2,q1,q2,q3 = [],[],[],[],[] \ No newline at end of file diff --git a/SSRNET/TYY_utils.py b/SSRNET/TYY_utils.py new file mode 100644 index 0000000..9329178 --- /dev/null +++ b/SSRNET/TYY_utils.py @@ -0,0 +1,51 @@ +# modifided from https://github.com/yu4u/age-gender-estimation + +from scipy.io import loadmat +from datetime import datetime +import os +import numpy as np + +def calc_age(taken, dob): + birth = datetime.fromordinal(max(int(dob) - 366, 1)) + + # assume the photo was taken in the middle of the year + if birth.month < 7: + return taken - birth.year + else: + return taken - birth.year - 1 + + +def get_meta(mat_path, db): + meta = loadmat(mat_path) + full_path = meta[db][0, 0]["full_path"][0] + dob = meta[db][0, 0]["dob"][0] # Matlab serial date number + gender = meta[db][0, 0]["gender"][0] + photo_taken = meta[db][0, 0]["photo_taken"][0] # year + face_score = meta[db][0, 0]["face_score"][0] + second_face_score = meta[db][0, 0]["second_face_score"][0] + age = [calc_age(photo_taken[i], dob[i]) for i in range(len(dob))] + + return full_path, dob, gender, photo_taken, face_score, second_face_score, age + + +def load_data(mat_path): + d = loadmat(mat_path) + + return d["image"], d["gender"][0], d["age"][0], d["db"][0], d["img_size"][0, 0], d["min_score"][0, 0] + +''' +def load_MORPH_data_npz(npz_path): + d = np.load(npz_path) + + return d["image"], d["gender"], d["age"], d["img_size"] +''' +def load_data_npz(npz_path): + d = np.load(npz_path) + + return d["image"], d["age"], d["img_size"] + +def mk_dir(dir): + try: + os.mkdir( dir ) + except OSError: + pass diff --git a/SSRNET/densenet.py b/SSRNET/densenet.py new file mode 100644 index 0000000..19101bb --- /dev/null +++ b/SSRNET/densenet.py @@ -0,0 +1,785 @@ +#https://github.com/titu1994/DenseNet +'''DenseNet models for Keras. +# Reference +- [Densely Connected Convolutional Networks](https://arxiv.org/pdf/1608.06993.pdf) +- [The One Hundred Layers Tiramisu: Fully Convolutional DenseNets for Semantic Segmentation](https://arxiv.org/pdf/1611.09326.pdf) +''' +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division + +import warnings + +from keras.models import Model +from keras.layers.core import Dense, Dropout, Activation, Reshape +from keras.layers.convolutional import Conv2D, Conv2DTranspose, UpSampling2D +from keras.layers.pooling import AveragePooling2D, MaxPooling2D +from keras.layers.pooling import GlobalAveragePooling2D +from keras.layers import Input +from keras.layers.merge import concatenate +from keras.layers.normalization import BatchNormalization +from keras.regularizers import l2 +from keras.utils.layer_utils import convert_all_kernels_in_model, convert_dense_weights_data_format +from keras.utils.data_utils import get_file +from keras.engine.topology import get_source_inputs +from keras.applications.imagenet_utils import _obtain_input_shape +from keras.applications.imagenet_utils import decode_predictions +import keras.backend as K + +from subpixel import SubPixelUpscaling + +DENSENET_121_WEIGHTS_PATH = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-121-32.h5' +DENSENET_161_WEIGHTS_PATH = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-161-48.h5' +DENSENET_169_WEIGHTS_PATH = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-169-32.h5' +DENSENET_121_WEIGHTS_PATH_NO_TOP = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-121-32-no-top.h5' +DENSENET_161_WEIGHTS_PATH_NO_TOP = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-161-48-no-top.h5' +DENSENET_169_WEIGHTS_PATH_NO_TOP = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-169-32-no-top.h5' + +def preprocess_input(x, data_format=None): + """Preprocesses a tensor encoding a batch of images. + + # Arguments + x: input Numpy tensor, 4D. + data_format: data format of the image tensor. + + # Returns + Preprocessed tensor. + """ + if data_format is None: + data_format = K.image_data_format() + assert data_format in {'channels_last', 'channels_first'} + + if data_format == 'channels_first': + if x.ndim == 3: + # 'RGB'->'BGR' + x = x[::-1, ...] + # Zero-center by mean pixel + x[0, :, :] -= 103.939 + x[1, :, :] -= 116.779 + x[2, :, :] -= 123.68 + else: + x = x[:, ::-1, ...] + x[:, 0, :, :] -= 103.939 + x[:, 1, :, :] -= 116.779 + x[:, 2, :, :] -= 123.68 + else: + # 'RGB'->'BGR' + x = x[..., ::-1] + # Zero-center by mean pixel + x[..., 0] -= 103.939 + x[..., 1] -= 116.779 + x[..., 2] -= 123.68 + + x *= 0.017 # scale values + + return x + + +def DenseNet(input_shape=None, depth=40, nb_dense_block=3, growth_rate=12, nb_filter=-1, nb_layers_per_block=-1, + bottleneck=False, reduction=0.0, dropout_rate=0.0, weight_decay=1e-4, subsample_initial_block=False, + include_top=True, weights=None, input_tensor=None, + classes=10, activation='softmax'): + '''Instantiate the DenseNet architecture, + optionally loading weights pre-trained + on CIFAR-10. Note that when using TensorFlow, + for best performance you should set + `image_data_format='channels_last'` in your Keras config + at ~/.keras/keras.json. + The model and the weights are compatible with both + TensorFlow and Theano. The dimension ordering + convention used by the model is the one + specified in your Keras config file. + # Arguments + input_shape: optional shape tuple, only to be specified + if `include_top` is False (otherwise the input shape + has to be `(32, 32, 3)` (with `channels_last` dim ordering) + or `(3, 32, 32)` (with `channels_first` dim ordering). + It should have exactly 3 inputs channels, + and width and height should be no smaller than 8. + E.g. `(200, 200, 3)` would be one valid value. + depth: number or layers in the DenseNet + nb_dense_block: number of dense blocks to add to end (generally = 3) + growth_rate: number of filters to add per dense block + nb_filter: initial number of filters. -1 indicates initial + number of filters is 2 * growth_rate + nb_layers_per_block: number of layers in each dense block. + Can be a -1, positive integer or a list. + If -1, calculates nb_layer_per_block from the network depth. + If positive integer, a set number of layers per dense block. + If list, nb_layer is used as provided. Note that list size must + be (nb_dense_block + 1) + bottleneck: flag to add bottleneck blocks in between dense blocks + reduction: reduction factor of transition blocks. + Note : reduction value is inverted to compute compression. + dropout_rate: dropout rate + weight_decay: weight decay rate + subsample_initial_block: Set to True to subsample the initial convolution and + add a MaxPool2D before the dense blocks are added. + include_top: whether to include the fully-connected + layer at the top of the network. + weights: one of `None` (random initialization) or + 'imagenet' (pre-training on ImageNet).. + input_tensor: optional Keras tensor (i.e. output of `layers.Input()`) + to use as image input for the model. + classes: optional number of classes to classify images + into, only to be specified if `include_top` is True, and + if no `weights` argument is specified. + activation: Type of activation at the top layer. Can be one of 'softmax' or 'sigmoid'. + Note that if sigmoid is used, classes must be 1. + # Returns + A Keras model instance. + ''' + + if weights not in {'imagenet', None}: + raise ValueError('The `weights` argument should be either ' + '`None` (random initialization) or `cifar10` ' + '(pre-training on CIFAR-10).') + + if weights == 'imagenet' and include_top and classes != 1000: + raise ValueError('If using `weights` as ImageNet with `include_top`' + ' as true, `classes` should be 1000') + + if activation not in ['softmax', 'sigmoid']: + raise ValueError('activation must be one of "softmax" or "sigmoid"') + + if activation == 'sigmoid' and classes != 1: + raise ValueError('sigmoid activation can only be used when classes = 1') + """ + # Determine proper input shape + input_shape = _obtain_input_shape(input_shape, + default_size=32, + min_size=8, + data_format=K.image_data_format(), + require_flatten=include_top) + """ + if input_tensor is None: + img_input = Input(shape=input_shape) + else: + if not K.is_keras_tensor(input_tensor): + img_input = Input(tensor=input_tensor, shape=input_shape) + else: + img_input = input_tensor + + x = __create_dense_net(classes, img_input, include_top, depth, nb_dense_block, + growth_rate, nb_filter, nb_layers_per_block, bottleneck, reduction, + dropout_rate, weight_decay, subsample_initial_block, activation) + + # Ensure that the model takes into account + # any potential predecessors of `input_tensor`. + if input_tensor is not None: + inputs = get_source_inputs(input_tensor) + else: + inputs = img_input + # Create model. + model = Model(inputs, x, name='densenet') + + # load weights + if weights == 'imagenet': + weights_loaded = False + + if (depth == 121) and (nb_dense_block == 4) and (growth_rate == 32) and (nb_filter == 64) and \ + (bottleneck is True) and (reduction == 0.5) and (dropout_rate == 0.0) and (subsample_initial_block): + if include_top: + weights_path = get_file('DenseNet-BC-121-32.h5', + DENSENET_121_WEIGHTS_PATH, + cache_subdir='models', + md5_hash='a439dd41aa672aef6daba4ee1fd54abd') + else: + weights_path = get_file('DenseNet-BC-121-32-no-top.h5', + DENSENET_121_WEIGHTS_PATH_NO_TOP, + cache_subdir='models', + md5_hash='55e62a6358af8a0af0eedf399b5aea99') + model.load_weights(weights_path) + weights_loaded = True + + if (depth == 161) and (nb_dense_block == 4) and (growth_rate == 48) and (nb_filter == 96) and \ + (bottleneck is True) and (reduction == 0.5) and (dropout_rate == 0.0) and (subsample_initial_block): + if include_top: + weights_path = get_file('DenseNet-BC-161-48.h5', + DENSENET_161_WEIGHTS_PATH, + cache_subdir='models', + md5_hash='6c326cf4fbdb57d31eff04333a23fcca') + else: + weights_path = get_file('DenseNet-BC-161-48-no-top.h5', + DENSENET_161_WEIGHTS_PATH_NO_TOP, + cache_subdir='models', + md5_hash='1a9476b79f6b7673acaa2769e6427b92') + model.load_weights(weights_path) + weights_loaded = True + + if (depth == 169) and (nb_dense_block == 4) and (growth_rate == 32) and (nb_filter == 64) and \ + (bottleneck is True) and (reduction == 0.5) and (dropout_rate == 0.0) and (subsample_initial_block): + if include_top: + weights_path = get_file('DenseNet-BC-169-32.h5', + DENSENET_169_WEIGHTS_PATH, + cache_subdir='models', + md5_hash='914869c361303d2e39dec640b4e606a6') + else: + weights_path = get_file('DenseNet-BC-169-32-no-top.h5', + DENSENET_169_WEIGHTS_PATH_NO_TOP, + cache_subdir='models', + md5_hash='89c19e8276cfd10585d5fadc1df6859e') + model.load_weights(weights_path) + weights_loaded = True + + if weights_loaded: + if K.backend() == 'theano': + convert_all_kernels_in_model(model) + + if K.image_data_format() == 'channels_first' and K.backend() == 'tensorflow': + warnings.warn('You are using the TensorFlow backend, yet you ' + 'are using the Theano ' + 'image data format convention ' + '(`image_data_format="channels_first"`). ' + 'For best performance, set ' + '`image_data_format="channels_last"` in ' + 'your Keras config ' + 'at ~/.keras/keras.json.') + + print("Weights for the model were loaded successfully") + + return model + + +def DenseNetFCN(input_shape, nb_dense_block=5, growth_rate=16, nb_layers_per_block=4, + reduction=0.0, dropout_rate=0.0, weight_decay=1e-4, init_conv_filters=48, + include_top=True, weights=None, input_tensor=None, classes=1, activation='softmax', + upsampling_conv=128, upsampling_type='deconv'): + '''Instantiate the DenseNet FCN architecture. + Note that when using TensorFlow, + for best performance you should set + `image_data_format='channels_last'` in your Keras config + at ~/.keras/keras.json. + # Arguments + nb_dense_block: number of dense blocks to add to end (generally = 3) + growth_rate: number of filters to add per dense block + nb_layers_per_block: number of layers in each dense block. + Can be a positive integer or a list. + If positive integer, a set number of layers per dense block. + If list, nb_layer is used as provided. Note that list size must + be (nb_dense_block + 1) + reduction: reduction factor of transition blocks. + Note : reduction value is inverted to compute compression. + dropout_rate: dropout rate + init_conv_filters: number of layers in the initial convolution layer + include_top: whether to include the fully-connected + layer at the top of the network. + weights: one of `None` (random initialization) or + 'cifar10' (pre-training on CIFAR-10).. + input_tensor: optional Keras tensor (i.e. output of `layers.Input()`) + to use as image input for the model. + input_shape: optional shape tuple, only to be specified + if `include_top` is False (otherwise the input shape + has to be `(32, 32, 3)` (with `channels_last` dim ordering) + or `(3, 32, 32)` (with `channels_first` dim ordering). + It should have exactly 3 inputs channels, + and width and height should be no smaller than 8. + E.g. `(200, 200, 3)` would be one valid value. + classes: optional number of classes to classify images + into, only to be specified if `include_top` is True, and + if no `weights` argument is specified. + activation: Type of activation at the top layer. Can be one of 'softmax' or 'sigmoid'. + Note that if sigmoid is used, classes must be 1. + upsampling_conv: number of convolutional layers in upsampling via subpixel convolution + upsampling_type: Can be one of 'upsampling', 'deconv' and + 'subpixel'. Defines type of upsampling algorithm used. + batchsize: Fixed batch size. This is a temporary requirement for + computation of output shape in the case of Deconvolution2D layers. + Parameter will be removed in next iteration of Keras, which infers + output shape of deconvolution layers automatically. + # Returns + A Keras model instance. + ''' + + if weights not in {None}: + raise ValueError('The `weights` argument should be ' + '`None` (random initialization) as no ' + 'model weights are provided.') + + upsampling_type = upsampling_type.lower() + + if upsampling_type not in ['upsampling', 'deconv', 'subpixel']: + raise ValueError('Parameter "upsampling_type" must be one of "upsampling", ' + '"deconv" or "subpixel".') + + if input_shape is None: + raise ValueError('For fully convolutional models, input shape must be supplied.') + + if type(nb_layers_per_block) is not list and nb_dense_block < 1: + raise ValueError('Number of dense layers per block must be greater than 1. Argument ' + 'value was %d.' % (nb_layers_per_block)) + + if activation not in ['softmax', 'sigmoid']: + raise ValueError('activation must be one of "softmax" or "sigmoid"') + + if activation == 'sigmoid' and classes != 1: + raise ValueError('sigmoid activation can only be used when classes = 1') + + # Determine proper input shape + min_size = 2 ** nb_dense_block + + if K.image_data_format() == 'channels_first': + if input_shape is not None: + if ((input_shape[1] is not None and input_shape[1] < min_size) or + (input_shape[2] is not None and input_shape[2] < min_size)): + raise ValueError('Input size must be at least ' + + str(min_size) + 'x' + str(min_size) + ', got ' + '`input_shape=' + str(input_shape) + '`') + else: + input_shape = (classes, None, None) + else: + if input_shape is not None: + if ((input_shape[0] is not None and input_shape[0] < min_size) or + (input_shape[1] is not None and input_shape[1] < min_size)): + raise ValueError('Input size must be at least ' + + str(min_size) + 'x' + str(min_size) + ', got ' + '`input_shape=' + str(input_shape) + '`') + else: + input_shape = (None, None, classes) + + if input_tensor is None: + img_input = Input(shape=input_shape) + else: + if not K.is_keras_tensor(input_tensor): + img_input = Input(tensor=input_tensor, shape=input_shape) + else: + img_input = input_tensor + + x = __create_fcn_dense_net(classes, img_input, include_top, nb_dense_block, + growth_rate, reduction, dropout_rate, weight_decay, + nb_layers_per_block, upsampling_conv, upsampling_type, + init_conv_filters, input_shape, activation) + + # Ensure that the model takes into account + # any potential predecessors of `input_tensor`. + if input_tensor is not None: + inputs = get_source_inputs(input_tensor) + else: + inputs = img_input + # Create model. + model = Model(inputs, x, name='fcn-densenet') + + return model + + +def DenseNetImageNet121(input_shape=None, + bottleneck=True, + reduction=0.5, + dropout_rate=0.0, + weight_decay=1e-4, + include_top=True, + weights='imagenet', + input_tensor=None, + classes=1000, + activation='softmax'): + return DenseNet(input_shape, depth=121, nb_dense_block=4, growth_rate=32, nb_filter=64, + nb_layers_per_block=[6, 12, 24, 16], bottleneck=bottleneck, reduction=reduction, + dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True, + include_top=include_top, weights=weights, input_tensor=input_tensor, + classes=classes, activation=activation) + + +def DenseNetImageNet169(input_shape=None, + bottleneck=True, + reduction=0.5, + dropout_rate=0.0, + weight_decay=1e-4, + include_top=True, + weights='imagenet', + input_tensor=None, + classes=1000, + activation='softmax'): + return DenseNet(input_shape, depth=169, nb_dense_block=4, growth_rate=32, nb_filter=64, + nb_layers_per_block=[6, 12, 32, 32], bottleneck=bottleneck, reduction=reduction, + dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True, + include_top=include_top, weights=weights, input_tensor=input_tensor, + classes=classes, activation=activation) + + +def DenseNetImageNet201(input_shape=None, + bottleneck=True, + reduction=0.5, + dropout_rate=0.0, + weight_decay=1e-4, + include_top=True, + weights=None, + input_tensor=None, + classes=1000, + activation='softmax'): + return DenseNet(input_shape, depth=201, nb_dense_block=4, growth_rate=32, nb_filter=64, + nb_layers_per_block=[6, 12, 48, 32], bottleneck=bottleneck, reduction=reduction, + dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True, + include_top=include_top, weights=weights, input_tensor=input_tensor, + classes=classes, activation=activation) + + +def DenseNetImageNet264(input_shape=None, + bottleneck=True, + reduction=0.5, + dropout_rate=0.0, + weight_decay=1e-4, + include_top=True, + weights=None, + input_tensor=None, + classes=1000, + activation='softmax'): + return DenseNet(input_shape, depth=201, nb_dense_block=4, growth_rate=32, nb_filter=64, + nb_layers_per_block=[6, 12, 64, 48], bottleneck=bottleneck, reduction=reduction, + dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True, + include_top=include_top, weights=weights, input_tensor=input_tensor, + classes=classes, activation=activation) + + +def DenseNetImageNet161(input_shape=None, + bottleneck=True, + reduction=0.5, + dropout_rate=0.0, + weight_decay=1e-4, + include_top=True, + weights='imagenet', + input_tensor=None, + classes=1000, + activation='softmax'): + return DenseNet(input_shape, depth=161, nb_dense_block=4, growth_rate=48, nb_filter=96, + nb_layers_per_block=[6, 12, 36, 24], bottleneck=bottleneck, reduction=reduction, + dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True, + include_top=include_top, weights=weights, input_tensor=input_tensor, + classes=classes, activation=activation) + + +def __conv_block(ip, nb_filter, bottleneck=False, dropout_rate=None, weight_decay=1e-4): + ''' Apply BatchNorm, Relu, 3x3 Conv2D, optional bottleneck block and dropout + Args: + ip: Input keras tensor + nb_filter: number of filters + bottleneck: add bottleneck block + dropout_rate: dropout rate + weight_decay: weight decay factor + Returns: keras tensor with batch_norm, relu and convolution2d added (optional bottleneck) + ''' + concat_axis = 1 if K.image_data_format() == 'channels_first' else -1 + + x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(ip) + x = Activation('relu')(x) + + if bottleneck: + inter_channel = nb_filter * 4 # Obtained from https://github.com/liuzhuang13/DenseNet/blob/master/densenet.lua + + x = Conv2D(inter_channel, (1, 1), kernel_initializer='he_normal', padding='same', use_bias=False, + kernel_regularizer=l2(weight_decay))(x) + x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(x) + x = Activation('relu')(x) + + x = Conv2D(nb_filter, (3, 3), kernel_initializer='he_normal', padding='same', use_bias=False)(x) + if dropout_rate: + x = Dropout(dropout_rate)(x) + + return x + + +def __dense_block(x, nb_layers, nb_filter, growth_rate, bottleneck=False, dropout_rate=None, weight_decay=1e-4, + grow_nb_filters=True, return_concat_list=False): + ''' Build a dense_block where the output of each conv_block is fed to subsequent ones + Args: + x: keras tensor + nb_layers: the number of layers of conv_block to append to the model. + nb_filter: number of filters + growth_rate: growth rate + bottleneck: bottleneck block + dropout_rate: dropout rate + weight_decay: weight decay factor + grow_nb_filters: flag to decide to allow number of filters to grow + return_concat_list: return the list of feature maps along with the actual output + Returns: keras tensor with nb_layers of conv_block appended + ''' + concat_axis = 1 if K.image_data_format() == 'channels_first' else -1 + + x_list = [x] + + for i in range(nb_layers): + cb = __conv_block(x, growth_rate, bottleneck, dropout_rate, weight_decay) + x_list.append(cb) + + x = concatenate([x, cb], axis=concat_axis) + + if grow_nb_filters: + nb_filter += growth_rate + + if return_concat_list: + return x, nb_filter, x_list + else: + return x, nb_filter + + +def __transition_block(ip, nb_filter, compression=1.0, weight_decay=1e-4): + ''' Apply BatchNorm, Relu 1x1, Conv2D, optional compression, dropout and Maxpooling2D + Args: + ip: keras tensor + nb_filter: number of filters + compression: calculated as 1 - reduction. Reduces the number of feature maps + in the transition block. + dropout_rate: dropout rate + weight_decay: weight decay factor + Returns: keras tensor, after applying batch_norm, relu-conv, dropout, maxpool + ''' + concat_axis = 1 if K.image_data_format() == 'channels_first' else -1 + + x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(ip) + x = Activation('relu')(x) + x = Conv2D(int(nb_filter * compression), (1, 1), kernel_initializer='he_normal', padding='same', use_bias=False, + kernel_regularizer=l2(weight_decay))(x) + x = AveragePooling2D((2, 2), strides=(2, 2))(x) + + return x + + +def __transition_up_block(ip, nb_filters, type='deconv', weight_decay=1E-4): + ''' SubpixelConvolutional Upscaling (factor = 2) + Args: + ip: keras tensor + nb_filters: number of layers + type: can be 'upsampling', 'subpixel', 'deconv'. Determines type of upsampling performed + weight_decay: weight decay factor + Returns: keras tensor, after applying upsampling operation. + ''' + + if type == 'upsampling': + x = UpSampling2D()(ip) + elif type == 'subpixel': + x = Conv2D(nb_filters, (3, 3), activation='relu', padding='same', kernel_regularizer=l2(weight_decay), + use_bias=False, kernel_initializer='he_normal')(ip) + x = SubPixelUpscaling(scale_factor=2)(x) + x = Conv2D(nb_filters, (3, 3), activation='relu', padding='same', kernel_regularizer=l2(weight_decay), + use_bias=False, kernel_initializer='he_normal')(x) + else: + x = Conv2DTranspose(nb_filters, (3, 3), activation='relu', padding='same', strides=(2, 2), + kernel_initializer='he_normal', kernel_regularizer=l2(weight_decay))(ip) + + return x + + +def __create_dense_net(nb_classes, img_input, include_top, depth=40, nb_dense_block=3, growth_rate=12, nb_filter=-1, + nb_layers_per_block=-1, bottleneck=False, reduction=0.0, dropout_rate=None, weight_decay=1e-4, + subsample_initial_block=False, activation='softmax'): + ''' Build the DenseNet model + Args: + nb_classes: number of classes + img_input: tuple of shape (channels, rows, columns) or (rows, columns, channels) + include_top: flag to include the final Dense layer + depth: number or layers + nb_dense_block: number of dense blocks to add to end (generally = 3) + growth_rate: number of filters to add per dense block + nb_filter: initial number of filters. Default -1 indicates initial number of filters is 2 * growth_rate + nb_layers_per_block: number of layers in each dense block. + Can be a -1, positive integer or a list. + If -1, calculates nb_layer_per_block from the depth of the network. + If positive integer, a set number of layers per dense block. + If list, nb_layer is used as provided. Note that list size must + be (nb_dense_block + 1) + bottleneck: add bottleneck blocks + reduction: reduction factor of transition blocks. Note : reduction value is inverted to compute compression + dropout_rate: dropout rate + weight_decay: weight decay rate + subsample_initial_block: Set to True to subsample the initial convolution and + add a MaxPool2D before the dense blocks are added. + subsample_initial: + activation: Type of activation at the top layer. Can be one of 'softmax' or 'sigmoid'. + Note that if sigmoid is used, classes must be 1. + Returns: keras tensor with nb_layers of conv_block appended + ''' + + concat_axis = 1 if K.image_data_format() == 'channels_first' else -1 + + if reduction != 0.0: + assert reduction <= 1.0 and reduction > 0.0, 'reduction value must lie between 0.0 and 1.0' + + # layers in each dense block + if type(nb_layers_per_block) is list or type(nb_layers_per_block) is tuple: + nb_layers = list(nb_layers_per_block) # Convert tuple to list + + assert len(nb_layers) == (nb_dense_block), 'If list, nb_layer is used as provided. ' \ + 'Note that list size must be (nb_dense_block)' + final_nb_layer = nb_layers[-1] + nb_layers = nb_layers[:-1] + else: + if nb_layers_per_block == -1: + assert (depth - 4) % 3 == 0, 'Depth must be 3 N + 4 if nb_layers_per_block == -1' + count = int((depth - 4) / 3) + nb_layers = [count for _ in range(nb_dense_block)] + final_nb_layer = count + else: + final_nb_layer = nb_layers_per_block + nb_layers = [nb_layers_per_block] * nb_dense_block + + # compute initial nb_filter if -1, else accept users initial nb_filter + if nb_filter <= 0: + nb_filter = 2 * growth_rate + + # compute compression factor + compression = 1.0 - reduction + + # Initial convolution + if subsample_initial_block: + initial_kernel = (7, 7) + initial_strides = (2, 2) + else: + initial_kernel = (3, 3) + initial_strides = (1, 1) + + x = Conv2D(nb_filter, initial_kernel, kernel_initializer='he_normal', padding='same', + strides=initial_strides, use_bias=False, kernel_regularizer=l2(weight_decay))(img_input) + + if subsample_initial_block: + x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(x) + x = Activation('relu')(x) + x = MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x) + + # Add dense blocks + for block_idx in range(nb_dense_block - 1): + x, nb_filter = __dense_block(x, nb_layers[block_idx], nb_filter, growth_rate, bottleneck=bottleneck, + dropout_rate=dropout_rate, weight_decay=weight_decay) + # add transition_block + x = __transition_block(x, nb_filter, compression=compression, weight_decay=weight_decay) + nb_filter = int(nb_filter * compression) + + # The last dense_block does not have a transition_block + x, nb_filter = __dense_block(x, final_nb_layer, nb_filter, growth_rate, bottleneck=bottleneck, + dropout_rate=dropout_rate, weight_decay=weight_decay) + + x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(x) + x = Activation('relu')(x) + x = GlobalAveragePooling2D()(x) + + if include_top: + x = Dense(nb_classes, activation=activation)(x) + + return x + + +def __create_fcn_dense_net(nb_classes, img_input, include_top, nb_dense_block=5, growth_rate=12, + reduction=0.0, dropout_rate=None, weight_decay=1e-4, + nb_layers_per_block=4, nb_upsampling_conv=128, upsampling_type='upsampling', + init_conv_filters=48, input_shape=None, activation='deconv'): + ''' Build the DenseNet model + Args: + nb_classes: number of classes + img_input: tuple of shape (channels, rows, columns) or (rows, columns, channels) + include_top: flag to include the final Dense layer + nb_dense_block: number of dense blocks to add to end (generally = 3) + growth_rate: number of filters to add per dense block + reduction: reduction factor of transition blocks. Note : reduction value is inverted to compute compression + dropout_rate: dropout rate + weight_decay: weight decay + nb_layers_per_block: number of layers in each dense block. + Can be a positive integer or a list. + If positive integer, a set number of layers per dense block. + If list, nb_layer is used as provided. Note that list size must + be (nb_dense_block + 1) + nb_upsampling_conv: number of convolutional layers in upsampling via subpixel convolution + upsampling_type: Can be one of 'upsampling', 'deconv' and 'subpixel'. Defines + type of upsampling algorithm used. + input_shape: Only used for shape inference in fully convolutional networks. + activation: Type of activation at the top layer. Can be one of 'softmax' or 'sigmoid'. + Note that if sigmoid is used, classes must be 1. + Returns: keras tensor with nb_layers of conv_block appended + ''' + + concat_axis = 1 if K.image_data_format() == 'channels_first' else -1 + + if concat_axis == 1: # channels_first dim ordering + _, rows, cols = input_shape + else: + rows, cols, _ = input_shape + + if reduction != 0.0: + assert reduction <= 1.0 and reduction > 0.0, 'reduction value must lie between 0.0 and 1.0' + + # check if upsampling_conv has minimum number of filters + # minimum is set to 12, as at least 3 color channels are needed for correct upsampling + assert nb_upsampling_conv > 12 and nb_upsampling_conv % 4 == 0, 'Parameter `upsampling_conv` number of channels must ' \ + 'be a positive number divisible by 4 and greater ' \ + 'than 12' + + # layers in each dense block + if type(nb_layers_per_block) is list or type(nb_layers_per_block) is tuple: + nb_layers = list(nb_layers_per_block) # Convert tuple to list + + assert len(nb_layers) == (nb_dense_block + 1), 'If list, nb_layer is used as provided. ' \ + 'Note that list size must be (nb_dense_block + 1)' + + bottleneck_nb_layers = nb_layers[-1] + rev_layers = nb_layers[::-1] + nb_layers.extend(rev_layers[1:]) + else: + bottleneck_nb_layers = nb_layers_per_block + nb_layers = [nb_layers_per_block] * (2 * nb_dense_block + 1) + + # compute compression factor + compression = 1.0 - reduction + + # Initial convolution + x = Conv2D(init_conv_filters, (7, 7), kernel_initializer='he_normal', padding='same', name='initial_conv2D', + use_bias=False, kernel_regularizer=l2(weight_decay))(img_input) + x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(x) + x = Activation('relu')(x) + + nb_filter = init_conv_filters + + skip_list = [] + + # Add dense blocks and transition down block + for block_idx in range(nb_dense_block): + x, nb_filter = __dense_block(x, nb_layers[block_idx], nb_filter, growth_rate, dropout_rate=dropout_rate, + weight_decay=weight_decay) + + # Skip connection + skip_list.append(x) + + # add transition_block + x = __transition_block(x, nb_filter, compression=compression, weight_decay=weight_decay) + + nb_filter = int(nb_filter * compression) # this is calculated inside transition_down_block + + # The last dense_block does not have a transition_down_block + # return the concatenated feature maps without the concatenation of the input + _, nb_filter, concat_list = __dense_block(x, bottleneck_nb_layers, nb_filter, growth_rate, + dropout_rate=dropout_rate, weight_decay=weight_decay, + return_concat_list=True) + + skip_list = skip_list[::-1] # reverse the skip list + + # Add dense blocks and transition up block + for block_idx in range(nb_dense_block): + n_filters_keep = growth_rate * nb_layers[nb_dense_block + block_idx] + + # upsampling block must upsample only the feature maps (concat_list[1:]), + # not the concatenation of the input with the feature maps (concat_list[0]. + l = concatenate(concat_list[1:], axis=concat_axis) + + t = __transition_up_block(l, nb_filters=n_filters_keep, type=upsampling_type, weight_decay=weight_decay) + + # concatenate the skip connection with the transition block + x = concatenate([t, skip_list[block_idx]], axis=concat_axis) + + # Dont allow the feature map size to grow in upsampling dense blocks + x_up, nb_filter, concat_list = __dense_block(x, nb_layers[nb_dense_block + block_idx + 1], nb_filter=growth_rate, + growth_rate=growth_rate, dropout_rate=dropout_rate, + weight_decay=weight_decay, return_concat_list=True, + grow_nb_filters=False) + + if include_top: + x = Conv2D(nb_classes, (1, 1), activation='linear', padding='same', use_bias=False)(x_up) + + if K.image_data_format() == 'channels_first': + channel, row, col = input_shape + else: + row, col, channel = input_shape + + x = Reshape((row * col, nb_classes))(x) + x = Activation(activation)(x) + x = Reshape((row, col, nb_classes))(x) + else: + x = x_up + + return x + diff --git a/SSRNET/subpixel.py b/SSRNET/subpixel.py new file mode 100644 index 0000000..3057157 --- /dev/null +++ b/SSRNET/subpixel.py @@ -0,0 +1,81 @@ +#https://github.com/titu1994/DenseNet +from __future__ import absolute_import + +from keras import backend as K +from keras.engine import Layer +from keras.utils.generic_utils import get_custom_objects +from keras.utils.conv_utils import normalize_data_format + +if K.backend() == 'theano': + import theano_backend as K_BACKEND +else: + import tensorflow_backend as K_BACKEND + +class SubPixelUpscaling(Layer): + """ Sub-pixel convolutional upscaling layer based on the paper "Real-Time Single Image + and Video Super-Resolution Using an Efficient Sub-Pixel Convolutional Neural Network" + (https://arxiv.org/abs/1609.05158). + This layer requires a Convolution2D prior to it, having output filters computed according to + the formula : + filters = k * (scale_factor * scale_factor) + where k = a user defined number of filters (generally larger than 32) + scale_factor = the upscaling factor (generally 2) + This layer performs the depth to space operation on the convolution filters, and returns a + tensor with the size as defined below. + # Example : + ```python + # A standard subpixel upscaling block + x = Convolution2D(256, 3, 3, padding='same', activation='relu')(...) + u = SubPixelUpscaling(scale_factor=2)(x) + [Optional] + x = Convolution2D(256, 3, 3, padding='same', activation='relu')(u) + ``` + In practice, it is useful to have a second convolution layer after the + SubPixelUpscaling layer to speed up the learning process. + However, if you are stacking multiple SubPixelUpscaling blocks, it may increase + the number of parameters greatly, so the Convolution layer after SubPixelUpscaling + layer can be removed. + # Arguments + scale_factor: Upscaling factor. + data_format: Can be None, 'channels_first' or 'channels_last'. + # Input shape + 4D tensor with shape: + `(samples, k * (scale_factor * scale_factor) channels, rows, cols)` if data_format='channels_first' + or 4D tensor with shape: + `(samples, rows, cols, k * (scale_factor * scale_factor) channels)` if data_format='channels_last'. + # Output shape + 4D tensor with shape: + `(samples, k channels, rows * scale_factor, cols * scale_factor))` if data_format='channels_first' + or 4D tensor with shape: + `(samples, rows * scale_factor, cols * scale_factor, k channels)` if data_format='channels_last'. + """ + + def __init__(self, scale_factor=2, data_format=None, **kwargs): + super(SubPixelUpscaling, self).__init__(**kwargs) + + self.scale_factor = scale_factor + self.data_format = normalize_data_format(data_format) + + def build(self, input_shape): + pass + + def call(self, x, mask=None): + y = K_BACKEND.depth_to_space(x, self.scale_factor, self.data_format) + return y + + def compute_output_shape(self, input_shape): + if self.data_format == 'channels_first': + b, k, r, c = input_shape + return (b, k // (self.scale_factor ** 2), r * self.scale_factor, c * self.scale_factor) + else: + b, r, c, k = input_shape + return (b, r * self.scale_factor, c * self.scale_factor, k // (self.scale_factor ** 2)) + + def get_config(self): + config = {'scale_factor': self.scale_factor, + 'data_format': self.data_format} + base_config = super(SubPixelUpscaling, self).get_config() + return dict(list(base_config.items()) + list(config.items())) + + +get_custom_objects().update({'SubPixelUpscaling': SubPixelUpscaling}) diff --git a/SSRNET/tensorflow_backend.py b/SSRNET/tensorflow_backend.py new file mode 100644 index 0000000..a878af9 --- /dev/null +++ b/SSRNET/tensorflow_backend.py @@ -0,0 +1,54 @@ +#https://github.com/titu1994/DenseNet +import tensorflow as tf + +from keras.backend import tensorflow_backend as KTF +from keras.backend.common import image_data_format + +py_all = all + + +def _preprocess_conv2d_input(x, data_format): + """Transpose and cast the input before the conv2d. + # Arguments + x: input tensor. + data_format: string, `"channels_last"` or `"channels_first"`. + # Returns + A tensor. + """ + if dtype(x) == 'float64': + x = tf.cast(x, 'float32') + if data_format == 'channels_first': + # TF uses the last dimension as channel dimension, + # instead of the 2nd one. + # TH input shape: (samples, input_depth, rows, cols) + # TF input shape: (samples, rows, cols, input_depth) + x = tf.transpose(x, (0, 2, 3, 1)) + return x + + +def _postprocess_conv2d_output(x, data_format): + """Transpose and cast the output from conv2d if needed. + # Arguments + x: A tensor. + data_format: string, `"channels_last"` or `"channels_first"`. + # Returns + A tensor. + """ + + if data_format == 'channels_first': + x = tf.transpose(x, (0, 3, 1, 2)) + + if floatx() == 'float64': + x = tf.cast(x, 'float64') + return x + + +def depth_to_space(input, scale, data_format=None): + ''' Uses phase shift algorithm to convert channels/depth for spatial resolution ''' + if data_format is None: + data_format = image_data_format() + data_format = data_format.lower() + input = _preprocess_conv2d_input(input, data_format) + out = tf.depth_to_space(input, scale) + out = _postprocess_conv2d_output(out, data_format) + return out diff --git a/TYY_Megaage_asian_create_db.py b/TYY_Megaage_asian_create_db.py new file mode 100644 index 0000000..7776e98 --- /dev/null +++ b/TYY_Megaage_asian_create_db.py @@ -0,0 +1,63 @@ +import numpy as np +import cv2 +import scipy.io +import argparse +from tqdm import tqdm +from os import listdir +from os.path import isfile, join +import sys +import dlib +from moviepy.editor import * + + +def get_args(): + parser = argparse.ArgumentParser(description="This script cleans-up noisy labels " + "and creates database for training.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument("--output", "-o", type=str, + help="path to output database mat file") + parser.add_argument("--img_size", type=int, default=64, + help="output image size") + + args = parser.parse_args() + return args + + +def main(): + args = get_args() + output_path = './data/megaage_test' + #output_path = './data/megaage_train' + img_size = args.img_size + + mypath = './megaage_asian/test' + #mypath = './megaage_asian/train' + isPlot = False + + age_file = np.loadtxt('./megaage_asian/list/test_age.txt') + #age_file = np.loadtxt('./megaage_asian/list/train_age.txt') + img_name_file = np.genfromtxt('./megaage_asian/list/test_name.txt',dtype='str') + #img_name_file = np.genfromtxt('./megaage_asian/list/train_name.txt',dtype='str') + out_ages = [] + out_imgs = [] + + for i in tqdm(range(len(img_name_file))): + + input_img = cv2.imread(mypath+'/'+img_name_file[i]) + input_img = input_img[20:-20,:,:] + img_h, img_w, _ = np.shape(input_img) + age = int(float(age_file[i])) + if age >= -1: + if isPlot: + img_clip = ImageClip(input_img) + img_clip.show() + key = cv2.waitKey(1000) + + input_img = cv2.resize(input_img,(img_size,img_size)) + #only add to the list when faces is detected + out_imgs.append(input_img) + out_ages.append(int(age)) + + np.savez(output_path,image=np.array(out_imgs), age=np.array(out_ages), img_size=img_size) + +if __name__ == '__main__': + main() diff --git a/data/README.md b/data/README.md new file mode 100644 index 0000000..116c2c3 --- /dev/null +++ b/data/README.md @@ -0,0 +1,3 @@ +Create megaage_train.npz and megaage_test.npz by TYY_Megaage_asian_create_db.py + or +Download from https://drive.google.com/open?id=1CismL8x4gi3sAfTi3qpxedWSStTPsrcp diff --git a/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/densenet_reg_19_64.h5 b/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/densenet_reg_19_64.h5 new file mode 100644 index 0000000..f59f384 Binary files /dev/null and b/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/densenet_reg_19_64.h5 differ diff --git a/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/densenet_reg_19_64.json b/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/densenet_reg_19_64.json new file mode 100644 index 0000000..9cd928b --- /dev/null +++ b/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/densenet_reg_19_64.json @@ -0,0 +1 @@ +{"backend": "tensorflow", "class_name": "Model", "config": {"name": "model_1", "input_layers": [["input_1", 0, 0]], "output_layers": [["pred_a", 0, 0]], "layers": [{"name": "input_1", "class_name": "InputLayer", "inbound_nodes": [], "config": {"batch_input_shape": [null, 64, 64, 3], "dtype": "float32", "sparse": false, "name": "input_1"}}, {"name": "densenet", "class_name": "Model", "inbound_nodes": [[["input_1", 0, 0, {}]]], "config": {"name": "densenet", "input_layers": [["input_2", 0, 0]], "output_layers": [["global_average_pooling2d_1", 0, 0]], "layers": [{"name": "input_2", "inbound_nodes": [], "class_name": "InputLayer", "config": {"batch_input_shape": [null, 64, 64, 3], "dtype": "float32", "sparse": false, "name": "input_2"}}, {"name": "conv2d_1", "inbound_nodes": [[["input_2", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_1", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": {"class_name": "L1L2", "config": {"l2": 9.999999747378752e-05, "l1": 0.0}}, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 24, "kernel_constraint": null}}, {"name": "batch_normalization_1", "inbound_nodes": [[["conv2d_1", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_1", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_1", "inbound_nodes": [[["batch_normalization_1", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_1", "activation": "relu", "trainable": true}}, {"name": "conv2d_2", "inbound_nodes": [[["activation_1", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_2", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_1", "inbound_nodes": [[["conv2d_1", 0, 0, {}], ["conv2d_2", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_1", "axis": -1, "trainable": true}}, {"name": "batch_normalization_2", "inbound_nodes": [[["concatenate_1", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_2", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_2", "inbound_nodes": [[["batch_normalization_2", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_2", "activation": "relu", "trainable": true}}, {"name": "conv2d_3", "inbound_nodes": [[["activation_2", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_3", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_2", "inbound_nodes": [[["concatenate_1", 0, 0, {}], ["conv2d_3", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_2", "axis": -1, "trainable": true}}, {"name": "batch_normalization_3", "inbound_nodes": [[["concatenate_2", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_3", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_3", "inbound_nodes": [[["batch_normalization_3", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_3", "activation": "relu", "trainable": true}}, {"name": "conv2d_4", "inbound_nodes": [[["activation_3", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_4", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_3", "inbound_nodes": [[["concatenate_2", 0, 0, {}], ["conv2d_4", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_3", "axis": -1, "trainable": true}}, {"name": "batch_normalization_4", "inbound_nodes": [[["concatenate_3", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_4", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_4", "inbound_nodes": [[["batch_normalization_4", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_4", "activation": "relu", "trainable": true}}, {"name": "conv2d_5", "inbound_nodes": [[["activation_4", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_5", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_4", "inbound_nodes": [[["concatenate_3", 0, 0, {}], ["conv2d_5", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_4", "axis": -1, "trainable": true}}, {"name": "batch_normalization_5", "inbound_nodes": [[["concatenate_4", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_5", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_5", "inbound_nodes": [[["batch_normalization_5", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_5", "activation": "relu", "trainable": true}}, {"name": "conv2d_6", "inbound_nodes": [[["activation_5", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_6", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_5", "inbound_nodes": [[["concatenate_4", 0, 0, {}], ["conv2d_6", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_5", "axis": -1, "trainable": true}}, {"name": "batch_normalization_6", "inbound_nodes": [[["concatenate_5", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_6", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_6", "inbound_nodes": [[["batch_normalization_6", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_6", "activation": "relu", "trainable": true}}, {"name": "conv2d_7", "inbound_nodes": [[["activation_6", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [1, 1], "name": "conv2d_7", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": {"class_name": "L1L2", "config": {"l2": 9.999999747378752e-05, "l1": 0.0}}, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 84, "kernel_constraint": null}}, {"name": "average_pooling2d_1", "inbound_nodes": [[["conv2d_7", 0, 0, {}]]], "class_name": "AveragePooling2D", "config": {"name": "average_pooling2d_1", "padding": "valid", "strides": [2, 2], "trainable": true, "data_format": "channels_last", "pool_size": [2, 2]}}, {"name": "batch_normalization_7", "inbound_nodes": [[["average_pooling2d_1", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_7", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_7", "inbound_nodes": [[["batch_normalization_7", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_7", "activation": "relu", "trainable": true}}, {"name": "conv2d_8", "inbound_nodes": [[["activation_7", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_8", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_6", "inbound_nodes": [[["average_pooling2d_1", 0, 0, {}], ["conv2d_8", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_6", "axis": -1, "trainable": true}}, {"name": "batch_normalization_8", "inbound_nodes": [[["concatenate_6", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_8", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_8", "inbound_nodes": [[["batch_normalization_8", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_8", "activation": "relu", "trainable": true}}, {"name": "conv2d_9", "inbound_nodes": [[["activation_8", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_9", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_7", "inbound_nodes": [[["concatenate_6", 0, 0, {}], ["conv2d_9", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_7", "axis": -1, "trainable": true}}, {"name": "batch_normalization_9", "inbound_nodes": [[["concatenate_7", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_9", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_9", "inbound_nodes": [[["batch_normalization_9", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_9", "activation": "relu", "trainable": true}}, {"name": "conv2d_10", "inbound_nodes": [[["activation_9", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_10", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_8", "inbound_nodes": [[["concatenate_7", 0, 0, {}], ["conv2d_10", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_8", "axis": -1, "trainable": true}}, {"name": "batch_normalization_10", "inbound_nodes": [[["concatenate_8", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_10", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_10", "inbound_nodes": [[["batch_normalization_10", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_10", "activation": "relu", "trainable": true}}, {"name": "conv2d_11", "inbound_nodes": [[["activation_10", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_11", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_9", "inbound_nodes": [[["concatenate_8", 0, 0, {}], ["conv2d_11", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_9", "axis": -1, "trainable": true}}, {"name": "batch_normalization_11", "inbound_nodes": [[["concatenate_9", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_11", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_11", "inbound_nodes": [[["batch_normalization_11", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_11", "activation": "relu", "trainable": true}}, {"name": "conv2d_12", "inbound_nodes": [[["activation_11", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_12", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_10", "inbound_nodes": [[["concatenate_9", 0, 0, {}], ["conv2d_12", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_10", "axis": -1, "trainable": true}}, {"name": "batch_normalization_12", "inbound_nodes": [[["concatenate_10", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_12", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_12", "inbound_nodes": [[["batch_normalization_12", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_12", "activation": "relu", "trainable": true}}, {"name": "conv2d_13", "inbound_nodes": [[["activation_12", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [1, 1], "name": "conv2d_13", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": {"class_name": "L1L2", "config": {"l2": 9.999999747378752e-05, "l1": 0.0}}, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 144, "kernel_constraint": null}}, {"name": "average_pooling2d_2", "inbound_nodes": [[["conv2d_13", 0, 0, {}]]], "class_name": "AveragePooling2D", "config": {"name": "average_pooling2d_2", "padding": "valid", "strides": [2, 2], "trainable": true, "data_format": "channels_last", "pool_size": [2, 2]}}, {"name": "batch_normalization_13", "inbound_nodes": [[["average_pooling2d_2", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_13", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_13", "inbound_nodes": [[["batch_normalization_13", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_13", "activation": "relu", "trainable": true}}, {"name": "conv2d_14", "inbound_nodes": [[["activation_13", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_14", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_11", "inbound_nodes": [[["average_pooling2d_2", 0, 0, {}], ["conv2d_14", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_11", "axis": -1, "trainable": true}}, {"name": "batch_normalization_14", "inbound_nodes": [[["concatenate_11", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_14", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_14", "inbound_nodes": [[["batch_normalization_14", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_14", "activation": "relu", "trainable": true}}, {"name": "conv2d_15", "inbound_nodes": [[["activation_14", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_15", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_12", "inbound_nodes": [[["concatenate_11", 0, 0, {}], ["conv2d_15", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_12", "axis": -1, "trainable": true}}, {"name": "batch_normalization_15", "inbound_nodes": [[["concatenate_12", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_15", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_15", "inbound_nodes": [[["batch_normalization_15", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_15", "activation": "relu", "trainable": true}}, {"name": "conv2d_16", "inbound_nodes": [[["activation_15", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_16", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_13", "inbound_nodes": [[["concatenate_12", 0, 0, {}], ["conv2d_16", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_13", "axis": -1, "trainable": true}}, {"name": "batch_normalization_16", "inbound_nodes": [[["concatenate_13", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_16", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_16", "inbound_nodes": [[["batch_normalization_16", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_16", "activation": "relu", "trainable": true}}, {"name": "conv2d_17", "inbound_nodes": [[["activation_16", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_17", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_14", "inbound_nodes": [[["concatenate_13", 0, 0, {}], ["conv2d_17", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_14", "axis": -1, "trainable": true}}, {"name": "batch_normalization_17", "inbound_nodes": [[["concatenate_14", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_17", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_17", "inbound_nodes": [[["batch_normalization_17", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_17", "activation": "relu", "trainable": true}}, {"name": "conv2d_18", "inbound_nodes": [[["activation_17", 0, 0, {}]]], "class_name": "Conv2D", "config": {"activation": "linear", "kernel_size": [3, 3], "name": "conv2d_18", "strides": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "bias_constraint": null, "bias_regularizer": null, "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "normal", "mode": "fan_in", "scale": 2.0, "seed": null}}, "padding": "same", "bias_initializer": {"class_name": "Zeros", "config": {}}, "trainable": true, "use_bias": false, "dilation_rate": [1, 1], "filters": 12, "kernel_constraint": null}}, {"name": "concatenate_15", "inbound_nodes": [[["concatenate_14", 0, 0, {}], ["conv2d_18", 0, 0, {}]]], "class_name": "Concatenate", "config": {"name": "concatenate_15", "axis": -1, "trainable": true}}, {"name": "batch_normalization_18", "inbound_nodes": [[["concatenate_15", 0, 0, {}]]], "class_name": "BatchNormalization", "config": {"epsilon": 1.1e-05, "gamma_initializer": {"class_name": "Ones", "config": {}}, "axis": -1, "gamma_constraint": null, "beta_regularizer": null, "center": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "name": "batch_normalization_18", "beta_constraint": null, "gamma_regularizer": null, "trainable": true, "momentum": 0.99, "scale": true, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}}}, {"name": "activation_18", "inbound_nodes": [[["batch_normalization_18", 0, 0, {}]]], "class_name": "Activation", "config": {"name": "activation_18", "activation": "relu", "trainable": true}}, {"name": "global_average_pooling2d_1", "inbound_nodes": [[["activation_18", 0, 0, {}]]], "class_name": "GlobalAveragePooling2D", "config": {"name": "global_average_pooling2d_1", "data_format": "channels_last", "trainable": true}}]}}, {"name": "dense_1", "class_name": "Dense", "inbound_nodes": [[["densenet", 1, 0, {}]]], "config": {"bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "activation": "relu", "trainable": true, "bias_regularizer": null, "activity_regularizer": null, "units": 128, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "mode": "fan_avg", "scale": 1.0, "seed": null}}, "bias_constraint": null, "name": "dense_1", "use_bias": true, "kernel_constraint": null}}, {"name": "dropout_1", "class_name": "Dropout", "inbound_nodes": [[["dense_1", 0, 0, {}]]], "config": {"name": "dropout_1", "rate": 0.2, "trainable": true}}, {"name": "feat_a", "class_name": "Dense", "inbound_nodes": [[["dropout_1", 0, 0, {}]]], "config": {"bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "activation": "relu", "trainable": true, "bias_regularizer": null, "activity_regularizer": null, "units": 32, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "mode": "fan_avg", "scale": 1.0, "seed": null}}, "bias_constraint": null, "name": "feat_a", "use_bias": true, "kernel_constraint": null}}, {"name": "pred_a", "class_name": "Dense", "inbound_nodes": [[["feat_a", 0, 0, {}]]], "config": {"bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "activation": "linear", "trainable": true, "bias_regularizer": null, "activity_regularizer": null, "units": 1, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "mode": "fan_avg", "scale": 1.0, "seed": null}}, "bias_constraint": null, "name": "pred_a", "use_bias": true, "kernel_constraint": null}}]}, "keras_version": "2.0.6"} \ No newline at end of file diff --git a/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/densenet_reg_19_64.png b/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/densenet_reg_19_64.png new file mode 100644 index 0000000..877a026 Binary files /dev/null and b/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/densenet_reg_19_64.png differ diff --git a/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/history_densenet_reg_19_64.h5 b/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/history_densenet_reg_19_64.h5 new file mode 100644 index 0000000..3ace63f Binary files /dev/null and b/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/history_densenet_reg_19_64.h5 differ diff --git a/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/loss.pdf b/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/loss.pdf new file mode 100644 index 0000000..5e33c39 Binary files /dev/null and b/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/loss.pdf differ diff --git a/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/performance.pdf b/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/performance.pdf new file mode 100644 index 0000000..603d81c Binary files /dev/null and b/megaage_models/DenseNet/batch_size_50/densenet_reg_19_64/performance.pdf differ diff --git a/megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.25_64/history_mobilenet_reg_0.25_64.h5 b/megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.25_64/history_mobilenet_reg_0.25_64.h5 new file mode 100644 index 0000000..df311db Binary files /dev/null and b/megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.25_64/history_mobilenet_reg_0.25_64.h5 differ diff --git a/megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.h5 b/megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.h5 new file mode 100644 index 0000000..0b12362 Binary files /dev/null and b/megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.h5 differ diff --git a/megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.json b/megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.json new file mode 100644 index 0000000..aec07a8 --- /dev/null +++ b/megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.json @@ -0,0 +1 @@ +{"class_name": "Model", "config": {"name": "model_1", "layers": [{"name": "input_1", "class_name": "InputLayer", "config": {"batch_input_shape": [null, 64, 64, 3], "dtype": "float32", "sparse": false, "name": "input_1"}, "inbound_nodes": []}, {"name": "mobilenet_0.25_64", "class_name": "Model", "config": {"name": "mobilenet_0.25_64", "layers": [{"name": "input_2", "class_name": "InputLayer", "config": {"batch_input_shape": [null, 64, 64, 3], "dtype": "float32", "sparse": false, "name": "input_2"}, "inbound_nodes": []}, {"name": "conv1_pad", "class_name": "ZeroPadding2D", "config": {"name": "conv1_pad", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["input_2", 0, 0, {}]]]}, {"name": "conv1", "class_name": "Conv2D", "config": {"name": "conv1", "trainable": true, "filters": 8, "kernel_size": [3, 3], "strides": [2, 2], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv1_pad", 0, 0, {}]]]}, {"name": "conv1_bn", "class_name": "BatchNormalization", "config": {"name": "conv1_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv1", 0, 0, {}]]]}, {"name": "conv1_relu", "class_name": "Activation", "config": {"name": "conv1_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv1_bn", 0, 0, {}]]]}, {"name": "conv_pad_1", "class_name": "ZeroPadding2D", "config": {"name": "conv_pad_1", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["conv1_relu", 0, 0, {}]]]}, {"name": "conv_dw_1", "class_name": "DepthwiseConv2D", "config": {"name": "conv_dw_1", "trainable": true, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_regularizer": null, "activity_regularizer": null, "bias_constraint": null, "depth_multiplier": 1, "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "depthwise_regularizer": null, "depthwise_constraint": null}, "inbound_nodes": [[["conv_pad_1", 0, 0, {}]]]}, {"name": "conv_dw_1_bn", "class_name": "BatchNormalization", "config": {"name": "conv_dw_1_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_dw_1", 0, 0, {}]]]}, {"name": "conv_dw_1_relu", "class_name": "Activation", "config": {"name": "conv_dw_1_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_dw_1_bn", 0, 0, {}]]]}, {"name": "conv_pw_1", "class_name": "Conv2D", "config": {"name": "conv_pw_1", "trainable": true, "filters": 16, "kernel_size": [1, 1], "strides": [1, 1], "padding": "same", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv_dw_1_relu", 0, 0, {}]]]}, {"name": "conv_pw_1_bn", "class_name": "BatchNormalization", "config": {"name": "conv_pw_1_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_pw_1", 0, 0, {}]]]}, {"name": "conv_pw_1_relu", "class_name": "Activation", "config": {"name": "conv_pw_1_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_pw_1_bn", 0, 0, {}]]]}, {"name": "conv_pad_2", "class_name": "ZeroPadding2D", "config": {"name": "conv_pad_2", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["conv_pw_1_relu", 0, 0, {}]]]}, {"name": "conv_dw_2", "class_name": "DepthwiseConv2D", "config": {"name": "conv_dw_2", "trainable": true, "kernel_size": [3, 3], "strides": [2, 2], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_regularizer": null, "activity_regularizer": null, "bias_constraint": null, "depth_multiplier": 1, "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "depthwise_regularizer": null, "depthwise_constraint": null}, "inbound_nodes": [[["conv_pad_2", 0, 0, {}]]]}, {"name": "conv_dw_2_bn", "class_name": "BatchNormalization", "config": {"name": "conv_dw_2_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_dw_2", 0, 0, {}]]]}, {"name": "conv_dw_2_relu", "class_name": "Activation", "config": {"name": "conv_dw_2_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_dw_2_bn", 0, 0, {}]]]}, {"name": "conv_pw_2", "class_name": "Conv2D", "config": {"name": "conv_pw_2", "trainable": true, "filters": 32, "kernel_size": [1, 1], "strides": [1, 1], "padding": "same", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv_dw_2_relu", 0, 0, {}]]]}, {"name": "conv_pw_2_bn", "class_name": "BatchNormalization", "config": {"name": "conv_pw_2_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_pw_2", 0, 0, {}]]]}, {"name": "conv_pw_2_relu", "class_name": "Activation", "config": {"name": "conv_pw_2_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_pw_2_bn", 0, 0, {}]]]}, {"name": "conv_pad_3", "class_name": "ZeroPadding2D", "config": {"name": "conv_pad_3", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["conv_pw_2_relu", 0, 0, {}]]]}, {"name": "conv_dw_3", "class_name": "DepthwiseConv2D", "config": {"name": "conv_dw_3", "trainable": true, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_regularizer": null, "activity_regularizer": null, "bias_constraint": null, "depth_multiplier": 1, "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "depthwise_regularizer": null, "depthwise_constraint": null}, "inbound_nodes": [[["conv_pad_3", 0, 0, {}]]]}, {"name": "conv_dw_3_bn", "class_name": "BatchNormalization", "config": {"name": "conv_dw_3_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_dw_3", 0, 0, {}]]]}, {"name": "conv_dw_3_relu", "class_name": "Activation", "config": {"name": "conv_dw_3_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_dw_3_bn", 0, 0, {}]]]}, {"name": "conv_pw_3", "class_name": "Conv2D", "config": {"name": "conv_pw_3", "trainable": true, "filters": 32, "kernel_size": [1, 1], "strides": [1, 1], "padding": "same", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv_dw_3_relu", 0, 0, {}]]]}, {"name": "conv_pw_3_bn", "class_name": "BatchNormalization", "config": {"name": "conv_pw_3_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_pw_3", 0, 0, {}]]]}, {"name": "conv_pw_3_relu", "class_name": "Activation", "config": {"name": "conv_pw_3_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_pw_3_bn", 0, 0, {}]]]}, {"name": "conv_pad_4", "class_name": "ZeroPadding2D", "config": {"name": "conv_pad_4", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["conv_pw_3_relu", 0, 0, {}]]]}, {"name": "conv_dw_4", "class_name": "DepthwiseConv2D", "config": {"name": "conv_dw_4", "trainable": true, "kernel_size": [3, 3], "strides": [2, 2], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_regularizer": null, "activity_regularizer": null, "bias_constraint": null, "depth_multiplier": 1, "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "depthwise_regularizer": null, "depthwise_constraint": null}, "inbound_nodes": [[["conv_pad_4", 0, 0, {}]]]}, {"name": "conv_dw_4_bn", "class_name": "BatchNormalization", "config": {"name": "conv_dw_4_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_dw_4", 0, 0, {}]]]}, {"name": "conv_dw_4_relu", "class_name": "Activation", "config": {"name": "conv_dw_4_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_dw_4_bn", 0, 0, {}]]]}, {"name": "conv_pw_4", "class_name": "Conv2D", "config": {"name": "conv_pw_4", "trainable": true, "filters": 64, "kernel_size": [1, 1], "strides": [1, 1], "padding": "same", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv_dw_4_relu", 0, 0, {}]]]}, {"name": "conv_pw_4_bn", "class_name": "BatchNormalization", "config": {"name": "conv_pw_4_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_pw_4", 0, 0, {}]]]}, {"name": "conv_pw_4_relu", "class_name": "Activation", "config": {"name": "conv_pw_4_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_pw_4_bn", 0, 0, {}]]]}, {"name": "conv_pad_5", "class_name": "ZeroPadding2D", "config": {"name": "conv_pad_5", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["conv_pw_4_relu", 0, 0, {}]]]}, {"name": "conv_dw_5", "class_name": "DepthwiseConv2D", "config": {"name": "conv_dw_5", "trainable": true, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_regularizer": null, "activity_regularizer": null, "bias_constraint": null, "depth_multiplier": 1, "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "depthwise_regularizer": null, "depthwise_constraint": null}, "inbound_nodes": [[["conv_pad_5", 0, 0, {}]]]}, {"name": "conv_dw_5_bn", "class_name": "BatchNormalization", "config": {"name": "conv_dw_5_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_dw_5", 0, 0, {}]]]}, {"name": "conv_dw_5_relu", "class_name": "Activation", "config": {"name": "conv_dw_5_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_dw_5_bn", 0, 0, {}]]]}, {"name": "conv_pw_5", "class_name": "Conv2D", "config": {"name": "conv_pw_5", "trainable": true, "filters": 64, "kernel_size": [1, 1], "strides": [1, 1], "padding": "same", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv_dw_5_relu", 0, 0, {}]]]}, {"name": "conv_pw_5_bn", "class_name": "BatchNormalization", "config": {"name": "conv_pw_5_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_pw_5", 0, 0, {}]]]}, {"name": "conv_pw_5_relu", "class_name": "Activation", "config": {"name": "conv_pw_5_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_pw_5_bn", 0, 0, {}]]]}, {"name": "conv_pad_6", "class_name": "ZeroPadding2D", "config": {"name": "conv_pad_6", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["conv_pw_5_relu", 0, 0, {}]]]}, {"name": "conv_dw_6", "class_name": "DepthwiseConv2D", "config": {"name": "conv_dw_6", "trainable": true, "kernel_size": [3, 3], "strides": [2, 2], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_regularizer": null, "activity_regularizer": null, "bias_constraint": null, "depth_multiplier": 1, "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "depthwise_regularizer": null, "depthwise_constraint": null}, "inbound_nodes": [[["conv_pad_6", 0, 0, {}]]]}, {"name": "conv_dw_6_bn", "class_name": "BatchNormalization", "config": {"name": "conv_dw_6_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_dw_6", 0, 0, {}]]]}, {"name": "conv_dw_6_relu", "class_name": "Activation", "config": {"name": "conv_dw_6_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_dw_6_bn", 0, 0, {}]]]}, {"name": "conv_pw_6", "class_name": "Conv2D", "config": {"name": "conv_pw_6", "trainable": true, "filters": 128, "kernel_size": [1, 1], "strides": [1, 1], "padding": "same", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv_dw_6_relu", 0, 0, {}]]]}, {"name": "conv_pw_6_bn", "class_name": "BatchNormalization", "config": {"name": "conv_pw_6_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_pw_6", 0, 0, {}]]]}, {"name": "conv_pw_6_relu", "class_name": "Activation", "config": {"name": "conv_pw_6_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_pw_6_bn", 0, 0, {}]]]}, {"name": "conv_pad_7", "class_name": "ZeroPadding2D", "config": {"name": "conv_pad_7", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["conv_pw_6_relu", 0, 0, {}]]]}, {"name": "conv_dw_7", "class_name": "DepthwiseConv2D", "config": {"name": "conv_dw_7", "trainable": true, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_regularizer": null, "activity_regularizer": null, "bias_constraint": null, "depth_multiplier": 1, "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "depthwise_regularizer": null, "depthwise_constraint": null}, "inbound_nodes": [[["conv_pad_7", 0, 0, {}]]]}, {"name": "conv_dw_7_bn", "class_name": "BatchNormalization", "config": {"name": "conv_dw_7_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_dw_7", 0, 0, {}]]]}, {"name": "conv_dw_7_relu", "class_name": "Activation", "config": {"name": "conv_dw_7_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_dw_7_bn", 0, 0, {}]]]}, {"name": "conv_pw_7", "class_name": "Conv2D", "config": {"name": "conv_pw_7", "trainable": true, "filters": 128, "kernel_size": [1, 1], "strides": [1, 1], "padding": "same", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv_dw_7_relu", 0, 0, {}]]]}, {"name": "conv_pw_7_bn", "class_name": "BatchNormalization", "config": {"name": "conv_pw_7_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_pw_7", 0, 0, {}]]]}, {"name": "conv_pw_7_relu", "class_name": "Activation", "config": {"name": "conv_pw_7_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_pw_7_bn", 0, 0, {}]]]}, {"name": "conv_pad_8", "class_name": "ZeroPadding2D", "config": {"name": "conv_pad_8", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["conv_pw_7_relu", 0, 0, {}]]]}, {"name": "conv_dw_8", "class_name": "DepthwiseConv2D", "config": {"name": "conv_dw_8", "trainable": true, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_regularizer": null, "activity_regularizer": null, "bias_constraint": null, "depth_multiplier": 1, "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "depthwise_regularizer": null, "depthwise_constraint": null}, "inbound_nodes": [[["conv_pad_8", 0, 0, {}]]]}, {"name": "conv_dw_8_bn", "class_name": "BatchNormalization", "config": {"name": "conv_dw_8_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_dw_8", 0, 0, {}]]]}, {"name": "conv_dw_8_relu", "class_name": "Activation", "config": {"name": "conv_dw_8_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_dw_8_bn", 0, 0, {}]]]}, {"name": "conv_pw_8", "class_name": "Conv2D", "config": {"name": "conv_pw_8", "trainable": true, "filters": 128, "kernel_size": [1, 1], "strides": [1, 1], "padding": "same", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv_dw_8_relu", 0, 0, {}]]]}, {"name": "conv_pw_8_bn", "class_name": "BatchNormalization", "config": {"name": "conv_pw_8_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_pw_8", 0, 0, {}]]]}, {"name": "conv_pw_8_relu", "class_name": "Activation", "config": {"name": "conv_pw_8_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_pw_8_bn", 0, 0, {}]]]}, {"name": "conv_pad_9", "class_name": "ZeroPadding2D", "config": {"name": "conv_pad_9", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["conv_pw_8_relu", 0, 0, {}]]]}, {"name": "conv_dw_9", "class_name": "DepthwiseConv2D", "config": {"name": "conv_dw_9", "trainable": true, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_regularizer": null, "activity_regularizer": null, "bias_constraint": null, "depth_multiplier": 1, "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "depthwise_regularizer": null, "depthwise_constraint": null}, "inbound_nodes": [[["conv_pad_9", 0, 0, {}]]]}, {"name": "conv_dw_9_bn", "class_name": "BatchNormalization", "config": {"name": "conv_dw_9_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_dw_9", 0, 0, {}]]]}, {"name": "conv_dw_9_relu", "class_name": "Activation", "config": {"name": "conv_dw_9_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_dw_9_bn", 0, 0, {}]]]}, {"name": "conv_pw_9", "class_name": "Conv2D", "config": {"name": "conv_pw_9", "trainable": true, "filters": 128, "kernel_size": [1, 1], "strides": [1, 1], "padding": "same", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv_dw_9_relu", 0, 0, {}]]]}, {"name": "conv_pw_9_bn", "class_name": "BatchNormalization", "config": {"name": "conv_pw_9_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_pw_9", 0, 0, {}]]]}, {"name": "conv_pw_9_relu", "class_name": "Activation", "config": {"name": "conv_pw_9_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_pw_9_bn", 0, 0, {}]]]}, {"name": "conv_pad_10", "class_name": "ZeroPadding2D", "config": {"name": "conv_pad_10", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["conv_pw_9_relu", 0, 0, {}]]]}, {"name": "conv_dw_10", "class_name": "DepthwiseConv2D", "config": {"name": "conv_dw_10", "trainable": true, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_regularizer": null, "activity_regularizer": null, "bias_constraint": null, "depth_multiplier": 1, "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "depthwise_regularizer": null, "depthwise_constraint": null}, "inbound_nodes": [[["conv_pad_10", 0, 0, {}]]]}, {"name": "conv_dw_10_bn", "class_name": "BatchNormalization", "config": {"name": "conv_dw_10_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_dw_10", 0, 0, {}]]]}, {"name": "conv_dw_10_relu", "class_name": "Activation", "config": {"name": "conv_dw_10_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_dw_10_bn", 0, 0, {}]]]}, {"name": "conv_pw_10", "class_name": "Conv2D", "config": {"name": "conv_pw_10", "trainable": true, "filters": 128, "kernel_size": [1, 1], "strides": [1, 1], "padding": "same", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv_dw_10_relu", 0, 0, {}]]]}, {"name": "conv_pw_10_bn", "class_name": "BatchNormalization", "config": {"name": "conv_pw_10_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_pw_10", 0, 0, {}]]]}, {"name": "conv_pw_10_relu", "class_name": "Activation", "config": {"name": "conv_pw_10_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_pw_10_bn", 0, 0, {}]]]}, {"name": "conv_pad_11", "class_name": "ZeroPadding2D", "config": {"name": "conv_pad_11", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["conv_pw_10_relu", 0, 0, {}]]]}, {"name": "conv_dw_11", "class_name": "DepthwiseConv2D", "config": {"name": "conv_dw_11", "trainable": true, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_regularizer": null, "activity_regularizer": null, "bias_constraint": null, "depth_multiplier": 1, "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "depthwise_regularizer": null, "depthwise_constraint": null}, "inbound_nodes": [[["conv_pad_11", 0, 0, {}]]]}, {"name": "conv_dw_11_bn", "class_name": "BatchNormalization", "config": {"name": "conv_dw_11_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_dw_11", 0, 0, {}]]]}, {"name": "conv_dw_11_relu", "class_name": "Activation", "config": {"name": "conv_dw_11_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_dw_11_bn", 0, 0, {}]]]}, {"name": "conv_pw_11", "class_name": "Conv2D", "config": {"name": "conv_pw_11", "trainable": true, "filters": 128, "kernel_size": [1, 1], "strides": [1, 1], "padding": "same", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv_dw_11_relu", 0, 0, {}]]]}, {"name": "conv_pw_11_bn", "class_name": "BatchNormalization", "config": {"name": "conv_pw_11_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_pw_11", 0, 0, {}]]]}, {"name": "conv_pw_11_relu", "class_name": "Activation", "config": {"name": "conv_pw_11_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_pw_11_bn", 0, 0, {}]]]}, {"name": "conv_pad_12", "class_name": "ZeroPadding2D", "config": {"name": "conv_pad_12", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["conv_pw_11_relu", 0, 0, {}]]]}, {"name": "conv_dw_12", "class_name": "DepthwiseConv2D", "config": {"name": "conv_dw_12", "trainable": true, "kernel_size": [3, 3], "strides": [2, 2], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_regularizer": null, "activity_regularizer": null, "bias_constraint": null, "depth_multiplier": 1, "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "depthwise_regularizer": null, "depthwise_constraint": null}, "inbound_nodes": [[["conv_pad_12", 0, 0, {}]]]}, {"name": "conv_dw_12_bn", "class_name": "BatchNormalization", "config": {"name": "conv_dw_12_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_dw_12", 0, 0, {}]]]}, {"name": "conv_dw_12_relu", "class_name": "Activation", "config": {"name": "conv_dw_12_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_dw_12_bn", 0, 0, {}]]]}, {"name": "conv_pw_12", "class_name": "Conv2D", "config": {"name": "conv_pw_12", "trainable": true, "filters": 256, "kernel_size": [1, 1], "strides": [1, 1], "padding": "same", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv_dw_12_relu", 0, 0, {}]]]}, {"name": "conv_pw_12_bn", "class_name": "BatchNormalization", "config": {"name": "conv_pw_12_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_pw_12", 0, 0, {}]]]}, {"name": "conv_pw_12_relu", "class_name": "Activation", "config": {"name": "conv_pw_12_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_pw_12_bn", 0, 0, {}]]]}, {"name": "conv_pad_13", "class_name": "ZeroPadding2D", "config": {"name": "conv_pad_13", "trainable": true, "padding": [[1, 1], [1, 1]], "data_format": "channels_last"}, "inbound_nodes": [[["conv_pw_12_relu", 0, 0, {}]]]}, {"name": "conv_dw_13", "class_name": "DepthwiseConv2D", "config": {"name": "conv_dw_13", "trainable": true, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_regularizer": null, "activity_regularizer": null, "bias_constraint": null, "depth_multiplier": 1, "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "depthwise_regularizer": null, "depthwise_constraint": null}, "inbound_nodes": [[["conv_pad_13", 0, 0, {}]]]}, {"name": "conv_dw_13_bn", "class_name": "BatchNormalization", "config": {"name": "conv_dw_13_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_dw_13", 0, 0, {}]]]}, {"name": "conv_dw_13_relu", "class_name": "Activation", "config": {"name": "conv_dw_13_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_dw_13_bn", 0, 0, {}]]]}, {"name": "conv_pw_13", "class_name": "Conv2D", "config": {"name": "conv_pw_13", "trainable": true, "filters": 256, "kernel_size": [1, 1], "strides": [1, 1], "padding": "same", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": false, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["conv_dw_13_relu", 0, 0, {}]]]}, {"name": "conv_pw_13_bn", "class_name": "BatchNormalization", "config": {"name": "conv_pw_13_bn", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv_pw_13", 0, 0, {}]]]}, {"name": "conv_pw_13_relu", "class_name": "Activation", "config": {"name": "conv_pw_13_relu", "trainable": true, "activation": "relu6"}, "inbound_nodes": [[["conv_pw_13_bn", 0, 0, {}]]]}], "input_layers": [["input_2", 0, 0]], "output_layers": [["conv_pw_13_relu", 0, 0]]}, "inbound_nodes": [[["input_1", 0, 0, {}]]]}, {"name": "conv2d_1", "class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "filters": 20, "kernel_size": [1, 1], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["mobilenet_0.25_64", 1, 0, {}]]]}, {"name": "flatten_1", "class_name": "Flatten", "config": {"name": "flatten_1", "trainable": true}, "inbound_nodes": [[["conv2d_1", 0, 0, {}]]]}, {"name": "dropout_1", "class_name": "Dropout", "config": {"name": "dropout_1", "trainable": true, "rate": 0.2, "noise_shape": null, "seed": null}, "inbound_nodes": [[["flatten_1", 0, 0, {}]]]}, {"name": "feat_a", "class_name": "Dense", "config": {"name": "feat_a", "trainable": true, "units": 32, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["dropout_1", 0, 0, {}]]]}, {"name": "pred_a", "class_name": "Dense", "config": {"name": "pred_a", "trainable": true, "units": 1, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["feat_a", 0, 0, {}]]]}], "input_layers": [["input_1", 0, 0]], "output_layers": [["pred_a", 0, 0]]}, "keras_version": "2.1.5", "backend": "tensorflow"} \ No newline at end of file diff --git a/megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.png b/megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.png new file mode 100644 index 0000000..00dc195 Binary files /dev/null and b/megaage_models/MobileNet/batch_size_50/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.png differ diff --git a/megaage_models/batch_size_50/ssrnet_3_3_3_64_1.0_1.0/history_ssrnet_3_3_3_64_1.0_1.0.h5 b/megaage_models/batch_size_50/ssrnet_3_3_3_64_1.0_1.0/history_ssrnet_3_3_3_64_1.0_1.0.h5 new file mode 100644 index 0000000..3686d1d Binary files /dev/null and b/megaage_models/batch_size_50/ssrnet_3_3_3_64_1.0_1.0/history_ssrnet_3_3_3_64_1.0_1.0.h5 differ diff --git a/megaage_models/batch_size_50/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.h5 b/megaage_models/batch_size_50/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.h5 new file mode 100644 index 0000000..12e7bac Binary files /dev/null and b/megaage_models/batch_size_50/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.h5 differ diff --git a/megaage_models/batch_size_50/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.json b/megaage_models/batch_size_50/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.json new file mode 100644 index 0000000..18226ea --- /dev/null +++ b/megaage_models/batch_size_50/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.json @@ -0,0 +1 @@ +{"class_name": "Model", "config": {"name": "model_1", "layers": [{"name": "input_1", "class_name": "InputLayer", "config": {"batch_input_shape": [null, 64, 64, 3], "dtype": "float32", "sparse": false, "name": "input_1"}, "inbound_nodes": []}, {"name": "conv2d_5", "class_name": "Conv2D", "config": {"name": "conv2d_5", "trainable": true, "filters": 16, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["input_1", 0, 0, {}]]]}, {"name": "conv2d_1", "class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "filters": 32, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["input_1", 0, 0, {}]]]}, {"name": "batch_normalization_5", "class_name": "BatchNormalization", "config": {"name": "batch_normalization_5", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv2d_5", 0, 0, {}]]]}, {"name": "batch_normalization_1", "class_name": "BatchNormalization", "config": {"name": "batch_normalization_1", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv2d_1", 0, 0, {}]]]}, {"name": "activation_5", "class_name": "Activation", "config": {"name": "activation_5", "trainable": true, "activation": "tanh"}, "inbound_nodes": [[["batch_normalization_5", 0, 0, {}]]]}, {"name": "activation_1", "class_name": "Activation", "config": {"name": "activation_1", "trainable": true, "activation": "relu"}, "inbound_nodes": [[["batch_normalization_1", 0, 0, {}]]]}, {"name": "max_pooling2d_1", "class_name": "MaxPooling2D", "config": {"name": "max_pooling2d_1", "trainable": true, "pool_size": [2, 2], "padding": "valid", "strides": [2, 2], "data_format": "channels_last"}, "inbound_nodes": [[["activation_5", 0, 0, {}]]]}, {"name": "average_pooling2d_1", "class_name": "AveragePooling2D", "config": {"name": "average_pooling2d_1", "trainable": true, "pool_size": [2, 2], "padding": "valid", "strides": [2, 2], "data_format": "channels_last"}, "inbound_nodes": [[["activation_1", 0, 0, {}]]]}, {"name": "conv2d_6", "class_name": "Conv2D", "config": {"name": "conv2d_6", "trainable": true, "filters": 16, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["max_pooling2d_1", 0, 0, {}]]]}, {"name": "conv2d_2", "class_name": "Conv2D", "config": {"name": "conv2d_2", "trainable": true, "filters": 32, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["average_pooling2d_1", 0, 0, {}]]]}, {"name": "batch_normalization_6", "class_name": "BatchNormalization", "config": {"name": "batch_normalization_6", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv2d_6", 0, 0, {}]]]}, {"name": "batch_normalization_2", "class_name": "BatchNormalization", "config": {"name": "batch_normalization_2", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv2d_2", 0, 0, {}]]]}, {"name": "activation_6", "class_name": "Activation", "config": {"name": "activation_6", "trainable": true, "activation": "tanh"}, "inbound_nodes": [[["batch_normalization_6", 0, 0, {}]]]}, {"name": "activation_2", "class_name": "Activation", "config": {"name": "activation_2", "trainable": true, "activation": "relu"}, "inbound_nodes": [[["batch_normalization_2", 0, 0, {}]]]}, {"name": "max_pooling2d_2", "class_name": "MaxPooling2D", "config": {"name": "max_pooling2d_2", "trainable": true, "pool_size": [2, 2], "padding": "valid", "strides": [2, 2], "data_format": "channels_last"}, "inbound_nodes": [[["activation_6", 0, 0, {}]]]}, {"name": "average_pooling2d_2", "class_name": "AveragePooling2D", "config": {"name": "average_pooling2d_2", "trainable": true, "pool_size": [2, 2], "padding": "valid", "strides": [2, 2], "data_format": "channels_last"}, "inbound_nodes": [[["activation_2", 0, 0, {}]]]}, {"name": "conv2d_7", "class_name": "Conv2D", "config": {"name": "conv2d_7", "trainable": true, "filters": 16, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["max_pooling2d_2", 0, 0, {}]]]}, {"name": "conv2d_3", "class_name": "Conv2D", "config": {"name": "conv2d_3", "trainable": true, "filters": 32, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["average_pooling2d_2", 0, 0, {}]]]}, {"name": "batch_normalization_7", "class_name": "BatchNormalization", "config": {"name": "batch_normalization_7", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv2d_7", 0, 0, {}]]]}, {"name": "batch_normalization_3", "class_name": "BatchNormalization", "config": {"name": "batch_normalization_3", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv2d_3", 0, 0, {}]]]}, {"name": "activation_7", "class_name": "Activation", "config": {"name": "activation_7", "trainable": true, "activation": "tanh"}, "inbound_nodes": [[["batch_normalization_7", 0, 0, {}]]]}, {"name": "activation_3", "class_name": "Activation", "config": {"name": "activation_3", "trainable": true, "activation": "relu"}, "inbound_nodes": [[["batch_normalization_3", 0, 0, {}]]]}, {"name": "max_pooling2d_3", "class_name": "MaxPooling2D", "config": {"name": "max_pooling2d_3", "trainable": true, "pool_size": [2, 2], "padding": "valid", "strides": [2, 2], "data_format": "channels_last"}, "inbound_nodes": [[["activation_7", 0, 0, {}]]]}, {"name": "average_pooling2d_3", "class_name": "AveragePooling2D", "config": {"name": "average_pooling2d_3", "trainable": true, "pool_size": [2, 2], "padding": "valid", "strides": [2, 2], "data_format": "channels_last"}, "inbound_nodes": [[["activation_3", 0, 0, {}]]]}, {"name": "conv2d_8", "class_name": "Conv2D", "config": {"name": "conv2d_8", "trainable": true, "filters": 16, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["max_pooling2d_3", 0, 0, {}]]]}, {"name": "conv2d_4", "class_name": "Conv2D", "config": {"name": "conv2d_4", "trainable": true, "filters": 32, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["average_pooling2d_3", 0, 0, {}]]]}, {"name": "batch_normalization_8", "class_name": "BatchNormalization", "config": {"name": "batch_normalization_8", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv2d_8", 0, 0, {}]]]}, {"name": "batch_normalization_4", "class_name": "BatchNormalization", "config": {"name": "batch_normalization_4", "trainable": true, "axis": -1, "momentum": 0.99, "epsilon": 0.001, "center": true, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "gamma_initializer": {"class_name": "Ones", "config": {}}, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_regularizer": null, "gamma_regularizer": null, "beta_constraint": null, "gamma_constraint": null}, "inbound_nodes": [[["conv2d_4", 0, 0, {}]]]}, {"name": "activation_8", "class_name": "Activation", "config": {"name": "activation_8", "trainable": true, "activation": "tanh"}, "inbound_nodes": [[["batch_normalization_8", 0, 0, {}]]]}, {"name": "activation_4", "class_name": "Activation", "config": {"name": "activation_4", "trainable": true, "activation": "relu"}, "inbound_nodes": [[["batch_normalization_4", 0, 0, {}]]]}, {"name": "conv2d_11", "class_name": "Conv2D", "config": {"name": "conv2d_11", "trainable": true, "filters": 10, "kernel_size": [1, 1], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["max_pooling2d_2", 0, 0, {}]]]}, {"name": "conv2d_12", "class_name": "Conv2D", "config": {"name": "conv2d_12", "trainable": true, "filters": 10, "kernel_size": [1, 1], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["average_pooling2d_2", 0, 0, {}]]]}, {"name": "conv2d_13", "class_name": "Conv2D", "config": {"name": "conv2d_13", "trainable": true, "filters": 10, "kernel_size": [1, 1], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["max_pooling2d_1", 0, 0, {}]]]}, {"name": "conv2d_14", "class_name": "Conv2D", "config": {"name": "conv2d_14", "trainable": true, "filters": 10, "kernel_size": [1, 1], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["average_pooling2d_1", 0, 0, {}]]]}, {"name": "conv2d_9", "class_name": "Conv2D", "config": {"name": "conv2d_9", "trainable": true, "filters": 10, "kernel_size": [1, 1], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["activation_8", 0, 0, {}]]]}, {"name": "conv2d_10", "class_name": "Conv2D", "config": {"name": "conv2d_10", "trainable": true, "filters": 10, "kernel_size": [1, 1], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["activation_4", 0, 0, {}]]]}, {"name": "max_pooling2d_4", "class_name": "MaxPooling2D", "config": {"name": "max_pooling2d_4", "trainable": true, "pool_size": [4, 4], "padding": "valid", "strides": [4, 4], "data_format": "channels_last"}, "inbound_nodes": [[["conv2d_11", 0, 0, {}]]]}, {"name": "average_pooling2d_4", "class_name": "AveragePooling2D", "config": {"name": "average_pooling2d_4", "trainable": true, "pool_size": [4, 4], "padding": "valid", "strides": [4, 4], "data_format": "channels_last"}, "inbound_nodes": [[["conv2d_12", 0, 0, {}]]]}, {"name": "max_pooling2d_5", "class_name": "MaxPooling2D", "config": {"name": "max_pooling2d_5", "trainable": true, "pool_size": [8, 8], "padding": "valid", "strides": [8, 8], "data_format": "channels_last"}, "inbound_nodes": [[["conv2d_13", 0, 0, {}]]]}, {"name": "average_pooling2d_5", "class_name": "AveragePooling2D", "config": {"name": "average_pooling2d_5", "trainable": true, "pool_size": [8, 8], "padding": "valid", "strides": [8, 8], "data_format": "channels_last"}, "inbound_nodes": [[["conv2d_14", 0, 0, {}]]]}, {"name": "flatten_1", "class_name": "Flatten", "config": {"name": "flatten_1", "trainable": true}, "inbound_nodes": [[["conv2d_9", 0, 0, {}]]]}, {"name": "flatten_2", "class_name": "Flatten", "config": {"name": "flatten_2", "trainable": true}, "inbound_nodes": [[["conv2d_10", 0, 0, {}]]]}, {"name": "flatten_3", "class_name": "Flatten", "config": {"name": "flatten_3", "trainable": true}, "inbound_nodes": [[["max_pooling2d_4", 0, 0, {}]]]}, {"name": "flatten_4", "class_name": "Flatten", "config": {"name": "flatten_4", "trainable": true}, "inbound_nodes": [[["average_pooling2d_4", 0, 0, {}]]]}, {"name": "flatten_5", "class_name": "Flatten", "config": {"name": "flatten_5", "trainable": true}, "inbound_nodes": [[["max_pooling2d_5", 0, 0, {}]]]}, {"name": "flatten_6", "class_name": "Flatten", "config": {"name": "flatten_6", "trainable": true}, "inbound_nodes": [[["average_pooling2d_5", 0, 0, {}]]]}, {"name": "dropout_1", "class_name": "Dropout", "config": {"name": "dropout_1", "trainable": true, "rate": 0.2, "noise_shape": null, "seed": null}, "inbound_nodes": [[["flatten_1", 0, 0, {}]]]}, {"name": "dropout_2", "class_name": "Dropout", "config": {"name": "dropout_2", "trainable": true, "rate": 0.2, "noise_shape": null, "seed": null}, "inbound_nodes": [[["flatten_2", 0, 0, {}]]]}, {"name": "dropout_3", "class_name": "Dropout", "config": {"name": "dropout_3", "trainable": true, "rate": 0.2, "noise_shape": null, "seed": null}, "inbound_nodes": [[["flatten_3", 0, 0, {}]]]}, {"name": "dropout_4", "class_name": "Dropout", "config": {"name": "dropout_4", "trainable": true, "rate": 0.2, "noise_shape": null, "seed": null}, "inbound_nodes": [[["flatten_4", 0, 0, {}]]]}, {"name": "dropout_5", "class_name": "Dropout", "config": {"name": "dropout_5", "trainable": true, "rate": 0.2, "noise_shape": null, "seed": null}, "inbound_nodes": [[["flatten_5", 0, 0, {}]]]}, {"name": "dropout_6", "class_name": "Dropout", "config": {"name": "dropout_6", "trainable": true, "rate": 0.2, "noise_shape": null, "seed": null}, "inbound_nodes": [[["flatten_6", 0, 0, {}]]]}, {"name": "dense_1", "class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "units": 3, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["dropout_1", 0, 0, {}]]]}, {"name": "dense_2", "class_name": "Dense", "config": {"name": "dense_2", "trainable": true, "units": 3, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["dropout_2", 0, 0, {}]]]}, {"name": "dense_4", "class_name": "Dense", "config": {"name": "dense_4", "trainable": true, "units": 3, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["dropout_3", 0, 0, {}]]]}, {"name": "dense_5", "class_name": "Dense", "config": {"name": "dense_5", "trainable": true, "units": 3, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["dropout_4", 0, 0, {}]]]}, {"name": "dense_7", "class_name": "Dense", "config": {"name": "dense_7", "trainable": true, "units": 3, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["dropout_5", 0, 0, {}]]]}, {"name": "dense_8", "class_name": "Dense", "config": {"name": "dense_8", "trainable": true, "units": 3, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["dropout_6", 0, 0, {}]]]}, {"name": "multiply_2", "class_name": "Multiply", "config": {"name": "multiply_2", "trainable": true}, "inbound_nodes": [[["dense_1", 0, 0, {}], ["dense_2", 0, 0, {}]]]}, {"name": "multiply_4", "class_name": "Multiply", "config": {"name": "multiply_4", "trainable": true}, "inbound_nodes": [[["dense_4", 0, 0, {}], ["dense_5", 0, 0, {}]]]}, {"name": "multiply_6", "class_name": "Multiply", "config": {"name": "multiply_6", "trainable": true}, "inbound_nodes": [[["dense_7", 0, 0, {}], ["dense_8", 0, 0, {}]]]}, {"name": "dense_3", "class_name": "Dense", "config": {"name": "dense_3", "trainable": true, "units": 6, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["multiply_2", 0, 0, {}]]]}, {"name": "dense_6", "class_name": "Dense", "config": {"name": "dense_6", "trainable": true, "units": 6, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["multiply_4", 0, 0, {}]]]}, {"name": "dense_9", "class_name": "Dense", "config": {"name": "dense_9", "trainable": true, "units": 6, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["multiply_6", 0, 0, {}]]]}, {"name": "multiply_1", "class_name": "Multiply", "config": {"name": "multiply_1", "trainable": true}, "inbound_nodes": [[["flatten_1", 0, 0, {}], ["flatten_2", 0, 0, {}]]]}, {"name": "multiply_3", "class_name": "Multiply", "config": {"name": "multiply_3", "trainable": true}, "inbound_nodes": [[["flatten_3", 0, 0, {}], ["flatten_4", 0, 0, {}]]]}, {"name": "multiply_5", "class_name": "Multiply", "config": {"name": "multiply_5", "trainable": true}, "inbound_nodes": [[["flatten_5", 0, 0, {}], ["flatten_6", 0, 0, {}]]]}, {"name": "pred_age_stage1", "class_name": "Dense", "config": {"name": "pred_age_stage1", "trainable": true, "units": 3, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["dense_3", 0, 0, {}]]]}, {"name": "pred_age_stage2", "class_name": "Dense", "config": {"name": "pred_age_stage2", "trainable": true, "units": 3, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["dense_6", 0, 0, {}]]]}, {"name": "pred_age_stage3", "class_name": "Dense", "config": {"name": "pred_age_stage3", "trainable": true, "units": 3, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["dense_9", 0, 0, {}]]]}, {"name": "delta_s1", "class_name": "Dense", "config": {"name": "delta_s1", "trainable": true, "units": 1, "activation": "tanh", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["multiply_1", 0, 0, {}]]]}, {"name": "delta_s2", "class_name": "Dense", "config": {"name": "delta_s2", "trainable": true, "units": 1, "activation": "tanh", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["multiply_3", 0, 0, {}]]]}, {"name": "delta_s3", "class_name": "Dense", "config": {"name": "delta_s3", "trainable": true, "units": 1, "activation": "tanh", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["multiply_5", 0, 0, {}]]]}, {"name": "local_delta_stage1", "class_name": "Dense", "config": {"name": "local_delta_stage1", "trainable": true, "units": 3, "activation": "tanh", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["dense_3", 0, 0, {}]]]}, {"name": "local_delta_stage2", "class_name": "Dense", "config": {"name": "local_delta_stage2", "trainable": true, "units": 3, "activation": "tanh", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["dense_6", 0, 0, {}]]]}, {"name": "local_delta_stage3", "class_name": "Dense", "config": {"name": "local_delta_stage3", "trainable": true, "units": 3, "activation": "tanh", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["dense_9", 0, 0, {}]]]}, {"name": "pred_a", "class_name": "Lambda", "config": {"name": "pred_a", "trainable": true, "function": ["4wYAAAAAAAAADwAAAAcAAABTAAAAc+4BAAB8AGQBGQBkAGQAhQJkAWYCGQBkARQAfQZ8AGQBGQBk\nAGQAhQJkAWYCGQBkARQAfQd8AGQBGQBkAGQAhQJkAWYCGQBkARQAfQh8AXwCFAB8AxQAfQlkAn0K\neEZ0AGQBfAGDAkQAXTh9C3wGfAt8BHwAZAMZAGQAZACFAnwLZgIZABQAFwB8AGQBGQBkAGQAhQJ8\nC2YCGQAUABcAfQZxZFcAdAFqAnwGZAuDAn0GfAZ8AWQEfAV8AGQFGQAUABcAFAAbAH0GeEZ0AGQB\nfAKDAkQAXTh9DHwHfAx8BHwAZAYZAGQAZACFAnwMZgIZABQAFwB8AGQEGQBkAGQAhQJ8DGYCGQAU\nABcAfQdx0FcAdAFqAnwHZAyDAn0HfAd8AWQEfAV8AGQFGQAUABcAFAAbAHwCZAR8BXwAZAcZABQA\nFwAUABsAfQd4SHQAZAF8A4MCRABdOn0NfAh8DXwEfABkCBkAZABkAIUCfA1mAhkAFAAXAHwAZAkZ\nAGQAZACFAnwNZgIZABQAFwB9CJABcVBXAHQBagJ8CGQNgwJ9CHwIfAFkBHwFfABkBRkAFAAXABQA\nGwB8AmQEfAV8AGQHGQAUABcAFAAbAHwDZAR8BXwAZAoZABQAFwAUABsAfQh8BnwHFwB8CBcAfAoU\nAH0OfA5TACkOTukAAAAA6WUAAADpBgAAAOkBAAAA6QMAAADpBwAAAOkEAAAA6QgAAADpAgAAAOkF\nAAAA6f////9yCwAAAHILAAAAKQPaBXJhbmdl2gFL2gtleHBhbmRfZGltcykP2gF42gJzMdoCczLa\nAnMz2gxsYW1iZGFfbG9jYWzaCGxhbWJkYV9k2gFh2gFi2gFj2gFB2gFW2gFp2gFq2gFr2gNhZ2Wp\nAHIeAAAA+kMvaG9tZS95aWhzdWFuL0RvY3VtZW50cy9TaW5pY2EvU1NSTkVUX21lZ2FhZ2VfYXNp\nYW4vU1NSTkVUX21vZGVsLnB52gltZXJnZV9hZ2UgBAAAcyYAAAAAARgBGAEYAQwBBAIQATgBDAEY\nAhABOAEMASwCEAE6AQwBQAMQAQ==\n", null, null], "function_type": "lambda", "output_shape": [1], "output_shape_type": "raw", "arguments": {"s1": 3, "s2": 3, "s3": 3, "lambda_local": 1.0, "lambda_d": 1.0}}, "inbound_nodes": [[["pred_age_stage1", 0, 0, {}], ["pred_age_stage2", 0, 0, {}], ["pred_age_stage3", 0, 0, {}], ["delta_s1", 0, 0, {}], ["delta_s2", 0, 0, {}], ["delta_s3", 0, 0, {}], ["local_delta_stage1", 0, 0, {}], ["local_delta_stage2", 0, 0, {}], ["local_delta_stage3", 0, 0, {}]]]}], "input_layers": [["input_1", 0, 0]], "output_layers": [["pred_a", 0, 0]]}, "keras_version": "2.1.5", "backend": "tensorflow"} \ No newline at end of file diff --git a/megaage_models/batch_size_50/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.png b/megaage_models/batch_size_50/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.png new file mode 100644 index 0000000..eab10c2 Binary files /dev/null and b/megaage_models/batch_size_50/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.png differ diff --git a/pre-trained/wiki/densenet_reg_13_64/densenet_reg_13_64.h5 b/pre-trained/wiki/densenet_reg_13_64/densenet_reg_13_64.h5 new file mode 100644 index 0000000..39cc410 Binary files /dev/null and b/pre-trained/wiki/densenet_reg_13_64/densenet_reg_13_64.h5 differ diff --git a/pre-trained/wiki/densenet_reg_13_64/densenet_reg_13_64.json b/pre-trained/wiki/densenet_reg_13_64/densenet_reg_13_64.json new file mode 100644 index 0000000..e9afde4 --- /dev/null +++ b/pre-trained/wiki/densenet_reg_13_64/densenet_reg_13_64.json @@ -0,0 +1 @@ +{"keras_version": "2.0.6", "backend": "tensorflow", "class_name": "Model", "config": {"layers": [{"config": {"dtype": "float32", "batch_input_shape": [null, 64, 64, 3], "sparse": false, "name": "input_1"}, "class_name": "InputLayer", "name": "input_1", "inbound_nodes": []}, {"config": {"layers": [{"inbound_nodes": [], "class_name": "InputLayer", "name": "input_2", "config": {"dtype": "float32", "batch_input_shape": [null, 64, 64, 3], "sparse": false, "name": "input_2"}}, {"inbound_nodes": [[["input_2", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_1", "config": {"data_format": "channels_last", "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 2.0, "distribution": "normal", "seed": null, "mode": "fan_in"}}, "activation": "linear", "trainable": true, "strides": [1, 1], "bias_regularizer": null, "padding": "same", "kernel_regularizer": {"class_name": "L1L2", "config": {"l1": 0.0, "l2": 9.999999747378752e-05}}, "activity_regularizer": null, "filters": 24, "kernel_constraint": null, "kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "bias_constraint": null, "use_bias": false, "name": "conv2d_1"}}, {"inbound_nodes": [[["conv2d_1", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_1", "config": {"center": true, "momentum": 0.99, "trainable": true, "axis": -1, "gamma_regularizer": null, "beta_regularizer": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "gamma_constraint": null, "gamma_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "epsilon": 1.1e-05, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "name": "batch_normalization_1"}}, {"inbound_nodes": [[["batch_normalization_1", 0, 0, {}]]], "class_name": "Activation", "name": "activation_1", "config": {"activation": "relu", "name": "activation_1", "trainable": true}}, {"inbound_nodes": [[["activation_1", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_2", "config": {"data_format": "channels_last", "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 2.0, "distribution": "normal", "seed": null, "mode": "fan_in"}}, "activation": "linear", "trainable": true, "strides": [1, 1], "bias_regularizer": null, "padding": "same", "kernel_regularizer": null, "activity_regularizer": null, "filters": 12, "kernel_constraint": null, "kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "bias_constraint": null, "use_bias": false, "name": "conv2d_2"}}, {"inbound_nodes": [[["conv2d_1", 0, 0, {}], ["conv2d_2", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_1", "config": {"axis": -1, "name": "concatenate_1", "trainable": true}}, {"inbound_nodes": [[["concatenate_1", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_2", "config": {"center": true, "momentum": 0.99, "trainable": true, "axis": -1, "gamma_regularizer": null, "beta_regularizer": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "gamma_constraint": null, "gamma_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "epsilon": 1.1e-05, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "name": "batch_normalization_2"}}, {"inbound_nodes": [[["batch_normalization_2", 0, 0, {}]]], "class_name": "Activation", "name": "activation_2", "config": {"activation": "relu", "name": "activation_2", "trainable": true}}, {"inbound_nodes": [[["activation_2", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_3", "config": {"data_format": "channels_last", "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 2.0, "distribution": "normal", "seed": null, "mode": "fan_in"}}, "activation": "linear", "trainable": true, "strides": [1, 1], "bias_regularizer": null, "padding": "same", "kernel_regularizer": null, "activity_regularizer": null, "filters": 12, "kernel_constraint": null, "kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "bias_constraint": null, "use_bias": false, "name": "conv2d_3"}}, {"inbound_nodes": [[["concatenate_1", 0, 0, {}], ["conv2d_3", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_2", "config": {"axis": -1, "name": "concatenate_2", "trainable": true}}, {"inbound_nodes": [[["concatenate_2", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_3", "config": {"center": true, "momentum": 0.99, "trainable": true, "axis": -1, "gamma_regularizer": null, "beta_regularizer": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "gamma_constraint": null, "gamma_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "epsilon": 1.1e-05, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "name": "batch_normalization_3"}}, {"inbound_nodes": [[["batch_normalization_3", 0, 0, {}]]], "class_name": "Activation", "name": "activation_3", "config": {"activation": "relu", "name": "activation_3", "trainable": true}}, {"inbound_nodes": [[["activation_3", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_4", "config": {"data_format": "channels_last", "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 2.0, "distribution": "normal", "seed": null, "mode": "fan_in"}}, "activation": "linear", "trainable": true, "strides": [1, 1], "bias_regularizer": null, "padding": "same", "kernel_regularizer": null, "activity_regularizer": null, "filters": 12, "kernel_constraint": null, "kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "bias_constraint": null, "use_bias": false, "name": "conv2d_4"}}, {"inbound_nodes": [[["concatenate_2", 0, 0, {}], ["conv2d_4", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_3", "config": {"axis": -1, "name": "concatenate_3", "trainable": true}}, {"inbound_nodes": [[["concatenate_3", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_4", "config": {"center": true, "momentum": 0.99, "trainable": true, "axis": -1, "gamma_regularizer": null, "beta_regularizer": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "gamma_constraint": null, "gamma_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "epsilon": 1.1e-05, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "name": "batch_normalization_4"}}, {"inbound_nodes": [[["batch_normalization_4", 0, 0, {}]]], "class_name": "Activation", "name": "activation_4", "config": {"activation": "relu", "name": "activation_4", "trainable": true}}, {"inbound_nodes": [[["activation_4", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_5", "config": {"data_format": "channels_last", "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 2.0, "distribution": "normal", "seed": null, "mode": "fan_in"}}, "activation": "linear", "trainable": true, "strides": [1, 1], "bias_regularizer": null, "padding": "same", "kernel_regularizer": {"class_name": "L1L2", "config": {"l1": 0.0, "l2": 9.999999747378752e-05}}, "activity_regularizer": null, "filters": 60, "kernel_constraint": null, "kernel_size": [1, 1], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "bias_constraint": null, "use_bias": false, "name": "conv2d_5"}}, {"inbound_nodes": [[["conv2d_5", 0, 0, {}]]], "class_name": "AveragePooling2D", "name": "average_pooling2d_1", "config": {"data_format": "channels_last", "trainable": true, "pool_size": [2, 2], "strides": [2, 2], "padding": "valid", "name": "average_pooling2d_1"}}, {"inbound_nodes": [[["average_pooling2d_1", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_5", "config": {"center": true, "momentum": 0.99, "trainable": true, "axis": -1, "gamma_regularizer": null, "beta_regularizer": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "gamma_constraint": null, "gamma_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "epsilon": 1.1e-05, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "name": "batch_normalization_5"}}, {"inbound_nodes": [[["batch_normalization_5", 0, 0, {}]]], "class_name": "Activation", "name": "activation_5", "config": {"activation": "relu", "name": "activation_5", "trainable": true}}, {"inbound_nodes": [[["activation_5", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_6", "config": {"data_format": "channels_last", "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 2.0, "distribution": "normal", "seed": null, "mode": "fan_in"}}, "activation": "linear", "trainable": true, "strides": [1, 1], "bias_regularizer": null, "padding": "same", "kernel_regularizer": null, "activity_regularizer": null, "filters": 12, "kernel_constraint": null, "kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "bias_constraint": null, "use_bias": false, "name": "conv2d_6"}}, {"inbound_nodes": [[["average_pooling2d_1", 0, 0, {}], ["conv2d_6", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_4", "config": {"axis": -1, "name": "concatenate_4", "trainable": true}}, {"inbound_nodes": [[["concatenate_4", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_6", "config": {"center": true, "momentum": 0.99, "trainable": true, "axis": -1, "gamma_regularizer": null, "beta_regularizer": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "gamma_constraint": null, "gamma_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "epsilon": 1.1e-05, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "name": "batch_normalization_6"}}, {"inbound_nodes": [[["batch_normalization_6", 0, 0, {}]]], "class_name": "Activation", "name": "activation_6", "config": {"activation": "relu", "name": "activation_6", "trainable": true}}, {"inbound_nodes": [[["activation_6", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_7", "config": {"data_format": "channels_last", "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 2.0, "distribution": "normal", "seed": null, "mode": "fan_in"}}, "activation": "linear", "trainable": true, "strides": [1, 1], "bias_regularizer": null, "padding": "same", "kernel_regularizer": null, "activity_regularizer": null, "filters": 12, "kernel_constraint": null, "kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "bias_constraint": null, "use_bias": false, "name": "conv2d_7"}}, {"inbound_nodes": [[["concatenate_4", 0, 0, {}], ["conv2d_7", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_5", "config": {"axis": -1, "name": "concatenate_5", "trainable": true}}, {"inbound_nodes": [[["concatenate_5", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_7", "config": {"center": true, "momentum": 0.99, "trainable": true, "axis": -1, "gamma_regularizer": null, "beta_regularizer": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "gamma_constraint": null, "gamma_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "epsilon": 1.1e-05, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "name": "batch_normalization_7"}}, {"inbound_nodes": [[["batch_normalization_7", 0, 0, {}]]], "class_name": "Activation", "name": "activation_7", "config": {"activation": "relu", "name": "activation_7", "trainable": true}}, {"inbound_nodes": [[["activation_7", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_8", "config": {"data_format": "channels_last", "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 2.0, "distribution": "normal", "seed": null, "mode": "fan_in"}}, "activation": "linear", "trainable": true, "strides": [1, 1], "bias_regularizer": null, "padding": "same", "kernel_regularizer": null, "activity_regularizer": null, "filters": 12, "kernel_constraint": null, "kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "bias_constraint": null, "use_bias": false, "name": "conv2d_8"}}, {"inbound_nodes": [[["concatenate_5", 0, 0, {}], ["conv2d_8", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_6", "config": {"axis": -1, "name": "concatenate_6", "trainable": true}}, {"inbound_nodes": [[["concatenate_6", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_8", "config": {"center": true, "momentum": 0.99, "trainable": true, "axis": -1, "gamma_regularizer": null, "beta_regularizer": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "gamma_constraint": null, "gamma_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "epsilon": 1.1e-05, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "name": "batch_normalization_8"}}, {"inbound_nodes": [[["batch_normalization_8", 0, 0, {}]]], "class_name": "Activation", "name": "activation_8", "config": {"activation": "relu", "name": "activation_8", "trainable": true}}, {"inbound_nodes": [[["activation_8", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_9", "config": {"data_format": "channels_last", "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 2.0, "distribution": "normal", "seed": null, "mode": "fan_in"}}, "activation": "linear", "trainable": true, "strides": [1, 1], "bias_regularizer": null, "padding": "same", "kernel_regularizer": {"class_name": "L1L2", "config": {"l1": 0.0, "l2": 9.999999747378752e-05}}, "activity_regularizer": null, "filters": 96, "kernel_constraint": null, "kernel_size": [1, 1], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "bias_constraint": null, "use_bias": false, "name": "conv2d_9"}}, {"inbound_nodes": [[["conv2d_9", 0, 0, {}]]], "class_name": "AveragePooling2D", "name": "average_pooling2d_2", "config": {"data_format": "channels_last", "trainable": true, "pool_size": [2, 2], "strides": [2, 2], "padding": "valid", "name": "average_pooling2d_2"}}, {"inbound_nodes": [[["average_pooling2d_2", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_9", "config": {"center": true, "momentum": 0.99, "trainable": true, "axis": -1, "gamma_regularizer": null, "beta_regularizer": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "gamma_constraint": null, "gamma_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "epsilon": 1.1e-05, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "name": "batch_normalization_9"}}, {"inbound_nodes": [[["batch_normalization_9", 0, 0, {}]]], "class_name": "Activation", "name": "activation_9", "config": {"activation": "relu", "name": "activation_9", "trainable": true}}, {"inbound_nodes": [[["activation_9", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_10", "config": {"data_format": "channels_last", "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 2.0, "distribution": "normal", "seed": null, "mode": "fan_in"}}, "activation": "linear", "trainable": true, "strides": [1, 1], "bias_regularizer": null, "padding": "same", "kernel_regularizer": null, "activity_regularizer": null, "filters": 12, "kernel_constraint": null, "kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "bias_constraint": null, "use_bias": false, "name": "conv2d_10"}}, {"inbound_nodes": [[["average_pooling2d_2", 0, 0, {}], ["conv2d_10", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_7", "config": {"axis": -1, "name": "concatenate_7", "trainable": true}}, {"inbound_nodes": [[["concatenate_7", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_10", "config": {"center": true, "momentum": 0.99, "trainable": true, "axis": -1, "gamma_regularizer": null, "beta_regularizer": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "gamma_constraint": null, "gamma_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "epsilon": 1.1e-05, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "name": "batch_normalization_10"}}, {"inbound_nodes": [[["batch_normalization_10", 0, 0, {}]]], "class_name": "Activation", "name": "activation_10", "config": {"activation": "relu", "name": "activation_10", "trainable": true}}, {"inbound_nodes": [[["activation_10", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_11", "config": {"data_format": "channels_last", "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 2.0, "distribution": "normal", "seed": null, "mode": "fan_in"}}, "activation": "linear", "trainable": true, "strides": [1, 1], "bias_regularizer": null, "padding": "same", "kernel_regularizer": null, "activity_regularizer": null, "filters": 12, "kernel_constraint": null, "kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "bias_constraint": null, "use_bias": false, "name": "conv2d_11"}}, {"inbound_nodes": [[["concatenate_7", 0, 0, {}], ["conv2d_11", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_8", "config": {"axis": -1, "name": "concatenate_8", "trainable": true}}, {"inbound_nodes": [[["concatenate_8", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_11", "config": {"center": true, "momentum": 0.99, "trainable": true, "axis": -1, "gamma_regularizer": null, "beta_regularizer": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "gamma_constraint": null, "gamma_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "epsilon": 1.1e-05, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "name": "batch_normalization_11"}}, {"inbound_nodes": [[["batch_normalization_11", 0, 0, {}]]], "class_name": "Activation", "name": "activation_11", "config": {"activation": "relu", "name": "activation_11", "trainable": true}}, {"inbound_nodes": [[["activation_11", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_12", "config": {"data_format": "channels_last", "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 2.0, "distribution": "normal", "seed": null, "mode": "fan_in"}}, "activation": "linear", "trainable": true, "strides": [1, 1], "bias_regularizer": null, "padding": "same", "kernel_regularizer": null, "activity_regularizer": null, "filters": 12, "kernel_constraint": null, "kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "bias_constraint": null, "use_bias": false, "name": "conv2d_12"}}, {"inbound_nodes": [[["concatenate_8", 0, 0, {}], ["conv2d_12", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_9", "config": {"axis": -1, "name": "concatenate_9", "trainable": true}}, {"inbound_nodes": [[["concatenate_9", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_12", "config": {"center": true, "momentum": 0.99, "trainable": true, "axis": -1, "gamma_regularizer": null, "beta_regularizer": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "gamma_constraint": null, "gamma_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "epsilon": 1.1e-05, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "scale": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "name": "batch_normalization_12"}}, {"inbound_nodes": [[["batch_normalization_12", 0, 0, {}]]], "class_name": "Activation", "name": "activation_12", "config": {"activation": "relu", "name": "activation_12", "trainable": true}}, {"inbound_nodes": [[["activation_12", 0, 0, {}]]], "class_name": "GlobalAveragePooling2D", "name": "global_average_pooling2d_1", "config": {"data_format": "channels_last", "name": "global_average_pooling2d_1", "trainable": true}}], "output_layers": [["global_average_pooling2d_1", 0, 0]], "input_layers": [["input_2", 0, 0]], "name": "densenet"}, "class_name": "Model", "name": "densenet", "inbound_nodes": [[["input_1", 0, 0, {}]]]}, {"config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}}, "activation": "relu", "trainable": true, "bias_regularizer": null, "kernel_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "units": 128, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_constraint": null, "use_bias": true, "name": "dense_1"}, "class_name": "Dense", "name": "dense_1", "inbound_nodes": [[["densenet", 1, 0, {}]]]}, {"config": {"rate": 0.2, "trainable": true, "name": "dropout_1"}, "class_name": "Dropout", "name": "dropout_1", "inbound_nodes": [[["dense_1", 0, 0, {}]]]}, {"config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}}, "activation": "relu", "trainable": true, "bias_regularizer": null, "kernel_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "units": 32, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_constraint": null, "use_bias": true, "name": "feat_a"}, "class_name": "Dense", "name": "feat_a", "inbound_nodes": [[["dropout_1", 0, 0, {}]]]}, {"config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}}, "activation": "linear", "trainable": true, "bias_regularizer": null, "kernel_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "units": 1, "bias_initializer": {"class_name": "Zeros", "config": {}}, "bias_constraint": null, "use_bias": true, "name": "pred_a"}, "class_name": "Dense", "name": "pred_a", "inbound_nodes": [[["feat_a", 0, 0, {}]]]}], "output_layers": [["pred_a", 0, 0]], "name": "model_1", "input_layers": [["input_1", 0, 0]]}} \ No newline at end of file diff --git a/pre-trained/wiki/densenet_reg_13_64/densenet_reg_13_64.png b/pre-trained/wiki/densenet_reg_13_64/densenet_reg_13_64.png new file mode 100644 index 0000000..877a026 Binary files /dev/null and b/pre-trained/wiki/densenet_reg_13_64/densenet_reg_13_64.png differ diff --git a/pre-trained/wiki/densenet_reg_13_64/history_densenet_reg_13_64.h5 b/pre-trained/wiki/densenet_reg_13_64/history_densenet_reg_13_64.h5 new file mode 100644 index 0000000..aa7d1c2 Binary files /dev/null and b/pre-trained/wiki/densenet_reg_13_64/history_densenet_reg_13_64.h5 differ diff --git a/pre-trained/wiki/densenet_reg_13_64/loss.pdf b/pre-trained/wiki/densenet_reg_13_64/loss.pdf new file mode 100644 index 0000000..b174bdd Binary files /dev/null and b/pre-trained/wiki/densenet_reg_13_64/loss.pdf differ diff --git a/pre-trained/wiki/densenet_reg_13_64/loss.png b/pre-trained/wiki/densenet_reg_13_64/loss.png new file mode 100644 index 0000000..d0714f4 Binary files /dev/null and b/pre-trained/wiki/densenet_reg_13_64/loss.png differ diff --git a/pre-trained/wiki/densenet_reg_13_64/performance.pdf b/pre-trained/wiki/densenet_reg_13_64/performance.pdf new file mode 100644 index 0000000..47410a6 Binary files /dev/null and b/pre-trained/wiki/densenet_reg_13_64/performance.pdf differ diff --git a/pre-trained/wiki/densenet_reg_13_64/performance.png b/pre-trained/wiki/densenet_reg_13_64/performance.png new file mode 100644 index 0000000..9bace59 Binary files /dev/null and b/pre-trained/wiki/densenet_reg_13_64/performance.png differ diff --git a/pre-trained/wiki/densenet_reg_19_64/densenet_reg_19_64.h5 b/pre-trained/wiki/densenet_reg_19_64/densenet_reg_19_64.h5 new file mode 100644 index 0000000..58e2a98 Binary files /dev/null and b/pre-trained/wiki/densenet_reg_19_64/densenet_reg_19_64.h5 differ diff --git a/pre-trained/wiki/densenet_reg_19_64/densenet_reg_19_64.json b/pre-trained/wiki/densenet_reg_19_64/densenet_reg_19_64.json new file mode 100644 index 0000000..2e76ee6 --- /dev/null +++ b/pre-trained/wiki/densenet_reg_19_64/densenet_reg_19_64.json @@ -0,0 +1 @@ +{"config": {"input_layers": [["input_1", 0, 0]], "output_layers": [["pred_a", 0, 0]], "name": "model_1", "layers": [{"config": {"dtype": "float32", "sparse": false, "name": "input_1", "batch_input_shape": [null, 64, 64, 3]}, "inbound_nodes": [], "class_name": "InputLayer", "name": "input_1"}, {"config": {"input_layers": [["input_2", 0, 0]], "output_layers": [["global_average_pooling2d_1", 0, 0]], "name": "densenet", "layers": [{"config": {"dtype": "float32", "sparse": false, "name": "input_2", "batch_input_shape": [null, 64, 64, 3]}, "inbound_nodes": [], "class_name": "InputLayer", "name": "input_2"}, {"config": {"kernel_regularizer": {"config": {"l2": 9.999999747378752e-05, "l1": 0.0}, "class_name": "L1L2"}, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 24, "name": "conv2d_1", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["input_2", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_1"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_1", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["conv2d_1", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_1"}, {"config": {"trainable": true, "name": "activation_1", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_1", 0, 0, {}]]], "class_name": "Activation", "name": "activation_1"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_2", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_1", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_2"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_1"}, "inbound_nodes": [[["conv2d_1", 0, 0, {}], ["conv2d_2", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_1"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_2", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_1", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_2"}, {"config": {"trainable": true, "name": "activation_2", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_2", 0, 0, {}]]], "class_name": "Activation", "name": "activation_2"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_3", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_2", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_3"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_2"}, "inbound_nodes": [[["concatenate_1", 0, 0, {}], ["conv2d_3", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_2"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_3", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_2", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_3"}, {"config": {"trainable": true, "name": "activation_3", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_3", 0, 0, {}]]], "class_name": "Activation", "name": "activation_3"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_4", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_3", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_4"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_3"}, "inbound_nodes": [[["concatenate_2", 0, 0, {}], ["conv2d_4", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_3"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_4", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_3", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_4"}, {"config": {"trainable": true, "name": "activation_4", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_4", 0, 0, {}]]], "class_name": "Activation", "name": "activation_4"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_5", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_4", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_5"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_4"}, "inbound_nodes": [[["concatenate_3", 0, 0, {}], ["conv2d_5", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_4"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_5", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_4", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_5"}, {"config": {"trainable": true, "name": "activation_5", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_5", 0, 0, {}]]], "class_name": "Activation", "name": "activation_5"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_6", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_5", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_6"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_5"}, "inbound_nodes": [[["concatenate_4", 0, 0, {}], ["conv2d_6", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_5"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_6", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_5", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_6"}, {"config": {"trainable": true, "name": "activation_6", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_6", 0, 0, {}]]], "class_name": "Activation", "name": "activation_6"}, {"config": {"kernel_regularizer": {"config": {"l2": 9.999999747378752e-05, "l1": 0.0}, "class_name": "L1L2"}, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 84, "name": "conv2d_7", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [1, 1], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_6", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_7"}, {"config": {"trainable": true, "strides": [2, 2], "name": "average_pooling2d_1", "pool_size": [2, 2], "data_format": "channels_last", "padding": "valid"}, "inbound_nodes": [[["conv2d_7", 0, 0, {}]]], "class_name": "AveragePooling2D", "name": "average_pooling2d_1"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_7", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["average_pooling2d_1", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_7"}, {"config": {"trainable": true, "name": "activation_7", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_7", 0, 0, {}]]], "class_name": "Activation", "name": "activation_7"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_8", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_7", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_8"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_6"}, "inbound_nodes": [[["average_pooling2d_1", 0, 0, {}], ["conv2d_8", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_6"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_8", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_6", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_8"}, {"config": {"trainable": true, "name": "activation_8", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_8", 0, 0, {}]]], "class_name": "Activation", "name": "activation_8"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_9", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_8", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_9"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_7"}, "inbound_nodes": [[["concatenate_6", 0, 0, {}], ["conv2d_9", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_7"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_9", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_7", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_9"}, {"config": {"trainable": true, "name": "activation_9", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_9", 0, 0, {}]]], "class_name": "Activation", "name": "activation_9"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_10", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_9", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_10"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_8"}, "inbound_nodes": [[["concatenate_7", 0, 0, {}], ["conv2d_10", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_8"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_10", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_8", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_10"}, {"config": {"trainable": true, "name": "activation_10", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_10", 0, 0, {}]]], "class_name": "Activation", "name": "activation_10"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_11", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_10", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_11"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_9"}, "inbound_nodes": [[["concatenate_8", 0, 0, {}], ["conv2d_11", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_9"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_11", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_9", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_11"}, {"config": {"trainable": true, "name": "activation_11", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_11", 0, 0, {}]]], "class_name": "Activation", "name": "activation_11"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_12", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_11", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_12"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_10"}, "inbound_nodes": [[["concatenate_9", 0, 0, {}], ["conv2d_12", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_10"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_12", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_10", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_12"}, {"config": {"trainable": true, "name": "activation_12", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_12", 0, 0, {}]]], "class_name": "Activation", "name": "activation_12"}, {"config": {"kernel_regularizer": {"config": {"l2": 9.999999747378752e-05, "l1": 0.0}, "class_name": "L1L2"}, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 144, "name": "conv2d_13", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [1, 1], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_12", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_13"}, {"config": {"trainable": true, "strides": [2, 2], "name": "average_pooling2d_2", "pool_size": [2, 2], "data_format": "channels_last", "padding": "valid"}, "inbound_nodes": [[["conv2d_13", 0, 0, {}]]], "class_name": "AveragePooling2D", "name": "average_pooling2d_2"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_13", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["average_pooling2d_2", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_13"}, {"config": {"trainable": true, "name": "activation_13", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_13", 0, 0, {}]]], "class_name": "Activation", "name": "activation_13"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_14", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_13", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_14"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_11"}, "inbound_nodes": [[["average_pooling2d_2", 0, 0, {}], ["conv2d_14", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_11"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_14", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_11", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_14"}, {"config": {"trainable": true, "name": "activation_14", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_14", 0, 0, {}]]], "class_name": "Activation", "name": "activation_14"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_15", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_14", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_15"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_12"}, "inbound_nodes": [[["concatenate_11", 0, 0, {}], ["conv2d_15", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_12"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_15", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_12", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_15"}, {"config": {"trainable": true, "name": "activation_15", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_15", 0, 0, {}]]], "class_name": "Activation", "name": "activation_15"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_16", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_15", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_16"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_13"}, "inbound_nodes": [[["concatenate_12", 0, 0, {}], ["conv2d_16", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_13"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_16", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_13", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_16"}, {"config": {"trainable": true, "name": "activation_16", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_16", 0, 0, {}]]], "class_name": "Activation", "name": "activation_16"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_17", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_16", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_17"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_14"}, "inbound_nodes": [[["concatenate_13", 0, 0, {}], ["conv2d_17", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_14"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_17", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_14", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_17"}, {"config": {"trainable": true, "name": "activation_17", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_17", 0, 0, {}]]], "class_name": "Activation", "name": "activation_17"}, {"config": {"kernel_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": false, "activity_regularizer": null, "padding": "same", "filters": 12, "name": "conv2d_18", "kernel_initializer": {"config": {"scale": 2.0, "distribution": "normal", "mode": "fan_in", "seed": null}, "class_name": "VarianceScaling"}, "kernel_size": [3, 3], "bias_regularizer": null, "activation": "linear", "data_format": "channels_last", "dilation_rate": [1, 1], "strides": [1, 1]}, "inbound_nodes": [[["activation_17", 0, 0, {}]]], "class_name": "Conv2D", "name": "conv2d_18"}, {"config": {"axis": -1, "trainable": true, "name": "concatenate_15"}, "inbound_nodes": [[["concatenate_14", 0, 0, {}], ["conv2d_18", 0, 0, {}]]], "class_name": "Concatenate", "name": "concatenate_15"}, {"config": {"scale": true, "trainable": true, "beta_constraint": null, "gamma_constraint": null, "beta_regularizer": null, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "center": true, "gamma_regularizer": null, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "name": "batch_normalization_18", "axis": -1, "epsilon": 1.1e-05, "momentum": 0.99, "gamma_initializer": {"config": {}, "class_name": "Ones"}}, "inbound_nodes": [[["concatenate_15", 0, 0, {}]]], "class_name": "BatchNormalization", "name": "batch_normalization_18"}, {"config": {"trainable": true, "name": "activation_18", "activation": "relu"}, "inbound_nodes": [[["batch_normalization_18", 0, 0, {}]]], "class_name": "Activation", "name": "activation_18"}, {"config": {"trainable": true, "data_format": "channels_last", "name": "global_average_pooling2d_1"}, "inbound_nodes": [[["activation_18", 0, 0, {}]]], "class_name": "GlobalAveragePooling2D", "name": "global_average_pooling2d_1"}]}, "inbound_nodes": [[["input_1", 0, 0, {}]]], "class_name": "Model", "name": "densenet"}, {"config": {"kernel_regularizer": null, "trainable": true, "name": "dense_1", "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": true, "bias_constraint": null, "activity_regularizer": null, "units": 128, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "mode": "fan_avg", "seed": null}, "class_name": "VarianceScaling"}, "bias_regularizer": null, "activation": "relu"}, "inbound_nodes": [[["densenet", 1, 0, {}]]], "class_name": "Dense", "name": "dense_1"}, {"config": {"trainable": true, "rate": 0.2, "name": "dropout_1"}, "inbound_nodes": [[["dense_1", 0, 0, {}]]], "class_name": "Dropout", "name": "dropout_1"}, {"config": {"kernel_regularizer": null, "trainable": true, "name": "feat_a", "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": true, "bias_constraint": null, "activity_regularizer": null, "units": 32, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "mode": "fan_avg", "seed": null}, "class_name": "VarianceScaling"}, "bias_regularizer": null, "activation": "relu"}, "inbound_nodes": [[["dropout_1", 0, 0, {}]]], "class_name": "Dense", "name": "feat_a"}, {"config": {"kernel_regularizer": null, "trainable": true, "name": "pred_a", "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_constraint": null, "use_bias": true, "bias_constraint": null, "activity_regularizer": null, "units": 1, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "mode": "fan_avg", "seed": null}, "class_name": "VarianceScaling"}, "bias_regularizer": null, "activation": "linear"}, "inbound_nodes": [[["feat_a", 0, 0, {}]]], "class_name": "Dense", "name": "pred_a"}]}, "backend": "tensorflow", "keras_version": "2.0.6", "class_name": "Model"} \ No newline at end of file diff --git a/pre-trained/wiki/densenet_reg_19_64/densenet_reg_19_64.png b/pre-trained/wiki/densenet_reg_19_64/densenet_reg_19_64.png new file mode 100644 index 0000000..877a026 Binary files /dev/null and b/pre-trained/wiki/densenet_reg_19_64/densenet_reg_19_64.png differ diff --git a/pre-trained/wiki/densenet_reg_19_64/history_densenet_reg_19_64.h5 b/pre-trained/wiki/densenet_reg_19_64/history_densenet_reg_19_64.h5 new file mode 100644 index 0000000..7f7b9ac Binary files /dev/null and b/pre-trained/wiki/densenet_reg_19_64/history_densenet_reg_19_64.h5 differ diff --git a/pre-trained/wiki/densenet_reg_19_64/loss.pdf b/pre-trained/wiki/densenet_reg_19_64/loss.pdf new file mode 100644 index 0000000..15b4f9d Binary files /dev/null and b/pre-trained/wiki/densenet_reg_19_64/loss.pdf differ diff --git a/pre-trained/wiki/densenet_reg_19_64/loss.png b/pre-trained/wiki/densenet_reg_19_64/loss.png new file mode 100644 index 0000000..8ec9527 Binary files /dev/null and b/pre-trained/wiki/densenet_reg_19_64/loss.png differ diff --git a/pre-trained/wiki/densenet_reg_19_64/performance.pdf b/pre-trained/wiki/densenet_reg_19_64/performance.pdf new file mode 100644 index 0000000..2e38c94 Binary files /dev/null and b/pre-trained/wiki/densenet_reg_19_64/performance.pdf differ diff --git a/pre-trained/wiki/densenet_reg_19_64/performance.png b/pre-trained/wiki/densenet_reg_19_64/performance.png new file mode 100644 index 0000000..385ff72 Binary files /dev/null and b/pre-trained/wiki/densenet_reg_19_64/performance.png differ diff --git a/pre-trained/wiki/mobilenet_reg_0.25_64/history_mobilenet_reg_0.25_64.h5 b/pre-trained/wiki/mobilenet_reg_0.25_64/history_mobilenet_reg_0.25_64.h5 new file mode 100644 index 0000000..de48e1c Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.25_64/history_mobilenet_reg_0.25_64.h5 differ diff --git a/pre-trained/wiki/mobilenet_reg_0.25_64/loss.pdf b/pre-trained/wiki/mobilenet_reg_0.25_64/loss.pdf new file mode 100644 index 0000000..3fbd017 Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.25_64/loss.pdf differ diff --git a/pre-trained/wiki/mobilenet_reg_0.25_64/loss.png b/pre-trained/wiki/mobilenet_reg_0.25_64/loss.png new file mode 100644 index 0000000..05e1569 Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.25_64/loss.png differ diff --git a/pre-trained/wiki/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.h5 b/pre-trained/wiki/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.h5 new file mode 100644 index 0000000..5b1bbba Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.h5 differ diff --git a/pre-trained/wiki/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.json b/pre-trained/wiki/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.json new file mode 100644 index 0000000..5651fda --- /dev/null +++ b/pre-trained/wiki/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.json @@ -0,0 +1 @@ +{"class_name": "Model", "backend": "tensorflow", "keras_version": "2.0.6", "config": {"layers": [{"class_name": "InputLayer", "name": "input_1", "inbound_nodes": [], "config": {"dtype": "float32", "batch_input_shape": [null, 64, 64, 3], "name": "input_1", "sparse": false}}, {"class_name": "Model", "name": "mobilenet_0.25_64", "inbound_nodes": [[["input_1", 0, 0, {}]]], "config": {"layers": [{"class_name": "InputLayer", "inbound_nodes": [], "name": "input_2", "config": {"dtype": "float32", "batch_input_shape": [null, 64, 64, 3], "name": "input_2", "sparse": false}}, {"class_name": "Conv2D", "inbound_nodes": [[["input_2", 0, 0, {}]]], "name": "conv1", "config": {"name": "conv1", "kernel_size": [3, 3], "strides": [2, 2], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 8}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv1", 0, 0, {}]]], "name": "conv1_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv1_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv1_bn", 0, 0, {}]]], "name": "conv1_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv1_relu"}}, {"class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv1_relu", 0, 0, {}]]], "name": "conv_dw_1", "config": {"kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "strides": [1, 1], "data_format": "channels_last", "depthwise_regularizer": null, "use_bias": false, "depth_multiplier": 1, "bias_regularizer": null, "depthwise_constraint": null, "activation": "linear", "trainable": true, "name": "conv_dw_1", "activity_regularizer": null, "padding": "same", "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_1", 0, 0, {}]]], "name": "conv_dw_1_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_dw_1_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_dw_1_bn", 0, 0, {}]]], "name": "conv_dw_1_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_dw_1_relu"}}, {"class_name": "Conv2D", "inbound_nodes": [[["conv_dw_1_relu", 0, 0, {}]]], "name": "conv_pw_1", "config": {"name": "conv_pw_1", "kernel_size": [1, 1], "strides": [1, 1], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 16}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_1", 0, 0, {}]]], "name": "conv_pw_1_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_pw_1_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_pw_1_bn", 0, 0, {}]]], "name": "conv_pw_1_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_pw_1_relu"}}, {"class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_1_relu", 0, 0, {}]]], "name": "conv_dw_2", "config": {"kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "strides": [2, 2], "data_format": "channels_last", "depthwise_regularizer": null, "use_bias": false, "depth_multiplier": 1, "bias_regularizer": null, "depthwise_constraint": null, "activation": "linear", "trainable": true, "name": "conv_dw_2", "activity_regularizer": null, "padding": "same", "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_2", 0, 0, {}]]], "name": "conv_dw_2_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_dw_2_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_dw_2_bn", 0, 0, {}]]], "name": "conv_dw_2_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_dw_2_relu"}}, {"class_name": "Conv2D", "inbound_nodes": [[["conv_dw_2_relu", 0, 0, {}]]], "name": "conv_pw_2", "config": {"name": "conv_pw_2", "kernel_size": [1, 1], "strides": [1, 1], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 32}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_2", 0, 0, {}]]], "name": "conv_pw_2_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_pw_2_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_pw_2_bn", 0, 0, {}]]], "name": "conv_pw_2_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_pw_2_relu"}}, {"class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_2_relu", 0, 0, {}]]], "name": "conv_dw_3", "config": {"kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "strides": [1, 1], "data_format": "channels_last", "depthwise_regularizer": null, "use_bias": false, "depth_multiplier": 1, "bias_regularizer": null, "depthwise_constraint": null, "activation": "linear", "trainable": true, "name": "conv_dw_3", "activity_regularizer": null, "padding": "same", "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_3", 0, 0, {}]]], "name": "conv_dw_3_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_dw_3_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_dw_3_bn", 0, 0, {}]]], "name": "conv_dw_3_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_dw_3_relu"}}, {"class_name": "Conv2D", "inbound_nodes": [[["conv_dw_3_relu", 0, 0, {}]]], "name": "conv_pw_3", "config": {"name": "conv_pw_3", "kernel_size": [1, 1], "strides": [1, 1], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 32}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_3", 0, 0, {}]]], "name": "conv_pw_3_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_pw_3_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_pw_3_bn", 0, 0, {}]]], "name": "conv_pw_3_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_pw_3_relu"}}, {"class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_3_relu", 0, 0, {}]]], "name": "conv_dw_4", "config": {"kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "strides": [2, 2], "data_format": "channels_last", "depthwise_regularizer": null, "use_bias": false, "depth_multiplier": 1, "bias_regularizer": null, "depthwise_constraint": null, "activation": "linear", "trainable": true, "name": "conv_dw_4", "activity_regularizer": null, "padding": "same", "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_4", 0, 0, {}]]], "name": "conv_dw_4_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_dw_4_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_dw_4_bn", 0, 0, {}]]], "name": "conv_dw_4_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_dw_4_relu"}}, {"class_name": "Conv2D", "inbound_nodes": [[["conv_dw_4_relu", 0, 0, {}]]], "name": "conv_pw_4", "config": {"name": "conv_pw_4", "kernel_size": [1, 1], "strides": [1, 1], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 64}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_4", 0, 0, {}]]], "name": "conv_pw_4_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_pw_4_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_pw_4_bn", 0, 0, {}]]], "name": "conv_pw_4_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_pw_4_relu"}}, {"class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_4_relu", 0, 0, {}]]], "name": "conv_dw_5", "config": {"kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "strides": [1, 1], "data_format": "channels_last", "depthwise_regularizer": null, "use_bias": false, "depth_multiplier": 1, "bias_regularizer": null, "depthwise_constraint": null, "activation": "linear", "trainable": true, "name": "conv_dw_5", "activity_regularizer": null, "padding": "same", "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_5", 0, 0, {}]]], "name": "conv_dw_5_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_dw_5_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_dw_5_bn", 0, 0, {}]]], "name": "conv_dw_5_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_dw_5_relu"}}, {"class_name": "Conv2D", "inbound_nodes": [[["conv_dw_5_relu", 0, 0, {}]]], "name": "conv_pw_5", "config": {"name": "conv_pw_5", "kernel_size": [1, 1], "strides": [1, 1], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 64}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_5", 0, 0, {}]]], "name": "conv_pw_5_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_pw_5_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_pw_5_bn", 0, 0, {}]]], "name": "conv_pw_5_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_pw_5_relu"}}, {"class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_5_relu", 0, 0, {}]]], "name": "conv_dw_6", "config": {"kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "strides": [2, 2], "data_format": "channels_last", "depthwise_regularizer": null, "use_bias": false, "depth_multiplier": 1, "bias_regularizer": null, "depthwise_constraint": null, "activation": "linear", "trainable": true, "name": "conv_dw_6", "activity_regularizer": null, "padding": "same", "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_6", 0, 0, {}]]], "name": "conv_dw_6_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_dw_6_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_dw_6_bn", 0, 0, {}]]], "name": "conv_dw_6_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_dw_6_relu"}}, {"class_name": "Conv2D", "inbound_nodes": [[["conv_dw_6_relu", 0, 0, {}]]], "name": "conv_pw_6", "config": {"name": "conv_pw_6", "kernel_size": [1, 1], "strides": [1, 1], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 128}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_6", 0, 0, {}]]], "name": "conv_pw_6_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_pw_6_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_pw_6_bn", 0, 0, {}]]], "name": "conv_pw_6_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_pw_6_relu"}}, {"class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_6_relu", 0, 0, {}]]], "name": "conv_dw_7", "config": {"kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "strides": [1, 1], "data_format": "channels_last", "depthwise_regularizer": null, "use_bias": false, "depth_multiplier": 1, "bias_regularizer": null, "depthwise_constraint": null, "activation": "linear", "trainable": true, "name": "conv_dw_7", "activity_regularizer": null, "padding": "same", "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_7", 0, 0, {}]]], "name": "conv_dw_7_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_dw_7_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_dw_7_bn", 0, 0, {}]]], "name": "conv_dw_7_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_dw_7_relu"}}, {"class_name": "Conv2D", "inbound_nodes": [[["conv_dw_7_relu", 0, 0, {}]]], "name": "conv_pw_7", "config": {"name": "conv_pw_7", "kernel_size": [1, 1], "strides": [1, 1], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 128}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_7", 0, 0, {}]]], "name": "conv_pw_7_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_pw_7_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_pw_7_bn", 0, 0, {}]]], "name": "conv_pw_7_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_pw_7_relu"}}, {"class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_7_relu", 0, 0, {}]]], "name": "conv_dw_8", "config": {"kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "strides": [1, 1], "data_format": "channels_last", "depthwise_regularizer": null, "use_bias": false, "depth_multiplier": 1, "bias_regularizer": null, "depthwise_constraint": null, "activation": "linear", "trainable": true, "name": "conv_dw_8", "activity_regularizer": null, "padding": "same", "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_8", 0, 0, {}]]], "name": "conv_dw_8_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_dw_8_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_dw_8_bn", 0, 0, {}]]], "name": "conv_dw_8_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_dw_8_relu"}}, {"class_name": "Conv2D", "inbound_nodes": [[["conv_dw_8_relu", 0, 0, {}]]], "name": "conv_pw_8", "config": {"name": "conv_pw_8", "kernel_size": [1, 1], "strides": [1, 1], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 128}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_8", 0, 0, {}]]], "name": "conv_pw_8_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_pw_8_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_pw_8_bn", 0, 0, {}]]], "name": "conv_pw_8_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_pw_8_relu"}}, {"class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_8_relu", 0, 0, {}]]], "name": "conv_dw_9", "config": {"kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "strides": [1, 1], "data_format": "channels_last", "depthwise_regularizer": null, "use_bias": false, "depth_multiplier": 1, "bias_regularizer": null, "depthwise_constraint": null, "activation": "linear", "trainable": true, "name": "conv_dw_9", "activity_regularizer": null, "padding": "same", "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_9", 0, 0, {}]]], "name": "conv_dw_9_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_dw_9_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_dw_9_bn", 0, 0, {}]]], "name": "conv_dw_9_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_dw_9_relu"}}, {"class_name": "Conv2D", "inbound_nodes": [[["conv_dw_9_relu", 0, 0, {}]]], "name": "conv_pw_9", "config": {"name": "conv_pw_9", "kernel_size": [1, 1], "strides": [1, 1], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 128}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_9", 0, 0, {}]]], "name": "conv_pw_9_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_pw_9_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_pw_9_bn", 0, 0, {}]]], "name": "conv_pw_9_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_pw_9_relu"}}, {"class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_9_relu", 0, 0, {}]]], "name": "conv_dw_10", "config": {"kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "strides": [1, 1], "data_format": "channels_last", "depthwise_regularizer": null, "use_bias": false, "depth_multiplier": 1, "bias_regularizer": null, "depthwise_constraint": null, "activation": "linear", "trainable": true, "name": "conv_dw_10", "activity_regularizer": null, "padding": "same", "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_10", 0, 0, {}]]], "name": "conv_dw_10_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_dw_10_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_dw_10_bn", 0, 0, {}]]], "name": "conv_dw_10_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_dw_10_relu"}}, {"class_name": "Conv2D", "inbound_nodes": [[["conv_dw_10_relu", 0, 0, {}]]], "name": "conv_pw_10", "config": {"name": "conv_pw_10", "kernel_size": [1, 1], "strides": [1, 1], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 128}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_10", 0, 0, {}]]], "name": "conv_pw_10_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_pw_10_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_pw_10_bn", 0, 0, {}]]], "name": "conv_pw_10_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_pw_10_relu"}}, {"class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_10_relu", 0, 0, {}]]], "name": "conv_dw_11", "config": {"kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "strides": [1, 1], "data_format": "channels_last", "depthwise_regularizer": null, "use_bias": false, "depth_multiplier": 1, "bias_regularizer": null, "depthwise_constraint": null, "activation": "linear", "trainable": true, "name": "conv_dw_11", "activity_regularizer": null, "padding": "same", "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_11", 0, 0, {}]]], "name": "conv_dw_11_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_dw_11_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_dw_11_bn", 0, 0, {}]]], "name": "conv_dw_11_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_dw_11_relu"}}, {"class_name": "Conv2D", "inbound_nodes": [[["conv_dw_11_relu", 0, 0, {}]]], "name": "conv_pw_11", "config": {"name": "conv_pw_11", "kernel_size": [1, 1], "strides": [1, 1], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 128}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_11", 0, 0, {}]]], "name": "conv_pw_11_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_pw_11_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_pw_11_bn", 0, 0, {}]]], "name": "conv_pw_11_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_pw_11_relu"}}, {"class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_11_relu", 0, 0, {}]]], "name": "conv_dw_12", "config": {"kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "strides": [2, 2], "data_format": "channels_last", "depthwise_regularizer": null, "use_bias": false, "depth_multiplier": 1, "bias_regularizer": null, "depthwise_constraint": null, "activation": "linear", "trainable": true, "name": "conv_dw_12", "activity_regularizer": null, "padding": "same", "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_12", 0, 0, {}]]], "name": "conv_dw_12_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_dw_12_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_dw_12_bn", 0, 0, {}]]], "name": "conv_dw_12_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_dw_12_relu"}}, {"class_name": "Conv2D", "inbound_nodes": [[["conv_dw_12_relu", 0, 0, {}]]], "name": "conv_pw_12", "config": {"name": "conv_pw_12", "kernel_size": [1, 1], "strides": [1, 1], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 256}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_12", 0, 0, {}]]], "name": "conv_pw_12_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_pw_12_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_pw_12_bn", 0, 0, {}]]], "name": "conv_pw_12_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_pw_12_relu"}}, {"class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_12_relu", 0, 0, {}]]], "name": "conv_dw_13", "config": {"kernel_size": [3, 3], "bias_initializer": {"class_name": "Zeros", "config": {}}, "dilation_rate": [1, 1], "strides": [1, 1], "data_format": "channels_last", "depthwise_regularizer": null, "use_bias": false, "depth_multiplier": 1, "bias_regularizer": null, "depthwise_constraint": null, "activation": "linear", "trainable": true, "name": "conv_dw_13", "activity_regularizer": null, "padding": "same", "depthwise_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_13", 0, 0, {}]]], "name": "conv_dw_13_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_dw_13_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_dw_13_bn", 0, 0, {}]]], "name": "conv_dw_13_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_dw_13_relu"}}, {"class_name": "Conv2D", "inbound_nodes": [[["conv_dw_13_relu", 0, 0, {}]]], "name": "conv_pw_13", "config": {"name": "conv_pw_13", "kernel_size": [1, 1], "strides": [1, 1], "dilation_rate": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "use_bias": false, "bias_regularizer": null, "activation": "linear", "trainable": true, "kernel_constraint": null, "activity_regularizer": null, "padding": "same", "bias_constraint": null, "filters": 256}}, {"class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_13", 0, 0, {}]]], "name": "conv_pw_13_bn", "config": {"scale": true, "gamma_regularizer": null, "momentum": 0.99, "trainable": true, "beta_constraint": null, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_initializer": {"class_name": "Zeros", "config": {}}, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "epsilon": 0.001, "gamma_constraint": null, "name": "conv_pw_13_bn", "axis": -1, "gamma_initializer": {"class_name": "Ones", "config": {}}, "center": true, "beta_regularizer": null}}, {"class_name": "Activation", "inbound_nodes": [[["conv_pw_13_bn", 0, 0, {}]]], "name": "conv_pw_13_relu", "config": {"activation": "relu6", "trainable": true, "name": "conv_pw_13_relu"}}], "input_layers": [["input_2", 0, 0]], "output_layers": [["conv_pw_13_relu", 0, 0]], "name": "mobilenet_0.25_64"}}, {"class_name": "Conv2D", "name": "conv2d_1", "inbound_nodes": [[["mobilenet_0.25_64", 1, 0, {}]]], "config": {"kernel_size": [1, 1], "dilation_rate": [1, 1], "strides": [1, 1], "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "data_format": "channels_last", "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_constraint": null, "bias_regularizer": null, "activation": "relu", "trainable": true, "name": "conv2d_1", "activity_regularizer": null, "padding": "valid", "use_bias": true, "bias_constraint": null, "filters": 20}}, {"class_name": "Flatten", "name": "flatten_1", "inbound_nodes": [[["conv2d_1", 0, 0, {}]]], "config": {"trainable": true, "name": "flatten_1"}}, {"class_name": "Dropout", "name": "dropout_1", "inbound_nodes": [[["flatten_1", 0, 0, {}]]], "config": {"rate": 0.2, "trainable": true, "name": "dropout_1"}}, {"class_name": "Dense", "name": "feat_a", "inbound_nodes": [[["dropout_1", 0, 0, {}]]], "config": {"units": 32, "name": "feat_a", "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_constraint": null, "bias_regularizer": null, "activation": "relu", "trainable": true, "use_bias": true, "activity_regularizer": null}}, {"class_name": "Dense", "name": "pred_a", "inbound_nodes": [[["feat_a", 0, 0, {}]]], "config": {"units": 1, "name": "pred_a", "kernel_regularizer": null, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"seed": null, "mode": "fan_avg", "distribution": "uniform", "scale": 1.0}}, "bias_constraint": null, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_constraint": null, "bias_regularizer": null, "activation": "linear", "trainable": true, "use_bias": true, "activity_regularizer": null}}], "input_layers": [["input_1", 0, 0]], "name": "model_1", "output_layers": [["pred_a", 0, 0]]}} \ No newline at end of file diff --git a/pre-trained/wiki/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.png b/pre-trained/wiki/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.png new file mode 100644 index 0000000..fba0588 Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.25_64/mobilenet_reg_0.25_64.png differ diff --git a/pre-trained/wiki/mobilenet_reg_0.25_64/performance.pdf b/pre-trained/wiki/mobilenet_reg_0.25_64/performance.pdf new file mode 100644 index 0000000..30ea3a4 Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.25_64/performance.pdf differ diff --git a/pre-trained/wiki/mobilenet_reg_0.25_64/performance.png b/pre-trained/wiki/mobilenet_reg_0.25_64/performance.png new file mode 100644 index 0000000..f77c965 Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.25_64/performance.png differ diff --git a/pre-trained/wiki/mobilenet_reg_0.5_64/history_mobilenet_reg_0.5_64.h5 b/pre-trained/wiki/mobilenet_reg_0.5_64/history_mobilenet_reg_0.5_64.h5 new file mode 100644 index 0000000..1e236ab Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.5_64/history_mobilenet_reg_0.5_64.h5 differ diff --git a/pre-trained/wiki/mobilenet_reg_0.5_64/loss.pdf b/pre-trained/wiki/mobilenet_reg_0.5_64/loss.pdf new file mode 100644 index 0000000..650bb8d Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.5_64/loss.pdf differ diff --git a/pre-trained/wiki/mobilenet_reg_0.5_64/loss.png b/pre-trained/wiki/mobilenet_reg_0.5_64/loss.png new file mode 100644 index 0000000..0a7d8ee Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.5_64/loss.png differ diff --git a/pre-trained/wiki/mobilenet_reg_0.5_64/mobilenet_reg_0.5_64.h5 b/pre-trained/wiki/mobilenet_reg_0.5_64/mobilenet_reg_0.5_64.h5 new file mode 100644 index 0000000..316af8d Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.5_64/mobilenet_reg_0.5_64.h5 differ diff --git a/pre-trained/wiki/mobilenet_reg_0.5_64/mobilenet_reg_0.5_64.json b/pre-trained/wiki/mobilenet_reg_0.5_64/mobilenet_reg_0.5_64.json new file mode 100644 index 0000000..ae6f2a6 --- /dev/null +++ b/pre-trained/wiki/mobilenet_reg_0.5_64/mobilenet_reg_0.5_64.json @@ -0,0 +1 @@ +{"keras_version": "2.0.6", "config": {"name": "model_1", "layers": [{"name": "input_1", "config": {"batch_input_shape": [null, 64, 64, 3], "name": "input_1", "dtype": "float32", "sparse": false}, "class_name": "InputLayer", "inbound_nodes": []}, {"name": "mobilenet_0.50_64", "config": {"name": "mobilenet_0.50_64", "output_layers": [["conv_pw_13_relu", 0, 0]], "layers": [{"name": "input_2", "config": {"batch_input_shape": [null, 64, 64, 3], "name": "input_2", "dtype": "float32", "sparse": false}, "class_name": "InputLayer", "inbound_nodes": []}, {"name": "conv1", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv1", "kernel_size": [3, 3], "filters": 16, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [2, 2], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["input_2", 0, 0, {}]]]}, {"name": "conv1_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv1_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv1", 0, 0, {}]]]}, {"name": "conv1_relu", "config": {"name": "conv1_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv1_bn", 0, 0, {}]]]}, {"name": "conv_dw_1", "config": {"bias_regularizer": null, "trainable": true, "use_bias": false, "padding": "same", "name": "conv_dw_1", "kernel_size": [3, 3], "depthwise_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "depthwise_constraint": null, "data_format": "channels_last", "bias_constraint": null, "activation": "linear", "depth_multiplier": 1, "activity_regularizer": null, "dilation_rate": [1, 1], "depthwise_regularizer": null, "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv1_relu", 0, 0, {}]]]}, {"name": "conv_dw_1_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_dw_1_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_1", 0, 0, {}]]]}, {"name": "conv_dw_1_relu", "config": {"name": "conv_dw_1_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_dw_1_bn", 0, 0, {}]]]}, {"name": "conv_pw_1", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv_pw_1", "kernel_size": [1, 1], "filters": 32, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["conv_dw_1_relu", 0, 0, {}]]]}, {"name": "conv_pw_1_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_pw_1_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_1", 0, 0, {}]]]}, {"name": "conv_pw_1_relu", "config": {"name": "conv_pw_1_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_pw_1_bn", 0, 0, {}]]]}, {"name": "conv_dw_2", "config": {"bias_regularizer": null, "trainable": true, "use_bias": false, "padding": "same", "name": "conv_dw_2", "kernel_size": [3, 3], "depthwise_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "depthwise_constraint": null, "data_format": "channels_last", "bias_constraint": null, "activation": "linear", "depth_multiplier": 1, "activity_regularizer": null, "dilation_rate": [1, 1], "depthwise_regularizer": null, "strides": [2, 2], "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_1_relu", 0, 0, {}]]]}, {"name": "conv_dw_2_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_dw_2_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_2", 0, 0, {}]]]}, {"name": "conv_dw_2_relu", "config": {"name": "conv_dw_2_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_dw_2_bn", 0, 0, {}]]]}, {"name": "conv_pw_2", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv_pw_2", "kernel_size": [1, 1], "filters": 64, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["conv_dw_2_relu", 0, 0, {}]]]}, {"name": "conv_pw_2_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_pw_2_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_2", 0, 0, {}]]]}, {"name": "conv_pw_2_relu", "config": {"name": "conv_pw_2_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_pw_2_bn", 0, 0, {}]]]}, {"name": "conv_dw_3", "config": {"bias_regularizer": null, "trainable": true, "use_bias": false, "padding": "same", "name": "conv_dw_3", "kernel_size": [3, 3], "depthwise_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "depthwise_constraint": null, "data_format": "channels_last", "bias_constraint": null, "activation": "linear", "depth_multiplier": 1, "activity_regularizer": null, "dilation_rate": [1, 1], "depthwise_regularizer": null, "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_2_relu", 0, 0, {}]]]}, {"name": "conv_dw_3_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_dw_3_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_3", 0, 0, {}]]]}, {"name": "conv_dw_3_relu", "config": {"name": "conv_dw_3_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_dw_3_bn", 0, 0, {}]]]}, {"name": "conv_pw_3", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv_pw_3", "kernel_size": [1, 1], "filters": 64, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["conv_dw_3_relu", 0, 0, {}]]]}, {"name": "conv_pw_3_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_pw_3_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_3", 0, 0, {}]]]}, {"name": "conv_pw_3_relu", "config": {"name": "conv_pw_3_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_pw_3_bn", 0, 0, {}]]]}, {"name": "conv_dw_4", "config": {"bias_regularizer": null, "trainable": true, "use_bias": false, "padding": "same", "name": "conv_dw_4", "kernel_size": [3, 3], "depthwise_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "depthwise_constraint": null, "data_format": "channels_last", "bias_constraint": null, "activation": "linear", "depth_multiplier": 1, "activity_regularizer": null, "dilation_rate": [1, 1], "depthwise_regularizer": null, "strides": [2, 2], "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_3_relu", 0, 0, {}]]]}, {"name": "conv_dw_4_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_dw_4_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_4", 0, 0, {}]]]}, {"name": "conv_dw_4_relu", "config": {"name": "conv_dw_4_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_dw_4_bn", 0, 0, {}]]]}, {"name": "conv_pw_4", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv_pw_4", "kernel_size": [1, 1], "filters": 128, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["conv_dw_4_relu", 0, 0, {}]]]}, {"name": "conv_pw_4_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_pw_4_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_4", 0, 0, {}]]]}, {"name": "conv_pw_4_relu", "config": {"name": "conv_pw_4_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_pw_4_bn", 0, 0, {}]]]}, {"name": "conv_dw_5", "config": {"bias_regularizer": null, "trainable": true, "use_bias": false, "padding": "same", "name": "conv_dw_5", "kernel_size": [3, 3], "depthwise_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "depthwise_constraint": null, "data_format": "channels_last", "bias_constraint": null, "activation": "linear", "depth_multiplier": 1, "activity_regularizer": null, "dilation_rate": [1, 1], "depthwise_regularizer": null, "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_4_relu", 0, 0, {}]]]}, {"name": "conv_dw_5_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_dw_5_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_5", 0, 0, {}]]]}, {"name": "conv_dw_5_relu", "config": {"name": "conv_dw_5_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_dw_5_bn", 0, 0, {}]]]}, {"name": "conv_pw_5", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv_pw_5", "kernel_size": [1, 1], "filters": 128, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["conv_dw_5_relu", 0, 0, {}]]]}, {"name": "conv_pw_5_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_pw_5_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_5", 0, 0, {}]]]}, {"name": "conv_pw_5_relu", "config": {"name": "conv_pw_5_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_pw_5_bn", 0, 0, {}]]]}, {"name": "conv_dw_6", "config": {"bias_regularizer": null, "trainable": true, "use_bias": false, "padding": "same", "name": "conv_dw_6", "kernel_size": [3, 3], "depthwise_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "depthwise_constraint": null, "data_format": "channels_last", "bias_constraint": null, "activation": "linear", "depth_multiplier": 1, "activity_regularizer": null, "dilation_rate": [1, 1], "depthwise_regularizer": null, "strides": [2, 2], "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_5_relu", 0, 0, {}]]]}, {"name": "conv_dw_6_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_dw_6_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_6", 0, 0, {}]]]}, {"name": "conv_dw_6_relu", "config": {"name": "conv_dw_6_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_dw_6_bn", 0, 0, {}]]]}, {"name": "conv_pw_6", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv_pw_6", "kernel_size": [1, 1], "filters": 256, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["conv_dw_6_relu", 0, 0, {}]]]}, {"name": "conv_pw_6_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_pw_6_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_6", 0, 0, {}]]]}, {"name": "conv_pw_6_relu", "config": {"name": "conv_pw_6_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_pw_6_bn", 0, 0, {}]]]}, {"name": "conv_dw_7", "config": {"bias_regularizer": null, "trainable": true, "use_bias": false, "padding": "same", "name": "conv_dw_7", "kernel_size": [3, 3], "depthwise_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "depthwise_constraint": null, "data_format": "channels_last", "bias_constraint": null, "activation": "linear", "depth_multiplier": 1, "activity_regularizer": null, "dilation_rate": [1, 1], "depthwise_regularizer": null, "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_6_relu", 0, 0, {}]]]}, {"name": "conv_dw_7_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_dw_7_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_7", 0, 0, {}]]]}, {"name": "conv_dw_7_relu", "config": {"name": "conv_dw_7_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_dw_7_bn", 0, 0, {}]]]}, {"name": "conv_pw_7", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv_pw_7", "kernel_size": [1, 1], "filters": 256, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["conv_dw_7_relu", 0, 0, {}]]]}, {"name": "conv_pw_7_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_pw_7_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_7", 0, 0, {}]]]}, {"name": "conv_pw_7_relu", "config": {"name": "conv_pw_7_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_pw_7_bn", 0, 0, {}]]]}, {"name": "conv_dw_8", "config": {"bias_regularizer": null, "trainable": true, "use_bias": false, "padding": "same", "name": "conv_dw_8", "kernel_size": [3, 3], "depthwise_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "depthwise_constraint": null, "data_format": "channels_last", "bias_constraint": null, "activation": "linear", "depth_multiplier": 1, "activity_regularizer": null, "dilation_rate": [1, 1], "depthwise_regularizer": null, "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_7_relu", 0, 0, {}]]]}, {"name": "conv_dw_8_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_dw_8_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_8", 0, 0, {}]]]}, {"name": "conv_dw_8_relu", "config": {"name": "conv_dw_8_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_dw_8_bn", 0, 0, {}]]]}, {"name": "conv_pw_8", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv_pw_8", "kernel_size": [1, 1], "filters": 256, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["conv_dw_8_relu", 0, 0, {}]]]}, {"name": "conv_pw_8_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_pw_8_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_8", 0, 0, {}]]]}, {"name": "conv_pw_8_relu", "config": {"name": "conv_pw_8_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_pw_8_bn", 0, 0, {}]]]}, {"name": "conv_dw_9", "config": {"bias_regularizer": null, "trainable": true, "use_bias": false, "padding": "same", "name": "conv_dw_9", "kernel_size": [3, 3], "depthwise_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "depthwise_constraint": null, "data_format": "channels_last", "bias_constraint": null, "activation": "linear", "depth_multiplier": 1, "activity_regularizer": null, "dilation_rate": [1, 1], "depthwise_regularizer": null, "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_8_relu", 0, 0, {}]]]}, {"name": "conv_dw_9_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_dw_9_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_9", 0, 0, {}]]]}, {"name": "conv_dw_9_relu", "config": {"name": "conv_dw_9_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_dw_9_bn", 0, 0, {}]]]}, {"name": "conv_pw_9", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv_pw_9", "kernel_size": [1, 1], "filters": 256, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["conv_dw_9_relu", 0, 0, {}]]]}, {"name": "conv_pw_9_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_pw_9_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_9", 0, 0, {}]]]}, {"name": "conv_pw_9_relu", "config": {"name": "conv_pw_9_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_pw_9_bn", 0, 0, {}]]]}, {"name": "conv_dw_10", "config": {"bias_regularizer": null, "trainable": true, "use_bias": false, "padding": "same", "name": "conv_dw_10", "kernel_size": [3, 3], "depthwise_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "depthwise_constraint": null, "data_format": "channels_last", "bias_constraint": null, "activation": "linear", "depth_multiplier": 1, "activity_regularizer": null, "dilation_rate": [1, 1], "depthwise_regularizer": null, "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_9_relu", 0, 0, {}]]]}, {"name": "conv_dw_10_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_dw_10_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_10", 0, 0, {}]]]}, {"name": "conv_dw_10_relu", "config": {"name": "conv_dw_10_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_dw_10_bn", 0, 0, {}]]]}, {"name": "conv_pw_10", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv_pw_10", "kernel_size": [1, 1], "filters": 256, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["conv_dw_10_relu", 0, 0, {}]]]}, {"name": "conv_pw_10_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_pw_10_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_10", 0, 0, {}]]]}, {"name": "conv_pw_10_relu", "config": {"name": "conv_pw_10_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_pw_10_bn", 0, 0, {}]]]}, {"name": "conv_dw_11", "config": {"bias_regularizer": null, "trainable": true, "use_bias": false, "padding": "same", "name": "conv_dw_11", "kernel_size": [3, 3], "depthwise_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "depthwise_constraint": null, "data_format": "channels_last", "bias_constraint": null, "activation": "linear", "depth_multiplier": 1, "activity_regularizer": null, "dilation_rate": [1, 1], "depthwise_regularizer": null, "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_10_relu", 0, 0, {}]]]}, {"name": "conv_dw_11_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_dw_11_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_11", 0, 0, {}]]]}, {"name": "conv_dw_11_relu", "config": {"name": "conv_dw_11_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_dw_11_bn", 0, 0, {}]]]}, {"name": "conv_pw_11", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv_pw_11", "kernel_size": [1, 1], "filters": 256, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["conv_dw_11_relu", 0, 0, {}]]]}, {"name": "conv_pw_11_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_pw_11_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_11", 0, 0, {}]]]}, {"name": "conv_pw_11_relu", "config": {"name": "conv_pw_11_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_pw_11_bn", 0, 0, {}]]]}, {"name": "conv_dw_12", "config": {"bias_regularizer": null, "trainable": true, "use_bias": false, "padding": "same", "name": "conv_dw_12", "kernel_size": [3, 3], "depthwise_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "depthwise_constraint": null, "data_format": "channels_last", "bias_constraint": null, "activation": "linear", "depth_multiplier": 1, "activity_regularizer": null, "dilation_rate": [1, 1], "depthwise_regularizer": null, "strides": [2, 2], "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_11_relu", 0, 0, {}]]]}, {"name": "conv_dw_12_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_dw_12_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_12", 0, 0, {}]]]}, {"name": "conv_dw_12_relu", "config": {"name": "conv_dw_12_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_dw_12_bn", 0, 0, {}]]]}, {"name": "conv_pw_12", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv_pw_12", "kernel_size": [1, 1], "filters": 512, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["conv_dw_12_relu", 0, 0, {}]]]}, {"name": "conv_pw_12_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_pw_12_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_12", 0, 0, {}]]]}, {"name": "conv_pw_12_relu", "config": {"name": "conv_pw_12_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_pw_12_bn", 0, 0, {}]]]}, {"name": "conv_dw_13", "config": {"bias_regularizer": null, "trainable": true, "use_bias": false, "padding": "same", "name": "conv_dw_13", "kernel_size": [3, 3], "depthwise_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "depthwise_constraint": null, "data_format": "channels_last", "bias_constraint": null, "activation": "linear", "depth_multiplier": 1, "activity_regularizer": null, "dilation_rate": [1, 1], "depthwise_regularizer": null, "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "DepthwiseConv2D", "inbound_nodes": [[["conv_pw_12_relu", 0, 0, {}]]]}, {"name": "conv_dw_13_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_dw_13_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_dw_13", 0, 0, {}]]]}, {"name": "conv_dw_13_relu", "config": {"name": "conv_dw_13_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_dw_13_bn", 0, 0, {}]]]}, {"name": "conv_pw_13", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "same", "name": "conv_pw_13", "kernel_size": [1, 1], "filters": 512, "use_bias": false, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "dilation_rate": [1, 1], "activity_regularizer": null, "data_format": "channels_last", "strides": [1, 1], "bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["conv_dw_13_relu", 0, 0, {}]]]}, {"name": "conv_pw_13_bn", "config": {"trainable": true, "beta_initializer": {"config": {}, "class_name": "Zeros"}, "gamma_constraint": null, "beta_constraint": null, "gamma_initializer": {"config": {}, "class_name": "Ones"}, "name": "conv_pw_13_bn", "moving_mean_initializer": {"config": {}, "class_name": "Zeros"}, "epsilon": 0.001, "momentum": 0.99, "moving_variance_initializer": {"config": {}, "class_name": "Ones"}, "axis": -1, "scale": true, "center": true, "beta_regularizer": null, "gamma_regularizer": null}, "class_name": "BatchNormalization", "inbound_nodes": [[["conv_pw_13", 0, 0, {}]]]}, {"name": "conv_pw_13_relu", "config": {"name": "conv_pw_13_relu", "trainable": true, "activation": "relu6"}, "class_name": "Activation", "inbound_nodes": [[["conv_pw_13_bn", 0, 0, {}]]]}], "input_layers": [["input_2", 0, 0]]}, "class_name": "Model", "inbound_nodes": [[["input_1", 0, 0, {}]]]}, {"name": "conv2d_1", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "padding": "valid", "name": "conv2d_1", "kernel_size": [1, 1], "data_format": "channels_last", "kernel_regularizer": null, "use_bias": true, "bias_constraint": null, "filters": 20, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "activity_regularizer": null, "dilation_rate": [1, 1], "strides": [1, 1], "activation": "relu", "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}}, "class_name": "Conv2D", "inbound_nodes": [[["mobilenet_0.50_64", 1, 0, {}]]]}, {"name": "flatten_1", "config": {"name": "flatten_1", "trainable": true}, "class_name": "Flatten", "inbound_nodes": [[["conv2d_1", 0, 0, {}]]]}, {"name": "dropout_1", "config": {"name": "dropout_1", "trainable": true, "rate": 0.2}, "class_name": "Dropout", "inbound_nodes": [[["flatten_1", 0, 0, {}]]]}, {"name": "feat_a", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "name": "feat_a", "use_bias": true, "bias_constraint": null, "kernel_regularizer": null, "activation": "relu", "activity_regularizer": null, "units": 32, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "Dense", "inbound_nodes": [[["dropout_1", 0, 0, {}]]]}, {"name": "pred_a", "config": {"trainable": true, "bias_regularizer": null, "kernel_constraint": null, "name": "pred_a", "use_bias": true, "bias_constraint": null, "kernel_regularizer": null, "activation": "linear", "activity_regularizer": null, "units": 1, "kernel_initializer": {"config": {"scale": 1.0, "distribution": "uniform", "seed": null, "mode": "fan_avg"}, "class_name": "VarianceScaling"}, "bias_initializer": {"config": {}, "class_name": "Zeros"}}, "class_name": "Dense", "inbound_nodes": [[["feat_a", 0, 0, {}]]]}], "output_layers": [["pred_a", 0, 0]], "input_layers": [["input_1", 0, 0]]}, "class_name": "Model", "backend": "tensorflow"} \ No newline at end of file diff --git a/pre-trained/wiki/mobilenet_reg_0.5_64/mobilenet_reg_0.5_64.png b/pre-trained/wiki/mobilenet_reg_0.5_64/mobilenet_reg_0.5_64.png new file mode 100644 index 0000000..13cb8d7 Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.5_64/mobilenet_reg_0.5_64.png differ diff --git a/pre-trained/wiki/mobilenet_reg_0.5_64/performance.pdf b/pre-trained/wiki/mobilenet_reg_0.5_64/performance.pdf new file mode 100644 index 0000000..a7ab766 Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.5_64/performance.pdf differ diff --git a/pre-trained/wiki/mobilenet_reg_0.5_64/performance.png b/pre-trained/wiki/mobilenet_reg_0.5_64/performance.png new file mode 100644 index 0000000..4642f1e Binary files /dev/null and b/pre-trained/wiki/mobilenet_reg_0.5_64/performance.png differ diff --git a/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/history_ssrnet_3_3_3_64_1.0_1.0.h5 b/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/history_ssrnet_3_3_3_64_1.0_1.0.h5 new file mode 100644 index 0000000..878968d Binary files /dev/null and b/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/history_ssrnet_3_3_3_64_1.0_1.0.h5 differ diff --git a/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/loss.pdf b/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/loss.pdf new file mode 100644 index 0000000..6f8ab29 Binary files /dev/null and b/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/loss.pdf differ diff --git a/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/performance.pdf b/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/performance.pdf new file mode 100644 index 0000000..336015f Binary files /dev/null and b/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/performance.pdf differ diff --git a/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.h5 b/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.h5 new file mode 100644 index 0000000..c41ac07 Binary files /dev/null and b/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.h5 differ diff --git a/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.json b/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.json new file mode 100644 index 0000000..f9df313 --- /dev/null +++ b/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.json @@ -0,0 +1 @@ +{"class_name": "Model", "keras_version": "2.0.6", "backend": "tensorflow", "config": {"name": "model_1", "layers": [{"name": "input_1", "class_name": "InputLayer", "config": {"name": "input_1", "sparse": false, "batch_input_shape": [null, 64, 64, 3], "dtype": "float32"}, "inbound_nodes": []}, {"name": "conv2d_5", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_5", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [3, 3], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "linear", "filters": 16, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["input_1", 0, 0, {}]]]}, {"name": "conv2d_1", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_1", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [3, 3], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "linear", "filters": 32, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["input_1", 0, 0, {}]]]}, {"name": "batch_normalization_5", "class_name": "BatchNormalization", "config": {"gamma_regularizer": null, "gamma_constraint": null, "center": true, "scale": true, "name": "batch_normalization_5", "axis": -1, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_regularizer": null, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "momentum": 0.99, "trainable": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "epsilon": 0.001, "gamma_initializer": {"class_name": "Ones", "config": {}}}, "inbound_nodes": [[["conv2d_5", 0, 0, {}]]]}, {"name": "batch_normalization_1", "class_name": "BatchNormalization", "config": {"gamma_regularizer": null, "gamma_constraint": null, "center": true, "scale": true, "name": "batch_normalization_1", "axis": -1, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_regularizer": null, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "momentum": 0.99, "trainable": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "epsilon": 0.001, "gamma_initializer": {"class_name": "Ones", "config": {}}}, "inbound_nodes": [[["conv2d_1", 0, 0, {}]]]}, {"name": "activation_5", "class_name": "Activation", "config": {"name": "activation_5", "trainable": true, "activation": "tanh"}, "inbound_nodes": [[["batch_normalization_5", 0, 0, {}]]]}, {"name": "activation_1", "class_name": "Activation", "config": {"name": "activation_1", "trainable": true, "activation": "relu"}, "inbound_nodes": [[["batch_normalization_1", 0, 0, {}]]]}, {"name": "max_pooling2d_1", "class_name": "MaxPooling2D", "config": {"pool_size": [2, 2], "padding": "valid", "data_format": "channels_last", "name": "max_pooling2d_1", "trainable": true, "strides": [2, 2]}, "inbound_nodes": [[["activation_5", 0, 0, {}]]]}, {"name": "average_pooling2d_1", "class_name": "AveragePooling2D", "config": {"pool_size": [2, 2], "padding": "valid", "data_format": "channels_last", "name": "average_pooling2d_1", "trainable": true, "strides": [2, 2]}, "inbound_nodes": [[["activation_1", 0, 0, {}]]]}, {"name": "conv2d_6", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_6", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [3, 3], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "linear", "filters": 16, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["max_pooling2d_1", 0, 0, {}]]]}, {"name": "conv2d_2", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_2", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [3, 3], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "linear", "filters": 32, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["average_pooling2d_1", 0, 0, {}]]]}, {"name": "batch_normalization_6", "class_name": "BatchNormalization", "config": {"gamma_regularizer": null, "gamma_constraint": null, "center": true, "scale": true, "name": "batch_normalization_6", "axis": -1, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_regularizer": null, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "momentum": 0.99, "trainable": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "epsilon": 0.001, "gamma_initializer": {"class_name": "Ones", "config": {}}}, "inbound_nodes": [[["conv2d_6", 0, 0, {}]]]}, {"name": "batch_normalization_2", "class_name": "BatchNormalization", "config": {"gamma_regularizer": null, "gamma_constraint": null, "center": true, "scale": true, "name": "batch_normalization_2", "axis": -1, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_regularizer": null, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "momentum": 0.99, "trainable": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "epsilon": 0.001, "gamma_initializer": {"class_name": "Ones", "config": {}}}, "inbound_nodes": [[["conv2d_2", 0, 0, {}]]]}, {"name": "activation_6", "class_name": "Activation", "config": {"name": "activation_6", "trainable": true, "activation": "tanh"}, "inbound_nodes": [[["batch_normalization_6", 0, 0, {}]]]}, {"name": "activation_2", "class_name": "Activation", "config": {"name": "activation_2", "trainable": true, "activation": "relu"}, "inbound_nodes": [[["batch_normalization_2", 0, 0, {}]]]}, {"name": "max_pooling2d_2", "class_name": "MaxPooling2D", "config": {"pool_size": [2, 2], "padding": "valid", "data_format": "channels_last", "name": "max_pooling2d_2", "trainable": true, "strides": [2, 2]}, "inbound_nodes": [[["activation_6", 0, 0, {}]]]}, {"name": "average_pooling2d_2", "class_name": "AveragePooling2D", "config": {"pool_size": [2, 2], "padding": "valid", "data_format": "channels_last", "name": "average_pooling2d_2", "trainable": true, "strides": [2, 2]}, "inbound_nodes": [[["activation_2", 0, 0, {}]]]}, {"name": "conv2d_7", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_7", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [3, 3], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "linear", "filters": 16, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["max_pooling2d_2", 0, 0, {}]]]}, {"name": "conv2d_3", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_3", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [3, 3], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "linear", "filters": 32, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["average_pooling2d_2", 0, 0, {}]]]}, {"name": "batch_normalization_7", "class_name": "BatchNormalization", "config": {"gamma_regularizer": null, "gamma_constraint": null, "center": true, "scale": true, "name": "batch_normalization_7", "axis": -1, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_regularizer": null, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "momentum": 0.99, "trainable": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "epsilon": 0.001, "gamma_initializer": {"class_name": "Ones", "config": {}}}, "inbound_nodes": [[["conv2d_7", 0, 0, {}]]]}, {"name": "batch_normalization_3", "class_name": "BatchNormalization", "config": {"gamma_regularizer": null, "gamma_constraint": null, "center": true, "scale": true, "name": "batch_normalization_3", "axis": -1, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_regularizer": null, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "momentum": 0.99, "trainable": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "epsilon": 0.001, "gamma_initializer": {"class_name": "Ones", "config": {}}}, "inbound_nodes": [[["conv2d_3", 0, 0, {}]]]}, {"name": "activation_7", "class_name": "Activation", "config": {"name": "activation_7", "trainable": true, "activation": "tanh"}, "inbound_nodes": [[["batch_normalization_7", 0, 0, {}]]]}, {"name": "activation_3", "class_name": "Activation", "config": {"name": "activation_3", "trainable": true, "activation": "relu"}, "inbound_nodes": [[["batch_normalization_3", 0, 0, {}]]]}, {"name": "max_pooling2d_3", "class_name": "MaxPooling2D", "config": {"pool_size": [2, 2], "padding": "valid", "data_format": "channels_last", "name": "max_pooling2d_3", "trainable": true, "strides": [2, 2]}, "inbound_nodes": [[["activation_7", 0, 0, {}]]]}, {"name": "average_pooling2d_3", "class_name": "AveragePooling2D", "config": {"pool_size": [2, 2], "padding": "valid", "data_format": "channels_last", "name": "average_pooling2d_3", "trainable": true, "strides": [2, 2]}, "inbound_nodes": [[["activation_3", 0, 0, {}]]]}, {"name": "conv2d_8", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_8", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [3, 3], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "linear", "filters": 16, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["max_pooling2d_3", 0, 0, {}]]]}, {"name": "conv2d_4", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_4", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [3, 3], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "linear", "filters": 32, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["average_pooling2d_3", 0, 0, {}]]]}, {"name": "batch_normalization_8", "class_name": "BatchNormalization", "config": {"gamma_regularizer": null, "gamma_constraint": null, "center": true, "scale": true, "name": "batch_normalization_8", "axis": -1, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_regularizer": null, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "momentum": 0.99, "trainable": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "epsilon": 0.001, "gamma_initializer": {"class_name": "Ones", "config": {}}}, "inbound_nodes": [[["conv2d_8", 0, 0, {}]]]}, {"name": "batch_normalization_4", "class_name": "BatchNormalization", "config": {"gamma_regularizer": null, "gamma_constraint": null, "center": true, "scale": true, "name": "batch_normalization_4", "axis": -1, "moving_mean_initializer": {"class_name": "Zeros", "config": {}}, "beta_regularizer": null, "moving_variance_initializer": {"class_name": "Ones", "config": {}}, "beta_constraint": null, "momentum": 0.99, "trainable": true, "beta_initializer": {"class_name": "Zeros", "config": {}}, "epsilon": 0.001, "gamma_initializer": {"class_name": "Ones", "config": {}}}, "inbound_nodes": [[["conv2d_4", 0, 0, {}]]]}, {"name": "activation_8", "class_name": "Activation", "config": {"name": "activation_8", "trainable": true, "activation": "tanh"}, "inbound_nodes": [[["batch_normalization_8", 0, 0, {}]]]}, {"name": "activation_4", "class_name": "Activation", "config": {"name": "activation_4", "trainable": true, "activation": "relu"}, "inbound_nodes": [[["batch_normalization_4", 0, 0, {}]]]}, {"name": "conv2d_11", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_11", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [1, 1], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "relu", "filters": 10, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["max_pooling2d_2", 0, 0, {}]]]}, {"name": "conv2d_12", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_12", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [1, 1], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "relu", "filters": 10, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["average_pooling2d_2", 0, 0, {}]]]}, {"name": "conv2d_13", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_13", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [1, 1], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "relu", "filters": 10, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["max_pooling2d_1", 0, 0, {}]]]}, {"name": "conv2d_14", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_14", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [1, 1], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "relu", "filters": 10, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["average_pooling2d_1", 0, 0, {}]]]}, {"name": "conv2d_9", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_9", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [1, 1], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "relu", "filters": 10, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["activation_8", 0, 0, {}]]]}, {"name": "conv2d_10", "class_name": "Conv2D", "config": {"kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "conv2d_10", "data_format": "channels_last", "activity_regularizer": null, "kernel_size": [1, 1], "dilation_rate": [1, 1], "use_bias": true, "strides": [1, 1], "trainable": true, "padding": "valid", "bias_regularizer": null, "bias_constraint": null, "activation": "relu", "filters": 10, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["activation_4", 0, 0, {}]]]}, {"name": "max_pooling2d_4", "class_name": "MaxPooling2D", "config": {"pool_size": [4, 4], "padding": "valid", "data_format": "channels_last", "name": "max_pooling2d_4", "trainable": true, "strides": [4, 4]}, "inbound_nodes": [[["conv2d_11", 0, 0, {}]]]}, {"name": "average_pooling2d_4", "class_name": "AveragePooling2D", "config": {"pool_size": [4, 4], "padding": "valid", "data_format": "channels_last", "name": "average_pooling2d_4", "trainable": true, "strides": [4, 4]}, "inbound_nodes": [[["conv2d_12", 0, 0, {}]]]}, {"name": "max_pooling2d_5", "class_name": "MaxPooling2D", "config": {"pool_size": [8, 8], "padding": "valid", "data_format": "channels_last", "name": "max_pooling2d_5", "trainable": true, "strides": [8, 8]}, "inbound_nodes": [[["conv2d_13", 0, 0, {}]]]}, {"name": "average_pooling2d_5", "class_name": "AveragePooling2D", "config": {"pool_size": [8, 8], "padding": "valid", "data_format": "channels_last", "name": "average_pooling2d_5", "trainable": true, "strides": [8, 8]}, "inbound_nodes": [[["conv2d_14", 0, 0, {}]]]}, {"name": "flatten_1", "class_name": "Flatten", "config": {"name": "flatten_1", "trainable": true}, "inbound_nodes": [[["conv2d_9", 0, 0, {}]]]}, {"name": "flatten_2", "class_name": "Flatten", "config": {"name": "flatten_2", "trainable": true}, "inbound_nodes": [[["conv2d_10", 0, 0, {}]]]}, {"name": "flatten_3", "class_name": "Flatten", "config": {"name": "flatten_3", "trainable": true}, "inbound_nodes": [[["max_pooling2d_4", 0, 0, {}]]]}, {"name": "flatten_4", "class_name": "Flatten", "config": {"name": "flatten_4", "trainable": true}, "inbound_nodes": [[["average_pooling2d_4", 0, 0, {}]]]}, {"name": "flatten_5", "class_name": "Flatten", "config": {"name": "flatten_5", "trainable": true}, "inbound_nodes": [[["max_pooling2d_5", 0, 0, {}]]]}, {"name": "flatten_6", "class_name": "Flatten", "config": {"name": "flatten_6", "trainable": true}, "inbound_nodes": [[["average_pooling2d_5", 0, 0, {}]]]}, {"name": "dropout_1", "class_name": "Dropout", "config": {"name": "dropout_1", "trainable": true, "rate": 0.2}, "inbound_nodes": [[["flatten_1", 0, 0, {}]]]}, {"name": "dropout_2", "class_name": "Dropout", "config": {"name": "dropout_2", "trainable": true, "rate": 0.2}, "inbound_nodes": [[["flatten_2", 0, 0, {}]]]}, {"name": "dropout_3", "class_name": "Dropout", "config": {"name": "dropout_3", "trainable": true, "rate": 0.2}, "inbound_nodes": [[["flatten_3", 0, 0, {}]]]}, {"name": "dropout_4", "class_name": "Dropout", "config": {"name": "dropout_4", "trainable": true, "rate": 0.2}, "inbound_nodes": [[["flatten_4", 0, 0, {}]]]}, {"name": "dropout_5", "class_name": "Dropout", "config": {"name": "dropout_5", "trainable": true, "rate": 0.2}, "inbound_nodes": [[["flatten_5", 0, 0, {}]]]}, {"name": "dropout_6", "class_name": "Dropout", "config": {"name": "dropout_6", "trainable": true, "rate": 0.2}, "inbound_nodes": [[["flatten_6", 0, 0, {}]]]}, {"name": "dense_1", "class_name": "Dense", "config": {"units": 3, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "dense_1", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "relu", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["dropout_1", 0, 0, {}]]]}, {"name": "dense_2", "class_name": "Dense", "config": {"units": 3, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "dense_2", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "relu", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["dropout_2", 0, 0, {}]]]}, {"name": "dense_4", "class_name": "Dense", "config": {"units": 3, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "dense_4", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "relu", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["dropout_3", 0, 0, {}]]]}, {"name": "dense_5", "class_name": "Dense", "config": {"units": 3, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "dense_5", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "relu", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["dropout_4", 0, 0, {}]]]}, {"name": "dense_7", "class_name": "Dense", "config": {"units": 3, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "dense_7", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "relu", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["dropout_5", 0, 0, {}]]]}, {"name": "dense_8", "class_name": "Dense", "config": {"units": 3, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "dense_8", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "relu", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["dropout_6", 0, 0, {}]]]}, {"name": "multiply_2", "class_name": "Multiply", "config": {"name": "multiply_2", "trainable": true}, "inbound_nodes": [[["dense_1", 0, 0, {}], ["dense_2", 0, 0, {}]]]}, {"name": "multiply_4", "class_name": "Multiply", "config": {"name": "multiply_4", "trainable": true}, "inbound_nodes": [[["dense_4", 0, 0, {}], ["dense_5", 0, 0, {}]]]}, {"name": "multiply_6", "class_name": "Multiply", "config": {"name": "multiply_6", "trainable": true}, "inbound_nodes": [[["dense_7", 0, 0, {}], ["dense_8", 0, 0, {}]]]}, {"name": "dense_3", "class_name": "Dense", "config": {"units": 6, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "dense_3", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "relu", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["multiply_2", 0, 0, {}]]]}, {"name": "dense_6", "class_name": "Dense", "config": {"units": 6, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "dense_6", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "relu", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["multiply_4", 0, 0, {}]]]}, {"name": "dense_9", "class_name": "Dense", "config": {"units": 6, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "dense_9", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "relu", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["multiply_6", 0, 0, {}]]]}, {"name": "multiply_1", "class_name": "Multiply", "config": {"name": "multiply_1", "trainable": true}, "inbound_nodes": [[["flatten_1", 0, 0, {}], ["flatten_2", 0, 0, {}]]]}, {"name": "multiply_3", "class_name": "Multiply", "config": {"name": "multiply_3", "trainable": true}, "inbound_nodes": [[["flatten_3", 0, 0, {}], ["flatten_4", 0, 0, {}]]]}, {"name": "multiply_5", "class_name": "Multiply", "config": {"name": "multiply_5", "trainable": true}, "inbound_nodes": [[["flatten_5", 0, 0, {}], ["flatten_6", 0, 0, {}]]]}, {"name": "pred_age_stage1", "class_name": "Dense", "config": {"units": 3, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "pred_age_stage1", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "relu", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["dense_3", 0, 0, {}]]]}, {"name": "pred_age_stage2", "class_name": "Dense", "config": {"units": 3, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "pred_age_stage2", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "relu", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["dense_6", 0, 0, {}]]]}, {"name": "pred_age_stage3", "class_name": "Dense", "config": {"units": 3, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "pred_age_stage3", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "relu", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["dense_9", 0, 0, {}]]]}, {"name": "delta_s1", "class_name": "Dense", "config": {"units": 1, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "delta_s1", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "tanh", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["multiply_1", 0, 0, {}]]]}, {"name": "delta_s2", "class_name": "Dense", "config": {"units": 1, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "delta_s2", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "tanh", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["multiply_3", 0, 0, {}]]]}, {"name": "delta_s3", "class_name": "Dense", "config": {"units": 1, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "delta_s3", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "tanh", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["multiply_5", 0, 0, {}]]]}, {"name": "local_delta_stage1", "class_name": "Dense", "config": {"units": 3, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "local_delta_stage1", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "tanh", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["dense_3", 0, 0, {}]]]}, {"name": "local_delta_stage2", "class_name": "Dense", "config": {"units": 3, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "local_delta_stage2", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "tanh", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["dense_6", 0, 0, {}]]]}, {"name": "local_delta_stage3", "class_name": "Dense", "config": {"units": 3, "bias_initializer": {"class_name": "Zeros", "config": {}}, "name": "local_delta_stage3", "bias_constraint": null, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "activation": "tanh", "trainable": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"distribution": "uniform", "seed": null, "scale": 1.0, "mode": "fan_avg"}}, "kernel_constraint": null, "kernel_regularizer": null}, "inbound_nodes": [[["dense_9", 0, 0, {}]]]}, {"name": "pred_a", "class_name": "Lambda", "config": {"output_shape": [1], "function": ["\u00e3\u0006\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u000e\u0000\u0000\u0000\u0007\u0000\u0000\u0000S\u0000\u0000\u0000s\u0016\u0002\u0000\u0000|\u0000\u0000d\u0001\u0000\u0019d\u0000\u0000d\u0000\u0000\u0085\u0002\u0000d\u0001\u0000f\u0002\u0000\u0019d\u0001\u0000\u0014}\u0006\u0000|\u0000\u0000d\u0001\u0000\u0019d\u0000\u0000d\u0000\u0000\u0085\u0002\u0000d\u0001\u0000f\u0002\u0000\u0019d\u0001\u0000\u0014}\u0007\u0000|\u0000\u0000d\u0001\u0000\u0019d\u0000\u0000d\u0000\u0000\u0085\u0002\u0000d\u0001\u0000f\u0002\u0000\u0019d\u0001\u0000\u0014}\b\u0000|\u0001\u0000|\u0002\u0000\u0014|\u0003\u0000\u0014}\t\u0000d\u0002\u0000}\n\u0000xU\u0000t\u0000\u0000d\u0001\u0000|\u0001\u0000\u0083\u0002\u0000D]D\u0000}\u000b\u0000|\u0006\u0000|\u000b\u0000|\u0004\u0000|\u0000\u0000d\u0003\u0000\u0019d\u0000\u0000d\u0000\u0000\u0085\u0002\u0000|\u000b\u0000f\u0002\u0000\u0019\u0014\u0017|\u0000\u0000d\u0001\u0000\u0019d\u0000\u0000d\u0000\u0000\u0085\u0002\u0000|\u000b\u0000f\u0002\u0000\u0019\u0014\u0017}\u0006\u0000q~\u0000W|\u0006\u0000|\u0001\u0000d\u0004\u0000|\u0005\u0000|\u0000\u0000d\u0005\u0000\u0019\u0014\u0017\u0014\u001b}\u0006\u0000xU\u0000t\u0000\u0000d\u0001\u0000|\u0002\u0000\u0083\u0002\u0000D]D\u0000}\f\u0000|\u0007\u0000|\f\u0000|\u0004\u0000|\u0000\u0000d\u0006\u0000\u0019d\u0000\u0000d\u0000\u0000\u0085\u0002\u0000|\f\u0000f\u0002\u0000\u0019\u0014\u0017|\u0000\u0000d\u0004\u0000\u0019d\u0000\u0000d\u0000\u0000\u0085\u0002\u0000|\f\u0000f\u0002\u0000\u0019\u0014\u0017}\u0007\u0000q\u00f0\u0000W|\u0007\u0000|\u0001\u0000d\u0004\u0000|\u0005\u0000|\u0000\u0000d\u0005\u0000\u0019\u0014\u0017\u0014\u001b|\u0002\u0000d\u0004\u0000|\u0005\u0000|\u0000\u0000d\u0007\u0000\u0019\u0014\u0017\u0014\u001b}\u0007\u0000xU\u0000t\u0000\u0000d\u0001\u0000|\u0003\u0000\u0083\u0002\u0000D]D\u0000}\r\u0000|\b\u0000|\r\u0000|\u0004\u0000|\u0000\u0000d\b\u0000\u0019d\u0000\u0000d\u0000\u0000\u0085\u0002\u0000|\r\u0000f\u0002\u0000\u0019\u0014\u0017|\u0000\u0000d\t\u0000\u0019d\u0000\u0000d\u0000\u0000\u0085\u0002\u0000|\r\u0000f\u0002\u0000\u0019\u0014\u0017}\b\u0000qv\u0001W|\b\u0000|\u0001\u0000d\u0004\u0000|\u0005\u0000|\u0000\u0000d\u0005\u0000\u0019\u0014\u0017\u0014\u001b|\u0002\u0000d\u0004\u0000|\u0005\u0000|\u0000\u0000d\u0007\u0000\u0019\u0014\u0017\u0014\u001b|\u0003\u0000d\u0004\u0000|\u0005\u0000|\u0000\u0000d\n\u0000\u0019\u0014\u0017\u0014\u001b}\b\u0000|\u0006\u0000|\u0007\u0000\u0017|\b\u0000\u0017|\n\u0000\u0014}\u0006\u0000|\u0006\u0000S)\u000bN\u00e9\u0000\u0000\u0000\u0000\u00e9e\u0000\u0000\u0000\u00e9\u0006\u0000\u0000\u0000\u00e9\u0001\u0000\u0000\u0000\u00e9\u0003\u0000\u0000\u0000\u00e9\u0007\u0000\u0000\u0000\u00e9\u0004\u0000\u0000\u0000\u00e9\b\u0000\u0000\u0000\u00e9\u0002\u0000\u0000\u0000\u00e9\u0005\u0000\u0000\u0000)\u0001\u00da\u0005range)\u000e\u00da\u0001x\u00da\u0002s1\u00da\u0002s2\u00da\u0002s3\u00da\flambda_local\u00da\blambda_d\u00da\u0001a\u00da\u0001b\u00da\u0001c\u00da\u0001A\u00da\u0001V\u00da\u0001i\u00da\u0001j\u00da\u0001k\u00a9\u0000r\u001a\u0000\u0000\u0000\u00faE/home/shamangary/Desktop/codeDemo/age-gender/training/SSRNET_model.py\u00da\tmerge_age\u00fd\u0003\u0000\u0000s \u0000\u0000\u0000\u0000\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u000e\u0001\u0006\u0003\u0016\u0001B\u0001\u001a\u0002\u0016\u0001B\u0001.\u0002\u0016\u0001B\u0001B\u0003\u0012\u0001", null, null], "arguments": {"s3": 3, "s1": 3, "lambda_d": 1.0, "s2": 3, "lambda_local": 1.0}, "output_shape_type": "raw", "function_type": "lambda", "name": "pred_a", "trainable": true}, "inbound_nodes": [[["pred_age_stage1", 0, 0, {}], ["pred_age_stage2", 0, 0, {}], ["pred_age_stage3", 0, 0, {}], ["delta_s1", 0, 0, {}], ["delta_s2", 0, 0, {}], ["delta_s3", 0, 0, {}], ["local_delta_stage1", 0, 0, {}], ["local_delta_stage2", 0, 0, {}], ["local_delta_stage3", 0, 0, {}]]]}], "output_layers": [["pred_a", 0, 0]], "input_layers": [["input_1", 0, 0]]}} \ No newline at end of file diff --git a/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.png b/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.png new file mode 100644 index 0000000..81d7d13 Binary files /dev/null and b/pre-trained/wiki/ssrnet_3_3_3_64_1.0_1.0/ssrnet_3_3_3_64_1.0_1.0.png differ diff --git a/run_CA.sh b/run_CA.sh new file mode 100644 index 0000000..ea8e348 --- /dev/null +++ b/run_CA.sh @@ -0,0 +1,4 @@ +python ./SSRNET/CA.py './data/megaage_test.npz' 4 4 +python ./MobileNet_and_DenseNet/CA_M.py './data/megaage_test.npz' 1 +python ./MobileNet_and_DenseNet/CA_D.py './data/megaage_test.npz' 4 + diff --git a/run_megaage_DenseNet.sh b/run_megaage_DenseNet.sh new file mode 100644 index 0000000..648aa7f --- /dev/null +++ b/run_megaage_DenseNet.sh @@ -0,0 +1,2 @@ +KERAS_BACKEND=tensorflow python ./MobileNet_and_DenseNet/TYY_train_others.py --input1 ./data/megaage_train.npz --input2 ./data/megaage_test.npz --netType 4 --db megaage --batch_size 50 + diff --git a/run_megaage_MobileNet.sh b/run_megaage_MobileNet.sh new file mode 100644 index 0000000..3c5f024 --- /dev/null +++ b/run_megaage_MobileNet.sh @@ -0,0 +1,2 @@ +KERAS_BACKEND=tensorflow python ./MobileNet_and_DenseNet/TYY_train_others.py --input1 ./data/megaage_train.npz --input2 ./data/megaage_test.npz --netType 1 --db megaage --batch_size 50 + diff --git a/run_ssrnet_megaage.sh b/run_ssrnet_megaage.sh new file mode 100644 index 0000000..30d61b4 --- /dev/null +++ b/run_ssrnet_megaage.sh @@ -0,0 +1,11 @@ +max1=4 +max2=4 +for i in `seq 4 $max1` +do + #echo "$i" + for j in `seq 4 $max2` + do + #echo "$j" 50 + KERAS_BACKEND=tensorflow python ./SSRNET/SSRNET_train.py --input1 ./data/megaage_train.npz --input2 ./data/megaage_test.npz --db megaage --netType1 $i --netType2 $j --batch_size 50 + done +done