diff --git a/.gitignore b/.gitignore index 99695e5f..2b2946a7 100644 --- a/.gitignore +++ b/.gitignore @@ -207,5 +207,6 @@ lib/mgltools_x86_64Linux2_1.5.7/MGLToolsPckgs/AutoDockTools/Utilities24/* lib/mgltools_x86_64Linux2_1.5.7p1.tar.gz log_test/ -slurm_tests/ +slurm_out_DDP/ /*.sh +results/model_checkpoints/ours/*.model* diff --git a/docs/req.txt b/docs/req.txt new file mode 100644 index 00000000..f8fbb52b --- /dev/null +++ b/docs/req.txt @@ -0,0 +1,122 @@ +absl-py==1.4.0 +aiosignal==1.3.1 +astor==0.8.1 +asttokens==2.2.1 +attrs==23.1.0 +autograd==1.5 +autograd-gamma==0.5.0 +backcall==0.2.0 +biopython==1.79 +certifi==2023.5.7 +charset-normalizer==3.1.0 +click==8.1.7 +cloudpickle==2.2.1 +cmake==3.26.4 +comm==0.1.3 +contourpy==1.0.7 +cycler==0.11.0 +debugpy==1.6.7 +decorator==5.1.1 +dm-tree==0.1.8 +executing==1.2.0 +fastjsonschema==2.17.1 +filelock==3.12.2 +flake8==6.1.0 +fonttools==4.39.4 +formulaic==0.6.1 +frozenlist==1.4.0 +fsspec==2023.6.0 +future==0.18.3 +grpcio==1.57.0 +huggingface-hub==0.16.4 +idna==3.4 +interface-meta==1.3.0 +ipykernel==6.23.1 +ipython==8.13.2 +jedi==0.18.2 +Jinja2==3.1.2 +joblib==1.2.0 +jsonschema==4.17.3 +jupyter_client==8.2.0 +jupyter_core==5.3.0 +kiwisolver==1.4.4 +lifelines==0.27.7 +lit==16.0.5 +MarkupSafe==2.1.3 +matplotlib==3.7.1 +matplotlib-inline==0.1.6 +mccabe==0.7.0 +mpmath==1.3.0 +msgpack==1.0.5 +nbformat==5.9.0 +nest-asyncio==1.5.6 +networkx==3.1 +numpy==1.23.5 +nvidia-cublas-cu11==11.10.3.66 +nvidia-cuda-cupti-cu11==11.7.101 +nvidia-cuda-nvrtc-cu11==11.7.99 +nvidia-cuda-runtime-cu11==11.7.99 +nvidia-cudnn-cu11==8.5.0.96 +nvidia-cufft-cu11==10.9.0.58 +nvidia-curand-cu11==10.2.10.91 +nvidia-cusolver-cu11==11.4.0.1 +nvidia-cusparse-cu11==11.7.4.91 +nvidia-nccl-cu11==2.14.3 +nvidia-nvtx-cu11==11.7.91 +packaging==23.1 +pandas==1.5.3 +parso==0.8.3 +pexpect==4.8.0 +pickleshare==0.7.5 +Pillow==9.5.0 +platformdirs==3.5.1 +plotly==5.14.1 +ProDy==2.4.1 +prompt-toolkit==3.0.38 +protobuf==4.24.1 +psutil==5.9.5 +ptyprocess==0.7.0 +pure-eval==0.2.2 +pyarrow==12.0.1 +pycodestyle==2.11.0 +pyflakes==3.1.0 +Pygments==2.15.1 +pyparsing==3.0.9 +pyrsistent==0.19.3 +python-dateutil==2.8.2 +pytz==2023.3 +PyYAML==6.0.1 +pyzmq==25.1.0 +rapidfuzz==3.3.0 +ray==2.6.3 +rdkit==2023.3.1 +regex==2023.6.3 +requests==2.31.0 +safetensors==0.3.1 +scikit-learn==1.2.2 +scipy==1.10.1 +seaborn==0.11.2 +six==1.16.0 +stack-data==0.6.2 +statannotations==0.6.0 +submitit==1.4.5 +sympy==1.12 +tabulate==0.9.0 +tenacity==8.2.2 +tensorboardX==2.6.2.2 +thefuzz==0.20.0 +threadpoolctl==3.1.0 +tokenizers==0.13.3 +torch==2.0.1 +torch-geometric==2.3.1 +torchsummary==1.5.1 +tornado==6.3.2 +tqdm==4.65.0 +traitlets==5.9.0 +transformers==4.31.0 +triton==2.0.0 +typing_extensions==4.6.3 +tzdata==2023.3 +urllib3==2.0.2 +wcwidth==0.2.6 +wrapt==1.15.0 diff --git a/docs/requirements.txt b/docs/requirements.txt index cf9d60df..34160dc3 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,29 +1,29 @@ -numpy -pandas -tqdm -rdkit -scipy +numpy==1.23.5 +pandas==1.5.3 +tqdm==4.65.0 +rdkit==2023.3.1 +scipy==1.10.1 # for generating figures: -matplotlib -seaborn -statannotations +matplotlib==3.7.1 +seaborn==0.11.2 +statannotations==0.6.0 -lifelines # used for concordance index calc +lifelines==0.27.7 # used for concordance index calc #biopython # used for cmap # model building -torch -torch_geometric -transformers # huggingface needed for esm +torch==2.0.1 +torch-geometric==2.3.1 +transformers==4.31.0 # huggingface needed for esm # optional: -torchsummary -tabulate # for torch_geometric.nn.summary -ipykernel -plotly -requests +torchsummary==1.5.1 +tabulate==0.9.0 # for torch_geometric.nn.summary +ipykernel==6.23.1 +plotly==5.14.1 +requests==2.31.0 #ray[tune] -submitit -ProDy +submitit==1.4.5 +ProDy==2.4.1 diff --git a/playground.py b/playground.py index 2827ab04..d82e4a9c 100644 --- a/playground.py +++ b/playground.py @@ -1,8 +1,31 @@ -# %% -from src.data_analysis.figures import prepare_df, fig3_edge_feat -df = prepare_df('results/model_media/model_stats.csv') +#%% +from src.data_processing.datasets import PDBbindDataset +from src.utils import config as cfg +import pandas as pd +import matplotlib.pyplot as plt + +# d0 = pd.read_csv(f'{cfg.DATA_ROOT}/DavisKibaDataset/davis/nomsa_anm/full/XY.csv', index_col=0) +d0 = pd.read_csv(f'{cfg.DATA_ROOT}/PDBbindDataset/nomsa_anm/full/XY.csv', index_col=0) + +d0['len'] = d0.prot_seq.str.len() # %% -fig3_edge_feat(df, show=True, exclude=[]) +n, bins, patches = plt.hist(d0['len'], bins=20) +# Set labels and title +plt.xlabel('Protein Sequence length') +plt.ylabel('Frequency') +plt.title('Histogram of Protein Sequence length (davis)') + +# Add counts to each bin +for count, x, patch in zip(n, bins, patches): + plt.text(x + 0.5, count, str(int(count)), ha='center', va='bottom') + +cutoff= 1500 +print(f"Eliminating codes above {cutoff} length would reduce the dataset by: {len(d0[d0['len'] > cutoff])}") +print(f"\t - Eliminates {len(d0[d0['len'] > cutoff].index.unique())} unique proteins") + +# %% -d PDBbind -f nomsa -e anm +from src.utils.loader import Loader +d1 = Loader.load_dataset('PDBbind', 'nomsa', 'anm') # %% diff --git a/results/model_checkpoints/ours/EDIM_kibaD_nomsaF_binaryE_40B_0.0001LR_0.4D_2000E.model_tmp b/results/model_checkpoints/ours/EDIM_kibaD_nomsaF_binaryE_40B_0.0001LR_0.4D_2000E.model_tmp index 3d6342da..15d70377 100644 Binary files a/results/model_checkpoints/ours/EDIM_kibaD_nomsaF_binaryE_40B_0.0001LR_0.4D_2000E.model_tmp and b/results/model_checkpoints/ours/EDIM_kibaD_nomsaF_binaryE_40B_0.0001LR_0.4D_2000E.model_tmp differ diff --git a/results/model_media/davis/DGM_davisD_nomsaF_af2E_64B_0.0001LR_0.4D_2000E_his.png b/results/model_media/davis/DGM_davisD_nomsaF_af2E_64B_0.0001LR_0.4D_2000E_his.png new file mode 100644 index 00000000..c04eb063 Binary files /dev/null and b/results/model_media/davis/DGM_davisD_nomsaF_af2E_64B_0.0001LR_0.4D_2000E_his.png differ diff --git a/results/model_media/davis/DGM_davisD_nomsaF_af2E_64B_0.0001LR_0.4D_2000E_loss.png b/results/model_media/davis/DGM_davisD_nomsaF_af2E_64B_0.0001LR_0.4D_2000E_loss.png new file mode 100644 index 00000000..e2f027ca Binary files /dev/null and b/results/model_media/davis/DGM_davisD_nomsaF_af2E_64B_0.0001LR_0.4D_2000E_loss.png differ diff --git a/results/model_media/davis/DGM_davisD_nomsaF_af2E_64B_0.0001LR_0.4D_2000E_scatter.png b/results/model_media/davis/DGM_davisD_nomsaF_af2E_64B_0.0001LR_0.4D_2000E_scatter.png new file mode 100644 index 00000000..9210df37 Binary files /dev/null and b/results/model_media/davis/DGM_davisD_nomsaF_af2E_64B_0.0001LR_0.4D_2000E_scatter.png differ diff --git a/results/model_media/davis/train_log/DGM_davisD_nomsaF_af2E_64B_0.0001LR_0.4D_2000E.json b/results/model_media/davis/train_log/DGM_davisD_nomsaF_af2E_64B_0.0001LR_0.4D_2000E.json new file mode 100644 index 00000000..d1d88e55 --- /dev/null +++ b/results/model_media/davis/train_log/DGM_davisD_nomsaF_af2E_64B_0.0001LR_0.4D_2000E.json @@ -0,0 +1,4007 @@ +{ + "train_loss": [ + 3.4795771015634953, + 1.114933820614398, + 1.0901854529244002, + 1.0730177794323592, + 1.0697020315895966, + 1.0520907072910195, + 1.0315107141361861, + 1.0232230950289412, + 0.992689210819742, + 0.9671031594927845, + 0.9604967328735062, + 0.942372758531831, + 0.9281324575263294, + 0.9135295553082, + 0.9149855013142844, + 0.9076353356527175, + 0.9031298708150296, + 0.8901961742610228, + 0.8899703772504473, + 0.8862839778834354, + 0.8757342744028308, + 0.8655750044708044, + 0.8793736348345957, + 0.8534006908780238, + 0.8572577103728154, + 0.871798805485332, + 0.9009318899342923, + 0.9103951563071357, + 0.9098108241263309, + 0.9051753872689979, + 0.916064478640185, + 0.9049390738860505, + 0.917978372689479, + 0.8905260527475936, + 0.9231249818099978, + 0.8836232170951172, + 0.8812414145738375, + 0.8884814706290028, + 0.8737814718134742, + 0.8636497221072841, + 0.9089170956750059, + 0.8665209544893823, + 0.851453735243753, + 0.8570259372230435, + 0.8419508121989138, + 0.8208823443436232, + 0.825240839863084, + 0.8160227636496226, + 0.8388434239138997, + 0.8274540138415626, + 0.8163409302306306, + 0.8147105459999191, + 0.8008320876320855, + 0.8008979861039282, + 0.7983876553470971, + 0.8012904969802319, + 0.8120635086110707, + 0.8056302941887756, + 0.793311515018914, + 0.8087346327027988, + 0.7922861496892457, + 0.7798654078582271, + 0.7664829494808215, + 0.7665701409588095, + 0.7580364669769839, + 0.7591053805023921, + 0.749649882072308, + 0.7373216632203977, + 0.7429464007702355, + 0.730069471451917, + 0.7453662057514073, + 0.7455165567985975, + 0.7401297692759116, + 0.7215058487010458, + 0.7224578065746795, + 0.7176947578523165, + 0.733236302524968, + 0.7123527046435517, + 0.6990380365340436, + 0.688178219756142, + 0.7155465619852308, + 0.6883653112447978, + 0.6966434799216158, + 0.6879875539023368, + 0.6898391037833332, + 0.6961188830557417, + 0.6961716260007822, + 0.6901730377980269, + 0.6896479347506996, + 0.7024780205893711, + 0.6848767927537361, + 0.6855438950934697, + 0.6805390220318661, + 0.700043892314851, + 0.6886743441671948, + 0.6709346649812056, + 0.6714110818024868, + 0.6819128909356933, + 0.6606416056146387, + 0.6505671938463964, + 0.6559670189283585, + 0.6599454517613669, + 0.6723376508863246, + 0.6628147635222132, + 0.6521448613312401, + 0.6589037027881771, + 0.6494867121342753, + 0.6500251113940947, + 0.6457276068872115, + 0.6521529414451839, + 0.6535470541057691, + 0.6505022206918789, + 0.6518732855310205, + 0.6451154637043593, + 0.6614869076738267, + 0.6468318834330866, + 0.6356872825532175, + 0.6432867742708472, + 0.6447625461294025, + 0.6372506241695802, + 0.6257797430569683, + 0.6333381846709981, + 0.6314116749566435, + 0.6361709348653835, + 0.6257931426615337, + 0.6161694328499133, + 0.6152982524713206, + 0.6212281771207768, + 0.6067189692514516, + 0.6108015307881793, + 0.6139092817698997, + 0.6151538417150414, + 0.6128350938135971, + 0.5999745430496058, + 0.599034483926218, + 0.6059937237716112, + 0.5974275265397921, + 0.6047546548331207, + 0.599627864735374, + 0.5969958360729322, + 0.6113981587476418, + 0.602615965967947, + 0.5894878278766337, + 0.5916929860991207, + 0.5930271248022715, + 0.5936970616706082, + 0.5770812447035247, + 0.5862090922688525, + 0.5813000758733254, + 0.584723298219058, + 0.5808291309233246, + 0.5722098246298201, + 0.5810200760721184, + 0.5830295023004539, + 0.5798101292342924, + 0.5714893640818003, + 0.5671687695844089, + 0.5776544529842874, + 0.5782646400334893, + 0.5714281664275732, + 0.5714350247786182, + 0.5779705884372764, + 0.5732556871072354, + 0.5747477734878741, + 0.5720061900633606, + 0.5600585911545108, + 0.5716940374240849, + 0.5687599610639075, + 0.5729106902526538, + 0.5572741322137945, + 0.5631152295858645, + 0.5649562969478101, + 0.5519896809194909, + 0.5572624192903928, + 0.5672589367448958, + 0.5603901336753303, + 0.5576281157408745, + 0.5555617327552512, + 0.5580392653539858, + 0.5584340521282023, + 0.5574462139480264, + 0.548763244743637, + 0.5484860177798186, + 0.5534440739782619, + 0.5432012427516811, + 0.5428294666070755, + 0.5514612933977054, + 0.5481852526812117, + 0.5488501288851754, + 0.5417544129889845, + 0.5419207534706983, + 0.5409412631571618, + 0.5561907970921589, + 0.5425444657156051, + 0.5274786936943648, + 0.5337741477974777, + 0.5363108539964007, + 0.5390971304614687, + 0.5362440044762659, + 0.5398604611718589, + 0.5310417321698914, + 0.5260577743028194, + 0.5218614387361566, + 0.5287030119591397, + 0.5260086389553678, + 0.5301115820642377, + 0.5255568298953966, + 0.5232137652429564, + 0.5404056998305633, + 0.5240923820541856, + 0.5189855197861872, + 0.5283616634055239, + 0.5192729088533772, + 0.5163342961990606, + 0.516023761791284, + 0.5091832634570495, + 0.523313476912718, + 0.5144259498296865, + 0.507645726376951, + 0.5139989290854644, + 0.5091940577706842, + 0.5100647518156819, + 0.5112081924097134, + 0.501593524518886, + 0.5035628922426146, + 0.5057935491713181, + 0.49698426918498156, + 0.5037066792061583, + 0.49722342233840233, + 0.5004067759492684, + 0.5013318383262131, + 0.49802539287939096, + 0.4996810855720538, + 0.5009068906958637, + 0.49757335924344964, + 0.4955672998429159, + 0.48821595937135764, + 0.49494960944834954, + 0.49688904359936714, + 0.49270969832855493, + 0.48541388320711143, + 0.4897090786141776, + 0.49160578488652174, + 0.4874671739551539, + 0.4838800244135121, + 0.4857055756156562, + 0.48446641171279825, + 0.4899278899522427, + 0.487853545217159, + 0.48380733964343864, + 0.48233292394322774, + 0.4808904654728096, + 0.47444582547320696, + 0.4844591786850834, + 0.4772982452309034, + 0.4841655330742643, + 0.4725176620353115, + 0.4753645591519244, + 0.4864064515390194, + 0.4851453575443049, + 0.4720456315703242, + 0.48059515132889397, + 0.46970658014462296, + 0.47428142348599567, + 0.4735131412133819, + 0.46660669815670597, + 0.4638592177503259, + 0.46440940321794627, + 0.4725175408615929, + 0.46414677271507476, + 0.46683297573990834, + 0.46637755042053963, + 0.46842497436563824, + 0.4632281787245651, + 0.4564166872105638, + 0.45753108063722897, + 0.45726426258235353, + 0.4591654411471281, + 0.46432926164828037, + 0.4639852858896627, + 0.4672308444182892, + 0.465077960171579, + 0.45384949187193413, + 0.45737117902462276, + 0.4526975942989353, + 0.4416526616067509, + 0.44541608069926664, + 0.45354502341745656, + 0.4612922619600765, + 0.45192370050754704, + 0.45120948083861606, + 0.46385327926098974, + 0.45370196217233366, + 0.46353545478580754, + 0.4516086571854972, + 0.4402374782767452, + 0.4354885185921127, + 0.44299711106864154, + 0.44344155771321936, + 0.4446166833465705, + 0.4393087032307041, + 0.43322084217123646, + 0.44375420173457086, + 0.44272886420717167, + 0.4326401072467774, + 0.4327907412837112, + 0.43079918081480295, + 0.4305153131444272, + 0.42926522898055164, + 0.4365091076924827, + 0.4281861964877837, + 0.4314976881151317, + 0.43657294351383635, + 0.4238332104792849, + 0.42740959060363104, + 0.4298021978103235, + 0.42970241239813506, + 0.4326071633137966, + 0.4347120368944817, + 0.42724463666275225, + 0.4332950929271393, + 0.42359903686847844, + 0.42200589169814284, + 0.43087194859981537, + 0.42573412213617007, + 0.4244511636900283, + 0.421045096957537, + 0.4171955160023085, + 0.4238815866308948, + 0.41951032310642833, + 0.4179196174516053, + 0.4152524077990016, + 0.41398162451252096, + 0.4159045034988982, + 0.4090905951671913, + 0.41756698806754877, + 0.415306427643273, + 0.4147315000596105, + 0.4135302841520863, + 0.4144348363631247, + 0.41082909171942805, + 0.4096075296910897, + 0.4082516809223128, + 0.408823333059748, + 0.4148340972008112, + 0.4153916923319056, + 0.4038152975931031, + 0.40388317013291714, + 0.4005701612233301, + 0.4065389437142943, + 0.40782675566406196, + 0.40624764100572125, + 0.4014820335381995, + 0.4141424288616969, + 0.4034720839515247, + 0.4017370821185451, + 0.39602490055471495, + 0.40117963192770717, + 0.4006492561798115, + 0.40143950422971303, + 0.41161281494372853, + 0.4003876894101745, + 0.4026921537547001, + 0.4021774119560103, + 0.4001573095800447, + 0.38909111848823685, + 0.3951290378021331, + 0.3952125511361194, + 0.398619004571047, + 0.3930572185746796, + 0.39396597823138285, + 0.3887927492907464, + 0.3969294677686984, + 0.3951853421806801, + 0.39191428891540875, + 0.39073732948246226, + 0.39553194544965126, + 0.38712882773716595, + 0.38699396199908087, + 0.3879979630637039, + 0.39552331572306937, + 0.3848324140961297, + 0.3811869951616464, + 0.39498835705383556, + 0.38002938198058006, + 0.3806998839225274, + 0.38258471921127024, + 0.38155540261071563, + 0.3897454333822421, + 0.38618550242988464, + 0.38402483649959, + 0.38317923386749186, + 0.37915212009102106, + 0.38427922012579574, + 0.3887697160142204, + 0.3801754325872562, + 0.38989883041406265, + 0.3792392538728121, + 0.3781903610549501, + 0.3812524157428676, + 0.3737874368684051, + 0.37458767985182057, + 0.381244416633631, + 0.3836292912369299, + 0.3884796629675099, + 0.37386889688548497, + 0.3873555246903434, + 0.37653570749435594, + 0.3768203514093747, + 0.3762925037493308, + 0.3748573964980782, + 0.37965270738458373, + 0.3830679459242873, + 0.3845030171949355, + 0.377722520060593, + 0.3838926398811119, + 0.38157667864948674, + 0.37615712017553754, + 0.3736540598011082, + 0.36992244323705387, + 0.3781464870489849, + 0.3700738287546107, + 0.37068391007070983, + 0.36268996800005926, + 0.37635025088395413, + 0.3754809487183563, + 0.3656640103992706, + 0.37288268197511065, + 0.3693652859242643, + 0.36504962345167913, + 0.37490140820746537, + 0.37007700853416176, + 0.3630300639400798, + 0.36314642673633135, + 0.36585571679994056, + 0.36318493191193524, + 0.3644098795014001, + 0.3664820920370641, + 0.3639029228418577, + 0.35928762089359306, + 0.35848744740821625, + 0.3613225039237184, + 0.36472573925746105, + 0.3653682420549334, + 0.3590485841402265, + 0.3615914676338434, + 0.36800634031453744, + 0.3709786243716387, + 0.36781548089322336, + 0.36020514884037397, + 0.35792950536709667, + 0.3577468985579704, + 0.36237651126998693, + 0.3630657318694507, + 0.36359739293410476, + 0.358902594156034, + 0.35215086031888354, + 0.36016718703805584, + 0.35195888960072413, + 0.3632605169070223, + 0.3545916947368227, + 0.3554001526113172, + 0.34837400141716657, + 0.3595712732503323, + 0.3552847170255712, + 0.35916200935168835, + 0.3591305376259518, + 0.35477401679025483, + 0.3540698375725925, + 0.3622935432431802, + 0.353288699119591, + 0.3634321939434266, + 0.35468998278587893, + 0.3548395326564628, + 0.36545099066621295, + 0.35985390670963974, + 0.35391723603274816, + 0.3548327521751813, + 0.34865502353574407, + 0.3516017370017581, + 0.35937440595869485, + 0.35245020720150955, + 0.3477038993230446, + 0.3513480395109194, + 0.3539879441953422, + 0.35034206868378354, + 0.3480270471057442, + 0.34242926027035453, + 0.3461156360947532, + 0.3540930967431902, + 0.3482443779920701, + 0.34760789081576415, + 0.3476854307180057, + 0.34493396352834066, + 0.3435243624796877, + 0.34361658556274866, + 0.34626475213696073, + 0.3404909754884455, + 0.35555095990987423, + 0.3502733946874575, + 0.3470131768717792, + 0.34314158917124804, + 0.34118304607239575, + 0.3424271143402884, + 0.34595445962368304, + 0.3383289841436298, + 0.34002098092514144, + 0.3379881690342814, + 0.3409773779045689, + 0.3442859493674395, + 0.33807465756729327, + 0.3445026838358064, + 0.338250459706197, + 0.34168722588935346, + 0.3385335926139534, + 0.3356482961057314, + 0.3343526901310829, + 0.3388092007966397, + 0.33730581693371625, + 0.339506395924531, + 0.3408918497178072, + 0.3409839532556918, + 0.33328163151532575, + 0.3311604740749536, + 0.34194459606348815, + 0.33917831875831705, + 0.3327761840004006, + 0.3322167044474942, + 0.3279175435961351, + 0.3341103277622106, + 0.3434039216305389, + 0.33404284572646104, + 0.33294338498345977, + 0.33396103322159887, + 0.32769274020496286, + 0.3326885620325478, + 0.33610928503419857, + 0.332068483670919, + 0.3307038081677559, + 0.3280845626201379, + 0.32746259346502565, + 0.32408120815484415, + 0.3271919011693102, + 0.33177535162088484, + 0.33067837277111783, + 0.33024560908476513, + 0.3381487371277858, + 0.3316357578939753, + 0.32796592822634296, + 0.32080532818055546, + 0.3245476376153325, + 0.32192461775320635, + 0.3239916600083393, + 0.3219428935795615, + 0.32737563508740863, + 0.32483619316659174, + 0.32433398998255936, + 0.32395826998711286, + 0.32788249787676044, + 0.3267564026465833, + 0.3299429187032043, + 0.32352611327879743, + 0.33031031731617905, + 0.32596325834632894, + 0.3228551317515269, + 0.3238056690223474, + 0.3319257321599566, + 0.32332681745921815, + 0.32096495205037745, + 0.3178232504115974, + 0.32164455202148584, + 0.3183404265399106, + 0.3237748023356652, + 0.3178799517218614, + 0.3237659398293316, + 0.31981592304251394, + 0.3203067882088283, + 0.3205447072071619, + 0.3182492609965345, + 0.317907270532651, + 0.314709928110649, + 0.32937366541861834, + 0.3225620731640139, + 0.3224511591188325, + 0.3203462150986077, + 0.31756378548433545, + 0.3178499213933863, + 0.3153343489139132, + 0.316208877641443, + 0.3185302125890561, + 0.31276653864283543, + 0.3163537779324205, + 0.31249841610529544, + 0.3136427130733357, + 0.3117150521984762, + 0.31778255953387324, + 0.3152175974355288, + 0.3084873510066603, + 0.31656618718607504, + 0.31806366113548884, + 0.3161856944983204, + 0.3128507320255123, + 0.3058751384254362, + 0.3137893359293866, + 0.3106515398350814, + 0.3139257139521218, + 0.31282448138573454, + 0.31314801679201465, + 0.30998017296927877, + 0.3116677850383064, + 0.30812042685418994, + 0.30419255826683317, + 0.31357283071904885, + 0.31358540596063833, + 0.31369313420575173, + 0.3106971389150929, + 0.31131692524347965, + 0.30276270042290615, + 0.3078328652531316, + 0.3047409789596018, + 0.3070096309535979, + 0.3043981170729617, + 0.3014471025608195, + 0.3103627991592868, + 0.30856772393110343, + 0.3115169207865749, + 0.31232996153660486, + 0.306886761805521, + 0.3086179945556844, + 0.30777682213346813, + 0.30772269545173875, + 0.3024718930150886, + 0.3091526658839134, + 0.30028238742424, + 0.300152095126324, + 0.30795003682415667, + 0.3039279472225351, + 0.308002258060612, + 0.3052648865806112, + 0.30508281093579337, + 0.2988137344020556, + 0.3045037712016311, + 0.3041959791876824, + 0.3047900928408315, + 0.30506200000358735, + 0.2996700700280852, + 0.2926823976475114, + 0.29976638510843606, + 0.298182797165272, + 0.29941384352316136, + 0.29763847945227484, + 0.2953364240880954, + 0.29506568216764534, + 0.3007551653850111, + 0.2943672525996719, + 0.29867435552057675, + 0.29631553803026023, + 0.30292841369703494, + 0.29829303981084887, + 0.29600267911258615, + 0.29493160114262273, + 0.3000763977821815, + 0.30227971671892157, + 0.29808922175094077, + 0.29163063331278133, + 0.294650450084216, + 0.29289316344253025, + 0.29564023787878413, + 0.29686059111149093, + 0.29368864262446026, + 0.2989668961858652, + 0.2937974314271508, + 0.2971026280721843, + 0.28822237176851173, + 0.28769291420797993, + 0.2938230724475993, + 0.2952498487436055, + 0.2904567711380987, + 0.2934738265024753, + 0.2881614549614774, + 0.28810632594764396, + 0.2877941729928382, + 0.2876617297519996, + 0.29287692902945595, + 0.2900537602155586, + 0.29052040422630443, + 0.291925479683394, + 0.2873180740477283, + 0.28807230265251277, + 0.2866978771384052, + 0.28871990146899157, + 0.2851354550987564, + 0.28367804031045546, + 0.28200495344856397, + 0.28846640192236894, + 0.29039511253517836, + 0.283076233958286, + 0.28696447917733525, + 0.2889954756762161, + 0.2853938728784684, + 0.28407658881869147, + 0.2784085979970182, + 0.2836953814859029, + 0.2851059917676074, + 0.28744501898910013, + 0.2833143962048442, + 0.28422350701087157, + 0.28174197851919414, + 0.27870325603181384, + 0.2824641329903013, + 0.2843641140574559, + 0.2879384493485826, + 0.2836327550403489, + 0.2871571182288596, + 0.2807687009974826, + 0.27732426588693276, + 0.28233918219377274, + 0.2780196540912644, + 0.27640677088250715, + 0.2797211974075502, + 0.2808609664150071, + 0.2826860951631243, + 0.2835867614816121, + 0.28165432494235493, + 0.275177606646114, + 0.2770296872893725, + 0.28037183889559386, + 0.276701451032397, + 0.275621791970738, + 0.27877237785180087, + 0.27872498367551735, + 0.2771645508849067, + 0.27965147332466445, + 0.27702165065496104, + 0.2812296367693143, + 0.27619679099651145, + 0.2739173923084726, + 0.27594301470204213, + 0.2720212761943783, + 0.2734674337610114, + 0.2689778229598302, + 0.2795869803381829, + 0.2769728041600049, + 0.269606379054161, + 0.27323487385232104, + 0.26818732531547873, + 0.2733760946557496, + 0.26627680954467403, + 0.2747454089875959, + 0.2682475298541328, + 0.2727917448383415, + 0.2722122484660336, + 0.2709230911800933, + 0.2679514955281397, + 0.27219256608558645, + 0.277201085102037, + 0.2696191775739763, + 0.27628638325106414, + 0.26767034674969203, + 0.26791015121566714, + 0.2664194875422234, + 0.26469617941075824, + 0.266210315927171, + 0.2638449190449112, + 0.2636071669721538, + 0.26471741501587986, + 0.26039800970545396, + 0.26324607687875057, + 0.2634018634094811, + 0.2613461404540093, + 0.2595148208484949, + 0.2632117941644796, + 0.2587750417796152, + 0.2614502071662516, + 0.2605390021109272, + 0.26755400931798534, + 0.2657746853754739, + 0.2612591652515156, + 0.25709279364005466, + 0.25776975693517046, + 0.2620596413836494, + 0.261500586918428, + 0.2591162288632712, + 0.2608165206601385, + 0.25645840197007674, + 0.2572693674509602, + 0.2594867132914563, + 0.253149203900146, + 0.25917675355296643, + 0.25293974553952453, + 0.2539412757875449, + 0.2579158146972539, + 0.25527840075871905, + 0.25769270111201076, + 0.25619327967548794, + 0.25419658647412485, + 0.2542781299667158, + 0.26094233925082805, + 0.25290512998176157, + 0.2569601554983382, + 0.25448799036351627, + 0.2589993129173915, + 0.25872412258046656, + 0.2558669491182463, + 0.25234263687766073, + 0.25677390466825883, + 0.25143204000651265, + 0.2535664406426943, + 0.2515909326990527, + 0.2581050761217058, + 0.25792838850369054, + 0.25779152991341764, + 0.25455687825868284, + 0.2540954782785288, + 0.2524431595868874, + 0.25113880995131743, + 0.24952785372530503, + 0.24732180492942288, + 0.24963397568580312, + 0.2544084000350307, + 0.2470220226713054, + 0.25128988972279787, + 0.24895688959351572, + 0.2508013852072951, + 0.24537245360778168, + 0.24716603262299072, + 0.2485690310577063, + 0.2466391579482114, + 0.24760592277950602, + 0.24880934673580316, + 0.24729569685324782, + 0.247339049155717, + 0.24725190220563545, + 0.24689302849965017, + 0.24391100478811506, + 0.24685958308254272, + 0.2467061943155739, + 0.2466387896635697, + 0.24615567157686244, + 0.2464818014722582, + 0.24796772542373974, + 0.24919434493061543, + 0.24395454491748184, + 0.24528559431932417, + 0.2437124793606052, + 0.24226838734530198, + 0.2469240859293384, + 0.245862537450437, + 0.24590388717526784, + 0.24744258269850836, + 0.24355035279292225, + 0.24304586363691694, + 0.24557791686682998, + 0.2451419924523254, + 0.2486862810074143, + 0.24381470934187616, + 0.24361984366093015, + 0.24388153878004987, + 0.24687293012081926, + 0.2484232098312, + 0.24634059483460227, + 0.2449258751200278, + 0.24427604903034417, + 0.24491947379899645, + 0.24174907467903092, + 0.24187623542005735, + 0.24191907689972816, + 0.2468458293921879, + 0.23937762198114737, + 0.24111156926434385, + 0.23662441950851207, + 0.24431020948180898, + 0.24345845009500539, + 0.2463588505153814, + 0.24157510758148598, + 0.23683699895246108, + 0.2429065845290942, + 0.24092590594104404, + 0.2335013278500467, + 0.2364588758088852, + 0.24193037413773152, + 0.24214472513533924, + 0.23954381235707245, + 0.2417819310524221, + 0.240467897438205, + 0.24026774111219118, + 0.24080756495457367, + 0.24115205124336106, + 0.2405509924098768, + 0.24120146877391904, + 0.2392046516684846, + 0.2424342514351621, + 0.24159804460607837, + 0.2342612327056138, + 0.23944149960768843, + 0.2411343621807631, + 0.2405743827188357, + 0.2416938473750415, + 0.24184156789653044, + 0.2389608462687195, + 0.2390821207660795, + 0.23579544681149836, + 0.24145694537332557, + 0.23643271725553552, + 0.236057668056624, + 0.23581037578005037, + 0.23835557124633444, + 0.23425616514792696, + 0.23796118443190564, + 0.23656639901290094, + 0.23630269128025147, + 0.2387284132896266, + 0.2357008530620913, + 0.23271867227766033, + 0.23741233387269853, + 0.23659335273506293, + 0.23719791901587953, + 0.23351104709426643, + 0.2372648733530621, + 0.23617860256271855, + 0.23301017110597322, + 0.23639555918459929, + 0.23545115933717925, + 0.2366673497055642, + 0.2359003891781624, + 0.23560129197904728, + 0.22909159245366445, + 0.23344141255830073, + 0.23169430249111087, + 0.23312263086672283, + 0.23278846130630032, + 0.23739355699734285, + 0.23170306615256628, + 0.23075925712753595, + 0.23469313501527972, + 0.23440130047062463, + 0.23517256563967043, + 0.23741218449212, + 0.23349292397427837, + 0.22936601051297345, + 0.23179588611670288, + 0.23207245962796072, + 0.23442101525906167, + 0.2346606752148173, + 0.2285364919718701, + 0.2294387554500476, + 0.232562253256761, + 0.23026602748333594, + 0.23448381290002598, + 0.23261864590992937, + 0.22991173119472513, + 0.22906901602648566, + 0.23325924505606863, + 0.23143786711292597, + 0.230222691288167, + 0.23095583930214653, + 0.22894441239698662, + 0.2299869788449278, + 0.22941880277739685, + 0.2316539147283495, + 0.2276851986102312, + 0.23123516877247988, + 0.22799694908355347, + 0.22851333251640282, + 0.2265702096333147, + 0.2294618185238068, + 0.23475269329749415, + 0.23096967271473157, + 0.23051729077687028, + 0.228910144235267, + 0.22964350232864664, + 0.22842295804758214, + 0.22223252408929128, + 0.22905184543828983, + 0.22692234838086237, + 0.23025976834930684, + 0.22548883391004768, + 0.2284125283798152, + 0.22874076221433412, + 0.2285970131398428, + 0.22707658477605935, + 0.2316670914211112, + 0.2366015657609522, + 0.2290084890899111, + 0.22516791574158224, + 0.22699192996630496, + 0.22807282568693488, + 0.2266559008905878, + 0.23180178696890183, + 0.22658902276918047, + 0.22550652576661337, + 0.22590331960713766, + 0.22476520819100934, + 0.22338441006910523, + 0.2315164493788746, + 0.22967108711345843, + 0.22530395373742762, + 0.2261709662337528, + 0.2286827920177758, + 0.22555509298439316, + 0.22452342341007675, + 0.2236789087815811, + 0.2323293914926834, + 0.22594809436488672, + 0.22335901673974806, + 0.22614414873151262, + 0.2251515666027986, + 0.22624773926055822, + 0.2295701190462855, + 0.22353165739475458, + 0.22682650053944906, + 0.2232471944591224, + 0.22370986354567843, + 0.22580555651611725, + 0.22490529864619338, + 0.22490767577581636, + 0.22348108094521477, + 0.2254633718918337, + 0.2228875467324721, + 0.22396504369823902, + 0.2230381934847581, + 0.2255885701071817, + 0.22609290927444978, + 0.22735011228233007, + 0.22978008673093764, + 0.22431073799128823, + 0.22500797377929824, + 0.22491272356198297, + 0.22421100017833498, + 0.22517878579769712, + 0.2268856730047471, + 0.22256883838636382, + 0.22165063942675708, + 0.22693033389177153, + 0.22484236344684913, + 0.2242048631951498, + 0.21969867505570823, + 0.22371792147964076, + 0.22222329942316144, + 0.22562980745751104, + 0.22103763406058072, + 0.22610713913273373, + 0.2248406220695686, + 0.2193393730456671, + 0.22542293348649894, + 0.21816913878718197, + 0.22559378087352655, + 0.22464868517928435, + 0.22350663995329556, + 0.2258925122753437, + 0.22613279054786176, + 0.22334656848628176, + 0.22268362465197516, + 0.222885651837098, + 0.221772987566013, + 0.22606933518450278, + 0.22074311377500982, + 0.2204382297496408, + 0.22152800809164516, + 0.21823821033101753, + 0.2199699177477977, + 0.22381601909867646, + 0.22112540573472045, + 0.22120734064234165, + 0.2226541760845792, + 0.22299462185073338, + 0.2195793611603112, + 0.22335891071910577, + 0.22223692494215536, + 0.22180319003394391, + 0.22262204990116624, + 0.21819857491913144, + 0.22043002457637847, + 0.2251433495157322, + 0.21718329149222032, + 0.2217946499967795, + 0.22106848559927583, + 0.21786811071208836, + 0.2185270535973374, + 0.21916953410413828, + 0.22114160204499633, + 0.22161088399639953, + 0.21763245023461314, + 0.21673765760983843, + 0.22011816986872118, + 0.21920909587283685, + 0.21454253850525112, + 0.2162412109866209, + 0.21886526982305074, + 0.21879908107119342, + 0.21725948384703428, + 0.21776643166410142, + 0.22065828669856968, + 0.21544729888215253, + 0.22198040181353607, + 0.2205653355095556, + 0.2178388781072012, + 0.22336283530989062, + 0.21814453924237623, + 0.21830947185444133, + 0.21766572983049956, + 0.216013626539011, + 0.2189679446660715, + 0.21943389969404603, + 0.21578054628563953, + 0.21955893683893596, + 0.21894229962007025, + 0.2180362280936596, + 0.21978155108769246, + 0.21726793563247868, + 0.21605472853712615, + 0.21441064588980027, + 0.2157490659718875, + 0.2139259500764921, + 0.21768238888086516, + 0.21854167408946026, + 0.216001748768969, + 0.21804247004656843, + 0.21900125776666923, + 0.21547546992943586, + 0.21449296174652346, + 0.21555494593171393, + 0.21556853517238558, + 0.21711022112406586, + 0.217695486803076, + 0.21261167679429868, + 0.21731927234913564, + 0.21459342503813325, + 0.21479888434669442, + 0.21946294579522613, + 0.21341930058800052, + 0.2157573954919811, + 0.21513273395709898, + 0.21753055642007804, + 0.21638943724171386, + 0.21971664930149262, + 0.21568723573588608, + 0.21716063320280507, + 0.2177577342715908, + 0.2172814650496702, + 0.21691738210791528, + 0.21504605078530278, + 0.21336155239355248, + 0.21130767886913662, + 0.21471022738428797, + 0.21635033538605752, + 0.21229353771529724, + 0.21382684862658463, + 0.21258717181435863, + 0.21420599503921028, + 0.21072536209710147, + 0.2154680153869011, + 0.21285066211626666, + 0.21295017774262634, + 0.21358186708091467, + 0.21210291332188647, + 0.21030791904370152, + 0.2139031419739168, + 0.21419074844385758, + 0.21289114158122185, + 0.2124462404461384, + 0.20957057488206876, + 0.21353088488262859, + 0.21037799247688121, + 0.2106638673306163, + 0.21017683635827134, + 0.21103868982372959, + 0.20929453702124415, + 0.21103679956461155, + 0.2135070280726653, + 0.21426708282790874, + 0.2129501828674451, + 0.21203821908194023, + 0.2093479102655553, + 0.21136449152461248, + 0.2137078448501543, + 0.20992562567937326, + 0.2104557400080757, + 0.21155751302400655, + 0.20807789004015956, + 0.21083928104693608, + 0.20779915727914153, + 0.2111898382789776, + 0.20773483074875804, + 0.20551727933279884, + 0.2107326464569756, + 0.21216166575179726, + 0.2141059084866664, + 0.2117629959578804, + 0.20923450123009785, + 0.20985219406747752, + 0.21286210593833624, + 0.21242366252528105, + 0.21523033206407083, + 0.21150165209495794, + 0.20542234879068502, + 0.20969080367975465, + 0.21224657007457454, + 0.20846689818935887, + 0.21003358381866513, + 0.2127752409906946, + 0.20862807323799923, + 0.21157974228924498, + 0.20713542343773803, + 0.20958345763314049, + 0.20790431135374787, + 0.20795775134793273, + 0.21152670626513292, + 0.209149390349643, + 0.20608543502065083, + 0.20752177072200376, + 0.20625365909463803, + 0.20478107895997297, + 0.20763756362772215, + 0.2074675735956455, + 0.211341897381948, + 0.20621420162388943, + 0.21028890338588935, + 0.20886789133029232, + 0.2099785550670505, + 0.20895264089321258, + 0.20158638505906354, + 0.2046611703794877, + 0.20479709491174403, + 0.20751366351064437, + 0.20469922999612214, + 0.20584080228582025, + 0.20490492886705883, + 0.21085097362119398, + 0.21037052801550657, + 0.2043697966970096, + 0.2091343911196731, + 0.20713932687655864, + 0.2077115304472912, + 0.2102374550148519, + 0.2065271206805737, + 0.2043767186044588, + 0.21018512854657945, + 0.21019835916062807, + 0.20539091927727146, + 0.20824197406722791, + 0.2029252102308464, + 0.20404909748349928, + 0.20805977938869774, + 0.20601198789943517, + 0.20475203122516147, + 0.20371286766196453, + 0.20611140223555877, + 0.2048300539392754, + 0.2062224625238304, + 0.20921019366764224, + 0.2078999746387285, + 0.204889848757067, + 0.20085959401378622, + 0.204330319067962, + 0.20659571536689428, + 0.20218171496860315, + 0.20516916559347526, + 0.20725346758931143, + 0.20407946009311032, + 0.20508872987855165, + 0.20208285696285425, + 0.20570067133591638, + 0.2052823030847873, + 0.1988270512904178, + 0.2017806837935754, + 0.20210761692415657, + 0.20292362796213945, + 0.20252479646670443, + 0.20501003185124264, + 0.2029797889399113, + 0.20714394875313538, + 0.20278860894006376, + 0.20579290276907303, + 0.20080695283227687, + 0.20328424872240164, + 0.2029870082178952, + 0.20033452039981475, + 0.20587927445036466, + 0.20446870935307257, + 0.20484086770126647, + 0.20368634983439182, + 0.20273421241652445, + 0.2033444730987355, + 0.2051135574828813, + 0.20501189497477615, + 0.20621911701975298, + 0.2013151971992268, + 0.20345023153807784, + 0.20194373795345344, + 0.2025467938936921, + 0.20502490419996242, + 0.20053643650436376, + 0.20112663253892438, + 0.2029950833469731, + 0.20281508699685458, + 0.20127552724825065, + 0.1994826126375485, + 0.2016193125844816, + 0.20197430400615507, + 0.20426189116217564, + 0.20598122213656703, + 0.20390950204407582, + 0.20202767296409224, + 0.20207684551228267, + 0.20108239859586857, + 0.20144045031553165, + 0.19745142758655987, + 0.20316802990709334, + 0.2006845349990483, + 0.20188400224156555, + 0.20162316931340377, + 0.1999986745859878, + 0.20075646771596428, + 0.20387940741617416, + 0.200710948272939, + 0.20483495654853803, + 0.2000475435115682, + 0.19889280975088885, + 0.2003190504893396, + 0.2006222860915637, + 0.20206827421474163, + 0.20066006059244357, + 0.19858175172862377, + 0.19860576015968343, + 0.19762274477620675, + 0.2018554008825392, + 0.1982738639411257, + 0.19997417804774895, + 0.20250280251818115, + 0.20128282451177718, + 0.2013190006519174, + 0.202880958738976, + 0.20356236674945008, + 0.20378426012781556, + 0.19695638083150518, + 0.2018206053759516, + 0.20027780575586146, + 0.19993934831123983, + 0.20046298559169828, + 0.19551791173477193, + 0.1974542949722744, + 0.1981990659021635, + 0.20143866691677298, + 0.1973382344703531, + 0.1984093010265258, + 0.1998131434061651, + 0.19813286058370613, + 0.1979379555823719, + 0.2006640463108146, + 0.19743902617785572, + 0.1983567799601826, + 0.20045008639871179, + 0.2014755063685156, + 0.19927893893645637, + 0.20073385383994852, + 0.19311343233833914, + 0.19789769000098964, + 0.19971913195856403, + 0.19521766047558334, + 0.20239201955792438, + 0.19669399916733835, + 0.20122194004402869, + 0.19525762746884115, + 0.19980153969178596, + 0.20012999435662815, + 0.19513028876123126, + 0.19888473934331105, + 0.1972953802727855, + 0.19491472818552838, + 0.20002512301348882, + 0.19994312129619587, + 0.19730069033821912, + 0.20251197439409752, + 0.2012888047833993, + 0.19535527878334044, + 0.20024475670302583, + 0.1950903633288795, + 0.19679067707289763, + 0.1971064860553384, + 0.19473159851893132, + 0.19763763451088687, + 0.19661208614070844, + 0.19603092978133366, + 0.19817677855522173, + 0.20040015565088945, + 0.19359157269418525, + 0.19987146046312904, + 0.19304459044948993, + 0.19696328975005797, + 0.19097921257721556, + 0.19544310325405514, + 0.19416853653068547, + 0.19271253678016365, + 0.19723260000124165, + 0.19629056800838784, + 0.19423908034792486, + 0.19918049521530506, + 0.1958215642145382, + 0.19697393426580018, + 0.1948508669220196, + 0.19206765291148725, + 0.19304236980114398, + 0.19371371696031361, + 0.19041093190383837, + 0.19576062162310048, + 0.19342245378587497, + 0.1946633149401534, + 0.19432749344758649, + 0.19656158460411013, + 0.19538040961459524, + 0.1964010155436588, + 0.1977371673628973, + 0.1947294204137059, + 0.1921143097283899, + 0.19589144796708122, + 0.1966359677744897, + 0.18975590799090356, + 0.19541971722383641, + 0.189233942453551, + 0.19389041459640804, + 0.19433889712609065, + 0.19735377958003697, + 0.19314963375523564, + 0.1926000194860714, + 0.19463713023597834, + 0.19539385431578454, + 0.19310242577857983, + 0.19125519956772527, + 0.19371669993690047, + 0.19559092358864152, + 0.1972408917200716, + 0.19128870751958116, + 0.19306996679366084, + 0.19276962591503122, + 0.1899271999763152, + 0.19088662106956003, + 0.19584202049605426, + 0.18966994817556987, + 0.19462839293018838, + 0.1923187301020886, + 0.19169769056721545, + 0.1891892353057495, + 0.19443789789264848, + 0.19505679293896983, + 0.1914343910184918, + 0.19278336725009432, + 0.19261648081741778, + 0.19007034119106625, + 0.1924416786212413, + 0.1940363162092844, + 0.19162466155767155, + 0.1932115309841198, + 0.1909933170212097, + 0.1964757973989828, + 0.18863926243922505, + 0.19460955471604494, + 0.19300354020084798, + 0.19246401170477312, + 0.19129450365651623, + 0.18940983323985966, + 0.1933618124136752, + 0.18717921090113823, + 0.19117504368215554, + 0.190023366415293, + 0.1907236612735245, + 0.19043133517630562, + 0.19027472563378026, + 0.18811333491695953, + 0.1872310767693272, + 0.19393794243911006, + 0.1941529229318957, + 0.19010455504853707, + 0.18922081698862012, + 0.19446432119988596, + 0.19411143002894204, + 0.18795673839132113, + 0.1912466135102482, + 0.18683300900166153, + 0.19042661251841225, + 0.190070149871709, + 0.18662409480187858, + 0.19403532803404688, + 0.19126567713880677, + 0.18577271625603156, + 0.18625336390273656, + 0.19149305101463704, + 0.18952546073912638, + 0.1887596142732747, + 0.1895649280727337, + 0.19039739582107448, + 0.18743915147214232, + 0.18790825983621384, + 0.18756263155892205, + 0.19206123136345277, + 0.19306555320636007, + 0.1918065051301357, + 0.1872095128971717, + 0.1888342225590792, + 0.18881613392474467, + 0.18763368517094256, + 0.19408677409586236, + 0.18884425803947727, + 0.1931829896992164, + 0.18996467929501393, + 0.18979781550543556, + 0.18988517611771008, + 0.18462149163738625, + 0.18877410954449714, + 0.18830034012595812, + 0.1862851934331912, + 0.18729611698161464, + 0.18624379312940187, + 0.18976198974742345, + 0.18858976190899565, + 0.1871039997417317, + 0.19244205105739334, + 0.18666420980544934, + 0.18510923449133262, + 0.18854172886832735, + 0.19011486010356232, + 0.1830299166756392, + 0.18722291374477287, + 0.18958017183086057, + 0.1858513381604431, + 0.1857929672979714, + 0.1933831590662909, + 0.18839332111475002, + 0.1889338940343835, + 0.19027173717994791, + 0.18609938615821098, + 0.1863583584781736, + 0.1852164188084381, + 0.18481048374675987, + 0.18966596497126087, + 0.18285180136047385, + 0.18673894450429807, + 0.18808544749429684, + 0.18968258992275172, + 0.18930379648774334, + 0.18721892265094026, + 0.18800041058019332, + 0.1887013627842191, + 0.19013924886538683, + 0.188308489956776, + 0.18632386656493316, + 0.186423778139523, + 0.18743190118442468, + 0.18556211661175911, + 0.1870482356218835, + 0.18794449609373517, + 0.185966037872853, + 0.1871936890713952, + 0.18711529002153401, + 0.18554534854576032, + 0.18315369336194068, + 0.18714976943952993, + 0.18673929781016838, + 0.18116377826138844, + 0.1840988919469706, + 0.18598060281769133, + 0.18896777285364727, + 0.18371116829898637, + 0.18698242630636513, + 0.188146743741487, + 0.18658462439312554, + 0.1854370005398256, + 0.1863258180152523, + 0.1878277050761272, + 0.18483178114360757, + 0.1849122746838664, + 0.18434082286920947, + 0.1846739363696304, + 0.1845975530453413, + 0.1829482753000278, + 0.1875699444462754, + 0.182683781972613, + 0.18445848167588924, + 0.18491990346451356, + 0.18279602376443888, + 0.1851418483722446, + 0.18363165590896835, + 0.1837933336237008, + 0.18339311230490035, + 0.18416613388343464, + 0.18768795097216232, + 0.1834840037735291, + 0.18354376964652758, + 0.18136458193929103, + 0.18186876618130327, + 0.18517585945967077, + 0.18482101775955184, + 0.18177525553460452, + 0.18306094716694918, + 0.1856074207055235, + 0.18433907504646424, + 0.18647975136673517, + 0.1805711718940283, + 0.18334412692455687, + 0.1846190540952089, + 0.18275268255895807, + 0.18387776674178577, + 0.18216517447911285, + 0.183807469700855, + 0.1775060023392342, + 0.18359271360082868, + 0.18175316958108031, + 0.18176582407160494, + 0.1824888603321577, + 0.18071449770403852, + 0.18492610645327778, + 0.18103335993339006, + 0.18126186849981996, + 0.18476503252693008, + 0.18542354197694305, + 0.18471741797580624, + 0.18511390422796575, + 0.18527511869496133, + 0.1858930919876658, + 0.18268717074311272, + 0.18574839997960285, + 0.18421442867409377, + 0.17944376108674281, + 0.18417347168112225, + 0.18492123143381026, + 0.18266738329052193, + 0.18259987399450253, + 0.18082122567944392, + 0.18145836999797635, + 0.18366733357015122, + 0.1793449670241769, + 0.18037286498253055, + 0.1808535815216601, + 0.1811171380607055, + 0.18064557669093945, + 0.1797198153983883, + 0.18244838319169204, + 0.18199296094365214, + 0.1805384372764323, + 0.18284971576916878, + 0.18479250510391154, + 0.17931889261556738, + 0.18353416625272911, + 0.18155298427973818, + 0.17974714377559112, + 0.1809744319300895, + 0.18041186970963952, + 0.17895366989147385, + 0.18151027223100388, + 0.18123125073326946, + 0.17941773026199412, + 0.18325283081662622, + 0.18346894441727202, + 0.17801265239771558, + 0.18134086335057525, + 0.18119986565488447, + 0.17670664960096077, + 0.1810940202183085, + 0.18264737684480703, + 0.18072624107863489, + 0.17924211856456157, + 0.18265522231062864, + 0.17831633407673467, + 0.1783338740590166, + 0.1812751287399776, + 0.1852443392828738, + 0.17933013319048033, + 0.17601620388354677, + 0.1800173224380006, + 0.17700642580929837, + 0.17707792545994488, + 0.17745722889330218, + 0.178510252004642, + 0.17623471471358884, + 0.1799998124005385, + 0.18088497889299412, + 0.1783897042696878, + 0.18096591531721457, + 0.18026496351014598, + 0.1782538398433619, + 0.1768587692691614, + 0.1798121582416441, + 0.1794521269729473, + 0.1773526483174678, + 0.18242537685990579, + 0.18040033060767244, + 0.1778517340620359, + 0.176427848943728, + 0.17956338177277334, + 0.17959850453364515, + 0.17915527571292972, + 0.17862264246455917, + 0.17524985320448794, + 0.17997233703086163, + 0.17916708487045285, + 0.17794305921018205, + 0.17874343933975273, + 0.17722136404777525, + 0.17694701011740485, + 0.17721325533383123, + 0.17693938107021112, + 0.18044071674443618, + 0.17900009202844683, + 0.17598045515303648, + 0.17844890454743323, + 0.17987665775628975, + 0.17713849217111824, + 0.18090177757558643, + 0.17635455596530453, + 0.17763322355388536, + 0.17770602317817022, + 0.17853797486947615, + 0.17735701231438605, + 0.17361730584732252, + 0.17469978455550805, + 0.1749300635572462, + 0.17788535965443716, + 0.17985827885975278, + 0.17689964957051793, + 0.17690880887866875, + 0.17896994937954297, + 0.17739229822974104, + 0.1784015440908795, + 0.17438728063077222, + 0.1758603324383355, + 0.17759046815412088, + 0.17999736799176558, + 0.17914872999873033, + 0.1781656969506599, + 0.1794539128017963, + 0.17861767183376037, + 0.17772079144014158, + 0.17616754351056543, + 0.17565430817321512, + 0.17573246443415153, + 0.180123584715222, + 0.1739265314097226, + 0.17834461198183274, + 0.17517742672120856, + 0.1774624028037012, + 0.18146263185794378, + 0.17662479724516186, + 0.17888166291685012, + 0.1765585665825164, + 0.17243526812103357, + 0.17467646841646828, + 0.17607095621811236, + 0.18053622703739733, + 0.17776276001737254, + 0.17717997345162423, + 0.17713585196195242, + 0.178666327664593, + 0.1755593383921402, + 0.17037723365473903, + 0.17436741207677953, + 0.17700208412213608, + 0.17839875475305025, + 0.17658301410036253, + 0.1739725963304155, + 0.173291837571372, + 0.17734399594397085, + 0.1718438496339883, + 0.17568409157270576, + 0.17437228048729198, + 0.17266653049762662, + 0.17332606950566132, + 0.17186316875028398, + 0.17448874952429347, + 0.17393168289713112, + 0.1780925884618948, + 0.17680956342618892, + 0.16975431918431216, + 0.17183228090654834, + 0.17359569304828823, + 0.1747507379362309, + 0.17853747760174704, + 0.17048272325357045, + 0.17363695054610204, + 0.17656417398286037, + 0.17655009580610698, + 0.17413550656533144, + 0.17308431342651284, + 0.17496894177716082, + 0.17004224960819456, + 0.1735543118512431, + 0.17254426327274472, + 0.17379744823823576, + 0.1721431372923868, + 0.1738793374474359, + 0.1742891999485755, + 0.17540580118991067, + 0.1733190261064189, + 0.1757237508096554, + 0.17698081269799198, + 0.17330257680720035, + 0.17504009700512585, + 0.17428575474463526, + 0.17336702449349026, + 0.17234075886243191, + 0.1719133450663674, + 0.1715083386598898, + 0.17152943630967432, + 0.17577525224190593, + 0.17511554333493318, + 0.17361207608555956, + 0.17267172866503458, + 0.17536262361348043, + 0.17758603408790188, + 0.17374013254770124, + 0.17292001013659308, + 0.17789146265325448, + 0.17153472622189814, + 0.17465959668230732, + 0.17421985109135651, + 0.17244158771087523, + 0.16785327027185407, + 0.17139526419888654, + 0.17528821385842164, + 0.1722658285947486, + 0.1761347234142242, + 0.16912477035737022, + 0.17015565684398565, + 0.17099481745231185, + 0.17382536850846755, + 0.17322755001149462, + 0.17215979572632376, + 0.17472784977871925, + 0.1743865850205976, + 0.16894008602967353, + 0.17075824013102006, + 0.17521095443249787, + 0.17245774945788736, + 0.17258881547938665, + 0.17264526757243714, + 0.16968774482483157, + 0.17152390500169268, + 0.16760025688833838, + 0.17217354546173838, + 0.17328591293355072, + 0.17169819610788808, + 0.17218161853223712, + 0.1726764381476398, + 0.17404119530819925, + 0.1702874203071334, + 0.1690443799731734, + 0.1746294640005907, + 0.17398043375085692, + 0.17362257337472478, + 0.17383865080837446, + 0.17327700165746723, + 0.17099812789056756, + 0.1691463197176879, + 0.16789605907136523, + 0.16688552745834218, + 0.17169351158841753, + 0.16994663102862737, + 0.1732013131812459, + 0.17210967055982476, + 0.1716342666568082, + 0.16860026129536698, + 0.1697351915260543, + 0.17289878031152398, + 0.17088924745756806, + 0.17059576492680267, + 0.17154192614567573, + 0.17261315905870717, + 0.17095189313836845, + 0.1717547890157668, + 0.1659582612044916, + 0.16904894150717215, + 0.1714199554821466, + 0.16917445125755512, + 0.1687287618056235, + 0.17267386809238258, + 0.1676400715480126, + 0.16952417707045261, + 0.17100997011195054, + 0.1656840512620621, + 0.1677447691225177, + 0.16706833180900196, + 0.16713640033687296, + 0.1710729866359851, + 0.17272351106407396, + 0.16643940031978866, + 0.16871785298002567, + 0.17262750346775935, + 0.17008644127976558, + 0.17186020916236228, + 0.1684009205407458, + 0.17092153329091767, + 0.1725088379579937, + 0.17172396399177448, + 0.16574330627536554, + 0.1685449952861457, + 0.16983443974133053, + 0.169476318447927, + 0.16788225180260796, + 0.17044971479214932, + 0.1667772276858835, + 0.168583451936281, + 0.16600674296706366, + 0.16937294159634192, + 0.1684606427592709, + 0.1660362091786966, + 0.17250852633008093, + 0.17010194778663762, + 0.16927886414634258, + 0.16916026495068837, + 0.16850305646352592, + 0.1686944442176479, + 0.16330376338300198, + 0.16972800859382448, + 0.16775759154182598, + 0.16799819980852326, + 0.1686439704489665, + 0.1718846169005204, + 0.16728141926142484, + 0.16997146212541667, + 0.17069641353442877, + 0.16852361127375395, + 0.16406691085567565, + 0.16637465871280172, + 0.16760646069582674, + 0.16881915561465516, + 0.17037621981214957, + 0.16881631013855866, + 0.1663717208716438, + 0.16703006343106877, + 0.16846389207439344, + 0.168156479911553, + 0.17152255999313698, + 0.17165390108177653, + 0.16959324235012602, + 0.16660727346392318, + 0.1624366155929884, + 0.16753987036581797, + 0.16751076894700323, + 0.165558160421352, + 0.16375856005327136, + 0.16743469295534688, + 0.1683721392876036, + 0.16712005283680995, + 0.16665970334080887, + 0.1633760609817971, + 0.16592232616938896, + 0.1662820741035492, + 0.1706440791868111, + 0.16465967369349827, + 0.16619672082222062, + 0.16772988042094403, + 0.16521556585291758, + 0.16752379793098673, + 0.16743334989149958, + 0.16501940518192681, + 0.16905284639895776, + 0.16542982884881194, + 0.16766506049698066, + 0.1711938560563725, + 0.1629361694912767, + 0.16909852188177243, + 0.1669176071716849, + 0.16997304314218176, + 0.16542273266190063, + 0.16683356633570168, + 0.16866715563540066, + 0.16932296644911538, + 0.16475550953522733, + 0.16677476386183396, + 0.16493216675166752, + 0.16632080621332887, + 0.16609484042661884, + 0.16503731950435563, + 0.1640908139830333, + 0.16544592251576218, + 0.1659406616239264, + 0.1653499356636401, + 0.16729815516880422, + 0.16370641916183643, + 0.16818610431570458, + 0.16511968669896346, + 0.16891590084262254, + 0.16309218315275315, + 0.16416950568383754, + 0.1657891397064175, + 0.16601388114237334, + 0.1684430640183206, + 0.16677134355232426, + 0.1622030400986595, + 0.16670136794682003, + 0.164445952427716, + 0.16323043957741962, + 0.165828808955721, + 0.16615340412884463, + 0.16602037971154812, + 0.1677297219135315, + 0.16657851250903283, + 0.16474220755413338, + 0.16663185579898193, + 0.16352554347844478, + 0.1652754031304942 + ], + "val_loss": [ + 0.9335313753589339, + 0.924082006773223, + 0.920760723883691, + 0.9203901174275771, + 0.9196031368944956, + 0.9188616431277731, + 0.9154986411980961, + 0.915073772487433, + 0.9085759241943774, + 0.9053407270947228, + 0.901888825647209, + 0.9309434607301069, + 0.9293690198303565, + 0.9645019697877022, + 1.0080292720712074, + 0.9604762281245633, + 0.9649848127332719, + 0.9255440956027166, + 0.8742151506569075, + 0.9041658510332522, + 0.9532280525435572, + 0.9707402638767076, + 0.9318156417297281, + 0.9163831893516623, + 0.9406941691818445, + 0.8678040608115818, + 0.8804374184945355, + 0.8784038889343324, + 0.901491626125315, + 0.8964716124793758, + 0.8915972147622834, + 0.916468197884767, + 0.8953802626094093, + 0.9107403117029563, + 0.9188818231063045, + 0.9467427967845098, + 0.9360431160615839, + 1.1105899966281394, + 1.0314723141491413, + 0.962242110915806, + 0.8790612455214495, + 0.8597234743849739, + 0.8783345729841486, + 0.8833463145419955, + 0.8363415394788203, + 0.8210743777453899, + 0.953287379046821, + 0.8586397647209789, + 0.8786677127582547, + 0.8203035231031801, + 0.8321384554121481, + 0.9391891709528863, + 0.8137018408666811, + 0.8350219985510668, + 0.8147539026711298, + 0.8731931285608722, + 0.810385759798405, + 0.8749862088943305, + 0.8486773315817118, + 0.8102369977404242, + 0.8066867747971707, + 0.8077141688731702, + 0.7752221888437382, + 0.8153072177715923, + 0.7900436106795931, + 0.782171701845389, + 0.7710716907117192, + 0.793669853880799, + 0.7815873446752843, + 0.7794856239596138, + 0.8205082837654196, + 0.749067696823698, + 0.7501907694032011, + 0.7775916833754467, + 0.7669744298833868, + 0.7699108112441457, + 0.7581968824581607, + 0.7608348225121913, + 0.7707190699875355, + 0.756943872279447, + 0.7456725756435291, + 0.7774382625585017, + 0.7783773752010387, + 0.7907520718872547, + 0.768419896778853, + 0.7628949095859475, + 0.7628669317244835, + 0.7512892335248382, + 0.7700609512141218, + 0.765031533923162, + 0.7834521046151286, + 0.7767880812854223, + 0.7681834911522658, + 0.7489856139797232, + 0.7552362963881182, + 0.7446100540135218, + 0.7765686759401275, + 0.7855115242140449, + 0.7807790828544808, + 0.7732409894385416, + 0.7459892864622499, + 0.7768123474215036, + 0.749852091235959, + 0.7765019118137981, + 0.7809225528136544, + 0.7855232076152511, + 0.7735792132013518, + 0.7733399370968665, + 0.7935119687336618, + 0.7620342209044358, + 0.7960943371138495, + 0.7658250574832377, + 0.7913259989582002, + 0.8545690691503494, + 0.7896923535786893, + 0.7642455670942107, + 0.8044870612413987, + 0.7331723524662463, + 0.8323005931654378, + 0.8054038210893455, + 0.7557010695024434, + 0.8115820161613595, + 0.7919056178070605, + 0.7660218852612636, + 0.7706810245948398, + 0.7144935719425911, + 0.7828506620894389, + 0.7907534663808411, + 0.7363465336586713, + 0.777086377730998, + 0.8067560145307494, + 0.8054410963440719, + 0.7701042692338966, + 0.8383759044758652, + 0.7276921994255289, + 0.7616055792136847, + 0.7696832467994207, + 0.7503964949662433, + 0.7442921504135365, + 0.8532583606631859, + 0.7125284590191491, + 0.7320188702408063, + 0.7386054730994384, + 0.7250872051252214, + 0.76413360570112, + 0.7062875987233027, + 0.7654975787483399, + 0.7267131056616326, + 0.7564509677822175, + 0.7132179980677471, + 0.7259646787652341, + 0.6917519994161051, + 0.7218752234159341, + 0.7707688102458158, + 0.7292817968004586, + 0.7374213343263483, + 0.7362456450886701, + 0.7641097186133265, + 0.6885774475826032, + 0.6939206427449117, + 0.6937396260504813, + 0.6898442395841298, + 0.6684250163481288, + 0.7698903179767987, + 0.7133965061663691, + 0.6983315334301037, + 0.7100327913396304, + 0.725316948080233, + 0.7083420785062987, + 0.677050675700783, + 0.678538358355265, + 0.6801175987930037, + 0.7679193592832788, + 0.7007558141308634, + 0.7176192046305083, + 0.6823614173943339, + 0.7154390323988121, + 0.7320952688789238, + 0.6825440318030103, + 0.6687829439204348, + 0.6830113794776085, + 0.6954394139693645, + 0.6744472224913214, + 0.6518412492663154, + 0.6570214424586005, + 0.6777332337148002, + 0.6934362220496911, + 0.6790976488416124, + 0.69438182754928, + 0.649227199192746, + 0.6626697640623087, + 0.6559397772121567, + 0.6819931754903139, + 0.6553071941927319, + 0.6755395939512907, + 0.6509974569932598, + 0.6449006876310982, + 0.6486555052131576, + 0.6587237138959133, + 0.6615151589351665, + 0.6508239915710874, + 0.6329995002850647, + 0.6180771064094227, + 0.650294575880727, + 0.6244668538666979, + 0.6338437450391686, + 0.6438477876920091, + 0.6128318192153845, + 0.6524125891358025, + 0.6224659674309964, + 0.6313123056847278, + 0.6211183187096259, + 0.6264181866819222, + 0.6355564067261699, + 0.6187690822407603, + 0.6255780717322562, + 0.6364593009677028, + 0.627284893695184, + 0.617899285113115, + 0.6199215940483238, + 0.6278117467932727, + 0.6147397899109385, + 0.5924301131972638, + 0.6157520517514533, + 0.6531983717830608, + 0.606250358611325, + 0.6047237440311026, + 0.6239295780820691, + 0.607177171299396, + 0.5919201681592866, + 0.6161522794332679, + 0.5855514190676014, + 0.6054849647309469, + 0.6055144870441164, + 0.5925689473034531, + 0.6305685584995208, + 0.5815090760740492, + 0.6107162005247792, + 0.6020100720824025, + 0.5923784702014097, + 0.604597086329823, + 0.5957592874158012, + 0.6408729708561188, + 0.5944304736229874, + 0.6068073532053108, + 0.5890731989284572, + 0.5876490123893904, + 0.6140666918464653, + 0.5880304343856709, + 0.6138126492075137, + 0.6045033763537345, + 0.5804295318059461, + 0.5897576988816423, + 0.5894891188186391, + 0.5835681248041193, + 0.5640755694703725, + 0.5655432703560623, + 0.6039265798728751, + 0.5815569900087607, + 0.586889413377756, + 0.5877595653797946, + 0.5599961310280892, + 0.5611120585615382, + 0.5827133699684687, + 0.5564388583981149, + 0.62359852048204, + 0.5924038733456932, + 0.6305146457819754, + 0.5784847504135383, + 0.589783426080628, + 0.5507983307698336, + 0.5691779451444745, + 0.5506841504500936, + 0.5519846720132552, + 0.577514791299346, + 0.5296497604595851, + 0.5680850643624106, + 0.603302829438294, + 0.5813327201176435, + 0.5818847435850488, + 0.5273906302273922, + 0.6140275834148506, + 0.5697482045760135, + 0.5485971694528733, + 0.5284288605401778, + 0.5673818252170864, + 0.5660538533500031, + 0.56730372562964, + 0.5947459209912821, + 0.6149931411339861, + 0.5575581353927113, + 0.5486763286272712, + 0.5601946976132003, + 0.5731955372586685, + 0.5462818267279426, + 0.5363325582634981, + 0.5637851090008474, + 0.6082423136610052, + 0.5545515536543225, + 0.5297800650916306, + 0.5154799118779761, + 0.5539796530451302, + 0.5186979485442862, + 0.5473379262965983, + 0.5197180013435528, + 0.5139365184788957, + 0.519213251678435, + 0.531433609673632, + 0.52297050507395, + 0.5115641057997456, + 0.5242972204071955, + 0.5353049041435082, + 0.5306742921960034, + 0.5283654796227854, + 0.520429481673763, + 0.5179115286832119, + 0.5322742038634201, + 0.5219083181784853, + 0.5759586192485269, + 0.5300453440577257, + 0.514672828142268, + 0.5082062517834381, + 0.5486662312481634, + 0.49693852496515634, + 0.53911773608137, + 0.5232387273294001, + 0.5126903645021071, + 0.4988662436548823, + 0.5634349446184427, + 0.5134392065735048, + 0.5215489079355788, + 0.5013327730728475, + 0.5173894368964956, + 0.5178513070796211, + 0.5182791313956209, + 0.5127414117773499, + 0.5008260679757729, + 0.5042639976094031, + 0.555265046129732, + 0.5824682366472426, + 0.5042891499697757, + 0.5180479189439211, + 0.5248467508379532, + 0.5280011345116098, + 0.4988022632049093, + 0.5023277833521285, + 0.49188997566386167, + 0.5201573434096645, + 0.5356648828753311, + 0.5018367158330005, + 0.4921772305204523, + 0.5070180372884193, + 0.48339069352242287, + 0.5392594025166624, + 0.49793830878384737, + 0.5092711518986819, + 0.5055111987078461, + 0.49295542709043494, + 0.5704387141196766, + 0.504461859904594, + 0.5138930280448656, + 0.4902845010795357, + 0.49553482460728643, + 0.541711049000973, + 0.5041065043466084, + 0.4939575487970496, + 0.48221861281806766, + 0.48683373779109074, + 0.5160252403381848, + 0.5285064628064309, + 0.483955665490256, + 0.5017939140772164, + 0.5035267858968481, + 0.5061295791579734, + 0.49401727917859756, + 0.5069063602498752, + 0.5004428592589608, + 0.4885798002259158, + 0.5048761519922308, + 0.5049159869794613, + 0.5178294737529738, + 0.4824068514407491, + 0.5048337145303579, + 0.5138389013762049, + 0.4832694528690215, + 0.5383005246365665, + 0.49517905645378685, + 0.4757908131489429, + 0.4911662921620756, + 0.4794729929404718, + 0.48437759716509154, + 0.4772551524675573, + 0.4860454080031638, + 0.49666522439371835, + 0.47857003009108745, + 0.48385503590005735, + 0.4934474151516991, + 0.48656709931484604, + 0.49244724576609494, + 0.47685529638344987, + 0.48006345753674395, + 0.48499125332708226, + 0.5036912090575282, + 0.5003975514023888, + 0.4753890815140355, + 0.5331356734563799, + 0.499892884311165, + 0.4791416369616459, + 0.5185476117071433, + 0.47481288632337487, + 0.5140055727426206, + 0.47815976875996136, + 0.5184240290976089, + 0.5306156230900619, + 0.5255397020575955, + 0.5118279850563419, + 0.4820745086763054, + 0.49152604931504873, + 0.49791651154531713, + 0.48577252950116934, + 0.4650839831194152, + 0.4779488415259276, + 0.5072246843989453, + 0.49372277570807416, + 0.486140260404563, + 0.4741597912405901, + 0.48504603029552684, + 0.47199819948645716, + 0.4773826359811684, + 0.5053967639231933, + 0.49308478131285444, + 0.48128940627667244, + 0.46563051163304964, + 0.47222808036578656, + 0.4840772159185018, + 0.47652436089550104, + 0.4698529417953029, + 0.47632089870698424, + 0.48617573659129965, + 0.46340246620061604, + 0.4806926123739053, + 0.4874136547156883, + 0.4647706943429986, + 0.4731543098839567, + 0.47525597950848547, + 0.4763283496418887, + 0.5236298118454769, + 0.4811435657624236, + 0.5173779266023422, + 0.47590302219285385, + 0.48291346310313954, + 0.49116272895607044, + 0.5386096273362889, + 0.4750643212034408, + 0.505801946725747, + 0.46619738058349036, + 0.48175452127717383, + 0.48148902516518516, + 0.47740917495937774, + 0.467279780239788, + 0.5076787070239611, + 0.46706545635369484, + 0.4884009524713569, + 0.4767191719526992, + 0.47278759199569165, + 0.4662707551261005, + 0.47500828029992787, + 0.4681541558915658, + 0.4741301856651578, + 0.4897673269860325, + 0.5089568152590959, + 0.4873529669206144, + 0.46263126703485363, + 0.47823934416911745, + 0.4840159249345205, + 0.47720205496348767, + 0.46189002385211136, + 0.4670951714177373, + 0.48651615068134246, + 0.46739858954021224, + 0.4729580986712118, + 0.4797852835415498, + 0.4700579523392346, + 0.47220394853249437, + 0.4780696659885428, + 0.47873892810299445, + 0.4721901520869047, + 0.4705970219922323, + 0.4688386979219301, + 0.48756537096746994, + 0.4606780731473523, + 0.47052797615912784, + 0.4604904842841358, + 0.47527619285107864, + 0.4522243957667936, + 0.47447497426511964, + 0.4661551875298914, + 0.47185511441152217, + 0.47903724299027567, + 0.47513932697272493, + 0.5020868681502811, + 0.4537827246259321, + 0.45795874421594857, + 0.4882588079515541, + 0.49529811727014655, + 0.4618759481818415, + 0.45242545085468167, + 0.45391354555297014, + 0.449442018714288, + 0.45776522095801064, + 0.4858489832284091, + 0.4627631689294029, + 0.4781357816324959, + 0.452023494132015, + 0.5052998592145741, + 0.4621753458885233, + 0.464136605359786, + 0.454850680108749, + 0.48862306023542973, + 0.45546797285129764, + 0.46380375668946805, + 0.4967672114650233, + 0.45573198998475994, + 0.4639793898142829, + 0.45608570072711346, + 0.44274457602296025, + 0.45756127815643, + 0.44747131218489655, + 0.4717089786395461, + 0.4518816336703426, + 0.4613612199906999, + 0.45417634700076986, + 0.46022028425384476, + 0.45580109398408397, + 0.4497734812991795, + 0.46123442852695007, + 0.4475619168445954, + 0.47134469352465164, + 0.4652782753862835, + 0.44711311701963574, + 0.44947326634555, + 0.447059034462259, + 0.4704630458347864, + 0.4462405618915395, + 0.4470514438479491, + 0.5313140690154599, + 0.46042055627913214, + 0.45209191147865646, + 0.4536968721462788, + 0.45227280777496914, + 0.4639509732799564, + 0.4663141419245537, + 0.4604264339229659, + 0.45038559409484785, + 0.47430821597125666, + 0.45080788726519805, + 0.4403324948134073, + 0.4831906725126116, + 0.4487806672936447, + 0.4726155846577097, + 0.4700583572216008, + 0.43849648372776323, + 0.46007204422494397, + 0.44559406537218427, + 0.45170043212989264, + 0.48162214480765647, + 0.44607876067722985, + 0.4524390118724272, + 0.4589698228968621, + 0.46245659757203056, + 0.4844780950073112, + 0.4554216510156909, + 0.4631534812076058, + 0.4506315137057201, + 0.45314504239839787, + 0.4389451947854073, + 0.4774033769145203, + 0.4501635001531964, + 0.4434225411311237, + 0.438870769521758, + 0.45313291330429545, + 0.4471001678074295, + 0.48119687119944266, + 0.4501648939811908, + 0.4883720672831101, + 0.4568191206211771, + 0.4524765127387819, + 0.452816343165266, + 0.43486748341693665, + 0.4422783558041809, + 0.49507095912375243, + 0.4475288726234399, + 0.45504522651754087, + 0.4469761633266424, + 0.44072704608223157, + 0.4439284339504134, + 0.47233520066251716, + 0.45573794392112177, + 0.44817666436891, + 0.44867251714964845, + 0.4464609193205631, + 0.4694188353959876, + 0.44205017909078614, + 0.43817021106698795, + 0.44284531235462055, + 0.4411101177532185, + 0.5099461825524011, + 0.45161567718099355, + 0.4575857886508026, + 0.43942213058503315, + 0.44986835167409206, + 0.4794997413941335, + 0.46005092784838303, + 0.47006753810590296, + 0.44886376830792235, + 0.446625504042933, + 0.444925672197512, + 0.4403738347429443, + 0.4601519613858292, + 0.43676503386054916, + 0.4522841770186737, + 0.4450737530809736, + 0.451427878326048, + 0.45708225040225836, + 0.45605044141816703, + 0.4317519017327552, + 0.4497345432856768, + 0.4352933710833895, + 0.4488497630720857, + 0.46752938201287814, + 0.447151577424096, + 0.46916549847430916, + 0.4399310623816943, + 0.4343153592123164, + 0.4577489498921711, + 0.4574032297431309, + 0.43775600587294955, + 0.4612839778672661, + 0.44954354317514633, + 0.44254028898253833, + 0.45244420187689527, + 0.4447905178657611, + 0.443076451253032, + 0.4537808093161363, + 0.4523978609877699, + 0.43965352767744387, + 0.45604936429299414, + 0.45958053797949106, + 0.4428187001179575, + 0.4389729475205177, + 0.4572161982777407, + 0.448084496159066, + 0.44124246874581213, + 0.4387483963067137, + 0.43492361749098485, + 0.44148318800105935, + 0.4351490364857929, + 0.45772109133458405, + 0.4335768043320454, + 0.4582694436792973, + 0.44792076078706683, + 0.44121192297872924, + 0.43911114037631627, + 0.4271584311389471, + 0.44525378897695034, + 0.4327502255764557, + 0.4843815671679888, + 0.43715071473991923, + 0.4367745742916747, + 0.43768794824174384, + 0.4527444014535792, + 0.4437139232202595, + 0.43663192429053393, + 0.4597106203588698, + 0.4414101560386885, + 0.44847554547469254, + 0.4352713495413216, + 0.44402046729279077, + 0.4291687950817109, + 0.42097990545722574, + 0.46451877201932645, + 0.43636869996059785, + 0.4578797480600137, + 0.4511650630909413, + 0.4438528618550702, + 0.5032850930608971, + 0.42935946125425806, + 0.42207391340660094, + 0.47115681082283595, + 0.4296426537577012, + 0.46282068227605044, + 0.4343317288819336, + 0.4768697671217682, + 0.43520333780907094, + 0.4325674381122909, + 0.43172371615503874, + 0.4347047119543118, + 0.43157869840860774, + 0.461572542822029, + 0.42927823525489023, + 0.4448641146540783, + 0.44570321190353157, + 0.4464524216069233, + 0.4570004147750771, + 0.4326970319190245, + 0.44156999763262045, + 0.448137591914375, + 0.45519396695928427, + 0.41447465354513435, + 0.419119532163756, + 0.4594401248867381, + 0.47362605969259597, + 0.4391487885504435, + 0.4293698868094235, + 0.4281786666568065, + 0.43740913376551244, + 0.4708240374026861, + 0.43057300183927116, + 0.4345752674701583, + 0.43033688588753727, + 0.43066378049274057, + 0.47982705743608833, + 0.4520611879200695, + 0.41607876497342344, + 0.429838995449245, + 0.4269131743611702, + 0.43898957051114057, + 0.4400916293886004, + 0.4457505695805278, + 0.42880056025435054, + 0.4205685332313094, + 0.4307735294214976, + 0.4477263471009159, + 0.43672320790621755, + 0.4350049652604629, + 0.4266512465850531, + 0.4316620315468121, + 0.423447926565448, + 0.43678762757158635, + 0.4282251020701116, + 0.42886768100762984, + 0.48056034372269135, + 0.4431704889347716, + 0.42889677170846285, + 0.42189881182022637, + 0.43113390722251294, + 0.4435103327066273, + 0.4309461219381774, + 0.44124323953671946, + 0.4347160573714696, + 0.46620638249188906, + 0.4443610627640737, + 0.4224100779959016, + 0.4360760977654971, + 0.44341569566977734, + 0.4517240748682525, + 0.4329356477929927, + 0.43347512475599576, + 0.4315943178725834, + 0.4485977605344606, + 0.4335812943248564, + 0.42442188660725544, + 0.4213310912246411, + 0.4276249707124763, + 0.42541516732940776, + 0.4324517394276842, + 0.4258505802914126, + 0.43071374483406544, + 0.4282688243749916, + 0.41846978720075084, + 0.4315321458979388, + 0.43941438156711543, + 0.43685166012880433, + 0.4323558664966739, + 0.4309412599844939, + 0.42908633446558786, + 0.4375118510493928, + 0.43683313578240696, + 0.43011078377173323, + 0.4167513819031782, + 0.42681018273730803, + 0.4417625035933705, + 0.4240314872560861, + 0.42233452128509624, + 0.43459087710463157, + 0.4286012871648975, + 0.440019060639948, + 0.42070044877017726, + 0.4349914041800809, + 0.4184767652667411, + 0.42293000454842317, + 0.4233488061366623, + 0.43370871128433425, + 0.4181512635537808, + 0.4359606504339077, + 0.44037911978185823, + 0.4201503482788963, + 0.4439296163112411, + 0.4465068154677283, + 0.4547112604967602, + 0.4254315409849843, + 0.42141021497967734, + 0.4334114205304776, + 0.43324393176463555, + 0.4282297431896238, + 0.43406712572259887, + 0.42606808013743314, + 0.42936671044520586, + 0.4327294742207691, + 0.4234644554393447, + 0.43056450373715843, + 0.4330339720055117, + 0.4379334163285144, + 0.4241939232806149, + 0.4324934406145273, + 0.4132937388494611, + 0.4348462004842156, + 0.42836986009395966, + 0.4206019790676098, + 0.42206423554022837, + 0.4238073992398669, + 0.4206439849386579, + 0.41838463767097617, + 0.4336242936298494, + 0.4245001600862926, + 0.42793790291382366, + 0.42827492911727977, + 0.4296054489500618, + 0.42707385345483606, + 0.4269802293489667, + 0.43306979969012266, + 0.42404916312965646, + 0.4292607392594421, + 0.4213617478551738, + 0.42255972924581525, + 0.4212572403459648, + 0.43552493038789736, + 0.4269425174153066, + 0.42671537088766776, + 0.4242956216596371, + 0.4232627181374245, + 0.4297063737941916, + 0.4294756127979465, + 0.42196364391028235, + 0.4286270012861401, + 0.4179406935543976, + 0.4316725130699089, + 0.4209279187084378, + 0.4135255520738175, + 0.419872805559704, + 0.4285656244180206, + 0.4189586613881503, + 0.41980106119603, + 0.4192176436848493, + 0.42185753927586356, + 0.4165449049905874, + 0.4301460422474243, + 0.4210525971290696, + 0.41406586639957665, + 0.42641187560714217, + 0.419929233237939, + 0.41390136291485524, + 0.4297937243017773, + 0.42869609568278183, + 0.42021649783861864, + 0.4247205048273115, + 0.4228014819521416, + 0.4191722595082272, + 0.4236022932012059, + 0.4240005649626255, + 0.41910758792200004, + 0.4113521523566147, + 0.4243754021185653, + 0.4126638040457771, + 0.4181576568491595, + 0.41752035890543915, + 0.4195167096002716, + 0.4195777850260999, + 0.41469264667197736, + 0.4147864759535245, + 0.41582341406636103, + 0.41290662179285986, + 0.41452907486160734, + 0.41909612369288324, + 0.4284156639503477, + 0.4216156570220609, + 0.4244226244070729, + 0.41487035995005106, + 0.42622228353992914, + 0.4216162766856344, + 0.4248408794418261, + 0.41801127019520523, + 0.4119492641184479, + 0.41542688696442737, + 0.4132858931198311, + 0.420743355605974, + 0.4161351049526433, + 0.41674538775418035, + 0.41082072128167213, + 0.4193770557555937, + 0.41926228328425763, + 0.4166920778098638, + 0.41035751675245474, + 0.4140533419887778, + 0.4145415566972983, + 0.40990364998983947, + 0.40477725488650007, + 0.4142777916707058, + 0.4222426289571044, + 0.4166857484378852, + 0.40986135020806536, + 0.42246535291854775, + 0.420080309106118, + 0.41059865237912163, + 0.4272964133415371, + 0.42396603173414327, + 0.4160880013369024, + 0.4215213567317139, + 0.4104813540268325, + 0.4135721356977466, + 0.42035722846428497, + 0.4101425014162922, + 0.41192679458965914, + 0.42753266200230905, + 0.41891766796091007, + 0.4196757018343424, + 0.41577878512907773, + 0.4133757144394938, + 0.4107581197926977, + 0.4148172353541114, + 0.42692669296039915, + 0.4122554913021462, + 0.4137400791210973, + 0.4162017080867056, + 0.41112620452591014, + 0.41593636920088495, + 0.4128811159398933, + 0.41243485143438546, + 0.41426069854313263, + 0.4091474675502547, + 0.4129365817390625, + 0.4150958138381374, + 0.4089710752165917, + 0.4145595222362317, + 0.42004538801974495, + 0.41386096649195836, + 0.4151051053650799, + 0.4152338422920919, + 0.41547680865345604, + 0.4070654575442187, + 0.4161892856278902, + 0.4145421755396376, + 0.417483569966862, + 0.415291139625681, + 0.4114344078900657, + 0.41444845745370357, + 0.41582465405656915, + 0.41357006258381857, + 0.411038820654579, + 0.4203137821801331, + 0.4182375761295628, + 0.41716053131613473, + 0.4189137550900974, + 0.41543654022161325, + 0.4081945508521091, + 0.4129915246385676, + 0.41902304997252626, + 0.41520231697991816, + 0.4218685400581149, + 0.4237323824427617, + 0.41448935990869673, + 0.41222063015574467, + 0.4186202147418795, + 0.4148582409328336, + 0.41434368908939295, + 0.417476074737193, + 0.41185823109243874, + 0.41316135797609127, + 0.409580203955082, + 0.4190007482716085, + 0.4133946701641316, + 0.4170695903974221, + 0.41935773624572903, + 0.41195196842106624, + 0.4170503997365418, + 0.41967716926972015, + 0.4119494028905731, + 0.4120979218379311, + 0.41192386789581453, + 0.4191358840325847, + 0.4131403263865808, + 0.4134145013522357, + 0.4157883390106018, + 0.4167894745579637, + 0.4086525775466884, + 0.4128917781787965, + 0.40955140020030184, + 0.4120481557265916, + 0.4079697304633041, + 0.416347152801514, + 0.4200347242105003, + 0.41694950205324544, + 0.41172772924840934, + 0.40788069007280486, + 0.40963280806317925, + 0.4086208732434265, + 0.411566336677187, + 0.4149919214795101, + 0.4110197843705385, + 0.40895848319379857, + 0.4075023140100276, + 0.4089867940056138, + 0.41673420516409626, + 0.41154590082537057, + 0.4111970674155442, + 0.41577420469484816, + 0.40352414255840297, + 0.4123092183802763, + 0.4135446028768733, + 0.4129959872377146, + 0.41451000016557216, + 0.4075882940188698, + 0.4122848940884148, + 0.4041375103102916, + 0.4085071629277714, + 0.4071568875907637, + 0.4103038970725206, + 0.4106280975772635, + 0.4162397976577505, + 0.4154566112138918, + 0.4161659761448391, + 0.40881323710630607, + 0.41131794025663426, + 0.4084724978260372, + 0.41968482657609024, + 0.40674235117773566, + 0.416023929307298, + 0.41503767786122375, + 0.40917576422899676, + 0.41325065320732235, + 0.4118443428002217, + 0.41700771086059674, + 0.4157352998673045, + 0.4084288548303606, + 0.41120239507163997, + 0.41686832962264103, + 0.4077555218087661, + 0.406485298382746, + 0.40932615269886574, + 0.4114363183034584, + 0.412499187666032, + 0.4125285494728419, + 0.4071069625801022, + 0.40914928561845876, + 0.408369386530704, + 0.41497457334670523, + 0.4137195836469207, + 0.40836601215414703, + 0.4044212583293025, + 0.4194959555157845, + 0.4118867264149468, + 0.41251994086728105, + 0.40816999106344, + 0.4075748050021028, + 0.41716285589221946, + 0.40360090856404934, + 0.41083189341209264, + 0.40332219252456486, + 0.4032748258930793, + 0.40846761399829434, + 0.4124012978028749, + 0.4224729032746678, + 0.41448465749461955, + 0.4063843399800016, + 0.4130389626509935, + 0.4130089081198726, + 0.41736314194428775, + 0.4103220242591899, + 0.42433114386854554, + 0.4212574233685661, + 0.42225519673261064, + 0.417580729608586, + 0.4124518425036829, + 0.41179620720820664, + 0.41406036428373266, + 0.41290614338662557, + 0.4174873032050369, + 0.4164632844141401, + 0.41016005625082785, + 0.41868628996282653, + 0.40353935396389873, + 0.41256969134733285, + 0.41166261652150477, + 0.41081983878646733, + 0.41009789024246857, + 0.4114387717297928, + 0.4163138918343263, + 0.41618473795698147, + 0.41623118968488165, + 0.40655699632707576, + 0.4133537840224681, + 0.41613093660840683, + 0.41474496535248245, + 0.41933464570699824, + 0.4098783741811412, + 0.42262102926716855, + 0.4169205506382834, + 0.4141550938530481, + 0.40810483062143804, + 0.4003651948600157, + 0.4017600893807273, + 0.4086468138734547, + 0.4112496365370411, + 0.4119867479122426, + 0.4078986914777804, + 0.4069817284977509, + 0.4079513023809894, + 0.4183548822411862, + 0.41020362801171595, + 0.4125053415229321, + 0.40378766938406246, + 0.40206223863060586, + 0.40154523437503603, + 0.4081740825634409, + 0.40861953699799336, + 0.4106285661818338, + 0.4033118086669635, + 0.4193590920445074, + 0.41208896924154426, + 0.4139859667756473, + 0.40477638462376175, + 0.4107340385747628, + 0.4101728207100203, + 0.4105630509869155, + 0.40870649621153815, + 0.40199664187157, + 0.41533417705163034, + 0.40490464947398996, + 0.41497608684205817, + 0.4075431587306135, + 0.4144176031923448, + 0.4129640437718278, + 0.4064840905282282, + 0.409405162935044, + 0.4073994843208272, + 0.4137221384823889, + 0.41273903252010274, + 0.41244426168724085, + 0.4100120418031862, + 0.4150374684654905, + 0.41286703780465556, + 0.41407289194150665, + 0.4249990200016486, + 0.41346586254708795, + 0.40726473991969175, + 0.4038594107588996, + 0.4175382253470952, + 0.410737780738956, + 0.4045602309456824, + 0.4042617463868658, + 0.4103240772054288, + 0.4092724932226069, + 0.40656921816306474, + 0.41232070176857116, + 0.4111402807782566, + 0.4121659067275924, + 0.4149717608309837, + 0.4105974575302199, + 0.4042006047384348, + 0.4173203853323408, + 0.4133810054197017, + 0.4123156609357861, + 0.40707558028288593, + 0.4086489579032945, + 0.41363504522925487, + 0.4136309396480615, + 0.41776477011507784, + 0.4117772405076286, + 0.4033114231044791, + 0.41235138629765616, + 0.4130831431425379, + 0.40995856858385, + 0.412424708240787, + 0.40680962222193, + 0.4070830175693592, + 0.4140182468564371, + 0.4133230831332343, + 0.40950000156020827, + 0.415304194098217, + 0.41829801073965983, + 0.41648590373275196, + 0.4164925331690187, + 0.4049068996214571, + 0.41430899975365837, + 0.3985857132736229, + 0.4080755397039668, + 0.40270623520684795, + 0.41624245224976103, + 0.41431112650468055, + 0.4126543963641819, + 0.41880413084594614, + 0.41159661314399587, + 0.41289294041242736, + 0.40712489828254783, + 0.4112280018668136, + 0.4145139404509277, + 0.4072948638159964, + 0.4181524520213513, + 0.4096448521823217, + 0.40887168256322975, + 0.41952978533378843, + 0.40415976875975385, + 0.40708720372007834, + 0.40612215231142373, + 0.399658620797867, + 0.4061243556763815, + 0.4181625315458919, + 0.41406668361235893, + 0.40575404519610025, + 0.41310741423133196, + 0.4134081618369395, + 0.4129807509041553, + 0.4136972918602861, + 0.40940995532108226, + 0.412597237377549, + 0.4084373220735554, + 0.4201241826648702, + 0.411822579884598, + 0.41471239525075676, + 0.4091507217115419, + 0.41165320281415124, + 0.41905261857344, + 0.40921758399496827, + 0.40648031581456406, + 0.40915260432824574, + 0.40958605435393425, + 0.40642504629162746, + 0.41120458718227304, + 0.41110118306064775, + 0.4262041691969068, + 0.40690201268602244, + 0.40843417902942747, + 0.4079868175701805, + 0.403168733606277, + 0.4141506949242245, + 0.4097771794692127, + 0.41230181744954875, + 0.40931003100653784, + 0.411432848286653, + 0.40309060339780484, + 0.40584158883733756, + 0.41055441661434167, + 0.4113014192426699, + 0.41611792941061454, + 0.41536675185489, + 0.41096008970381936, + 0.41968472863410844, + 0.4170585542856513, + 0.4084823611521405, + 0.41081671784733137, + 0.4076889149171462, + 0.4143891853720719, + 0.41027398282565863, + 0.41212820703335834, + 0.4104022560590554, + 0.41186492444451334, + 0.41427999349948746, + 0.4022366136094839, + 0.4042767498961083, + 0.41163670369955385, + 0.4199368347193661, + 0.4077442976400641, + 0.4199022605911444, + 0.413992084818386, + 0.4090982359763154, + 0.4072767359042621, + 0.40603783624828793, + 0.4170220778222479, + 0.4060363038177298, + 0.41071245210680546, + 0.4126675129923767, + 0.4127404278463117, + 0.4035375587089853, + 0.41172135812888405, + 0.4090266076116519, + 0.4137866456142586, + 0.42055402558711963, + 0.41005775154284807, + 0.4058584744191688, + 0.4012786580051002, + 0.41350531964132603, + 0.4066811425829023, + 0.4136982917805891, + 0.41154950951939734, + 0.41429821271246864, + 0.4101774195441976, + 0.4094789371365155, + 0.4090338599458134, + 0.40995489031268767, + 0.4103075048693901, + 0.4135377621404705, + 0.41177109158192965, + 0.4107513208101686, + 0.4187287772112303, + 0.4189137286081424, + 0.418862747661162, + 0.4104253371466336, + 0.4154145056976552, + 0.40844079718732723, + 0.41179243420028006, + 0.41595422066166066, + 0.4104103591627158, + 0.412946764060863, + 0.4123893650223339, + 0.40759180620332935, + 0.4083322894996103, + 0.4086683507117888, + 0.40129711693557707, + 0.4069133329827784, + 0.41024234777053253, + 0.41197349998654553, + 0.4177394621276661, + 0.41500621773656865, + 0.41573945816312713, + 0.4173129149497507, + 0.4300291208378242, + 0.4108912229775856, + 0.4092982933254224, + 0.41430226736702025, + 0.42478963517098, + 0.4134959660068094, + 0.41650230384117964, + 0.40900682609872485, + 0.41396344704654714, + 0.43034654649723647, + 0.4063269219907891, + 0.4181585088005269, + 0.4115779915338625, + 0.4159839937397603, + 0.4169456601021406, + 0.4060779787098948, + 0.4158385635186594, + 0.4116240150001629, + 0.4238845150684938, + 0.41357585626057064, + 0.41462521258559404, + 0.410196915051252, + 0.4068941019105968, + 0.4166827405696613, + 0.41435107520437275, + 0.40998759767060855, + 0.4113716005092062, + 0.4102388250201171, + 0.4212121420284308, + 0.4121678395389134, + 0.41003623363845376, + 0.4054191404695461, + 0.411014972264757, + 0.42699093987618614, + 0.4167008764571105, + 0.4036326703423148, + 0.41087916001687635, + 0.4050238334710467, + 0.41495637630071974, + 0.4140179990816092, + 0.4010799337866838, + 0.409687436323704, + 0.42070391716476047, + 0.41399641528108116, + 0.40327031963059434, + 0.4174113645845943, + 0.4150591025726222, + 0.4146455737683699, + 0.4155300094134873, + 0.416132907152844, + 0.42115769353593985, + 0.4173611317908027, + 0.40694080672920274, + 0.42265437484551827, + 0.4082042783721472, + 0.4176931927226099, + 0.41904748190553737, + 0.40881890720060177, + 0.42023629828801623, + 0.4100670311372201, + 0.40495741249659384, + 0.41232073476851877, + 0.41329839754389075, + 0.40464022978267167, + 0.4091028317672443, + 0.41980822593651956, + 0.4004755182781398, + 0.41614952200451744, + 0.4146402161264711, + 0.41836535355643084, + 0.40587008483302983, + 0.4051588364831253, + 0.42120429878528026, + 0.41303951601477584, + 0.41478749234786094, + 0.4158147184656548, + 0.415784648223527, + 0.42973707267336303, + 0.4193051869882291, + 0.4098844824956082, + 0.4085015131201109, + 0.42493328692786314, + 0.4165460123457824, + 0.412045130434031, + 0.41856120462509117, + 0.42410497785211826, + 0.4160518222804809, + 0.42323657367682166, + 0.41151566570118553, + 0.41111820915324165, + 0.41780605847927055, + 0.4112774466301072, + 0.4201122465702887, + 0.41048381715486554, + 0.41935435090841644, + 0.42241566504463146, + 0.4073317731065048, + 0.4178912078875684, + 0.4124003023886041, + 0.4145345567933122, + 0.407509791001718, + 0.41974181751340994, + 0.40862027822700125, + 0.40957773396360647, + 0.4170350330481913, + 0.4121097263313182, + 0.4205060663795018, + 0.4112011289512536, + 0.4267678673343692, + 0.4045570938564513, + 0.41912957948490814, + 0.4075133741590316, + 0.41294675591558666, + 0.42543226208173385, + 0.4070858401841099, + 0.417132237779341, + 0.41518935863353795, + 0.41139755694387964, + 0.4109999718873397, + 0.413227817676354, + 0.4087224943264712, + 0.4119431396855203, + 0.4104094494984526, + 0.4106253065148611, + 0.41309171923152777, + 0.4212700174412812, + 0.41008055056242837, + 0.4123417289444225, + 0.4070923200342804, + 0.4100565559448391, + 0.4066704108782173, + 0.41310685824314336, + 0.41610366590675374, + 0.4072798464039806, + 0.42090708168942237, + 0.4133390862059415, + 0.4142310480566938, + 0.41760041083604016, + 0.4198282614740057, + 0.415317274736362, + 0.411851580224997, + 0.4130822735795569, + 0.4114887816730239, + 0.41350440128007904, + 0.4154079440302904, + 0.41319985870327597, + 0.4168981453846715, + 0.4301776650574301, + 0.41966002095369215, + 0.4242370781519086, + 0.4156760921165266, + 0.4240688775810312, + 0.4093516849224334, + 0.42574945557356364, + 0.40963365167187044, + 0.4177859898646484, + 0.41181819556463184, + 0.4152761251569483, + 0.42284394183788326, + 0.4190813646393666, + 0.40825590348559554, + 0.4132246760469254, + 0.4264875736338107, + 0.4166894428072619, + 0.41801296283032885, + 0.4231390908547758, + 0.41966701885584096, + 0.418590067499874, + 0.42161069966791925, + 0.41451318944429816, + 0.4232556420029141, + 0.42120897763854137, + 0.4157706606581443, + 0.4203419299573516, + 0.41591864573749027, + 0.41511875291294215, + 0.4173049435054924, + 0.42254073716922785, + 0.4099266163640372, + 0.4194325076152935, + 0.4198575819634994, + 0.4161144294750715, + 0.41456724138950685, + 0.419658505602021, + 0.4202468539777961, + 0.426124948347696, + 0.4157424905165301, + 0.410941374923467, + 0.4164871198742691, + 0.4181211067657189, + 0.41086564376272017, + 0.41663833796400984, + 0.4185751662564302, + 0.409775306799692, + 0.42467116704716795, + 0.4187556290821901, + 0.4159551151780128, + 0.4245140232758232, + 0.4185809211441032, + 0.41553663562871923, + 0.41901170367466123, + 0.41446861081108777, + 0.41249862856373354, + 0.4107547312109407, + 0.4162943985758592, + 0.4180356249233465, + 0.4217911398395106, + 0.4159664522385751, + 0.4143517340113328, + 0.4174258748661605, + 0.40436227895628213, + 0.41222740825754084, + 0.4146290240757441, + 0.4271582492131173, + 0.4226280958796649, + 0.4122320276472475, + 0.42309538647532463, + 0.40756573483256786, + 0.41645388294304925, + 0.4193039079470074, + 0.41230990028428927, + 0.4166640127583852, + 0.42020206305049057, + 0.4163541562409829, + 0.4339490746168177, + 0.41143789683574933, + 0.41913818177746853, + 0.4119150400561605, + 0.40925152294600947, + 0.41486924585795193, + 0.4188385562409404, + 0.4250856225338319, + 0.42000724623516283, + 0.414572491314333, + 0.41423768198127736, + 0.4186309856608125, + 0.4176676853397942, + 0.4135741393648736, + 0.4090169199676577, + 0.4101641604793258, + 0.43488858241851075, + 0.4193538571404212, + 0.4128042625329133, + 0.415704742671274, + 0.4202308228072386, + 0.4120970888888342, + 0.41381353804203885, + 0.4163978944568004, + 0.4187331489469532, + 0.42254315244803287, + 0.41748727416700643, + 0.42399286377527146, + 0.41569320989124803, + 0.4117810633261283, + 0.41478351431736804, + 0.4065585309391558, + 0.41594258584740124, + 0.42012386279352737, + 0.4160880397134663, + 0.41453390656628786, + 0.41229340654251206, + 0.42133953078629693, + 0.4153505143780342, + 0.411626729291722, + 0.4132142280451144, + 0.4214680679977629, + 0.42065460035962093, + 0.4171152370374488, + 0.41517241615498357, + 0.40653938655589666, + 0.42203097309633764, + 0.4093853283999994, + 0.4231647166838545, + 0.41403898730388156, + 0.4128338453693963, + 0.4102696151821874, + 0.42069533528031217, + 0.40648858182375197, + 0.4096395128973476, + 0.40854194515850395, + 0.42226439329628507, + 0.4108756087902609, + 0.4160214892016602, + 0.4125487261360673, + 0.4260932163679567, + 0.41308244256019266, + 0.42606333113761374, + 0.41546747438880394, + 0.424103680124972, + 0.4222442799338909, + 0.4120209973695202, + 0.4216140333130328, + 0.41846141151820676, + 0.4143469075459744, + 0.4150859805573102, + 0.41587034954294644, + 0.4082985274068525, + 0.40318060753887275, + 0.4096895488783839, + 0.4220191847214111, + 0.4148437025458248, + 0.4173853115872075, + 0.4012947881274158, + 0.41442550590980554, + 0.4210831835010575, + 0.4131523301197297, + 0.4140286403784083, + 0.4155908420013831, + 0.4048626149837535, + 0.4016579642876699, + 0.4169321516022572, + 0.4064085992260674, + 0.4106429557280574, + 0.4076672095309376, + 0.4180811745565101, + 0.4100005428013189, + 0.413514790612364, + 0.4231400644032123, + 0.4157802319386974, + 0.41548131793514703, + 0.41364659779102786, + 0.42654774997272005, + 0.4132207975009173, + 0.42354324334250437, + 0.424650103028398, + 0.4193291105342644, + 0.40818210207583866, + 0.41855807086128904, + 0.4160535431811181, + 0.42053990874125663, + 0.4138513651743541, + 0.4157401139508811, + 0.41436204320558795, + 0.4241154232814037, + 0.4164418932379466, + 0.4135676335566171, + 0.4152104343014324, + 0.4177002628845082, + 0.41453215401776583, + 0.41760058575385733, + 0.4121861811524824, + 0.414753941107926, + 0.4067011844245312, + 0.41260498737961904, + 0.4113264803157147, + 0.4082560785837299, + 0.41706101472031465, + 0.41860752680678043, + 0.41258705942638463, + 0.4135580632440827, + 0.4124405258828673, + 0.41233896146244975, + 0.4110870516818503, + 0.4214170775781183, + 0.41683788865845167, + 0.4159150638455606, + 0.4145924517209135, + 0.4139197619996317, + 0.41141378222306707, + 0.413934978828563, + 0.40926205812897737, + 0.4097840694518274, + 0.40633809806414595, + 0.4227684440354452, + 0.42559150562105375, + 0.4115243100500463, + 0.41508833392341016, + 0.4109806274291654, + 0.41464118080709694, + 0.42272147382976505, + 0.41130200389180455, + 0.4034348174590496, + 0.41429198251622124, + 0.4168761153502957, + 0.4203512430540286, + 0.40891970233338804, + 0.4049332407897885, + 0.4096874931989157, + 0.40524711285631737, + 0.40938066034172865, + 0.4075999805835359, + 0.4113461733947549, + 0.41575783627922647, + 0.41476450944015675, + 0.41133793368530663, + 0.41469967579874006, + 0.4022175300279227, + 0.4024350106508633, + 0.41493174905949476, + 0.41898396294614626, + 0.4055773254911638, + 0.4081876749684022, + 0.4042403585829205, + 0.40665864102724614, + 0.4160793453503056, + 0.41076100500725693, + 0.4050231009251033, + 0.411615132232723, + 0.40787745556166477, + 0.4085311413054233, + 0.4123041145668021, + 0.39743481326149777, + 0.4087679313651397, + 0.41099407244473696, + 0.4179606010894413, + 0.40593244336069684, + 0.4074042617879889, + 0.40961292596361804, + 0.411248362711732, + 0.40933554270051664, + 0.41118385169766436, + 0.40990119532849034, + 0.4096145505723341, + 0.40996050439349824, + 0.39622791978004185, + 0.41070080687210936, + 0.41406122801076295, + 0.4061693091264359, + 0.4154900400443812, + 0.41071359170904465, + 0.4141998901310296, + 0.4177289498291667, + 0.41855535590413795, + 0.41505540043378814, + 0.41461859848451516, + 0.4022129235489284, + 0.4207464352341207, + 0.4162507158977425, + 0.4193801207280637, + 0.40708085330729576, + 0.41612402488902694, + 0.41196659412083175, + 0.4073943937002458, + 0.41334952202969755, + 0.41567134667623223, + 0.4231993196501761, + 0.42239292382778926, + 0.4261178219306242, + 0.41243670257466397, + 0.4196924054843333, + 0.4084573027110942, + 0.4083250096199386, + 0.40856443588753516, + 0.40292984565198625, + 0.40631660121574026, + 0.41653729736319056, + 0.41183857145749597, + 0.41525920689657697, + 0.4100978642805864, + 0.41230391501955205, + 0.41006396991408267, + 0.4113562353405818, + 0.4143046359936504, + 0.4131352234145869, + 0.4153450597735608, + 0.4137504463964749, + 0.4080616463046602, + 0.4051023218148064, + 0.4120643822471206, + 0.41727367779442476, + 0.4071507343905978, + 0.4007422251313034, + 0.41792664287419506, + 0.405089741046338, + 0.4185478943147008, + 0.40529366700818925, + 0.4154186847457743, + 0.41316393203274143, + 0.41535466472598037, + 0.41627496070957376, + 0.41205390562991734, + 0.4089369066084897, + 0.4166042811209437, + 0.4196942047970167, + 0.40916263728918834, + 0.41814651815524406, + 0.41302721936549264, + 0.4121056053693831, + 0.40705179458777624, + 0.41264042989907623, + 0.41506383632076904, + 0.4078217570242249, + 0.41787528561974835, + 0.40900347702478507, + 0.41230335624878417, + 0.41616114810028154, + 0.403486201089159, + 0.41268935445797345, + 0.4194793849702641, + 0.4092436562590908, + 0.4067381876976351, + 0.4113653644023504, + 0.4039976399634843, + 0.4091907453637206, + 0.4059884483121214, + 0.4077896091368292, + 0.40805652758875943, + 0.41290758440078684, + 0.40686295738661377, + 0.4066821640235898, + 0.41628732437169197, + 0.41308051474270935, + 0.40797502335171093, + 0.4117179422882264, + 0.4172464299747093, + 0.40938793263479095, + 0.3985132547500341, + 0.41097206541124487, + 0.41800240192906524, + 0.4209438004915643, + 0.403004238052982, + 0.4118582691324105, + 0.39815032411285717, + 0.4112736153968043, + 0.4201478964558033, + 0.4194340601357196, + 0.41050213127417245, + 0.3954416657646146, + 0.40687146896000387, + 0.41375897218771884, + 0.4030715787261927, + 0.40278620736511506, + 0.4067442192671993, + 0.40565352350139344, + 0.41031732142943406, + 0.40393092236532224, + 0.41046233350821043, + 0.409784958244848, + 0.4030089288479876, + 0.39921336506908434, + 0.4049276555837739, + 0.40783281882328953, + 0.41791806525662134, + 0.4073335040289828, + 0.4070253354793354, + 0.4159891407167458, + 0.40643401903798804, + 0.41205471629813395, + 0.4113534072277379, + 0.4201452959108713, + 0.4066929013908679, + 0.40927658276252815, + 0.40694506998356106, + 0.4170557551475687, + 0.4096655511941366, + 0.4177694986713544, + 0.4212973335676867, + 0.4137182317714652, + 0.40443476043899707, + 0.40743246060573374, + 0.4083916673021715, + 0.4136230508061683, + 0.41325185397315933, + 0.40922959529005154, + 0.4105775461319591, + 0.4102297496036677, + 0.41885288598765014, + 0.410986465881528, + 0.4053946056291866, + 0.41474386324483453, + 0.41934527402601973, + 0.4019263654608158, + 0.4094834552455248, + 0.4105432370738329, + 0.41029187678055756, + 0.4136816251506705, + 0.4151477433637331, + 0.4010732719907537, + 0.40025411212548573, + 0.41814371529693267, + 0.409883427494408, + 0.4068699890555328, + 0.411019608570116, + 0.40653399365407455, + 0.4006172033133107, + 0.40610311721937487, + 0.4117575061251916, + 0.40477218380724284, + 0.4037013868573527, + 0.40754247033518093, + 0.4081655354137816, + 0.41160101772782026, + 0.41492004135785543, + 0.4130274673326589, + 0.410037715732039, + 0.41468479170251393, + 0.40779271750184504, + 0.4011521519050407, + 0.40842409658058465, + 0.4136849925119389, + 0.40342518291562673, + 0.4177901807706803, + 0.4148280754553802, + 0.4125210260073958, + 0.41338749983444123, + 0.42436097533462325, + 0.4083638878516428, + 0.4053802511973671, + 0.40664793334840593, + 0.4168748192757409, + 0.4023481670756946, + 0.40746738356107887, + 0.4056275642512407, + 0.4141252236946931, + 0.418249595242188, + 0.4101695787875265, + 0.40941760126936616, + 0.41148954969780194, + 0.3976588369569863, + 0.40574371053599345, + 0.40781943754637207, + 0.4146068642680209, + 0.40769306439703895, + 0.41055891034193337, + 0.4046898335673968, + 0.4044510570352229, + 0.4046996503552098, + 0.4127233245577056, + 0.41959824246789695, + 0.4116716816105232, + 0.4091754323023412, + 0.4128634065171987, + 0.41197440318469686, + 0.4066103350601422, + 0.413254319416845, + 0.40643766602379794, + 0.41068206087600073, + 0.41227546834979084, + 0.41301460955393215, + 0.4116902068135855, + 0.40359230932106427, + 0.39885565206033946, + 0.40202144584011124, + 0.4171202442603712, + 0.40809400720805256, + 0.4197560039490623, + 0.40563699799594877, + 0.40687050461090857, + 0.41272301306446973, + 0.4219525681666868, + 0.40646517015588673, + 0.41838277430582343, + 0.4068125482122211, + 0.412942354084605, + 0.41564286745700013, + 0.41625140547536, + 0.41511225261416734, + 0.4149749426376176, + 0.4043446427195981, + 0.41538231213116494, + 0.4192253041521484, + 0.4119340303359027, + 0.41333206873969175, + 0.4164911192422986, + 0.4057654442908445, + 0.4111714661399753, + 0.4081320085375489, + 0.4097493308869636, + 0.40831095944968576, + 0.412763844741478, + 0.40868784276181425, + 0.39848096624684887, + 0.4096053907743898, + 0.4077169504775868, + 0.4098123884987612, + 0.40453293897166, + 0.4051867398735814, + 0.41983348416903504, + 0.41617977850609866, + 0.4071712785314404, + 0.4114987802686696, + 0.4022648221124773, + 0.40427407149716443, + 0.40932180152942793, + 0.40343789365927124 + ], + "best_epoch": 1843 +} \ No newline at end of file diff --git a/results/model_media/model_stats.csv b/results/model_media/model_stats.csv index 317775df..add73e42 100644 --- a/results/model_media/model_stats.csv +++ b/results/model_media/model_stats.csv @@ -17,4 +17,5 @@ DGM_davisD_nomsaF_anmE_64B_0.0001LR_0.4D_2000E,0.8250074299111713,0.695567212857 DGM_davisD_nomsaF_af2-anmE_64B_0.0001LR_0.4D_2000E,0.8340067034309678,0.7223194595485749,0.6237215987537702,0.3929565548835356,0.3418820812968863,0.6268624688745814 DGM_kibaD_nomsaF_simpleE_128B_0.0001LR_0.4D_2000E,0.7436957850539295,0.7162112545924394,0.6223745821649189,0.3846365983738171,0.4193476290343476,0.6201907757890447 DGM_kibaD_nomsaF_anmE_128B_0.0001LR_0.4D_2000E,0.7451497238158984,0.7170971688921055,0.6181435377446383,0.3600508469253324,0.4084963823553237,0.6000423709416964 -DGM_kibaD_nomsaF_af2E_128B_0.0001LR_0.4D_2000E,0.7421544771099204,0.7320308314499666,0.6102455648558164,0.35005739125669477,0.4112797514616568,0.5916564807865243 +DGM_kibaD_nomsaF_af2E_128B_0.0001LR_0.4D_2000E,0.7421544771099204,0.7320308314499666,0.6102455648558164,0.3500573912566947,0.4112797514616568,0.5916564807865243 +DGM_davisD_nomsaF_af2E_64B_0.0001LR_0.4D_2000E,0.8371815910480507,0.7268638307639713,0.6356548295003425,0.4133190204335282,0.3667126292760789,0.642898919297216 diff --git a/results/model_media/model_stats_val.csv b/results/model_media/model_stats_val.csv new file mode 100644 index 00000000..e69de29b diff --git a/src/data_analysis/figures.py b/src/data_analysis/figures.py index f23411f1..8914f58d 100644 --- a/src/data_analysis/figures.py +++ b/src/data_analysis/figures.py @@ -73,7 +73,7 @@ def fig1_pro_overlap(df, sel_col='cindex', verbose=False, show=True): # Figure 2 - node feature cindex difference # Features -> nomsa, msa, shannon, and esm -def fig2_pro_feat(df, verbose=False, sel_col='cindex', show=True): +def fig2_pro_feat(df, verbose=False, sel_col='cindex', exclude=[], show=True, add_labels=True): # Extract relevant data filtered_df = df[(df['edge'] == 'binary') & (~df['overlap'])] @@ -115,6 +115,8 @@ def fig2_pro_feat(df, verbose=False, sel_col='cindex', show=True): 'shannon': shannon, 'esm': esm }) + for c in exclude: + plot_data.drop(c, axis=1, inplace=True) # Melt the DataFrame for Seaborn barplot melted_data = pd.melt(plot_data, id_vars=['Dataset'], var_name='Node feature', @@ -126,6 +128,10 @@ def fig2_pro_feat(df, verbose=False, sel_col='cindex', show=True): sns.set_context('poster') ax = sns.barplot(x='Dataset', y=sel_col, hue='Node feature', data=melted_data, palette='deep') + if add_labels: + for i in ax.containers: + ax.bar_label(i, fmt='%.3f', fontsize=13) + # Set the title ax.set_title(f'Node feature performance ({"concordance index" if sel_col == "cindex" else "MSE"})') @@ -152,7 +158,7 @@ def fig2_pro_feat(df, verbose=False, sel_col='cindex', show=True): # Figure 3 - Edge type cindex difference # Edges -> binary, simple, anm, af2 -def fig3_edge_feat(df, verbose=False, sel_col='cindex', exclude=[], show=True): +def fig3_edge_feat(df, verbose=False, sel_col='cindex', exclude=[], show=True, add_labels=True): # comparing nomsa, msa, shannon, and esm # group by data type @@ -214,6 +220,9 @@ def fig3_edge_feat(df, verbose=False, sel_col='cindex', exclude=[], show=True): sns.set_context('poster') ax = sns.barplot(x='Dataset', y=sel_col, hue='Edge type', data=melted_data, palette='deep') + if add_labels: + for i in ax.containers: + ax.bar_label(i, fmt='%.3f', fontsize=13) # Set the title ax.set_title(f'Edge type performance ({"concordance index" if sel_col == "cindex" else "MSE"})') diff --git a/src/data_processing/datasets.py b/src/data_processing/datasets.py index 63159def..fd807cfb 100644 --- a/src/data_processing/datasets.py +++ b/src/data_processing/datasets.py @@ -27,8 +27,10 @@ # See: https://pytorch-geometric.readthedocs.io/en/latest/tutorial/create_dataset.html # for details on how to create a dataset class BaseDataset(torchg.data.InMemoryDataset, abc.ABC): - FEATURE_OPTIONS = cfg.PRO_FEAT_OPT EDGE_OPTIONS = cfg.EDGE_OPT + FEATURE_OPTIONS = cfg.PRO_FEAT_OPT + LIGAND_EDGE_OPTIONS = cfg.LIG_EDGE_OPT + LIGAND_FEATURE_OPTIONS = cfg.LIG_FEAT_OPT def __init__(self, save_root:str, data_root:str, aln_dir:str, cmap_threshold:float, feature_opt='nomsa', @@ -38,6 +40,8 @@ def __init__(self, save_root:str, data_root:str, aln_dir:str, overwrite=False, max_seq_len:int=None, only_download=False, + ligand_feature:str='original', + ligand_edge:str='binary', *args, **kwargs): """ Base class for datasets. This class is used to create datasets for @@ -88,7 +92,7 @@ def __init__(self, save_root:str, data_root:str, aln_dir:str, self.data_root = data_root self.cmap_threshold = cmap_threshold self.overwrite = overwrite - max_seq_len = 100000 or max_seq_len + max_seq_len = max_seq_len or 100000 assert max_seq_len >= 100, 'max_seq_len cant be smaller than 100.' self.max_seq_len = max_seq_len @@ -110,9 +114,17 @@ def __init__(self, save_root:str, data_root:str, aln_dir:str, f"Invalid edge_opt '{edge_opt}', choose from {self.EDGE_OPTIONS}" self.edge_opt = edge_opt + # check ligand options: + assert ligand_feature in self.LIGAND_FEATURE_OPTIONS, \ + f"Invalid ligand_feature '{ligand_feature}', choose from {self.LIGAND_FEATURE_OPTIONS}" + self.ligand_feature = ligand_feature + assert ligand_edge in self.LIGAND_EDGE_OPTIONS, \ + f"Invalid ligand_edge '{ligand_edge}', choose from {self.LIGAND_EDGE_OPTIONS}" + self.ligand_edge = ligand_edge + # Validating subset subset = subset or 'full' - save_root = os.path.join(save_root, f'{self.feature_opt}_{self.edge_opt}') # e.g.: path/to/root/nomsa_anm + save_root = os.path.join(save_root, f'{self.feature_opt}_{self.edge_opt}_{self.ligand_feature}_{self.ligand_edge}') # e.g.: path/to/root/nomsa_anm print('save_root:', save_root) if subset != 'full': @@ -122,9 +134,9 @@ def __init__(self, save_root:str, data_root:str, aln_dir:str, self.subset = subset # checking af2 conf dir if we are creating the dataset from scratch - if not os.path.isdir(save_root): - assert 'af2' not in self.edge_opt or af_conf_dir is not None, f"'af2' edge selected but no af_conf_dir provided!" - assert af_conf_dir is None or os.path.isdir(af_conf_dir), f"AF configuration dir doesnt exist, {af_conf_dir}" + if not os.path.isdir(save_root) and ('af2' in self.edge_opt): + assert af_conf_dir is not None, f"{self.edge_opt} edge selected but no af_conf_dir provided!" + assert os.path.isdir(af_conf_dir), f"AF configuration dir doesnt exist, {af_conf_dir}" self.af_conf_dir = af_conf_dir self.only_download = only_download @@ -153,6 +165,7 @@ def edgew_p(self, code) -> str: return os.path.join(dirname, f'{code}.npy') def af_conf_files(self, code) -> list[str]: + if 'af2' not in self.edge_opt: return [] # removing () from string since file names cannot include them and localcolabfold replaces them with _ code = re.sub(r'[()]', '_', code) # localcolabfold has 'unrelaxed' as the first part after the code/ID. @@ -320,27 +333,25 @@ def process(self): af_confs = None # Check to see if edge weights already generated: - if os.path.isfile(self.edgew_p(code)) and not self.overwrite: - pro_edge_weight = np.load(self.edgew_p(code)) - else: - pro_edge_weight = get_target_edge_weights(self.pdb_p(code), pro_seq, - edge_opt=self.edge_opt, - cmap=pro_cmap, - n_modes=5, n_cpu=4, - af_confs=af_confs) - np.save(self.edgew_p(code), pro_edge_weight) - - if pro_edge_weight is None: - pro = torchg.data.Data(x=torch.Tensor(pro_feat), - edge_index=torch.LongTensor(edge_idx), - pro_seq=pro_seq, # protein sequence for downstream esm model - prot_id=prot_id) - else: - pro = torchg.data.Data(x=torch.Tensor(pro_feat), - edge_index=torch.LongTensor(edge_idx), - pro_seq=pro_seq, # protein sequence for downstream esm model - prot_id=prot_id, - edge_weight=torch.Tensor(pro_edge_weight[edge_idx[0], edge_idx[1]])) + pro_edge_weight = None + if self.edge_opt != 'binary': + if os.path.isfile(self.edgew_p(code)) and not self.overwrite: + pro_edge_weight = np.load(self.edgew_p(code)) + else: + pro_edge_weight = get_target_edge_weights(self.pdb_p(code), pro_seq, + edge_opt=self.edge_opt, + cmap=pro_cmap, + n_modes=5, n_cpu=4, + af_confs=af_confs) + np.save(self.edgew_p(code), pro_edge_weight) + pro_edge_weight = torch.Tensor(pro_edge_weight[edge_idx[0], edge_idx[1]]) + + + pro = torchg.data.Data(x=torch.Tensor(pro_feat), + edge_index=torch.LongTensor(edge_idx), + pro_seq=pro_seq, # protein sequence for downstream esm model + prot_id=prot_id, + edge_weight=pro_edge_weight) processed_prots[prot_id] = pro ###### Get Ligand Graphs ###### @@ -350,7 +361,8 @@ def process(self): desc='Creating ligand graphs'): if lig_seq not in processed_ligs: try: - mol_feat, mol_edge = smile_to_graph(lig_seq) + mol_feat, mol_edge = smile_to_graph(lig_seq, lig_feature=self.ligand_feature, + lig_edge=self.ligand_edge) except ValueError: errors.append(f'L-{lig_seq}') continue @@ -371,8 +383,8 @@ def process(self): class PDBbindDataset(BaseDataset): # InMemoryDataset is used if the dataset is small and can fit in CPU memory - def __init__(self, save_root='../data/PDBbindDataset/nomsa', - data_root='../data/v2020-other-PL', + def __init__(self, save_root=f'{cfg.DATA_ROOT}/PDBbindDataset', + data_root=f'{cfg.DATA_ROOT}/v2020-other-PL', aln_dir=None, cmap_threshold=8.0, feature_opt='nomsa', *args, **kwargs): """ @@ -468,15 +480,11 @@ def pre_process(self): missing_pid = df_pid.prot_id == '------' df_pid[missing_pid] = df_pid[missing_pid].assign(prot_id = df_pid[missing_pid].index) - ############# get pdb codes based on data root dir ############# - pdb_codes = os.listdir(self.data_root) - # filter out readme and index folders - pdb_codes = [p for p in pdb_codes if p != 'index' and p != 'readme'] - - ############# creating MSA: ############# - #NOTE: assuming MSAs are already created, since this would take a long time to do. - # create_aln_files(df_seq, self.aln_p) - if self.aln_dir is not None: + pdb_codes = df_binding.index # pdbcodes + ############# validating codes ############# + if self.aln_dir is not None: # create msa if 'msaF' is selected + #NOTE: assuming MSAs are already created, since this would take a long time to do. + # create_aln_files(df_seq, self.aln_p) # WARNING: use feature_extraction.process_msa method instead # PDBbindProcessor.fasta_to_aln_dir(self.aln_dir, # os.path.join(os.path.dirname(self.aln_dir), @@ -486,11 +494,13 @@ def pre_process(self): valid_codes = [c for c in pdb_codes if os.path.isfile(self.aln_p(c))] # filters out those that do not have aln file print(f'Number of codes with aln files: {len(valid_codes)} out of {len(pdb_codes)}') - pdb_codes = valid_codes + else: # check if exists + valid_codes = [c for c in pdb_codes if os.path.isfile(self.pdb_p(c))] - #TODO: filter out pdbs that dont have confirmations if edge type af2 - - + pdb_codes = valid_codes + #TODO: filter out pdbs that dont have confirmations if edge type is af2 + # currently we treat all edges as the same if no confirmations are found... + # (see protein_edges.get_target_edge_weights():232) assert len(pdb_codes) > 0, 'Too few PDBCodes, need at least 1...' @@ -534,9 +544,9 @@ def pre_process(self): class DavisKibaDataset(BaseDataset): - def __init__(self, save_root='../data/DavisKibaDataset/', - data_root='../data/davis_kiba/davis/', - aln_dir='../data/davis_kiba/davis/aln/', + def __init__(self, save_root=f'{cfg.DATA_ROOT}/DavisKibaDataset/', + data_root=f'{cfg.DATA_ROOT}/davis_kiba/davis/', + aln_dir=f'{cfg.DATA_ROOT}/davis_kiba/davis/aln/', cmap_threshold=-0.5, feature_opt='nomsa', *args, **kwargs): """ InMemoryDataset for davis or kiba. This dataset is used to train graph models. @@ -566,6 +576,8 @@ def pdb_p(self, code, safe=True): code = re.sub(r'[()]', '_', code) # davis and kiba dont have their own structures so this must be made using # af or some other method beforehand. + if 'af2' not in self.edge_opt: return None + file = glob(os.path.join(self.af_conf_dir, f'highQ/{code}_unrelaxed_rank_001*.pdb')) # should only be one file assert not safe or len(file) == 1, f'Incorrect pdb pathing, {len(file)}# of structures for {code}.' diff --git a/src/data_processing/init_dataset.py b/src/data_processing/init_dataset.py index effc5c24..1b4ef65e 100644 --- a/src/data_processing/init_dataset.py +++ b/src/data_processing/init_dataset.py @@ -11,44 +11,53 @@ from src.data_processing.datasets import DavisKibaDataset, PDBbindDataset, PlatinumDataset from src.train_test.utils import train_val_test_split -def create_datasets(data_opt:Iterable[str], feat_opt:Iterable[str], edge_opt:Iterable[str], - pro_overlap:bool, data_root_dir:str) -> None: - for data, FEATURE, EDGE in itertools.product(data_opt, feat_opt, edge_opt): +def create_datasets(data_opt:Iterable[str], feat_opt:Iterable[str], edge_opt:Iterable[str], + pro_overlap:bool, data_root_dir:str, + ligand_features:Iterable[str]=['original'], + ligand_edges:Iterable[str]='binary') -> None: + for data, FEATURE, EDGE, ligand_feature, ligand_edge in itertools.product( + data_opt, feat_opt, edge_opt, ligand_features, ligand_edges): + print('\n', data, FEATURE, EDGE) if data in ['davis', 'kiba']: - DATA_ROOT = f'{data_root_dir}/{data}/' if FEATURE == 'msa': # position frequency matrix creation -> important for msa feature - create_pfm_np_files(DATA_ROOT+'/aln/', processes=4) + create_pfm_np_files(f'{data_root_dir}/{data}/aln', processes=4) + dataset = DavisKibaDataset( - save_root=f'../data/DavisKibaDataset/{data}/', - data_root=DATA_ROOT, - aln_dir=f'{DATA_ROOT}/aln/', + save_root=f'{data_root_dir}/DavisKibaDataset/{data}/', + data_root=f'{data_root_dir}/{data}/', + aln_dir=f'{data_root_dir}/{data}/aln/', cmap_threshold=-0.5, feature_opt=FEATURE, - af_conf_dir=f'../colabfold/{data}_af2_out/', - edge_opt=EDGE + af_conf_dir=f'../colabfold/{data}_af2_out/', # colabfold not needed if no structure required methods are used (see config) + edge_opt=EDGE, + ligand_feature=ligand_feature, + ligand_edge=ligand_edge ) elif data == 'PDBbind': - # create_pfm_np_files('../data/PDBbind_aln/', processes=4) dataset = PDBbindDataset( - save_root=f'../data/PDBbindDataset/', - data_root=f'../data/v2020-other-PL/', - aln_dir=f'../data/PDBbind_a3m', + save_root=f'{data_root_dir}/PDBbindDataset/', + data_root=f'{data_root_dir}/v2020-other-PL/', + aln_dir=f'{data_root_dir}/PDBbind_a3m', cmap_threshold=8.0, overwrite=False, # overwrite old cmap.npy files af_conf_dir='../colabfold/pdbbind_af2_out/', feature_opt=FEATURE, edge_opt=EDGE, + ligand_feature=ligand_feature, + ligand_edge=ligand_edge ) elif data == 'Platinum': dataset = PlatinumDataset( - save_root=f'../data/PlatinumDataset/', - data_root=f'../data/PlatinumDataset/raw', + save_root=f'{data_root_dir}/PlatinumDataset/', + data_root=f'{data_root_dir}/PlatinumDataset/raw', aln_dir=None, cmap_threshold=8.0, feature_opt=FEATURE, - edge_opt=EDGE + edge_opt=EDGE, + ligand_feature=ligand_feature, + ligand_edge=ligand_edge ) # saving training, validation, and test sets diff --git a/src/feature_extraction/ligand.py b/src/feature_extraction/ligand.py index eb23ef93..4411296b 100644 --- a/src/feature_extraction/ligand.py +++ b/src/feature_extraction/ligand.py @@ -23,7 +23,7 @@ def atom_features(atom): [atom.GetIsAromatic()])) # mol smile to mol graph edge index -def smile_to_graph(smile): +def smile_to_graph(smile, lig_feature:str, lig_edge:str): try: mol = Chem.MolFromSmiles(smile) except AttributeError as e: diff --git a/src/models/__init__.py b/src/models/__init__.py index 6a2af3d5..e20e89a3 100644 --- a/src/models/__init__.py +++ b/src/models/__init__.py @@ -1,5 +1,5 @@ from src.models.prior_work import DGraphDTA, GraphDTA -from src.models.mut_dta import EsmDTA +from src.models.pro_mod import EsmDTA from src.models.utils import BaseModel def display_models(): diff --git a/src/models/lig_mod.py b/src/models/lig_mod.py new file mode 100644 index 00000000..ac937fad --- /dev/null +++ b/src/models/lig_mod.py @@ -0,0 +1,43 @@ +from torch import nn +from torch_geometric.nn import (GCNConv, global_mean_pool as gep) + +from src.models.prior_work import DGraphDTA + +class DGraphDTALigand(DGraphDTA): + def __init__(self, ligand_feature='original', ligand_edge='binary', output_dim=128, *args, **kwargs): + super(DGraphDTA, self).__init__(*args, **kwargs) + + print('DGraphDTA Loaded') + num_features_mol = 78 + + # if ligand_feature == 'some new feature list': + # num_features_mol = updated number + + self.mol_conv1 = GCNConv(num_features_mol, num_features_mol) + self.mol_conv2 = GCNConv(num_features_mol, num_features_mol * 2) + self.mol_conv3 = GCNConv(num_features_mol * 2, num_features_mol * 4) + self.mol_fc_g1 = nn.Linear(num_features_mol * 4, 1024) + self.mol_fc_g2 = nn.Linear(1024, output_dim) + + def forward_mol(self, data_mol): + # get graph input + mol_x, mol_edge_index, mol_batch = data_mol.x, data_mol.edge_index, data_mol.batch + + x = self.mol_conv1(mol_x, mol_edge_index) + x = self.relu(x) + + # mol_edge_index, _ = dropout_adj(mol_edge_index, training=self.training) + x = self.mol_conv2(x, mol_edge_index) + x = self.relu(x) + + # mol_edge_index, _ = dropout_adj(mol_edge_index, training=self.training) + x = self.mol_conv3(x, mol_edge_index) + x = self.relu(x) + x = gep(x, mol_batch) # global pooling + + # flatten + x = self.relu(self.mol_fc_g1(x)) + x = self.dropout(x) + x = self.mol_fc_g2(x) + x = self.dropout(x) + return x \ No newline at end of file diff --git a/src/models/prior_work.py b/src/models/prior_work.py index bc67c454..012e500e 100644 --- a/src/models/prior_work.py +++ b/src/models/prior_work.py @@ -1,7 +1,5 @@ -from typing import Any, Mapping import torch from torch import nn -import torch.nn.functional as F from torch_geometric.nn import (GCNConv, GATConv, global_max_pool as gmp, diff --git a/src/models/mut_dta.py b/src/models/pro_mod.py similarity index 98% rename from src/models/mut_dta.py rename to src/models/pro_mod.py index 9c1782a9..c59442b6 100644 --- a/src/models/mut_dta.py +++ b/src/models/pro_mod.py @@ -172,7 +172,8 @@ def forward_pro(self, data): #### Graph NN #### ei = data.edge_index - ew = data.edge_weight if self.edge_weight else None + ew = data.edge_weight if (self.edge_weight is not None and + self.edge_weight != 'binary') else None # if edge_weight doesnt exist no error is thrown it just passes it as None xt = self.pro_conv1(target_x, ei, ew) diff --git a/src/train_test/distributed.py b/src/train_test/distributed.py index 45023264..2dc65f93 100644 --- a/src/train_test/distributed.py +++ b/src/train_test/distributed.py @@ -11,7 +11,7 @@ from src.train_test.training import train, test from src.train_test.utils import CheckpointSaver, init_node, init_dist_gpu, print_device_info -from src.utils.config import MODEL_STATS_CSV,MEDIA_SAVE_DIR, MODEL_SAVE_DIR +from src.utils.config import MODEL_STATS_CSV, MEDIA_SAVE_DIR, MODEL_SAVE_DIR # distributed training fn def dtrain(args): @@ -23,23 +23,24 @@ def dtrain(args): init_dist_gpu(args) # TODO: update this to loop through all options. - DATA = args.data_opt[0] # only one data option for now - FEATURE = args.feature_opt[0] # only one feature option for now - EDGEW = args.edge_opt[0] # only one edge option for now - MODEL = args.model_opt[0] # only one model option for now - - BATCH_SIZE = args.batch_size - DROPOUT = args.dropout - LEARNING_RATE = args.learning_rate - EPOCHS = args.num_epochs + # only support for a single option for now: + MODEL = args.model_opt[0] + DATA = args.data_opt[0] + FEATURE = args.feature_opt[0] + EDGEW = args.edge_opt[0] + ligand_feature = args.ligand_feature_opt[0] + ligand_edge = args.ligand_edge_opt[0] media_save_p = f'{MEDIA_SAVE_DIR}/{DATA}/' - MODEL_KEY = Loader.get_model_key(MODEL,DATA,FEATURE,EDGEW,BATCH_SIZE*args.world_size, - LEARNING_RATE,DROPOUT,EPOCHS, + MODEL_KEY = Loader.get_model_key(model=MODEL,data=DATA,pro_feature=FEATURE,edge=EDGEW, + ligand_feature=ligand_feature, ligand_edge=ligand_edge, + batch_size=args.batch_size, + lr=args.learning_rate,dropout=args.dropout, + n_epochs=args.num_epochs, pro_overlap=args.protein_overlap) - # MODEL_KEY = "DDP-" + MODEL_KEY print(os.getcwd()) + print(MODEL_KEY) print(f"---------------- MODEL OPT ---------------") print(f" Selected og_model_opt: {args.model_opt}") print(f" Selected data_opt: {args.data_opt}") @@ -53,8 +54,8 @@ def dtrain(args): print(f" Num epochs: {args.num_epochs}\n") print(f"----------------- DISTRIBUTED ARGS -----------------") - print(f" Local Batch size: {BATCH_SIZE}") - print(f" Global Batch size: {BATCH_SIZE*args.world_size}") + print(f" Local Batch size: {args.batch_size}") + print(f" Global Batch size: {args.batch_size*args.world_size}") print(f" GPU: {args.gpu}") print(f" Rank: {args.rank}") print(f" World Size: {args.world_size}") @@ -78,7 +79,7 @@ def dtrain(args): sampler = DistributedSampler(dataset, shuffle=True, num_replicas=args.world_size, rank=args.rank, seed=args.rand_seed) - bs = 1 if d == 'test' else BATCH_SIZE + bs = 1 if d == 'test' else args.batch_size loader = DataLoader(dataset=dataset, sampler=sampler, batch_size=bs, # batch size per gpu (https://stackoverflow.com/questions/73899097/distributed-data-parallel-ddp-batch-size) @@ -92,7 +93,7 @@ def dtrain(args): # ==== Load model ==== # args.gpu is the local rank for this process - model = Loader.load_model(MODEL,FEATURE, EDGEW, DROPOUT).cuda(args.gpu) + model = Loader.init_model(MODEL, FEATURE, EDGEW, args.dropout).cuda(args.gpu) cp_saver = CheckpointSaver(model=model, save_path=f'{MODEL_SAVE_DIR}/{MODEL_KEY}.model', train_all=False, patience=50, min_delta=0.2, @@ -112,7 +113,7 @@ def dtrain(args): # ==== train ==== print("starting training:") logs = train(model=model, train_loader=loaders['train'], val_loader=loaders['val'], - device=args.gpu, saver=cp_saver, epochs=EPOCHS, lr_0=LEARNING_RATE) + device=args.gpu, saver=cp_saver, epochs=args.num_epochs, lr_0=args.learning_rate) torch.distributed.barrier() # Sync params across GPUs cp_saver.save() diff --git a/src/train_test/training.py b/src/train_test/training.py index 368706f9..7e85519a 100644 --- a/src/train_test/training.py +++ b/src/train_test/training.py @@ -198,7 +198,7 @@ def test(model, test_loader, device, CRITERION=None) -> Tuple[float, np.ndarray, def train_tune(config, model:str, pro_feature:str, train_dataset:BaseDataset, val_dataset:BaseDataset): from ray.air import session device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') - model = Loader.load_model(model, pro_feature, config['edge'], config['dropout']) + model = Loader.init_model(model, pro_feature, config['edge'], config['dropout']) model.to(device) train_loader = DataLoader(train_dataset, batch_size=config['batch_size'], diff --git a/src/utils/arg_parse.py b/src/utils/arg_parse.py index 17046dbc..660a5aa9 100644 --- a/src/utils/arg_parse.py +++ b/src/utils/arg_parse.py @@ -5,9 +5,10 @@ def add_model_args(parser: argparse.ArgumentParser): """ Adds the following arguments to the parser: - model_opt - - data_opt - feature_opt - edge_opt + - ligand_feature_opt + - ligand_edge_opt - train - debug """ @@ -23,6 +24,12 @@ def add_model_args(parser: argparse.ArgumentParser): 'and EDA is the same but with esm_only set to False. Additional options:' + \ '\n\t- EAT: EsmAttentionDTA (no graph for protein rep)' ) + # Add the argument for FEATURE_opt + parser.add_argument('-f', + '--feature_opt', + choices=cfg.PRO_FEAT_OPT, nargs='+', required=True, + help=f'Select one or more from {cfg.PRO_FEAT_OPT}.' + ) # Add the argument for EDGE_opt parser.add_argument('-e', '--edge_opt', @@ -41,6 +48,21 @@ def add_model_args(parser: argparse.ArgumentParser): action='store_true', help='Enters debug mode, no training is done, just model initialization.' ) + + # Arguments for ligand options + parser.add_argument('-lf', + '--ligand_feature_opt', + choices=cfg.LIG_FEAT_OPT, + nargs='+', default=cfg.LIG_FEAT_OPT[0:1], required=False, + help=f'Select one or more from {cfg.LIG_FEAT_OPT}.' + ) + parser.add_argument('-le', + '--ligand_edge_opt', + choices=cfg.LIG_EDGE_OPT, + nargs='+', default=cfg.LIG_EDGE_OPT[0:1], required=False, + help=f'Select one or more from {cfg.LIG_EDGE_OPT}.' + ) + return parser def add_hyperparam_args(parser: argparse.ArgumentParser): @@ -79,7 +101,6 @@ def add_dataset_args(parser: argparse.ArgumentParser): """ Adds the following dataset arguments to the parser: - data_opt - - feature_opt - train_split - val_split - shuffle_data @@ -92,12 +113,6 @@ def add_dataset_args(parser: argparse.ArgumentParser): choices=cfg.DATA_OPT, nargs='+', required=True, help=f'Select one of {cfg.DATA_OPT} (default: {cfg.DATA_OPT[0]}).' ) - # Add the argument for FEATURE_opt - parser.add_argument('-f', - '--feature_opt', - choices=cfg.PRO_FEAT_OPT, nargs='+', required=True, - help=f'Select one or more from {cfg.PRO_FEAT_OPT}.' - ) parser.add_argument('-ts', '--train_split', action='store', type=float, default=0.8, @@ -207,7 +222,10 @@ def parse_train_test_args(verbose=True, distributed=False, print(f" Selected data_opt: {args.data_opt}") print(f" Selected feature_opt: {args.feature_opt}") print(f" Selected edge_opt: {args.edge_opt}") - print(f" forced training: {args.train}\n") + print(f" forced training: {args.train}") + print(f" -----") + print(f" ligand_feature_opt: {args.ligand_feature_opt}") + print(f" ligand_edge_opt: {args.ligand_edge_opt}\n") print(f"-------------- HYPERPARAMETERS -----------") print(f" Global Batch size: {global_bs}") diff --git a/src/utils/config.py b/src/utils/config.py index 7a5e04c1..174e3b31 100644 --- a/src/utils/config.py +++ b/src/utils/config.py @@ -10,13 +10,22 @@ STRUCT_EDGE_OPT = ['anm', 'af2', 'af2-anm'] # edge options that require structural info (pdbs) EDGE_OPT = ['simple', 'binary'] + STRUCT_EDGE_OPT +PRO_FEAT_OPT = ['nomsa', 'msa', 'shannon'] + +LIG_FEAT_OPT = ['original'] +LIG_EDGE_OPT = ['binary'] DATA_OPT = ['davis', 'kiba', 'PDBbind'] -PRO_FEAT_OPT = ['nomsa', 'msa', 'shannon'] -MODEL_STATS_CSV = 'results/model_media/model_stats.csv' +# data save paths +DATA_ROOT = '../data/' + +# Model save paths MEDIA_SAVE_DIR = 'results/model_media/' +MODEL_STATS_CSV = 'results/model_media/model_stats.csv' +MODEL_STATS_CSV_VAL = 'results/model_media/model_stats_val.csv' MODEL_SAVE_DIR = 'results/model_checkpoints/ours' +CHECKPOINT_SAVE_DIR = MODEL_SAVE_DIR # alias for clarity # cluster based configs: import socket diff --git a/src/utils/loader.py b/src/utils/loader.py index 4c135b9a..cb99fcb6 100644 --- a/src/utils/loader.py +++ b/src/utils/loader.py @@ -3,7 +3,8 @@ from typing import Iterable from torch_geometric.loader import DataLoader -from src.models.mut_dta import EsmDTA, EsmAttentionDTA +from src.models.lig_mod import DGraphDTALigand +from src.models.pro_mod import EsmDTA, EsmAttentionDTA from src.models.prior_work import DGraphDTA, DGraphDTAImproved from src.data_processing.datasets import PDBbindDataset, DavisKibaDataset from src.utils import config as cfg # sets up os env for HF @@ -26,22 +27,32 @@ class Loader(): pro_feature_opt = cfg.PRO_FEAT_OPT @staticmethod - @validate_args({'model': model_opt, 'data':data_opt, 'edge': edge_opt, 'pro_feature': pro_feature_opt}) - def get_model_key(model:str, data:str, pro_feature:str, edge:str, + @validate_args({'model': model_opt, 'data':data_opt, 'edge': edge_opt, 'pro_feature': pro_feature_opt, + 'ligand_feature':cfg.LIG_FEAT_OPT, 'ligand_edge':cfg.LIG_EDGE_OPT}) + def get_model_key(model:str, data:str, pro_feature:str, edge:str, ligand_feature:str, ligand_edge:str, batch_size:int, lr:float, dropout:float, n_epochs:int, pro_overlap:bool=False): data += '-overlap' if pro_overlap else '' if model in ['EAT']: # no edgew or features for this model type print('WARNING: edge weight and feature opt is not supported with the specified model.') - return f'{model}M_{data}D_{batch_size}B_{lr}LR_{dropout}D_{n_epochs}E' + model_key = f'{model}M_{data}D_{batch_size}B_{lr}LR_{dropout}D_{n_epochs}E' else: - return f'{model}M_{data}D_{pro_feature}F_{edge}E_{batch_size}B_{lr}LR_{dropout}D_{n_epochs}E' + model_key = f'{model}M_{data}D_{pro_feature}F_{edge}E_{batch_size}B_{lr}LR_{dropout}D_{n_epochs}E' + + return model_key + f'_{ligand_feature}LF_{ligand_edge}LE' @staticmethod - @validate_args({'model': model_opt, 'edge': edge_opt, 'pro_feature': pro_feature_opt}) - def load_model(model:str, pro_feature:str, edge:str, dropout:float): + @validate_args({'model': model_opt, 'edge': edge_opt, 'pro_feature': pro_feature_opt, + 'ligand_feature':cfg.LIG_FEAT_OPT, 'ligand_edge':cfg.LIG_EDGE_OPT}) + def init_model(model:str, pro_feature:str, edge:str, dropout:float, ligand_feature:str=None, ligand_edge:str=None): num_feat_pro = 54 if 'msa' in pro_feature else 34 + if (ligand_feature is not None and ligand_feature != 'original') or \ + (ligand_edge is not None and ligand_edge != 'binary'): + print('WARNING: currently no support for combining pro and lig modifications, using original pro features.') + #TODO: add support for above. + return DGraphDTALigand(ligand_feature, ligand_edge) + if model == 'DG': model = DGraphDTA(num_features_pro=num_feat_pro, dropout=dropout, edge_weight_opt=edge) @@ -84,8 +95,12 @@ def load_model(model:str, pro_feature:str, edge:str, dropout:float): return model @staticmethod - @validate_args({'data': data_opt, 'pro_feature': pro_feature_opt, 'edge_opt': edge_opt}) - def load_dataset(data:str, pro_feature:str, edge_opt:str, subset:str=None, path:str='../data/'): + @validate_args({'data': data_opt, 'pro_feature': pro_feature_opt, 'edge_opt': edge_opt, + 'ligand_feature':cfg.LIG_FEAT_OPT, 'ligand_edge':cfg.LIG_EDGE_OPT}) + def load_dataset(data:str, pro_feature:str, edge_opt:str, subset:str=None, path:str=cfg.DATA_ROOT, + ligand_feature:str='original', ligand_edge:str='binary'): + # subset is used for train/val/test split. + # can also be used to specify the cross-val fold used by train1, train2, etc. if data == 'PDBbind': dataset = PDBbindDataset(save_root=f'{path}/PDBbindDataset', data_root=f'{path}/v2020-other-PL/', @@ -95,6 +110,9 @@ def load_dataset(data:str, pro_feature:str, edge_opt:str, subset:str=None, path: edge_opt=edge_opt, subset=subset, af_conf_dir='../colabfold/pdbbind_af2_out/out0', + ligand_feature=ligand_feature, + ligand_edge=ligand_edge, + max_seq_len=1500 ) elif data in ['davis', 'kiba']: dataset = DavisKibaDataset( @@ -104,7 +122,10 @@ def load_dataset(data:str, pro_feature:str, edge_opt:str, subset:str=None, path: cmap_threshold=-0.5, feature_opt=pro_feature, edge_opt=edge_opt, - subset=subset + subset=subset, + ligand_feature=ligand_feature, + ligand_edge=ligand_edge, + max_seq_len=1500 ) else: raise Exception(f'Invalid data option, pick from {Loader.data_opt}') @@ -112,10 +133,12 @@ def load_dataset(data:str, pro_feature:str, edge_opt:str, subset:str=None, path: return dataset @staticmethod - @validate_args({'data': data_opt, 'pro_feature': pro_feature_opt, 'edge_opt': edge_opt}) - def load_DataLoaders(data:str, pro_feature:str, edge_opt:str, path:str='../data/', + @validate_args({'data': data_opt, 'pro_feature': pro_feature_opt, 'edge_opt': edge_opt, + 'ligand_feature':cfg.LIG_FEAT_OPT, 'ligand_edge':cfg.LIG_EDGE_OPT}) + def load_DataLoaders(data:str, pro_feature:str, edge_opt:str, path:str=cfg.DATA_ROOT, batch_train:int=64, datasets:Iterable[str]=['train', 'test', 'val'], - protein_overlap:bool=False): + protein_overlap:bool=False, + ligand_feature:str=None, ligand_edge:str=None): loaders = {} # different list for subset so that loader keys are the same name as input @@ -126,7 +149,9 @@ def load_DataLoaders(data:str, pro_feature:str, edge_opt:str, path:str='../data/ for d, s in zip(datasets, subsets): dataset = Loader.load_dataset(data, pro_feature, edge_opt, - subset=s, path=path) + subset=s, path=path, + ligand_feature=ligand_feature, + ligand_edge=ligand_edge) bs = 1 if d == 'test' else batch_train loader = DataLoader(dataset=dataset, batch_size=bs, diff --git a/test.py b/test.py index 99e8d4d9..14d4c18a 100644 --- a/test.py +++ b/test.py @@ -1,5 +1,5 @@ #%% -from src.utils import config +from src.utils import config as cfg import torch import os from torch_geometric.loader import DataLoader @@ -11,7 +11,6 @@ args = parse_train_test_args(verbose=True, jyp_args=' -m EDI -d PDBbind -f nomsa -e anm -lr 0.0001 -bs 20 -do 0.4 -ne 2000') #%% - MODEL = args.model_opt[0] DATA = args.data_opt[0] FEATURE = args.feature_opt[0] @@ -23,31 +22,25 @@ DROPOUT = args.dropout EPOCHS = args.num_epochs -checkpoint_p_tmp = lambda x: f'results/model_checkpoints/ours/{x}.model_tmp' -checkpoint_p = lambda x: f'results/model_checkpoints/ours/{x}.model' - -media_save_dir = 'results/model_media/' -media_save_p = f'{media_save_dir}/{DATA}/' -MODEL_STATS_CSV = 'results/model_media/model_stats.csv' +media_save_p = f'{cfg.MEDIA_SAVE_DIR}/{DATA}/' MODEL_KEY = Loader.get_model_key(MODEL,DATA,FEATURE,EDGE, BATCH_SIZE,LEARNING_RATE,DROPOUT,EPOCHS, pro_overlap=args.protein_overlap) + +model_p_tmp = f'{cfg.MODEL_SAVE_DIR}/{MODEL_KEY}.model_tmp' +model_p = f'{cfg.MODEL_SAVE_DIR}/{MODEL_KEY}.model' + # MODEL_KEY = 'DDP-' + MODEL_KEY # distributed model -model_p = checkpoint_p(MODEL_KEY) -model_p = model_p if os.path.isfile(model_p) else checkpoint_p_tmp(MODEL_KEY) +model_p = model_p if os.path.isfile(model_p) else model_p_tmp assert os.path.isfile(model_p), f"MISSING MODEL CHECKPOINT {model_p}" print(model_p) -# %% -subset = 'test-overlap' if args.protein_overlap else 'test' -test_dataset = Loader.load_dataset(DATA, FEATURE, EDGE, subset=subset, path='../data') -test_loader = DataLoader(test_dataset, 1, shuffle=False) -#%% -model = Loader.load_model(MODEL, FEATURE, EDGE, DROPOUT) +#%% Initialize model and load checkpoint +model = Loader.init_model(MODEL, FEATURE, EDGE, DROPOUT) device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') mdl_dict = torch.load(model_p, map_location=device) @@ -55,21 +48,40 @@ model.to(device) -#%% +# %% Load test data +subset = 'test-overlap' if args.protein_overlap else 'test' +test_dataset = Loader.load_dataset(DATA, FEATURE, EDGE, subset=subset, path=cfg.DATA_ROOT) +test_loader = DataLoader(test_dataset, 1, shuffle=False) + +#%% Run model on test set loss, pred, actual = test(model, test_loader, device) print(f'# Test loss: {loss}') get_metrics(actual, pred, save_results=True, save_path=media_save_p, model_key=MODEL_KEY, - csv_file=MODEL_STATS_CSV, + csv_file=cfg.MODEL_STATS_CSV, + show=False, + ) + +#%% Load Val data +subset = 'val-overlap' if args.protein_overlap else 'val' +val_dataset = Loader.load_dataset(DATA, FEATURE, EDGE, subset=subset, path=cfg.DATA_ROOT) +val_loader = DataLoader(val_dataset, 1, shuffle=False) + +#%% Run model on val set +loss, pred, actual = test(model, val_loader, device) +print(f'# Val loss: {loss}') +get_metrics(actual, pred, + save_results=True, + save_path=media_save_p, + model_key=MODEL_KEY, + csv_file=cfg.MODEL_STATS_CSV_VAL, show=False, ) -# %% -# renaming checkpoint to remove _tmp specification -model_p = checkpoint_p(MODEL_KEY) -model_p_tmp = checkpoint_p_tmp(MODEL_KEY) + +# %% renaming checkpoint to remove _tmp specification if (not os.path.isfile(model_p) and # ensuring no overwrite os.path.isfile(model_p_tmp)): os.rename(model_p_tmp, model_p) \ No newline at end of file diff --git a/train_test.py b/train_test.py index e4cd5fdd..5faa25bb 100644 --- a/train_test.py +++ b/train_test.py @@ -17,7 +17,7 @@ VAL_SPLIT = args.val_split # 10% for validation (10%) SHUFFLE_DATA = not args.no_shuffle -SAVE_RESULTS = True +SAVE_RESULTS = False SHOW_PLOTS = False #%% @@ -27,9 +27,8 @@ import torch import matplotlib.pyplot as plt -from matplotlib.ticker import MaxNLocator -from src.utils import config # sets up env vars +from src.utils import config as cfg # sets up env vars from src.utils.config import MODEL_STATS_CSV, MEDIA_SAVE_DIR, MODEL_SAVE_DIR from src.train_test.training import train, test @@ -52,11 +51,16 @@ # %% Training loop metrics = {} -for MODEL, DATA, FEATURE, EDGEW in itertools.product(args.model_opt, args.data_opt, - args.feature_opt, args.edge_opt): +for (MODEL, DATA, + FEATURE, EDGEW, + ligand_feature, ligand_edge) in itertools.product( + args.model_opt, args.data_opt, + args.feature_opt, args.edge_opt, + args.ligand_feature_opt, args.ligand_edge_opt): print(f'\n{"-"*40}\n({MODEL}, {DATA}, {FEATURE}, {EDGEW})') - MODEL_KEY = Loader.get_model_key(MODEL,DATA,FEATURE,EDGEW, - BATCH_SIZE,LEARNING_RATE,DROPOUT,NUM_EPOCHS, + MODEL_KEY = Loader.get_model_key(model=MODEL,data=DATA,pro_feature=FEATURE,edge=EDGEW, + ligand_feature=ligand_feature, ligand_edge=ligand_edge, + batch_size=BATCH_SIZE,lr=LEARNING_RATE,dropout=DROPOUT,n_epochs=NUM_EPOCHS, pro_overlap=args.protein_overlap) print(f'# {MODEL_KEY} \n') @@ -80,7 +84,8 @@ # batch_train=BATCH_SIZE, use_refined=False, # split_by_prot=not args.protein_overlap) - loaders = Loader.load_DataLoaders(DATA, FEATURE, EDGEW, path='../data/', + loaders = Loader.load_DataLoaders(data=DATA, pro_feature=FEATURE, edge_opt=EDGEW, path=cfg.DATA_ROOT, + ligand_feature=ligand_feature, ligand_edge=ligand_edge, batch_train=BATCH_SIZE, datasets=['train', 'test', 'val'], protein_overlap=args.protein_overlap) @@ -89,7 +94,8 @@ # ==== LOAD MODEL ==== device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') print(f'#Device: {device}') - model = Loader.load_model(MODEL, FEATURE, EDGEW, DROPOUT).to(device) + model = Loader.init_model(model=MODEL, pro_feature=FEATURE, edge=EDGEW, dropout=DROPOUT, + ligand_feature=ligand_feature, ligand_edge=ligand_edge).to(device) cp_saver.new_model(model, save_path=model_save_p) if DEBUG: @@ -137,7 +143,6 @@ logs=logs ) plt.clf() - plt.clf() # %%