diff --git a/BaselineModel.py b/BaselineModel.py new file mode 100644 index 0000000..4867607 --- /dev/null +++ b/BaselineModel.py @@ -0,0 +1,53 @@ +import numbers +import numpy as np +import sklearn + +from sklearn.utils import check_X_y, check_array, column_or_1d +from sklearn.utils.multiclass import check_classification_targets + +from sklearn.externals.joblib import Parallel, delayed #For parallel computing TODO: check if we need to be parallel or not +from sklearn.utils.validation import has_fit_parameter, check_is_fitted + + +class DecisionTreeBaseline(): + """Base class for ordinal meta-classifier. + + """ + + def __init__(self): + return self + + def fit(self, X, y, sample_weight=None): + return self + + def _fit(self, X, y, max_samples=None, max_depth=None, sample_weight=None): + return self + + def predict(self, X): + X = check_array(X, accept_sparse=['csr', 'csc']) + + # ---------------------------------------------Our CODE + n_samples = X.shape[0] + prediction = np.zeros((n_samples, 1)) + + for i in range(0, n_samples): + if X[i,"Scenario"] == "C": + if X[i,"VoterType"] == "LB": + prediction[i] = 2 #Q' vote + else: + prediction[i] = 1 #Q vote + else: + if X[i,"Scenario"] in ["E","F"]: + if X[i,"VoterType"] == "TRT": + prediction[i] = 1 #Q vote + else: + prediction[i] = 2 #Q' vote + + else: + prediction[i] = 1 #Q vote + + + return prediction + + + diff --git a/BayesRuleModel.py b/BayesRuleModel.py new file mode 100644 index 0000000..c102638 --- /dev/null +++ b/BayesRuleModel.py @@ -0,0 +1,53 @@ +import numbers +import numpy as np +import sklearn + +from sklearn.utils import check_X_y, check_array, column_or_1d +from sklearn.utils.multiclass import check_classification_targets + +from sklearn.externals.joblib import Parallel, \ + delayed # For parallel computing TODO: check if we need to be parallel or not +from sklearn.utils.validation import has_fit_parameter, check_is_fitted + + +class BayesRuleClassifier(): + """Base class for ordinal meta-classifier. + + """ + + def __init__(self): + return self + + def fit(self, X, y, sample_weight=None): + return self + + def _fit(self, X, y, max_samples=None, max_depth=None, sample_weight=None): + return self + + def predict(self, X): + X = check_array(X, accept_sparse=['csr', 'csc']) + + # ---------------------------------------------Our CODE + n_samples = X.shape[0] + prediction = np.zeros((n_samples, 1)) + + for i in range(0, n_samples): + if X[i, "Scenario"] == "C": + if X[i, "VoterType"] == "LB": + prediction[i] = 2 # Q' vote + else: + prediction[i] = 1 # Q vote + else: + if X[i, "Scenario"] in ["E", "F"]: + if X[i, "VoterType"] == "TRT": + prediction[i] = 1 # Q vote + else: + prediction[i] = 2 # Q' vote + + else: + prediction[i] = 1 # Q vote + + return prediction + + + diff --git a/Data/action_table_N3.csv b/Data/action_table_N3.csv new file mode 100644 index 0000000..452ba5c --- /dev/null +++ b/Data/action_table_N3.csv @@ -0,0 +1,19 @@ +scenario,action,action_name +1,1,TRT +1,2,DOM +1,3,DOM +2,1,TRT +2,2,DOM +2,3,DOM +3,1,TRT +3,2,WLB +3,3,DOM +4,1,TRT +4,2,DOM +4,3,SLB +5,1,TRT +5,2,CMP\WLB +5,3,DOM +6,1,TRT +6,2,CMP +6,3,SLB diff --git a/Data/action_table_N4.csv b/Data/action_table_N4.csv new file mode 100644 index 0000000..2077982 --- /dev/null +++ b/Data/action_table_N4.csv @@ -0,0 +1,97 @@ +scenario,action,action_name +1,1,TRT +1,2,DOM +1,3,DOM +1,4,DOM +2,1,TRT +2,2,DOM +2,3,DOM +2,4,DOM +3,1,TRT +3,2,DOM +3,3,DOM +3,4,DOM +4,1,TRT +4,2,DOM +4,3,DOM +4,4,DOM +5,1,TRT +5,2,DOM +5,3,DOM +5,4,DOM +6,1,TRT +6,2,DOM +6,3,DOM +6,4,DOM +7,1,TRT +7,2,WLB +7,3,DOM +7,4,DOM +8,1,TRT +8,2,WLB +8,3,DOM +8,4,DOM +9,1,TRT +9,2,CMP\WLB +9,3,DOM +9,4,DOM +10,1,TRT +10,2,CMP\WLB +10,3,DOM +10,4,DOM +11,1,TRT +11,2,CMP\WLB +11,3,DOM +11,4,DOM +12,1,TRT +12,2,CMP\WLB +12,3,DOM +12,4,DOM +13,1,TRT +13,2,DOM +13,3,SLB +13,4,DOM +14,1,TRT +14,2,DOM +14,3,SLB +14,4,DOM +15,1,TRT +15,2,CMP +15,3,SLB +15,4,DOM +16,1,TRT +16,2,CMP +16,3,SLB +16,4,DOM +17,1,TRT +17,2,DOM +17,3,SLB +17,4,DOM +18,1,TRT +18,2,CMP +18,3,SLB +18,4,DOM +19,1,TRT +19,2,DOM +19,3,DOM +19,4,SLB +20,1,TRT +20,2,DOM +20,3,DOM +20,4,SLB +21,1,TRT +21,2,CMP +21,3,DOM +21,4,SLB +22,1,TRT +22,2,CMP +22,3,DOM +22,4,SLB +23,1,TRT +23,2,DOM +23,3,CMP +23,4,SLB +24,1,TRT +24,2,CMP +24,3,CMP +24,4,SLB diff --git a/Data/d32_folds.txt b/Data/d32_folds.txt new file mode 100644 index 0000000..9c6e877 --- /dev/null +++ b/Data/d32_folds.txt @@ -0,0 +1,20 @@ +fold 1: +[2006, 2017, 2028, 3324, 3336, 281, 387, 491, 5824, 5004, 5028, 5044, 4242, 4253, 4264, 1940, 1951, 3717, 3728, 3741, 5388, 5413, 5450, 161, 204, 246, 5228, 5247, 5267, 4768, 4991, 5079, 4987, 5031, 1742, 2653, 3287, 3918, 1055, 1506, 2183, 794, 854, 865, 877, 1672, 1694, 1707, 1489, 1500, 1511, 1522, 2245, 2256, 2267, 4749, 4769, 4800, 793, 1098, 1744, 1303, 1314, 1325, 1772, 1783, 1794, 4087, 4098, 4112, 3398, 3410, 3421, 4276, 4287, 4298, 5281, 5301, 5325, 3830, 3842, 3853, 3866, 2531, 2043, 2849, 3723, 5530, 5872, 5883, 3105, 3123, 3151, 2735, 4765, 4801, 4823, 3632, 3679, 3690, 3701, 1619, 1642, 1662, 2041, 2052, 4434, 5214, 5240, 5265, 1553, 1569, 1592, 1031, 1062, 1092, 3879, 3915, 4220, 3194, 3207, 3218, 4570, 4594, 4605, 5167, 5178, 5191, 1870, 2693, 3507, 5535, 5556, 5569, 3757, 3768, 3780, 1004, 1016, 9, 20, 31, 87, 120, 180, 4925, 4932, 5118, 5146, 4655, 4666, 4677, 4839, 4863, 1444, 1462, 1480, 4119, 4130, 4141, 4311, 4325, 4344, 2226, 2238, 3482, 4047, 4462, 888, 899, 910, 1231, 1260, 1285, 1219, 1243, 1259, 2328, 2340, 2351, 179, 257, 315, 4100, 4440, 4759, 3582, 3618, 3658, 2628, 2645, 3502, 3513, 941, 4157, 4170, 4189, 2281, 4031, 4060, 4072, 1775, 2385, 2965, 671, 682, 693, 1898, 1911, 1922, 1339, 1350, 1361, 40, 51, 62, 3573, 3605, 3636, 732, 743, 5511, 2774, 2802, 2817, 4346, 4366, 4377, 1605, 1627, 1650, 2594, 2606, 2622, 5683, 5694, 4452, 4484, 4518, 1869, 1891, 3140, 1718, 1730, 1756, 4612, 4623, 4634, 1966, 1980, 2001, 4183, 4205, 4224, 3223, 3234, 3245, 3256, 5088, 5115, 5144, 2075, 2086, 2097, 2108, 2436, 2448, 2468, 566, 577, 604, 3451, 3462, 3473, 3532, 3543, 3554, 4824, 4860, 3276, 3908, 4379, 540, 551, 562, 4485, 3067, 3031, 3047, 3059, 1370, 1381, 1392, 2160, 2172, 496, 507, 518, 536, 3077, 3088, 3099, 3357, 3368, 3379, 3390, 4538, 4554, 4576, 5360, 5371, 5382, 730, 5182, 1861, 2618, 4382, 4409, 76, 94, 131, 1150, 1178, 3960, 3987, 5612, 5623, 5635, 2973, 2985, 388, 433, 476, 980, 1571, 5401, 2557, 2568, 2581, 4413, 4436, 2191, 2202, 2213, 4717, 4727, 3435, 4875, 4887, 4899, 2112, 2123, 5472, 5483, 5494, 2228, 3052, 922, 934, 2499, 2514, 2532, 256, 288, 322, 3571, 3611, 3659, 202, 259, 305, 437, 5389, 5409, 4934, 4945, 4957, 2945, 2956, 2967, 1200, 1217, 3282, 359, 408, 455, 3961, 3984, 4017, 583, 600, 619, 2912, 3258, 3630, 3904, 3923, 3934, 4468, 4507, 4528, 2828, 2844, 2869, 2857, 2889, 1830, 1864, 1884, 4714, 5093, 5158, 3704, 5143, 5411, 1814, 1842, 1581, 1532, 821, 835, 5452, 2295, 2306, 2317, 5766, 5777, 5788, 5649, 5660, 5671, 972, 984, 995, 5801, 5812, 2920, 2931, 2670, 2695, 2721, 4691, 4704, 650, 661, 4713, 5567, 5589, 5600, 4974, 5005, 832, 1732, 2741, 3998, 4026, 4043, 641, 1685, 2364, 2395, 2414, 617, 704, 715, 3259, 3270, 757, 781, 805, 77, 175, 532, 1052, 1089, 1123, 3489, 1560, 3792, 1981, 2694, 2717, 2745, 5706, 5717, 5728, 5298, 5320, 5342, 5544, 5655, 5763, 5843, 5854, 5865, 5737, 5749] +fold 2: +[2012, 2023, 2035, 3315, 3327, 3340, 302, 409, 511, 5807, 5013, 5033, 5049, 4248, 4259, 4271, 1937, 1949, 3718, 3729, 3742, 5393, 5417, 5455, 113, 171, 210, 5226, 5244, 5264, 4728, 4953, 5074, 4981, 5022, 5055, 1753, 2516, 3163, 3857, 1050, 1246, 2140, 787, 856, 868, 879, 1675, 1696, 1710, 1497, 1508, 1519, 2241, 2252, 2263, 4754, 4779, 4806, 887, 1268, 1957, 1306, 1317, 1328, 1767, 1778, 1789, 1800, 4079, 4094, 4108, 3404, 3415, 3426, 4282, 4293, 4304, 5282, 5303, 5327, 3836, 3847, 3859, 2526, 2098, 2906, 3801, 5523, 5542, 5878, 5889, 3119, 3136, 2740, 4750, 4776, 4810, 3640, 3680, 3691, 1606, 1628, 1649, 2048, 2059, 4650, 5223, 5248, 5275, 1545, 1556, 1576, 1049, 1082, 1112, 3868, 3889, 4231, 3189, 3200, 3212, 4564, 4590, 4603, 5163, 5175, 5188, 2194, 2913, 3785, 5532, 5554, 5568, 3748, 3763, 3774, 1002, 1014, 1025, 7, 18, 29, 75, 108, 163, 216, 4922, 5030, 5124, 5152, 4654, 4665, 4676, 4835, 4862, 1445, 1466, 1481, 4128, 4139, 4150, 4314, 4327, 4349, 2221, 2233, 2908, 3778, 4269, 893, 904, 915, 1238, 1269, 1289, 1233, 1253, 1274, 2326, 2338, 2349, 166, 244, 303, 3949, 4312, 4621, 3604, 3642, 3670, 2621, 2638, 3505, 3517, 940, 4161, 4174, 4192, 2271, 2282, 4042, 4063, 4078, 1205, 1988, 2585, 3174, 679, 690, 701, 1906, 1918, 1930, 1343, 1354, 1366, 42, 53, 64, 3579, 3612, 3644, 739, 5507, 5518, 2769, 2796, 2815, 4330, 4353, 4370, 1607, 1630, 1652, 2587, 2602, 2614, 5679, 5690, 4464, 4505, 4529, 1859, 1887, 1722, 1739, 1759, 4615, 4626, 4639, 1970, 1986, 4177, 4202, 4221, 3232, 3243, 3254, 5066, 5101, 5131, 2083, 2094, 2105, 2435, 2447, 2465, 574, 598, 627, 3455, 3465, 3476, 3529, 3540, 3551, 4815, 4850, 3122, 3849, 4280, 531, 546, 557, 4481, 3063, 3026, 3041, 3055, 1373, 1384, 1396, 2152, 2169, 505, 516, 530, 3078, 3089, 3100, 3359, 3370, 3381, 4542, 4562, 4584, 5355, 5366, 5377, 5111, 1109, 1999, 2772, 4391, 71, 89, 119, 160, 1142, 1171, 3946, 3972, 4006, 5606, 5617, 5628, 2980, 357, 405, 457, 752, 1033, 1712, 5499, 2549, 2563, 2574, 4397, 4423, 4441, 2195, 2206, 2217, 4720, 4731, 3439, 4878, 4889, 4902, 2113, 2125, 5479, 5490, 5501, 2158, 2954, 923, 935, 2505, 2523, 2546, 270, 300, 339, 3561, 3586, 3638, 213, 262, 309, 425, 5396, 5418, 4937, 4948, 4960, 2937, 2948, 2959, 2970, 1194, 1207, 3277, 355, 404, 450, 780, 3947, 3968, 3997, 590, 607, 625, 2943, 3298, 3641, 3895, 3916, 3930, 4465, 4502, 4525, 2832, 2851, 2875, 2854, 2885, 1822, 1854, 1878, 4610, 5082, 5150, 4064, 5209, 5477, 1810, 1833, 1573, 1534, 817, 828, 5451, 2290, 2301, 2312, 5769, 5780, 5791, 5643, 5654, 5665, 1280, 970, 983, 994, 5795, 5806, 5818, 2924, 2666, 2690, 2716, 4687, 4701, 656, 4159, 5572, 5591, 4971, 5001, 1012, 1928, 2840, 4016, 4034, 4052, 640, 1688, 2388, 2412, 2423, 678, 711, 722, 3263, 750, 765, 797, 809, 56, 153, 323, 1058, 1094, 1127, 3484, 3495, 1984, 2686, 2708, 2736, 5702, 5713, 5724, 5735, 5300, 5322, 5344, 5599, 5708, 5846, 5841, 5852, 5863, 5739, 5752] +fold 3: +[2011, 2022, 2034, 3317, 3329, 3341, 313, 419, 522, 5826, 5006, 5029, 5046, 4245, 4256, 4267, 1936, 1947, 1959, 3714, 3725, 3737, 3754, 5403, 5431, 5464, 137, 188, 230, 5229, 5249, 5268, 4706, 4923, 5073, 5094, 4979, 5018, 5053, 1738, 2888, 3472, 4025, 1043, 1075, 2076, 779, 798, 851, 862, 873, 1668, 1692, 1705, 1496, 1507, 1518, 2243, 2254, 2265, 4744, 4764, 4793, 930, 1319, 2032, 1301, 1312, 1323, 1770, 1781, 1792, 4091, 4105, 3396, 3408, 3419, 4279, 4291, 4302, 5292, 5315, 5338, 3835, 3846, 3858, 2512, 2538, 1892, 2800, 3684, 5536, 5875, 5886, 3117, 3132, 2758, 4747, 4774, 4809, 3672, 3687, 3698, 1608, 1629, 1651, 2040, 2051, 4408, 5211, 5234, 5260, 1548, 1562, 1583, 1042, 1074, 1104, 5060, 3873, 3897, 4209, 4232, 3191, 3202, 3214, 4572, 4595, 4606, 5166, 5177, 5190, 1967, 2781, 3577, 5548, 5563, 5580, 3740, 3760, 3771, 3783, 1003, 1015, 10, 21, 32, 83, 116, 174, 4914, 4808, 5109, 5136, 4648, 4659, 4671, 4682, 4825, 4852, 1454, 1476, 1486, 4127, 4138, 4149, 4315, 4329, 4351, 2225, 2237, 3673, 4182, 4589, 892, 903, 914, 1222, 1252, 1283, 1230, 1250, 1267, 2334, 2345, 134, 217, 286, 348, 3712, 4226, 4537, 3606, 3646, 3674, 2630, 2649, 3503, 3514, 965, 4162, 4176, 4195, 2275, 2286, 4045, 4066, 4080, 1517, 2239, 2856, 674, 685, 696, 1901, 1913, 1924, 5138, 1335, 1346, 1357, 38, 49, 60, 3351, 3585, 3620, 3651, 731, 742, 5509, 5520, 2771, 2799, 2816, 4336, 4360, 4373, 1602, 1624, 1648, 2591, 2604, 2616, 5675, 5686, 5697, 4448, 4477, 4515, 1863, 1888, 1721, 1737, 1758, 4617, 4629, 4641, 1971, 1989, 4173, 4199, 4216, 4237, 3226, 3237, 3249, 5081, 5108, 5139, 2080, 2091, 2102, 2432, 2444, 2457, 572, 591, 620, 3446, 3457, 3467, 3533, 3544, 3555, 4821, 4857, 3339, 3969, 4401, 539, 550, 561, 4466, 4493, 3064, 3030, 3046, 3058, 1376, 1387, 1402, 2161, 2173, 503, 514, 526, 3084, 3095, 3109, 3364, 3375, 3386, 4539, 4555, 4578, 5350, 5361, 5372, 5384, 920, 5467, 1797, 2554, 4389, 84, 107, 150, 1152, 1179, 3953, 3978, 4015, 5613, 5624, 5636, 2974, 2987, 346, 399, 446, 483, 876, 1286, 5201, 2558, 2569, 2584, 4415, 4437, 2187, 2198, 2209, 4721, 4732, 3432, 4882, 4893, 4907, 2111, 2122, 5474, 5485, 5496, 2087, 2876, 928, 2511, 2529, 2552, 260, 292, 328, 3565, 3593, 3645, 177, 247, 287, 5402, 5426, 4935, 4946, 4958, 2940, 2951, 2962, 2687, 1196, 1209, 3279, 347, 396, 442, 784, 3952, 3973, 4005, 587, 603, 622, 2868, 3185, 3515, 3891, 3913, 3927, 3938, 4471, 4509, 4530, 2821, 2836, 2859, 2843, 2874, 1811, 1840, 1871, 1893, 4451, 5070, 5127, 3402, 5067, 5351, 1816, 1844, 1561, 1584, 1535, 818, 829, 5440, 5468, 2289, 2300, 2311, 2322, 5761, 5772, 5783, 5641, 5652, 5663, 967, 981, 992, 5803, 5814, 2922, 2934, 3706, 2660, 2681, 2704, 4690, 4703, 657, 5549, 5582, 5595, 4977, 5016, 1431, 2332, 5110, 3983, 4023, 4039, 4058, 636, 1679, 2372, 2406, 2417, 658, 709, 720, 3261, 3272, 755, 775, 802, 2, 110, 208, 717, 1032, 1066, 1102, 1131, 3485, 3496, 1154, 3787, 1979, 2696, 2720, 2746, 5707, 5718, 5729, 5296, 5316, 5339, 5577, 5688, 5825, 5836, 5847, 5858, 5745, 5757] +fold 4: +[2007, 2018, 2029, 3314, 3326, 3338, 1786, 1961, 291, 398, 500, 5817, 4988, 5017, 5036, 5054, 4243, 4254, 4265, 1945, 1956, 3719, 3730, 3744, 5386, 5410, 5445, 128, 184, 226, 5218, 5238, 5258, 4868, 5065, 5087, 4973, 5003, 5045, 1740, 2975, 3566, 4093, 1047, 1165, 2117, 788, 852, 863, 874, 1673, 1695, 1709, 1499, 1510, 1521, 2244, 2255, 2266, 4755, 4782, 957, 1397, 5318, 1305, 1316, 1327, 1777, 1788, 1799, 4077, 4092, 4106, 3395, 3407, 3418, 4285, 4296, 4307, 5285, 5307, 5330, 3839, 3851, 3862, 2515, 1425, 2278, 3152, 5531, 5873, 5884, 3108, 3124, 3154, 2714, 4771, 4805, 3669, 3685, 3696, 1621, 1644, 1663, 2042, 2053, 4491, 5225, 5252, 5276, 1550, 1563, 1586, 1029, 1059, 1090, 5058, 3884, 4211, 4235, 3196, 3208, 3219, 4575, 4596, 4607, 5170, 5181, 5195, 2054, 2862, 3663, 5545, 5561, 5576, 3746, 3762, 3773, 1000, 1011, 1023, 12, 23, 726, 93, 133, 191, 4921, 4740, 5107, 5135, 5157, 4658, 4669, 4681, 4829, 4854, 1442, 1461, 1479, 4121, 4132, 4143, 5187, 4321, 4340, 4361, 2224, 2236, 2882, 3734, 4236, 4628, 884, 895, 906, 917, 1242, 1275, 1291, 1210, 1237, 1254, 1276, 2324, 2336, 2347, 146, 228, 290, 4076, 4420, 4748, 3610, 3650, 3676, 2632, 2650, 3508, 3519, 939, 4154, 4166, 4184, 2277, 4035, 4061, 4074, 1697, 2344, 2909, 672, 683, 694, 1903, 1915, 1926, 5129, 1341, 1352, 1363, 39, 50, 61, 3350, 3568, 3596, 3631, 738, 5505, 5517, 2785, 2808, 2826, 4338, 4362, 4374, 1613, 1635, 1660, 2580, 2598, 2610, 5682, 5693, 4460, 4503, 4526, 1845, 1877, 1724, 1743, 1761, 4620, 4632, 4645, 1974, 1996, 4188, 4208, 4228, 3225, 3236, 3247, 4943, 5095, 5123, 2077, 2088, 2099, 2433, 2445, 2459, 567, 578, 608, 3445, 3456, 3466, 3477, 3527, 3538, 3549, 4819, 4855, 3499, 4032, 4473, 534, 547, 558, 4478, 4501, 3065, 3032, 3048, 1372, 1383, 1394, 2163, 2178, 499, 510, 521, 3079, 3090, 3102, 3362, 3373, 3384, 4551, 4571, 4593, 5352, 5363, 5374, 814, 5306, 1408, 2258, 4394, 85, 109, 155, 1137, 1167, 3943, 3964, 3999, 5607, 5618, 5629, 2972, 2984, 361, 410, 461, 803, 1087, 4990, 2559, 2570, 2586, 4405, 4431, 2188, 2199, 2210, 4719, 4730, 3431, 3442, 4877, 4888, 4900, 2118, 2135, 5471, 5482, 5493, 5506, 1720, 2462, 931, 2502, 2520, 2540, 245, 280, 311, 3583, 3633, 185, 252, 294, 5404, 5429, 4938, 4949, 4961, 2941, 2952, 2963, 2674, 1201, 1220, 3283, 363, 414, 458, 768, 3950, 3971, 4002, 585, 602, 621, 2907, 3238, 3556, 3899, 3919, 3931, 4449, 4483, 4514, 2833, 2853, 2878, 2838, 2870, 1820, 1850, 1876, 4679, 5086, 5154, 3308, 4896, 5329, 1805, 1824, 1858, 1577, 4916, 1527, 1539, 811, 822, 836, 5444, 5469, 2296, 2307, 2318, 5767, 5778, 5789, 5647, 5658, 5669, 1278, 959, 977, 988, 5797, 5808, 5819, 2929, 3703, 2672, 2697, 2725, 4692, 4707, 652, 663, 5078, 5546, 5581, 5594, 4965, 4986, 1528, 2440, 5208, 4020, 4036, 4054, 639, 1671, 1691, 2384, 2411, 2422, 596, 697, 713, 724, 3266, 756, 778, 804, 88, 186, 637, 1053, 1091, 1124, 3486, 3500, 3788, 1991, 2673, 2700, 2726, 5705, 5716, 5727, 5305, 5328, 5347, 5533, 5644, 5753, 5837, 5848, 5859, 5743, 5755] +fold 5: +[2004, 2015, 2026, 2038, 3318, 3330, 3342, 356, 462, 564, 4997, 5024, 5040, 4241, 4252, 4263, 1944, 1955, 3715, 3726, 3738, 5407, 5439, 140, 192, 235, 5222, 5239, 5259, 4905, 5071, 5091, 4992, 5035, 1751, 2820, 3403, 4001, 1067, 1907, 2407, 795, 853, 864, 875, 1667, 1690, 1703, 1492, 1503, 1514, 2246, 2257, 2269, 4742, 4760, 4785, 846, 1215, 1881, 1296, 1307, 1318, 1329, 1771, 1782, 1793, 4081, 4095, 4109, 3401, 3414, 3425, 4281, 4292, 4303, 5290, 5313, 5336, 3841, 3852, 3864, 2508, 2535, 1342, 2205, 3083, 5524, 5869, 5880, 3115, 3129, 2733, 4761, 4794, 4818, 3649, 3681, 3692, 1617, 1640, 1657, 2044, 2055, 4527, 5221, 5246, 5271, 1543, 1554, 1572, 1594, 1027, 1051, 1083, 1115, 5063, 3875, 3910, 4217, 3197, 3209, 3220, 4568, 4592, 4604, 5169, 5180, 5194, 1625, 2525, 3331, 5540, 5559, 5573, 3743, 3761, 3772, 1009, 1021, 8, 19, 30, 95, 139, 196, 4920, 5103, 5132, 5156, 4653, 4664, 4675, 4822, 4849, 4871, 1451, 1471, 1484, 4125, 4136, 4147, 2897, 4319, 4339, 4359, 2230, 3391, 4019, 4414, 890, 901, 912, 1234, 1264, 1287, 1216, 1241, 1258, 2327, 2339, 2350, 190, 264, 319, 3991, 4356, 4660, 3601, 3637, 3668, 2623, 2640, 3510, 3521, 946, 4156, 4169, 4187, 2272, 2283, 4059, 4071, 1832, 2452, 3019, 673, 684, 695, 1902, 1914, 1925, 5133, 1336, 1347, 1358, 36, 47, 58, 3589, 3623, 3654, 734, 745, 5513, 2760, 2789, 2811, 4341, 4364, 4375, 1600, 1622, 1645, 2593, 2605, 2620, 5678, 5689, 5700, 4470, 4510, 4534, 1836, 1873, 1894, 1723, 1741, 1760, 4619, 4631, 4644, 1973, 1994, 4186, 4207, 4225, 3233, 3244, 3255, 4897, 5092, 5119, 5147, 2079, 2090, 2101, 2426, 2437, 2449, 2470, 571, 588, 618, 3447, 3458, 3468, 3530, 3541, 3552, 4788, 4845, 3380, 4011, 4427, 527, 543, 555, 4461, 4489, 3066, 3033, 3049, 1377, 1388, 1404, 2157, 2171, 502, 513, 524, 3082, 3094, 3107, 3367, 3378, 3389, 4540, 4558, 4580, 5357, 5368, 5379, 783, 5253, 1938, 2683, 4398, 82, 104, 147, 1139, 1169, 3944, 3970, 4004, 5609, 5620, 5631, 2983, 375, 422, 470, 950, 1432, 5295, 2551, 2564, 2577, 4400, 4428, 4442, 2189, 2200, 2211, 4725, 4739, 3436, 4873, 4884, 4895, 2115, 2127, 5473, 5484, 5495, 1464, 2374, 3206, 926, 2498, 2513, 2530, 2553, 249, 282, 316, 3574, 3614, 3662, 221, 266, 314, 443, 5395, 5416, 4929, 4940, 4951, 2944, 2955, 2966, 1198, 1211, 3280, 334, 382, 429, 764, 3956, 3977, 4012, 592, 609, 628, 2893, 3195, 3535, 3893, 3914, 3928, 4463, 4499, 4523, 2823, 2837, 2861, 2867, 1838, 1868, 1890, 4915, 5104, 3205, 4636, 5284, 1812, 1835, 1570, 1591, 4910, 1525, 1537, 819, 831, 5462, 2293, 2304, 2315, 5762, 5773, 5784, 5639, 5650, 5661, 5672, 961, 978, 989, 5802, 5813, 2919, 2930, 3709, 2663, 2688, 2712, 4695, 4710, 653, 664, 5100, 5570, 5590, 4966, 4989, 1330, 2227, 4547, 3990, 4024, 4041, 645, 1677, 2377, 2408, 2419, 648, 708, 719, 3265, 753, 769, 800, 13, 111, 219, 1037, 1069, 1106, 3487, 3790, 1993, 2689, 2711, 2737, 5709, 5720, 5731, 5302, 5324, 5345, 5522, 5633, 5742, 5879, 5838, 5849, 5860, 5736, 5748] +fold 6: +[2010, 2021, 2033, 3313, 3325, 3337, 1900, 345, 452, 554, 5829, 5015, 5034, 5052, 4247, 4258, 4270, 1942, 1953, 3724, 3735, 3752, 5400, 5425, 5461, 145, 195, 239, 5233, 5251, 5272, 4817, 5051, 5083, 4978, 5010, 5050, 1747, 2441, 3104, 3832, 4171, 1040, 1072, 2020, 776, 796, 859, 870, 881, 1684, 1699, 1714, 1490, 1501, 1512, 2242, 2253, 2264, 4746, 4766, 4795, 1001, 1540, 5437, 1302, 1313, 1324, 1774, 1785, 1796, 4090, 4104, 4116, 3399, 3411, 3422, 4275, 4286, 4297, 4308, 5289, 5310, 5334, 3834, 3845, 3856, 2528, 1669, 2473, 3413, 5537, 5876, 5887, 3121, 3146, 2748, 4758, 4791, 4814, 3671, 3686, 3697, 1610, 1631, 1653, 2045, 2056, 4557, 5219, 5245, 5269, 1547, 1559, 1580, 1039, 1071, 1101, 3867, 3885, 4227, 3192, 3203, 3215, 4550, 4577, 4597, 4608, 5172, 5183, 5196, 2249, 3035, 3863, 5550, 5564, 5584, 3750, 3764, 3775, 1007, 1019, 5, 16, 27, 102, 148, 206, 4924, 4982, 5120, 5148, 4657, 4668, 4680, 4831, 4856, 1447, 1467, 1482, 4118, 4129, 4140, 4151, 4310, 4322, 4342, 4363, 2220, 2232, 3545, 4085, 4506, 889, 900, 911, 1218, 1249, 1281, 1293, 1221, 1244, 1261, 2335, 2346, 198, 269, 332, 3898, 4290, 4581, 3591, 3629, 3666, 2629, 2646, 3504, 3516, 960, 4164, 4179, 4198, 2273, 2284, 4038, 4062, 4075, 1917, 2506, 3093, 669, 680, 691, 702, 1908, 1919, 1931, 1334, 1345, 1356, 44, 55, 69, 3353, 3562, 3592, 3626, 3656, 736, 747, 5515, 2764, 2792, 2812, 4324, 4348, 4368, 4380, 1599, 1620, 1643, 2589, 2603, 2615, 5684, 5695, 4458, 4497, 4524, 1849, 1879, 1719, 1735, 1757, 4611, 4622, 4633, 4646, 1969, 1985, 4180, 4203, 4223, 3231, 3242, 3253, 5085, 5112, 5141, 2078, 2089, 2100, 2427, 2438, 2450, 568, 579, 610, 3450, 3461, 3471, 3528, 3539, 3550, 4792, 4848, 3217, 3876, 4323, 541, 552, 4480, 4504, 3068, 3029, 3044, 3057, 1369, 1380, 1391, 2162, 2174, 497, 508, 519, 3075, 3086, 3097, 3111, 3365, 3376, 3387, 4545, 4567, 4588, 5359, 5370, 5381, 898, 5420, 1455, 2354, 4383, 4411, 86, 115, 157, 1135, 1162, 3951, 3976, 4013, 5605, 5616, 5627, 2978, 393, 440, 479, 990, 1658, 5447, 2555, 2566, 2578, 4421, 4439, 2192, 2203, 2214, 4724, 4737, 3429, 3440, 4874, 4885, 4898, 2114, 2126, 5475, 5486, 5497, 1807, 2565, 919, 932, 2501, 2518, 2536, 234, 273, 304, 342, 3563, 3590, 3643, 194, 255, 299, 432, 5399, 5423, 4933, 4944, 4955, 2942, 2953, 2964, 1195, 1208, 3278, 317, 371, 423, 777, 3954, 3975, 4008, 589, 605, 624, 2722, 3073, 3369, 3901, 3921, 3932, 4459, 4494, 4521, 2831, 2848, 2873, 2858, 2891, 1828, 1860, 1883, 4549, 5075, 5145, 3112, 4548, 5274, 1804, 1821, 1852, 1566, 1587, 4913, 1526, 1538, 820, 834, 5448, 2297, 2308, 2319, 5770, 5781, 5792, 5646, 5657, 5668, 974, 985, 996, 5798, 5809, 5820, 2928, 2661, 2684, 2707, 4684, 4697, 655, 666, 3979, 5541, 5578, 5593, 4975, 5008, 1225, 2132, 3597, 4000, 4027, 4044, 643, 1678, 2357, 2391, 2413, 2424, 638, 707, 718, 3268, 760, 789, 807, 35, 132, 231, 1041, 1077, 1113, 3480, 3492, 1474, 1990, 2682, 2706, 2734, 5703, 5714, 5725, 5287, 5308, 5331, 5348, 5566, 5677, 5785, 5842, 5853, 5864, 5738, 5751] +fold 7: +[2005, 2016, 2027, 3321, 3334, 3346, 335, 441, 544, 5823, 5000, 5026, 5042, 4244, 4255, 4266, 1935, 1946, 1958, 3722, 3733, 3751, 5405, 5434, 118, 176, 215, 5216, 5236, 5256, 4826, 5059, 5084, 4994, 5038, 1749, 2575, 3248, 3888, 1057, 1680, 2216, 790, 858, 869, 880, 1686, 1700, 1715, 1493, 1504, 1515, 2248, 2260, 4751, 4770, 4802, 741, 1024, 1614, 5488, 1298, 1309, 1320, 1773, 1784, 1795, 4086, 4097, 4111, 3393, 3405, 3416, 3427, 4278, 4289, 4300, 5279, 5297, 5321, 5343, 3837, 3848, 3860, 2519, 1733, 2627, 3491, 5539, 5877, 5888, 3120, 3141, 2738, 4753, 4786, 4811, 3665, 3683, 3694, 1615, 1638, 1656, 2046, 2057, 4599, 5213, 5237, 5262, 1544, 1555, 1574, 1036, 1068, 1097, 5064, 3872, 3892, 4214, 3190, 3201, 3213, 4559, 4585, 4601, 5162, 5174, 5186, 5199, 1495, 2418, 3228, 5538, 5558, 5571, 3758, 3769, 3781, 1010, 1022, 4, 15, 26, 91, 127, 183, 4919, 5069, 5126, 5153, 4649, 4661, 4672, 4833, 4859, 1449, 1469, 1483, 4123, 4134, 4145, 4313, 4326, 4347, 2229, 3267, 3958, 4385, 894, 905, 916, 1240, 1272, 1290, 1232, 1251, 1271, 2330, 2342, 2353, 203, 275, 338, 3767, 4246, 4565, 3595, 3635, 3667, 2617, 2636, 2657, 3511, 3522, 947, 4155, 4168, 4185, 2276, 4049, 4067, 4082, 1603, 2299, 2902, 675, 686, 698, 1905, 1916, 1927, 1333, 1344, 1355, 1367, 34, 45, 57, 3355, 3575, 3609, 3639, 735, 746, 5514, 2766, 2794, 2813, 4332, 4355, 4371, 1609, 1632, 1655, 2597, 2609, 2626, 5680, 5691, 4467, 4508, 4531, 1856, 1885, 1713, 1727, 1750, 1764, 4618, 4630, 4643, 1968, 1983, 2002, 4175, 4201, 4219, 3229, 3240, 3251, 5077, 5106, 5137, 2085, 2096, 2107, 2434, 2446, 2463, 569, 581, 612, 3453, 3463, 3474, 3531, 3542, 3553, 4777, 4837, 4864, 2914, 3705, 4107, 538, 549, 560, 4469, 4495, 3070, 3039, 3051, 1378, 1389, 1406, 2151, 2168, 2182, 501, 512, 523, 3080, 3091, 3103, 3363, 3374, 3385, 4541, 4560, 4583, 5356, 5367, 5378, 763, 5220, 1704, 2495, 4387, 78, 97, 138, 1157, 3955, 3982, 4018, 5611, 5622, 5634, 2976, 369, 418, 468, 962, 1529, 5362, 2556, 2567, 2579, 4407, 4432, 2185, 2196, 2207, 2218, 4723, 4736, 3437, 4881, 4892, 4906, 2120, 2143, 5476, 5487, 5498, 1593, 2396, 3316, 927, 2507, 2524, 2548, 267, 296, 337, 3576, 3621, 162, 242, 283, 5394, 5414, 4936, 4947, 4959, 2946, 2957, 2968, 2669, 1199, 1214, 3281, 351, 400, 445, 770, 3942, 3965, 3988, 595, 613, 630, 2830, 3130, 3452, 3906, 3924, 3935, 4445, 4475, 4511, 4532, 2818, 2834, 2855, 2880, 2847, 2879, 1825, 1857, 1880, 4160, 4972, 5114, 3018, 4378, 5242, 5510, 1803, 1819, 1848, 1579, 1530, 1541, 816, 827, 5454, 2294, 2305, 2316, 5768, 5779, 5790, 5648, 5659, 5670, 1273, 975, 986, 997, 5804, 5815, 2921, 2932, 2668, 2692, 2718, 4688, 4702, 651, 662, 5557, 5587, 5598, 4969, 4998, 1120, 2031, 2901, 4007, 4030, 4048, 642, 1681, 2366, 2400, 2415, 667, 710, 721, 3260, 3271, 759, 786, 806, 46, 143, 241, 1061, 1100, 1130, 3488, 1054, 3786, 1995, 2677, 2702, 2729, 5704, 5715, 5726, 5304, 5326, 5346, 5621, 5730, 5868, 5834, 5845, 5856, 5746] +fold 8: +[2014, 2025, 2037, 3320, 3333, 3344, 1841, 324, 431, 533, 4995, 5023, 5039, 4240, 4251, 4262, 1943, 1954, 3721, 3732, 3749, 5390, 5415, 5453, 106, 167, 207, 248, 5224, 5241, 5261, 4797, 5011, 5080, 4976, 5007, 5047, 1745, 2713, 3349, 3980, 1035, 1070, 1978, 2484, 782, 850, 861, 872, 1670, 1693, 1706, 1498, 1509, 1520, 2247, 2259, 4743, 4762, 4790, 824, 1143, 1815, 1300, 1311, 1322, 1776, 1787, 1798, 4088, 4101, 4113, 3394, 3406, 3417, 4277, 4288, 4299, 5280, 5299, 5323, 3831, 3843, 3854, 2533, 1549, 2321, 3309, 5528, 5871, 5882, 3116, 3131, 2719, 4756, 4789, 4813, 3677, 3689, 3700, 1604, 1626, 1647, 2049, 4257, 4689, 5210, 5230, 5257, 1552, 1567, 1590, 1034, 1064, 1095, 5062, 3874, 3907, 4218, 3193, 3204, 3216, 4561, 4587, 4602, 5165, 5176, 5189, 2124, 2905, 3745, 5527, 5552, 5565, 5586, 3759, 3770, 3782, 1005, 1017, 6, 17, 28, 100, 142, 200, 4918, 4886, 5116, 5142, 4651, 4662, 4673, 4843, 4869, 1438, 1457, 1477, 1487, 4122, 4133, 4144, 4317, 4333, 4354, 2223, 2235, 3619, 4115, 4574, 891, 902, 913, 1236, 1266, 1288, 1229, 1248, 1265, 2329, 2341, 2352, 172, 250, 308, 3587, 4137, 4482, 4787, 3588, 3625, 3664, 2634, 2654, 3506, 3518, 943, 4163, 4178, 4196, 2279, 4050, 4068, 4084, 1331, 2133, 2743, 676, 688, 699, 1896, 1909, 1920, 1337, 1348, 1359, 37, 48, 59, 3570, 3603, 3634, 737, 748, 5516, 2780, 2807, 2822, 4343, 4365, 4376, 1596, 1616, 1639, 1661, 2595, 2608, 2624, 5674, 5685, 5696, 4446, 4474, 4512, 4535, 1853, 1882, 1726, 1748, 1763, 4614, 4625, 4638, 1975, 1997, 4197, 4213, 4234, 3230, 3241, 3252, 5041, 5098, 5128, 2084, 2095, 2106, 2431, 2443, 2456, 573, 594, 623, 3454, 3464, 3475, 3525, 3536, 3547, 3558, 4784, 4844, 4867, 3062, 3812, 4215, 537, 548, 559, 4487, 3069, 3025, 3040, 3054, 1374, 1385, 1400, 2153, 2170, 498, 509, 520, 3076, 3087, 3098, 3361, 3372, 3383, 4543, 4566, 4586, 5354, 5365, 5376, 866, 5373, 1297, 2167, 4388, 80, 101, 141, 1126, 1159, 3948, 3974, 4009, 5603, 5614, 5625, 5637, 2981, 381, 427, 474, 909, 1353, 5263, 2560, 2571, 2588, 4410, 4433, 2186, 2197, 2208, 4716, 4726, 3433, 4879, 4890, 4903, 2119, 2139, 5480, 5491, 5503, 1929, 2635, 925, 2500, 2517, 2534, 240, 277, 307, 3580, 3624, 227, 268, 321, 5406, 4931, 4942, 4954, 2939, 2950, 2961, 2676, 1187, 1202, 1223, 3284, 326, 377, 426, 772, 3945, 3967, 3992, 597, 615, 631, 2664, 3028, 3358, 3911, 3926, 3937, 4447, 4479, 4513, 2827, 2842, 2866, 2845, 2877, 1834, 1866, 1886, 4367, 5021, 5121, 3881, 5193, 5458, 1802, 1818, 1847, 1575, 1533, 815, 826, 5433, 5465, 2292, 2303, 2314, 5760, 5771, 5782, 5793, 5642, 5653, 5664, 968, 982, 993, 5805, 5816, 2927, 3707, 2662, 2685, 2710, 4685, 4698, 654, 665, 5553, 5585, 5597, 4968, 4996, 729, 1731, 2642, 5419, 4010, 4033, 4051, 635, 1674, 2379, 2409, 2420, 626, 705, 716, 3264, 754, 771, 801, 67, 164, 430, 1048, 1084, 1119, 3481, 3493, 1308, 3784, 2679, 2705, 2731, 5711, 5722, 5733, 5288, 5311, 5333, 5555, 5666, 5774, 5833, 5844, 5855, 5866, 5744, 5756] +fold 9: +[2013, 2024, 2036, 3319, 3332, 3343, 1963, 272, 376, 481, 5830, 5009, 5032, 5048, 4249, 4260, 4272, 1941, 1952, 3716, 3727, 3739, 5408, 5442, 124, 181, 224, 5232, 5250, 5270, 4876, 5068, 5090, 4984, 5027, 5056, 1734, 1754, 2916, 3524, 4055, 1063, 1851, 2333, 785, 855, 867, 878, 1687, 1701, 1717, 1491, 1502, 1513, 2251, 2262, 4752, 4775, 4804, 774, 1065, 1689, 1304, 1315, 1326, 1768, 1779, 1790, 4089, 4102, 4114, 3397, 3409, 3420, 4283, 4294, 4305, 5286, 5309, 5332, 3838, 3850, 3861, 2522, 1257, 2150, 2933, 3869, 5526, 5870, 5881, 3118, 3134, 2727, 4763, 4798, 4820, 3660, 3682, 3693, 1612, 1634, 1654, 2039, 2050, 4334, 4694, 5217, 5243, 5266, 1546, 1557, 1578, 1028, 1056, 1086, 3880, 4222, 3188, 3199, 3211, 4553, 4579, 4598, 5168, 5179, 5192, 1386, 2310, 3144, 3929, 5547, 5562, 5579, 3755, 3766, 3779, 1006, 1018, 3, 14, 25, 727, 79, 114, 169, 4847, 5113, 5140, 4656, 4667, 4678, 4841, 4865, 1440, 1459, 1478, 4124, 4135, 4146, 4318, 4337, 4357, 2231, 2925, 3840, 4301, 886, 897, 908, 1228, 1255, 1284, 1224, 1247, 1262, 2331, 2343, 211, 279, 343, 3652, 4193, 4516, 3584, 3622, 3661, 2625, 2644, 3509, 3520, 942, 4158, 4172, 4190, 2274, 2285, 4053, 4069, 1417, 2176, 2810, 677, 689, 700, 1899, 1912, 1923, 1340, 1351, 1362, 43, 54, 65, 3564, 3594, 3627, 740, 5508, 5519, 2777, 2805, 2819, 4335, 4358, 4372, 1597, 1618, 1641, 2583, 2601, 2612, 5676, 5687, 5699, 4453, 4488, 4520, 1867, 1889, 3138, 1716, 1729, 1752, 4616, 4627, 4640, 1965, 1977, 2000, 4194, 4212, 4233, 3224, 3235, 3246, 5002, 5097, 5125, 2081, 2092, 2103, 2430, 2442, 2454, 570, 584, 614, 3448, 3459, 3469, 3534, 3546, 3557, 4828, 4861, 2904, 3608, 4065, 4498, 529, 545, 556, 4472, 4496, 3061, 3072, 3027, 3042, 3056, 1371, 1382, 1393, 2148, 2164, 2179, 504, 515, 528, 3085, 3096, 3110, 3360, 3371, 3382, 4546, 4569, 4591, 5358, 5369, 5380, 5151, 1582, 2429, 4386, 4412, 74, 92, 126, 1148, 1175, 3957, 3985, 4021, 5608, 5619, 5630, 2979, 364, 413, 466, 857, 1186, 5171, 2545, 2562, 2573, 2592, 4403, 4430, 2190, 2201, 2212, 4718, 4729, 3438, 4880, 4891, 4904, 2116, 2129, 5481, 5492, 5504, 2009, 2752, 924, 936, 2503, 2521, 2542, 254, 285, 318, 3569, 3607, 3653, 232, 274, 327, 448, 5397, 5422, 4928, 4939, 4950, 2938, 2949, 2960, 1193, 1204, 3275, 310, 367, 417, 463, 761, 3959, 3981, 4014, 582, 599, 616, 633, 2607, 2986, 3323, 3695, 3903, 3922, 3933, 4457, 4490, 4519, 2829, 2846, 2871, 2850, 2881, 1813, 1843, 1872, 4836, 5099, 5164, 3498, 5089, 5383, 1806, 1826, 1862, 1568, 1589, 1524, 1536, 812, 823, 839, 5436, 5466, 2298, 2309, 2320, 5765, 5776, 5787, 5640, 5651, 5662, 1270, 964, 979, 991, 5800, 5811, 5822, 2923, 2675, 2699, 2728, 4686, 4699, 649, 660, 4450, 5574, 5592, 4980, 929, 1831, 2742, 4003, 4028, 4046, 646, 1676, 2369, 2404, 2416, 606, 703, 714, 3262, 3273, 751, 767, 799, 99, 197, 706, 1038, 1073, 1110, 3479, 3490, 1235, 3793, 1982, 2691, 2715, 2739, 5710, 5721, 5732, 5291, 5312, 5335, 5610, 5719, 5857, 5839, 5850, 5861, 5747] +fold 10: +[2008, 2019, 2030, 3322, 3335, 3347, 261, 365, 471, 575, 5828, 4993, 5020, 5037, 4239, 4250, 4261, 4273, 1939, 1950, 3720, 3731, 3747, 5398, 5421, 5459, 149, 199, 243, 5215, 5235, 5254, 5273, 4734, 4962, 5076, 4970, 4999, 5043, 1736, 3043, 3791, 4126, 1060, 1765, 2268, 791, 849, 860, 871, 882, 1682, 1698, 1711, 1494, 1505, 1516, 2250, 2261, 4757, 4783, 971, 1436, 5392, 1299, 1310, 1321, 1769, 1780, 1791, 4083, 4096, 4110, 3400, 3412, 3423, 4284, 4295, 4306, 5294, 5319, 5341, 3833, 3844, 3855, 2510, 2537, 1823, 2703, 3599, 5534, 5874, 5885, 3114, 3125, 2724, 4767, 4803, 4827, 3675, 3688, 3699, 1601, 1623, 1646, 1664, 2047, 2058, 4642, 5207, 5227, 5255, 5277, 1551, 1565, 1588, 1046, 1078, 1108, 5061, 3871, 3890, 4229, 3187, 3198, 3210, 3221, 4556, 4582, 4600, 5173, 5185, 5197, 1755, 2596, 3424, 5543, 5560, 5575, 3753, 3765, 3777, 1008, 1020, 11, 22, 33, 105, 152, 212, 4917, 5102, 5130, 5155, 4652, 4663, 4674, 4816, 4846, 4870, 1452, 1473, 1485, 4120, 4131, 4142, 4316, 4331, 4352, 2222, 2234, 2996, 3882, 4345, 885, 896, 907, 1245, 1279, 1292, 1213, 1239, 1256, 2325, 2337, 2348, 158, 237, 298, 4040, 4392, 4700, 3578, 3613, 3655, 3678, 2619, 2637, 3512, 945, 4153, 4165, 4181, 4200, 2280, 4057, 4070, 1277, 2065, 2643, 670, 681, 692, 1897, 1910, 1921, 5149, 1338, 1349, 1360, 41, 52, 63, 3354, 3581, 3615, 3647, 733, 744, 5512, 2756, 2787, 2809, 4328, 4350, 4369, 1611, 1633, 1659, 2582, 2600, 2611, 5681, 5692, 4455, 4492, 4522, 1839, 1875, 1725, 1746, 1762, 4613, 4624, 4635, 1972, 1992, 4191, 4210, 4230, 3227, 3239, 3250, 5072, 5105, 5134, 2082, 2093, 2104, 2428, 2439, 2451, 565, 576, 601, 632, 3449, 3460, 3470, 3526, 3537, 3548, 4780, 4840, 4866, 3007, 3756, 4148, 542, 553, 4476, 4500, 3071, 3034, 3050, 1368, 1379, 1390, 1407, 2149, 2166, 2180, 506, 517, 535, 3081, 3092, 3106, 3366, 3377, 3388, 4536, 4552, 4573, 5353, 5364, 5375, 840, 5340, 1226, 2109, 2841, 4390, 72, 90, 121, 1146, 1173, 3939, 3962, 3989, 5604, 5615, 5626, 2977, 352, 402, 451, 833, 1121, 5122, 2561, 2572, 2590, 4417, 4438, 2193, 2204, 2215, 4722, 4733, 3430, 3441, 4883, 4894, 4908, 2121, 5478, 5489, 5500, 1364, 2288, 3137, 921, 933, 2509, 2527, 2550, 263, 293, 331, 3567, 3600, 3648, 154, 238, 278, 330, 5391, 5412, 4930, 4941, 4952, 2947, 2958, 2969, 2680, 1191, 1203, 1227, 341, 389, 436, 766, 3941, 3963, 3986, 593, 611, 629, 2790, 3113, 3434, 3909, 3925, 3936, 4454, 4486, 4517, 2825, 2839, 2864, 2863, 2892, 1817, 1846, 1874, 4778, 5096, 5160, 3800, 5161, 5428, 1809, 1829, 1564, 1585, 4912, 1531, 813, 825, 845, 5457, 2291, 2302, 2313, 5764, 5775, 5786, 5645, 5656, 5667, 1282, 956, 976, 987, 998, 5799, 5810, 5821, 2926, 3708, 2659, 2678, 2701, 2730, 4693, 4708, 659, 5551, 5583, 5596, 4964, 4983, 1636, 2543, 5317, 4022, 4037, 4056, 644, 1683, 2381, 2410, 2421, 687, 712, 723, 3269, 762, 792, 808, 24, 122, 220, 1045, 1081, 1116, 3483, 3494, 1375, 3789, 1987, 2671, 2698, 2723, 2749, 5712, 5723, 5734, 5293, 5314, 5337, 5588, 5698, 5835, 5840, 5851, 5862, 5740, 5754] diff --git a/Data/d32_updated.xlsx b/Data/d32_updated.xlsx new file mode 100644 index 0000000..662d186 Binary files /dev/null and b/Data/d32_updated.xlsx differ diff --git a/Data/d36_folds.txt b/Data/d36_folds.txt new file mode 100644 index 0000000..4b699b6 --- /dev/null +++ b/Data/d36_folds.txt @@ -0,0 +1,20 @@ +fold 1: +[8660, 8686, 8708, 8730, 14435, 14449, 14478, 14507, 14497, 15242, 15315, 15399, 14288, 14311, 15305, 15364, 15419, 13769, 14002, 14035, 14054, 14083, 12024, 12071, 12118, 12723, 12849, 12645, 12818, 12952, 13611, 13647, 13690, 13069, 13133, 15520, 15554, 6975, 7019, 7068, 7102, 8048, 8192, 12953, 10047, 14084, 14150, 14183, 6706, 6740, 6760, 6780, 6582, 6622, 7288, 7305, 7327, 11821, 11836, 9723, 9752, 9776, 10620, 10679, 10738, 10776, 7149, 7185, 7226, 8447, 8477, 8517, 11928, 12933, 7161, 7201, 7236, 7262, 6662, 6673, 6685, 11045, 11156, 11267, 11841, 7196, 7249, 7271, 10555, 10645, 10782, 6972, 7056, 7107, 7128, 11061, 11191, 11498, 11895, 9070, 9177, 9279, 9351, 7531, 7565, 7602, 5893, 5904, 6913, 6937, 8528, 8551, 8570, 7544, 7599, 7645, 14696, 14707, 14718, 5956, 14085, 14142, 14800, 9145, 9238, 9366, 11032, 11126, 11282, 11851, 10094, 10161, 10208, 5944, 6401, 6628, 6874, 7635, 7677, 7700, 6974, 7147, 7320, 7403, 12343, 12495, 12619, 8913, 9001, 9078, 6880, 6904, 6922, 13021, 14590, 14602, 8944, 9044, 9126, 9188, 11687, 11723, 11760, 6988, 7061, 8206, 14482, 14512, 7626, 8006, 9156, 9206, 9261, 12340, 13539, 13579, 10098, 10169, 10218, 15428, 15493, 15521, 15556, 8790, 8855, 8927, 8988, 6222, 6318, 6431, 13450, 14452, 15189, 9502, 9544, 9571, 13277, 10644, 10701, 10739, 10781, 13469, 13544, 7414, 7443, 7481, 8389, 11656, 13584, 13644, 13693, 10267, 10332, 10401, 10465, 9317, 9383, 9438, 9478, 6177, 6248, 6299, 6364, 10502, 10614, 10928, 11120, 11279, 11614, 11719, 6971, 7080, 8134, 8264, 9419, 9457, 9488, 9510, 7011, 7072, 8212, 7648, 7950, 8188, 6162, 6242, 6436, 6504, 13986, 8915, 8984, 9049, 9110, 6850, 6865, 6891, 10929, 11888, 11899, 9979, 9990, 10002, 10013, 9925, 10097, 10159, 10818, 8774, 14038, 14603, 8306, 8334, 9649, 10824, 10855, 10879, 14915, 14974, 15022, 7882, 7985, 8063, 12205, 12515, 10310, 10373, 10427, 8519, 8552, 8576, 6801, 6812, 6823, 11171, 11319, 11406, 8678, 8700, 8725, 6384, 6470, 6521, 7590, 7629, 7652, 9884, 8583, 8867, 10697, 10786, 7517, 12298, 12379, 12954, 9067, 9159, 9222, 12396, 12522, 12633, 11783, 13093, 15496, 15540, 14145, 8742, 8753, 8766, 6944, 7957, 8074, 8216, 11634, 6302, 6383, 6434, 6479, 6942, 7024, 8161, 8256, 14792, 14251, 14263, 11060, 11135, 11238, 8430, 8463, 8487, 8801, 8903, 8973, 9043, 13912, 15178, 12426, 12549, 12703, 10971, 10982, 10994, 11005, 10259, 10300, 10361, 8089, 8163, 8277, 7931, 7976, 8036, 8596, 8607, 10516, 10565, 10602, 5909, 5920, 5931, 5943, 8798, 8878, 9219, 6731, 6761, 6783, 11039, 11137, 11234, 9827, 9850, 9878, 10204, 10242, 10264, 13418, 14880, 15266, 6382, 7796, 7839, 7874, 12498, 12696, 8616, 8817, 9164, 9656, 10026, 10160, 10656, 11611, 11644, 10372, 10492, 11664, 11764, 11787, 13455, 13507, 7374, 7385, 6123, 6149, 6172, 6298, 6457, 6537, 6072, 6099, 6127, 12347, 12972, 8866, 8970, 9056, 9135, 10821, 10837, 10854, 10870, 7897, 7959, 8002, 8055, 10532, 11161, 7692, 7723, 7773, 7047, 7605, 7834, 8076, 6580, 6600, 6621, 6647, 8793, 12213, 12439, 8802, 8881, 7914, 7968, 8009, 8082, 9715, 9736, 13076, 13119, 13171, 13214, 8328, 8351, 8386, 8418, 14347, 14363, 6216, 6312, 6423, 6147, 6223, 6281, 11099, 11271, 11621, 9278, 9323, 9360, 9398, 11323, 11382, 11437, 9807, 9830, 9851, 9866, 12940, 6259, 6435, 6524, 6563, 11162, 11285, 11373, 11435, 6957, 8145, 8220, 8276, 6031, 6070, 6091, 6117, 7775, 7837, 7888, 7949, 12341, 12448, 12540, 15338, 15397, 15467, 6581, 6631, 6707, 6735, 11130, 11338, 11430, 10362, 10420, 10474, 11595, 11637, 7736, 7760, 7804, 13699, 14463, 14863, 14931, 13199, 7285, 7317, 7339, 7360, 11248, 11343, 11413, 13187, 13261, 13344, 10021, 10083, 10276, 10829, 11478, 9257, 9334, 9424, 9459, 10573, 10624, 10673, 11491, 14921, 14973, 15020, 10059, 10130, 10184, 6412, 12802, 12911, 13026, 7416, 8064, 8123, 8169, 9940, 9951, 9962, 9973, 10342, 10436, 9585, 9615, 9647, 9690, 6154, 6212, 6276, 6363, 5994, 6005, 6017, 6028, 9702, 10892, 10903, 10913, 13190, 13268, 10674, 10749, 10792, 10809, 14242, 8388, 8912, 9175, 9578, 11255, 11334, 11421, 11463, 15272, 15348, 12407, 12517, 9751, 9774, 9790, 9792, 10085, 10775, 14881, 8425, 11106, 11215, 11321, 9497, 9520, 9538, 9555, 13832, 13890, 13935, 14380, 9882, 9903, 9923, 9934, 10311, 10453, 10732, 12598, 12688, 12282, 12392, 12488, 12977, 13840, 13895, 13930, 12669, 12855, 7764, 7831, 7881, 13753, 13790, 13844, 13894, 12021, 12083, 12181, 13310, 13390, 13461, 6314, 6476, 6554, 11668, 11688, 11738, 12796, 15181, 15202, 15239, 15312, 11913, 12777, 12947, 13949, 13978, 14001, 11825, 11850, 11861, 11872, 13605, 13655, 13712, 14541, 14553, 14565, 14576, 12027, 12114, 12191, 12294, 13427, 13484, 13546, 11935, 11984, 12018, 13049, 13188, 13262, 13349, 14984, 15053, 15092, 12865, 15203, 15289, 15385, 14620, 14633, 14646, 12232, 12564, 12759, 12913, 13016, 14996, 15061, 15112, 15141, 14277, 11510, 11533, 11553, 13395, 13426, 13472, 14371, 14394, 14405, 14417, 8310, 9570, 9608, 9685, 9678, 11952, 11997, 12065, 13741, 14329, 12225, 12478, 12691, 12899, 14653, 14665, 14676, 14687, 13788, 13848, 13903, 13571, 14328, 15008, 12156, 12266, 13657, 13708, 13737, 12862, 8321, 9611, 9658, 11483, 11521, 11549, 11576, 15162, 7329, 14227, 14096, 14171, 14213, 13970, 14006, 14036, 12228, 13236, 13913, 14544, 14864, 13072, 13111, 13158, 14097, 14149, 14187, 14220, 13745, 14554, 15051, 13589, 14402, 14856, 15524, 8622, 8633, 8645, 12199, 12293, 15082, 15134, 14758, 14770, 14791, 12090, 12151, 12234, 12411, 12637, 12819, 13201, 13280, 13348, 8561, 8825, 9252, 13798, 13855, 14866, 14992, 15078, 13121, 13249, 13339, 14733, 14746, 15213, 15275, 8355, 8384, 11919, 11965, 12720, 13050, 14660, 14874, 14962, 12077, 12143, 12235, 12507, 12657, 12797, 12919, 15204, 15320, 15402, 15481, 12975, 12577, 12709, 14766, 14908] +fold 2: +[8672, 8696, 8717, 14434, 14448, 14474, 14504, 6833, 14462, 14528, 15251, 15343, 15412, 14290, 14312, 15290, 15347, 15409, 13772, 11802, 14026, 14049, 14071, 12033, 12080, 12127, 12652, 12801, 12545, 12715, 12869, 12988, 13607, 13641, 13685, 13055, 13112, 15508, 15542, 6956, 7007, 7054, 7096, 8068, 8204, 12976, 10035, 14103, 14165, 14192, 6710, 6743, 6762, 6782, 6572, 6615, 6660, 7291, 7309, 7332, 11813, 11826, 11842, 9714, 9743, 9769, 9798, 10643, 10705, 10748, 7139, 7168, 7207, 8443, 8474, 8514, 12263, 7183, 7220, 7253, 6648, 6666, 6677, 6692, 11029, 11133, 11239, 11643, 7234, 7268, 7279, 10559, 10661, 10788, 6982, 7063, 7108, 7131, 11131, 11326, 11651, 9079, 9185, 9284, 9357, 7524, 7556, 7592, 5897, 5910, 6926, 8515, 8542, 8560, 8581, 7508, 7559, 7618, 7656, 14699, 14710, 14721, 5949, 5966, 14118, 14785, 8840, 9173, 9273, 9397, 11038, 11134, 11337, 10082, 10151, 10197, 10237, 6350, 6535, 6809, 7622, 7669, 7691, 7732, 7005, 7180, 7350, 7407, 12374, 12523, 12643, 8863, 8957, 9040, 9113, 6864, 6887, 6911, 6928, 14583, 14595, 14611, 9035, 9116, 9182, 11714, 11751, 6970, 7046, 8186, 8282, 14479, 14511, 14535, 6995, 7875, 8239, 9167, 9218, 9268, 13014, 13542, 13586, 10074, 10158, 10210, 15463, 15507, 15537, 8823, 8892, 8955, 6211, 6304, 6411, 13602, 14837, 15254, 9484, 9528, 9564, 13220, 13303, 10632, 10687, 10731, 10777, 13452, 13523, 13592, 7394, 7428, 7464, 7503, 8358, 8415, 11647, 8904, 13629, 13678, 13723, 10273, 10338, 10404, 10470, 9361, 9417, 9464, 6214, 6260, 6324, 10491, 10584, 10721, 11104, 11245, 11411, 11691, 11730, 7030, 7106, 8183, 9425, 9461, 9490, 6955, 7033, 7095, 8255, 7495, 7854, 8117, 6202, 6358, 6474, 8977, 9034, 9096, 6844, 6855, 6873, 6903, 10927, 11887, 11898, 9980, 9991, 10003, 10014, 10033, 10121, 10205, 8768, 14137, 14703, 8301, 8329, 9642, 9688, 10813, 10843, 10875, 10886, 14937, 14990, 15036, 7861, 7962, 8047, 12325, 12556, 10335, 10394, 10452, 8475, 8525, 8554, 8578, 6795, 6806, 6817, 6828, 11149, 11288, 11388, 8656, 8684, 8709, 8731, 6395, 6477, 6525, 7572, 7621, 7644, 9908, 8399, 8851, 9535, 10727, 12237, 12363, 12867, 8967, 9098, 9183, 9239, 12409, 12530, 12659, 11778, 11798, 13070, 15470, 15519, 14116, 14200, 8737, 8748, 8759, 6954, 7966, 8084, 8228, 11624, 6336, 6404, 6450, 7008, 8141, 8244, 14790, 14256, 14269, 11041, 11123, 11221, 11862, 8428, 8462, 8486, 8839, 8923, 9002, 13052, 14858, 15393, 12382, 12511, 12628, 10974, 10985, 10996, 11007, 10250, 10290, 10341, 8053, 8129, 8223, 7916, 7964, 8016, 8598, 8609, 10526, 10571, 10608, 8378, 5911, 5921, 5932, 5945, 8754, 8859, 9023, 6722, 6757, 6779, 11071, 11174, 11256, 9818, 9841, 9872, 10180, 10232, 10255, 10280, 13283, 13613, 15125, 6047, 7772, 7822, 7858, 7898, 12475, 12667, 8702, 8836, 9241, 9740, 10055, 10344, 10840, 11608, 11641, 10432, 10533, 11704, 11765, 11789, 13414, 13471, 13527, 7366, 7377, 7388, 6103, 6132, 6157, 6184, 6253, 6389, 6518, 6561, 6039, 6077, 6104, 6133, 12048, 12390, 8853, 8961, 9045, 9127, 14425, 10831, 10847, 10864, 7910, 7971, 8015, 13521, 10504, 10545, 11114, 11200, 7680, 7705, 7743, 7811, 7451, 7704, 7939, 8187, 6595, 6616, 6641, 8723, 12376, 12910, 8828, 8908, 7945, 7988, 8037, 9726, 13086, 13130, 13180, 13223, 8324, 8348, 8383, 8413, 14352, 14367, 6221, 6320, 6432, 6182, 6246, 6316, 11015, 11175, 11422, 11819, 9286, 9331, 9369, 11296, 11356, 11414, 11467, 9828, 9849, 9864, 12890, 6353, 6501, 6546, 11119, 11272, 11355, 11419, 8118, 8195, 8257, 6049, 6079, 6100, 6429, 7787, 7844, 7900, 12297, 12408, 12502, 15324, 15384, 15456, 6588, 6636, 6709, 6738, 10950, 11190, 11362, 11744, 10354, 10411, 10462, 11588, 11626, 7733, 7758, 7800, 13802, 14582, 14870, 14939, 13247, 7298, 7326, 7348, 7364, 11236, 11339, 11408, 13177, 13254, 13337, 10011, 10076, 10227, 10765, 11484, 9247, 9330, 9421, 9456, 10581, 10629, 10680, 14933, 14983, 15024, 10024, 10105, 10157, 10785, 6430, 12812, 12931, 13033, 7418, 8075, 8126, 8175, 9945, 9956, 9967, 10334, 10433, 8295, 9607, 9637, 9674, 6146, 6204, 6271, 6354, 5993, 6004, 6016, 6027, 10889, 10900, 10910, 13225, 13296, 10681, 10755, 10793, 8489, 8942, 9263, 9686, 11274, 11354, 11436, 11470, 15314, 15375, 12399, 12513, 9748, 9772, 9788, 9805, 10020, 10114, 14836, 14900, 8398, 8442, 11087, 11201, 11300, 11390, 9512, 9529, 9545, 13801, 13856, 13908, 13946, 14374, 14391, 9897, 9917, 9931, 10367, 10494, 12625, 12702, 12348, 12445, 12572, 13836, 13889, 13927, 12607, 12800, 7706, 7794, 7852, 7918, 13760, 13808, 13859, 11988, 12055, 12132, 13345, 13401, 13488, 6329, 6492, 6559, 11677, 11712, 11754, 12882, 15184, 15206, 15250, 11921, 12791, 12964, 13951, 13979, 14003, 11849, 11860, 11871, 13581, 13642, 13707, 14547, 14558, 14569, 12088, 12166, 12259, 13439, 13498, 13556, 11939, 11986, 12023, 13168, 13251, 13341, 14956, 15030, 15079, 15111, 12754, 12904, 15207, 15292, 15396, 14626, 14639, 12173, 12284, 12500, 12710, 12853, 12992, 15015, 15080, 15121, 14283, 11496, 11524, 11545, 13392, 13423, 13466, 14385, 14400, 14411, 8319, 9574, 9619, 11958, 12001, 12072, 13713, 14330, 12249, 12551, 12765, 14644, 14662, 14673, 14684, 13762, 13814, 13870, 13937, 13172, 13891, 14516, 15465, 12211, 13667, 13715, 13749, 8305, 9592, 9636, 9683, 11481, 11519, 11546, 11574, 15137, 15176, 14225, 14237, 14074, 14157, 14199, 13960, 13995, 14031, 12256, 13550, 14159, 14818, 13054, 13091, 13137, 15544, 14111, 14158, 14194, 14224, 13306, 14205, 14847, 15454, 13162, 14065, 14821, 14895, 8627, 8638, 8655, 12230, 15117, 15153, 14762, 14778, 14805, 7170, 12053, 12119, 12194, 12285, 12368, 12592, 12785, 12948, 12193, 13148, 13238, 13312, 13367, 8333, 8779, 9020, 9458, 13835, 13906, 14917, 15013, 15107, 13138, 13257, 13347, 14730, 14743, 15163, 15249, 15327, 8349, 8377, 11954, 12650, 12934, 14588, 14868, 14958, 12129, 12216, 12462, 12603, 12760, 12872, 11922, 15258, 15357, 15443, 12583, 12738, 14839, 14927] +fold 3: +[8651, 8676, 8699, 8721, 14439, 14457, 14487, 6832, 14502, 15247, 15322, 15405, 14275, 14298, 15323, 15383, 15445, 13765, 13805, 11805, 14032, 14053, 14080, 12002, 12046, 12094, 12146, 11666, 12789, 12905, 12631, 12805, 12944, 13577, 13617, 13650, 13694, 13095, 15504, 15533, 6999, 7052, 7094, 8122, 8259, 12963, 10106, 14095, 14162, 14189, 6727, 6752, 6771, 6590, 6632, 7300, 7318, 11822, 11837, 9716, 9747, 9771, 10625, 10685, 10740, 7127, 7160, 7197, 7245, 8432, 8466, 8501, 12310, 7156, 7198, 7235, 7261, 6640, 6663, 6674, 6687, 11103, 11210, 11303, 7215, 7258, 7274, 10456, 10570, 10671, 10791, 7034, 7091, 7122, 11090, 11226, 11572, 9111, 9214, 9304, 7512, 7538, 7577, 7611, 5898, 6883, 6927, 8534, 8555, 8575, 7546, 7610, 7650, 14700, 14711, 14722, 5954, 14113, 14793, 9062, 9202, 9315, 11062, 11159, 11476, 10112, 10167, 10215, 6024, 6495, 6745, 7632, 7674, 7699, 7026, 7191, 7381, 7411, 12317, 12468, 12593, 12743, 8841, 8939, 9024, 9099, 6869, 6892, 6914, 6933, 13037, 14587, 14600, 14617, 8986, 9077, 9155, 9210, 11711, 11749, 6953, 7031, 8166, 8268, 14469, 14498, 14523, 7737, 8150, 9148, 9201, 9256, 12400, 13525, 13572, 9804, 10124, 10187, 10236, 15472, 15510, 15539, 8834, 8907, 8966, 6207, 6293, 6405, 13541, 14825, 15243, 9489, 9532, 9565, 13285, 10672, 10719, 10763, 13458, 13530, 7409, 7440, 7476, 14354, 8362, 8444, 11605, 8887, 13626, 13673, 13722, 10289, 10360, 10423, 10489, 9290, 9367, 9422, 9468, 6188, 6251, 6306, 6373, 10455, 10572, 10692, 11042, 11173, 11315, 11708, 11743, 6959, 7075, 8120, 8251, 9411, 9452, 9485, 9507, 7004, 7065, 8200, 7439, 7824, 8087, 8281, 6155, 6230, 6426, 6500, 13983, 8926, 8989, 9059, 9117, 6848, 6860, 6884, 10922, 11882, 11893, 11904, 9987, 9998, 10010, 9905, 10091, 10154, 10722, 8777, 14534, 8311, 9616, 9657, 10811, 10836, 10868, 10883, 14922, 14980, 15025, 7853, 7953, 8039, 12385, 12609, 10282, 10340, 10403, 10458, 8516, 8547, 8574, 6792, 6803, 6814, 6825, 11053, 11212, 11349, 11426, 8679, 8704, 8727, 6334, 6428, 6502, 6534, 7560, 7617, 7641, 7664, 9902, 8490, 8860, 9634, 10736, 11977, 12322, 12398, 8997, 9105, 9189, 9242, 12312, 12473, 12579, 12708, 11795, 13067, 15462, 15517, 15561, 14101, 14173, 8739, 8750, 8761, 7001, 8012, 8147, 11604, 6308, 6391, 6438, 6483, 6973, 7066, 8207, 8286, 14253, 14265, 11035, 11108, 11213, 11791, 8441, 8471, 8495, 8780, 8882, 8960, 9030, 13203, 14923, 15534, 12410, 12536, 12680, 10979, 10990, 11002, 10247, 10284, 10336, 8078, 8154, 8266, 7885, 7947, 7994, 8059, 8593, 8604, 10522, 10568, 10605, 8480, 5917, 5928, 5939, 8807, 8883, 9285, 6728, 6759, 6781, 11080, 11198, 11884, 9829, 9854, 9881, 10166, 10225, 10252, 10275, 13144, 13501, 15028, 15434, 6301, 7746, 7808, 7847, 7884, 12447, 12617, 8429, 8813, 9119, 9623, 9815, 10077, 10477, 10895, 11581, 11613, 11646, 10457, 10549, 11720, 11769, 11794, 13465, 13517, 7375, 7386, 6109, 6138, 6161, 6192, 6238, 6359, 6507, 6553, 6062, 6090, 6118, 11968, 12377, 8941, 9029, 9109, 14424, 10827, 10844, 10860, 7933, 7981, 8027, 13766, 10517, 11105, 11192, 7676, 7698, 7735, 7799, 7429, 7671, 7920, 8128, 6584, 6605, 6624, 6655, 8459, 9601, 12344, 12793, 8862, 7951, 7992, 8043, 9721, 9744, 13103, 13150, 13197, 8347, 8376, 8410, 14350, 14364, 6194, 6287, 6390, 6142, 6219, 6274, 11001, 11154, 11348, 11763, 9267, 9316, 9353, 9387, 11311, 11367, 11424, 11471, 9822, 9844, 9861, 6323, 6493, 6542, 11222, 11308, 11392, 8101, 8174, 8240, 6055, 6083, 6105, 6494, 7761, 7827, 7872, 7937, 12329, 12437, 12528, 15279, 15353, 15425, 15479, 6593, 6644, 6712, 6741, 11086, 11306, 11416, 10329, 10389, 10441, 10497, 11598, 11640, 7727, 7756, 7797, 13994, 14814, 14892, 13191, 13286, 7311, 7336, 7357, 11228, 11331, 11403, 13216, 13284, 13361, 9836, 10052, 10110, 10560, 11473, 11513, 9305, 9405, 9441, 10566, 10615, 10663, 14911, 14968, 15014, 10067, 10134, 10189, 12769, 12880, 13002, 7420, 8098, 8140, 8205, 9943, 9954, 9965, 9976, 10325, 10421, 9593, 9620, 9654, 6183, 6257, 6319, 5998, 6009, 6020, 9710, 10899, 10909, 13173, 13252, 13335, 10713, 10779, 10801, 14243, 8550, 9009, 9314, 11247, 11327, 11410, 11461, 15252, 15337, 12324, 12457, 9762, 9783, 9799, 10029, 10123, 14844, 8431, 11050, 11170, 11270, 11371, 9506, 9526, 9542, 13793, 13850, 13905, 13941, 14386, 9899, 9920, 9932, 10350, 10475, 10771, 12581, 12674, 12315, 12414, 12510, 13862, 13911, 13947, 12548, 12752, 12943, 7749, 7825, 7871, 13775, 13831, 13877, 12041, 12107, 13338, 13396, 13474, 6407, 6526, 11665, 11684, 11731, 12690, 12935, 15185, 15209, 15257, 12744, 12912, 13038, 13955, 13981, 14007, 11848, 11859, 11870, 13533, 13612, 13669, 13719, 14548, 14559, 14570, 12036, 12121, 12201, 13434, 13490, 13552, 11930, 11981, 12016, 13043, 13127, 13211, 13299, 13369, 14971, 15043, 15086, 12737, 12891, 15195, 15280, 15373, 14619, 14630, 14645, 12210, 12687, 12844, 12987, 15001, 15064, 15116, 15144, 14287, 11500, 11526, 11547, 13376, 13410, 13448, 13497, 14373, 14395, 14406, 14418, 8297, 9550, 9594, 9660, 9664, 11923, 11985, 12043, 13030, 13736, 14327, 12438, 12636, 12831, 14656, 14667, 14678, 13770, 13822, 13882, 13948, 13777, 14361, 15309, 12167, 12272, 13661, 13711, 13742, 12885, 8318, 9605, 9653, 11511, 11534, 11566, 15132, 15171, 14230, 14081, 14164, 14206, 14055, 13992, 14028, 13470, 14121, 14796, 15545, 13057, 13096, 13141, 15550, 14114, 14163, 14198, 13691, 14433, 15018, 13881, 14741, 14883, 8618, 8629, 8641, 8661, 12160, 12257, 15100, 15142, 14761, 14774, 14802, 12074, 12138, 12217, 12309, 12555, 12749, 12908, 12165, 13210, 13287, 13355, 8605, 8858, 9303, 13841, 13910, 14954, 15049, 15122, 13204, 13304, 14739, 15164, 15256, 15367, 8360, 11929, 11974, 12748, 14820, 14907, 14972, 12086, 12152, 12246, 12524, 12675, 12807, 11910, 10319, 15237, 15341, 15429, 15498, 11931, 12537, 12642, 14893] +fold 4: +[8674, 8698, 8719, 14429, 14442, 14465, 14495, 14490, 15208, 15278, 15360, 15423, 14282, 14304, 15273, 15336, 15400, 15460, 13755, 13791, 14023, 14048, 14067, 12006, 12051, 12100, 12153, 11667, 12739, 12858, 12596, 12776, 12903, 13609, 13646, 13687, 13060, 13116, 15511, 15547, 6969, 7014, 7062, 7101, 8176, 11936, 10072, 14130, 14177, 6698, 6733, 6756, 6775, 6608, 6653, 7290, 7307, 7330, 11816, 11831, 9701, 9731, 9758, 9785, 10657, 10718, 10753, 7146, 7177, 7221, 8426, 8464, 8496, 12357, 7153, 7194, 7230, 7259, 6659, 6671, 6682, 11018, 11122, 11232, 11582, 7188, 7248, 7270, 7281, 10478, 10585, 10684, 10797, 7028, 7088, 7119, 11034, 11167, 11434, 11772, 9160, 9262, 9337, 7526, 7558, 7594, 5894, 5905, 6916, 6938, 8518, 8544, 8563, 8582, 7533, 7582, 7633, 7668, 14697, 14708, 14719, 5957, 14079, 14132, 14783, 8824, 9166, 9264, 9388, 11026, 11116, 11241, 11781, 10108, 10164, 10212, 5890, 6360, 6575, 6841, 7613, 7665, 7689, 7724, 7125, 7306, 7401, 12268, 12440, 12568, 12707, 8894, 8983, 9064, 6870, 6893, 6915, 6935, 12949, 14584, 14596, 14613, 8964, 9061, 9137, 9198, 11709, 11745, 6947, 7025, 8155, 8260, 14477, 14509, 14532, 7550, 7961, 9114, 9178, 9227, 9280, 12314, 13510, 13557, 10000, 10136, 10199, 15436, 15494, 15525, 15560, 8799, 8861, 8933, 8991, 6203, 6285, 6394, 6488, 13298, 13812, 15040, 15497, 9476, 9524, 9562, 13260, 13327, 10652, 10707, 10750, 13428, 13499, 13564, 7404, 7435, 7472, 7513, 14343, 8403, 11660, 13576, 13640, 13689, 10324, 10393, 10460, 9354, 9412, 9462, 6220, 6266, 6328, 10496, 10591, 11112, 11259, 11466, 11702, 11739, 7021, 7098, 8167, 9434, 9470, 9495, 6976, 7044, 8162, 8274, 7158, 7715, 8017, 8229, 6208, 6372, 6480, 8958, 9013, 9082, 6849, 6862, 6888, 10925, 11886, 11897, 9984, 9995, 10007, 9770, 10066, 10147, 10550, 8769, 14453, 8315, 9621, 9661, 10830, 10861, 10881, 14930, 14985, 15032, 7836, 7932, 8023, 8103, 12307, 12533, 10313, 10379, 10434, 8491, 8535, 8562, 8585, 6796, 6807, 6818, 6829, 11179, 11328, 11412, 8673, 8693, 8718, 6409, 6496, 6530, 7548, 7600, 7634, 7657, 9904, 8787, 8885, 10751, 7573, 12308, 12383, 12990, 9091, 9170, 9233, 12384, 12509, 12622, 11793, 13089, 15491, 15536, 14151, 8738, 8749, 8760, 7040, 8049, 8181, 11629, 6326, 6399, 6446, 6968, 7050, 8196, 8279, 14815, 14254, 14266, 11067, 11146, 11244, 8433, 8465, 8492, 8865, 8946, 9019, 13082, 14891, 15487, 12397, 12525, 12638, 10973, 10984, 10995, 11006, 10257, 10298, 10352, 8031, 8116, 8203, 7924, 7974, 8029, 8591, 8602, 8613, 10513, 10561, 10597, 9415, 5916, 5927, 5938, 8369, 8822, 8891, 9336, 6711, 6748, 6772, 6789, 11089, 11202, 9814, 9835, 9865, 9893, 10156, 10221, 10251, 10274, 13065, 13440, 14935, 15339, 6451, 7740, 7802, 7842, 7880, 12585, 8387, 8805, 9054, 9557, 9906, 10087, 10579, 11594, 11627, 10408, 10515, 11663, 11752, 11782, 13421, 13476, 13534, 7367, 7378, 7389, 6126, 6150, 6175, 6247, 6375, 6513, 6558, 6043, 6080, 6107, 6137, 12331, 12936, 8952, 9039, 9118, 10816, 10834, 10850, 10867, 7905, 7967, 8010, 14257, 10525, 11151, 7683, 7709, 7747, 7820, 7562, 7803, 8056, 6587, 6607, 6627, 6657, 8409, 9481, 12362, 12825, 8848, 7941, 7982, 8033, 9720, 9742, 13090, 13134, 13184, 8340, 8366, 8400, 14356, 6185, 6278, 6378, 6485, 6158, 6229, 6288, 11110, 11304, 11696, 9253, 9306, 9348, 9380, 11329, 11384, 11447, 9825, 9845, 9863, 11770, 6425, 6522, 6560, 11184, 11290, 11378, 8108, 8180, 8246, 6052, 6081, 6102, 6482, 7771, 7833, 7879, 7944, 12306, 12419, 12508, 15269, 15351, 15417, 15473, 6579, 6629, 6705, 6732, 11036, 11243, 11393, 10359, 10413, 10468, 11606, 11645, 7714, 7745, 7785, 7816, 14098, 14822, 14898, 13212, 7308, 7333, 7356, 11268, 11357, 11427, 13139, 13228, 13300, 13371, 10036, 10096, 10376, 11489, 9282, 9352, 9431, 9467, 10531, 10586, 10635, 10690, 14940, 14986, 15027, 9781, 10084, 10144, 10198, 12716, 12852, 12966, 7410, 7434, 8104, 8146, 8210, 9946, 9957, 9968, 10348, 10443, 9599, 9627, 9662, 6164, 6240, 6292, 6396, 5996, 6007, 6018, 6029, 9703, 10893, 10904, 10914, 13217, 13292, 10667, 10742, 10790, 10808, 14244, 8814, 9087, 9469, 11289, 11368, 11446, 15245, 15332, 12299, 12441, 9760, 9780, 9797, 9880, 10102, 10851, 14887, 8391, 8436, 11144, 11249, 11350, 9500, 9522, 9539, 9556, 13827, 13886, 13928, 14375, 9883, 9907, 9924, 9936, 10391, 10564, 12635, 12008, 12360, 12466, 12731, 13813, 13867, 13916, 13953, 12587, 12784, 12983, 7720, 7806, 7860, 13764, 13815, 13865, 12013, 12076, 12172, 13319, 13393, 13468, 6385, 6520, 6568, 11675, 11701, 11748, 12914, 15191, 15219, 15274, 11948, 12854, 13018, 13924, 13964, 13989, 14015, 11835, 11854, 11865, 11876, 13566, 13634, 13686, 14540, 14552, 14564, 14575, 12097, 12176, 12269, 13402, 13456, 13518, 13574, 11957, 11995, 12040, 13181, 13256, 13346, 12240, 14989, 15058, 15096, 12808, 15220, 15316, 15411, 14623, 14635, 14650, 12242, 12664, 12834, 12979, 14976, 15048, 15102, 15136, 14274, 11494, 11522, 11544, 11567, 13360, 13404, 13441, 13487, 14376, 14396, 14407, 14420, 8339, 9582, 9639, 9682, 11975, 12017, 12092, 13746, 14332, 12496, 12706, 12916, 14658, 14669, 14680, 13792, 13853, 13915, 13512, 14076, 14869, 12222, 13652, 13704, 13734, 8298, 9586, 9628, 9673, 11493, 11525, 11554, 15120, 15169, 14226, 14117, 14179, 14221, 14064, 13972, 14011, 14037, 12276, 13728, 14335, 14841, 13059, 13101, 13145, 15552, 14129, 14175, 14211, 13107, 13974, 14803, 15210, 13481, 14302, 14846, 14918, 8617, 8628, 8639, 8658, 12148, 12248, 15104, 15149, 14757, 14768, 14787, 12082, 12145, 12227, 12489, 12699, 12860, 12144, 13161, 13244, 13318, 13372, 8521, 8795, 9186, 9667, 11121, 13845, 14855, 14982, 15074, 13170, 13275, 13394, 14740, 15161, 15238, 15311, 8341, 8372, 11933, 12610, 12775, 14423, 14857, 14942, 12124, 12197, 12481, 12618, 12778, 12886, 11940, 15276, 15377, 15459, 13051, 12613, 14819, 14916] +fold 5: +[8657, 8685, 8707, 8728, 14431, 14446, 14470, 14500, 6837, 14471, 15228, 15296, 15380, 15457, 14285, 14310, 15318, 15378, 15432, 13761, 13800, 14020, 14046, 14066, 11996, 12042, 12087, 12139, 11657, 12711, 12836, 12578, 12762, 12894, 13603, 13632, 13670, 13078, 15485, 15526, 6979, 7023, 7071, 7105, 8110, 8241, 12994, 10115, 14110, 14167, 14197, 6714, 6746, 6763, 6596, 6637, 7296, 7315, 7351, 11814, 11827, 11843, 9712, 9739, 9766, 9796, 10639, 10696, 10747, 7130, 7163, 7200, 7247, 8417, 8458, 8488, 8537, 12335, 7169, 7210, 7242, 6646, 6665, 6676, 6690, 11092, 11203, 11295, 7209, 7255, 7273, 10499, 10592, 10693, 10800, 6958, 7051, 7100, 7126, 11118, 11293, 11628, 9140, 9243, 9321, 7519, 7549, 7585, 5892, 5903, 6910, 6936, 8523, 8548, 8567, 7537, 7588, 7638, 14691, 14702, 14713, 14724, 5951, 14072, 14128, 14779, 9073, 9209, 9332, 10983, 11079, 11181, 11592, 10122, 10177, 10224, 5960, 6440, 6680, 6918, 7628, 7672, 7695, 6985, 7159, 7335, 7405, 12302, 12453, 12580, 12727, 8884, 8978, 9057, 9128, 6867, 6889, 6912, 6932, 13029, 14580, 14593, 14607, 11674, 8975, 9068, 9150, 9204, 11697, 11732, 6960, 7038, 8177, 8273, 14475, 14505, 14529, 6963, 7843, 8219, 9144, 9196, 9249, 12159, 13520, 13565, 9730, 10119, 10183, 10230, 15389, 15480, 15513, 15543, 8830, 8897, 8959, 6217, 6310, 6421, 13397, 14017, 15158, 9473, 9521, 9558, 13295, 10655, 10712, 10756, 13438, 13508, 13582, 7359, 7425, 7459, 7501, 14345, 8407, 13600, 13649, 13696, 10279, 10346, 10410, 10476, 9326, 9389, 9442, 9483, 6239, 6286, 6348, 10529, 10633, 11012, 11128, 11287, 11661, 11694, 11734, 6991, 7087, 8142, 8278, 9440, 9475, 9499, 7022, 7085, 8238, 7616, 7929, 8178, 6186, 6335, 6456, 13977, 8945, 9003, 9069, 9131, 6842, 6853, 6868, 6898, 10921, 11881, 11892, 11903, 9986, 9997, 10009, 10049, 10132, 10238, 8778, 14574, 8317, 9625, 9666, 10826, 10858, 10880, 14906, 14970, 15019, 7817, 7915, 8004, 8085, 12423, 12741, 10304, 10366, 10422, 10486, 8513, 8546, 8571, 6794, 6805, 6816, 6827, 11040, 11194, 11342, 11418, 8671, 8692, 8716, 8735, 6374, 6464, 6517, 6547, 7553, 7606, 7636, 7658, 9900, 8808, 8900, 10714, 7473, 12275, 12369, 12925, 9008, 9115, 9192, 9250, 12289, 12460, 12571, 12700, 11790, 13056, 13098, 15501, 15548, 14108, 14195, 8745, 8756, 7013, 8022, 8156, 11601, 6291, 6376, 6427, 6475, 7000, 7086, 8235, 14782, 14250, 14262, 11084, 11169, 11264, 11807, 8414, 8455, 8478, 8506, 8874, 8953, 9027, 13429, 15083, 12109, 12451, 12569, 12742, 10980, 10992, 11003, 10253, 10292, 10349, 6987, 8097, 8172, 8290, 7878, 7942, 7987, 8050, 8595, 8606, 10507, 10557, 10593, 8292, 5913, 5924, 5935, 5947, 8681, 8850, 9017, 9677, 6716, 6751, 6774, 6790, 11013, 11095, 11209, 9810, 9831, 9859, 9886, 10195, 10239, 10261, 13342, 13671, 15200, 5976, 6617, 7776, 7826, 7862, 12418, 12597, 8786, 8901, 9325, 9696, 10054, 10297, 10795, 11590, 11625, 10417, 10527, 11758, 11784, 13449, 13502, 7369, 7380, 7391, 6119, 6145, 6170, 6275, 6419, 6529, 6035, 6075, 6101, 6131, 12319, 12918, 8877, 8980, 9066, 9142, 14426, 10832, 10849, 10866, 7876, 7943, 7989, 8040, 14612, 10528, 11141, 7694, 7728, 7777, 7265, 7637, 7865, 8086, 6592, 6613, 6635, 8782, 12332, 12747, 8833, 8916, 7963, 8005, 8060, 9734, 13105, 13154, 13200, 8344, 8371, 8401, 14359, 6148, 6226, 6332, 6441, 6209, 6258, 11055, 11218, 11539, 9294, 9340, 9375, 11341, 11395, 11457, 9811, 9834, 9853, 9868, 6300, 6481, 6539, 6567, 11098, 11258, 11345, 11415, 6945, 8130, 8208, 8267, 6061, 6086, 6111, 7780, 7841, 7894, 11998, 12365, 12464, 12878, 15317, 15379, 15449, 6612, 6699, 6725, 11047, 11273, 11402, 10316, 10377, 10429, 10485, 11610, 11648, 7707, 7741, 7768, 7810, 13624, 14368, 14854, 14925, 13263, 7314, 7338, 7358, 10917, 11281, 11363, 11432, 13205, 13276, 13356, 10050, 10103, 10488, 11464, 11505, 9287, 9393, 9435, 10569, 10619, 10669, 11497, 11568, 14890, 14953, 15002, 15037, 10079, 10140, 10191, 12698, 12837, 12955, 13044, 7402, 7427, 8057, 8121, 8165, 9939, 9950, 9961, 9972, 10303, 10364, 9588, 9617, 9651, 9694, 6179, 6252, 6313, 5997, 6008, 6019, 9706, 10896, 10906, 13183, 13258, 13340, 10654, 10729, 10787, 10803, 14246, 8449, 8932, 9208, 9645, 11309, 11396, 11454, 15297, 15358, 12380, 12505, 9757, 9778, 9795, 10038, 10260, 14850, 8434, 11025, 11160, 11263, 11361, 9515, 9530, 9547, 13807, 13863, 13914, 13950, 14382, 9888, 9911, 9927, 10426, 10595, 12614, 12697, 12337, 12435, 12532, 13816, 13874, 13919, 13956, 12629, 12815, 7783, 7850, 7908, 13750, 13783, 13843, 13887, 11992, 12063, 12140, 13288, 13382, 13443, 13526, 6433, 6540, 11678, 11716, 11756, 12751, 15187, 15212, 15261, 12718, 12863, 13028, 13917, 13961, 13987, 14012, 11847, 11858, 11869, 13599, 13651, 13709, 14539, 14551, 14562, 14573, 12049, 12128, 12215, 13398, 13451, 13513, 13569, 11953, 11993, 12034, 13159, 13245, 13330, 12221, 14943, 15005, 15069, 15106, 12771, 12921, 15177, 15241, 15340, 14622, 14634, 14648, 12245, 12520, 12724, 12871, 13001, 15009, 15073, 15119, 15148, 14289, 11479, 11512, 11536, 11555, 13388, 13419, 13463, 14381, 14398, 14409, 8313, 9572, 9613, 11966, 12005, 12079, 13729, 14326, 14341, 12236, 12611, 12794, 14661, 14672, 14683, 13782, 13838, 13897, 13735, 14360, 15072, 12179, 12280, 13619, 13672, 13720, 12873, 8312, 9598, 9646, 9692, 11488, 11523, 11552, 15155, 15197, 7370, 14229, 14127, 14185, 13966, 14000, 14034, 13315, 13965, 14632, 14873, 13077, 13117, 13163, 14119, 14166, 14202, 13823, 14621, 15094, 13375, 14223, 14838, 14910, 8620, 8631, 8643, 8665, 12168, 12264, 15088, 15138, 14764, 14781, 14812, 7103, 12044, 12115, 12174, 12277, 12514, 12726, 12876, 13178, 13259, 13333, 13383, 8469, 8788, 9152, 9612, 10916, 13806, 13860, 14899, 15004, 15097, 13237, 13328, 14737, 14752, 15211, 15265, 8357, 8385, 11947, 12626, 12843, 14501, 14861, 14951, 12096, 12164, 12261, 12541, 12685, 12820, 13047, 10287, 15268, 15369, 15451, 13010, 12588, 14853, 14952] +fold 6: +[8664, 8688, 8710, 14430, 14445, 14468, 14499, 6838, 14466, 15240, 15307, 15395, 14278, 14300, 15282, 15342, 15406, 15468, 13784, 14030, 14051, 14073, 12014, 12061, 12110, 12780, 12887, 12503, 12684, 12845, 12974, 13597, 13625, 13663, 13706, 13088, 15500, 15530, 6992, 7043, 7083, 8143, 8269, 11920, 13035, 10125, 14134, 14180, 6724, 6750, 6769, 6585, 6625, 7284, 7301, 7323, 11815, 11829, 11845, 9718, 9749, 9773, 10603, 10662, 10725, 10757, 7144, 7176, 7218, 8451, 8482, 8520, 12321, 7174, 7214, 7246, 6661, 6672, 6683, 11072, 11187, 11284, 7203, 7252, 7272, 10542, 10622, 10760, 7042, 7097, 7123, 10972, 11145, 11359, 11679, 9149, 9254, 9329, 7530, 7564, 7598, 5896, 5908, 6923, 8508, 8540, 8559, 8579, 7542, 7593, 7642, 14695, 14706, 14717, 5962, 14094, 14146, 14804, 8922, 9184, 9297, 9403, 11021, 11107, 11193, 11689, 10116, 10172, 10220, 6341, 6505, 6787, 7640, 7679, 7703, 7081, 7243, 7395, 11949, 12402, 12547, 12682, 8875, 8968, 9048, 9120, 6882, 6906, 6924, 13009, 14581, 14594, 14608, 11771, 11672, 9026, 9107, 9179, 11686, 11717, 11755, 7010, 8119, 8245, 14460, 14488, 14517, 7693, 8096, 9132, 9187, 9237, 9293, 13535, 13575, 10041, 10142, 10200, 15453, 15505, 15535, 8785, 8849, 8920, 8979, 6200, 6277, 6381, 6478, 13235, 13756, 14988, 15464, 9466, 9518, 9554, 13239, 13317, 10649, 10704, 10743, 10784, 13412, 13475, 13549, 7313, 7423, 7456, 7494, 14349, 8370, 8453, 11622, 8837, 13615, 13662, 13710, 10302, 10375, 10440, 9309, 9381, 9433, 9474, 6166, 6244, 6289, 6355, 10524, 10618, 11094, 11237, 11369, 11685, 11726, 7057, 7115, 8232, 9407, 9449, 9482, 9504, 6943, 7027, 7089, 8247, 7349, 7748, 8038, 8250, 6197, 6347, 6466, 13973, 8899, 8981, 9041, 9102, 6847, 6858, 6881, 10919, 11879, 11890, 11901, 9981, 9992, 10004, 10015, 10056, 10137, 10317, 8771, 14309, 14823, 8309, 9614, 9652, 10817, 10848, 10877, 14960, 15007, 7891, 7990, 8071, 12458, 12889, 10322, 10387, 10442, 8503, 8541, 8566, 6802, 6813, 6824, 11115, 11261, 11374, 8666, 8689, 8714, 8734, 6352, 6444, 6511, 6543, 7557, 7609, 7639, 7662, 9892, 8303, 8844, 9447, 10759, 12170, 12353, 12814, 9046, 9146, 9213, 12039, 12422, 12542, 12671, 11797, 13075, 15477, 15522, 14122, 14203, 8740, 8751, 8762, 6967, 7979, 8099, 8243, 11631, 6280, 6369, 6424, 6468, 6981, 7076, 8221, 14806, 14255, 14268, 11076, 11157, 11254, 8421, 8457, 8481, 8507, 8857, 8934, 9012, 13716, 15135, 12255, 12465, 12582, 12755, 10976, 10987, 10998, 10222, 10262, 10305, 10363, 8042, 8124, 8213, 7921, 7969, 8024, 8589, 8600, 8611, 10505, 10551, 10590, 10630, 5919, 5930, 5941, 8783, 8864, 9042, 6708, 6744, 6770, 6788, 11064, 11164, 11251, 9816, 9838, 9870, 10213, 10245, 10270, 13192, 13560, 15062, 15476, 6044, 7755, 7812, 7849, 7889, 12535, 8314, 8796, 8965, 9436, 9847, 10081, 10498, 11596, 11633, 10384, 10500, 11713, 11767, 11792, 13425, 13482, 13538, 7371, 7382, 6094, 6128, 6152, 6178, 6309, 6471, 6541, 6059, 6087, 6115, 12361, 13042, 8914, 9005, 9086, 14422, 10825, 10842, 10859, 7938, 7986, 8034, 10493, 10541, 11082, 11176, 7675, 7696, 7731, 7784, 7484, 7726, 7972, 6578, 6598, 6620, 6645, 8815, 12313, 12732, 8810, 8888, 7919, 7973, 8014, 8092, 9717, 9738, 13068, 13109, 13160, 13202, 8335, 8359, 8395, 14358, 6151, 6233, 6346, 6449, 6167, 6235, 6296, 11022, 11186, 11433, 11873, 9258, 9311, 9349, 9382, 11316, 11375, 11429, 9813, 9837, 9855, 9871, 12864, 6282, 6448, 6536, 6565, 11214, 11298, 11386, 8113, 8189, 8252, 6041, 6076, 6096, 6122, 6452, 7754, 7823, 7866, 7925, 12318, 12429, 12519, 15300, 15365, 15440, 15490, 6618, 6700, 6726, 11172, 11353, 11448, 10343, 10402, 10454, 11602, 11642, 7718, 7750, 7788, 7821, 13954, 14735, 14885, 14949, 13255, 7302, 7328, 7352, 11049, 11313, 11387, 11800, 13167, 13250, 13332, 13384, 9944, 10068, 10182, 10700, 11460, 11502, 9319, 9413, 9450, 10540, 10594, 10641, 10710, 14897, 14959, 15006, 15044, 9918, 10093, 10148, 10539, 12736, 12859, 12980, 7398, 7424, 8088, 8133, 8199, 9948, 9959, 9970, 10320, 10412, 9596, 9624, 9659, 6159, 6231, 6283, 6377, 5999, 6010, 6021, 9709, 10898, 10908, 13208, 13282, 10660, 10737, 10789, 10804, 14245, 8343, 8880, 9163, 9525, 11235, 11318, 11404, 11458, 15234, 15325, 12424, 12531, 9741, 9765, 9786, 9801, 10070, 10520, 14865, 8394, 8438, 11074, 11189, 11286, 11383, 9505, 9523, 9540, 9560, 13781, 13839, 13896, 13936, 14379, 9885, 9909, 9926, 9937, 10416, 10588, 12605, 12693, 12326, 12425, 12521, 13825, 13879, 13920, 13962, 12651, 12838, 7774, 7840, 7899, 14148, 13754, 13794, 13849, 13899, 12031, 12098, 12198, 13301, 13387, 13447, 6420, 6531, 11683, 11728, 12866, 15179, 15198, 15229, 15295, 11941, 12840, 12999, 13934, 13969, 13993, 14024, 11840, 11855, 11866, 11877, 13573, 13639, 13703, 14538, 14550, 14561, 14572, 12081, 12155, 12252, 13413, 13467, 13528, 13585, 11943, 11991, 12029, 13118, 13206, 13289, 13365, 12274, 14924, 14995, 15063, 15099, 12782, 15186, 15262, 15354, 14627, 14640, 12185, 12627, 12799, 12956, 15042, 15098, 15133, 14272, 14297, 11490, 11520, 11543, 11562, 13385, 13415, 13457, 14388, 14401, 14413, 8307, 9566, 9603, 9679, 11937, 11994, 12054, 13752, 14324, 14338, 12406, 12621, 12810, 14659, 14671, 14682, 13774, 13829, 13888, 13952, 13531, 14248, 14957, 12200, 13643, 13692, 13731, 12842, 8300, 9591, 9631, 9675, 11495, 11527, 11557, 15103, 15166, 14235, 14057, 14133, 14188, 13982, 14019, 14042, 12241, 13635, 14231, 14824, 13066, 13106, 13152, 15559, 14092, 14144, 14184, 14218, 13153, 14047, 14833, 15277, 13654, 14483, 14859, 8625, 8636, 8649, 12219, 15095, 15140, 14765, 14784, 14816, 12060, 12126, 12203, 12339, 12575, 12770, 12926, 12184, 13185, 13266, 13336, 13389, 8743, 8998, 9404, 11088, 13824, 13878, 14926, 15029, 15113, 13189, 13293, 14731, 14744, 15205, 15259, 8353, 8382, 11950, 12641, 12902, 14775, 14896, 14967, 12093, 12158, 12253, 12494, 12640, 12786, 12898, 11972, 15293, 15386, 15469, 11916, 12553, 12655, 14862] +fold 7: +[8652, 8680, 8701, 8724, 14437, 14455, 14484, 6835, 14447, 14513, 15216, 15285, 15368, 15433, 14284, 14307, 15299, 15356, 15414, 13776, 14008, 14041, 14058, 14093, 12019, 12067, 12113, 12694, 12826, 12526, 12705, 12857, 12982, 13593, 13621, 13659, 13701, 13083, 15495, 15529, 6986, 7036, 7079, 8158, 8275, 11927, 13046, 10100, 14090, 14156, 14186, 6701, 6737, 6758, 6778, 6604, 6649, 7299, 7316, 7353, 11820, 11834, 9711, 9737, 9764, 9793, 10653, 10709, 10752, 7141, 7172, 7211, 8440, 8472, 8510, 12180, 12984, 7150, 7190, 7227, 7257, 6652, 6667, 6678, 6695, 11037, 11147, 11260, 11722, 7225, 7263, 7276, 10512, 10607, 10723, 6994, 7069, 7110, 7133, 11102, 11262, 11607, 9124, 9223, 9312, 7516, 7547, 7581, 7614, 5891, 5902, 6901, 6934, 8530, 8553, 8573, 7552, 7615, 7653, 14693, 14704, 14715, 14726, 5952, 14082, 14136, 14788, 9125, 9220, 9341, 11056, 11150, 11445, 10090, 10155, 10202, 10241, 5901, 6393, 6586, 6852, 7597, 7655, 7685, 7717, 7070, 7224, 7393, 12089, 12421, 12557, 12695, 8930, 9016, 9093, 6872, 6895, 6917, 12998, 14589, 14601, 9014, 9101, 9174, 11693, 11729, 6980, 7055, 8197, 8289, 14458, 14486, 14514, 7015, 7911, 8272, 9138, 9191, 9240, 9300, 12373, 13516, 13563, 10057, 10150, 10206, 15447, 15502, 15531, 8804, 8870, 8936, 8995, 6232, 6344, 6455, 13074, 13623, 14902, 15298, 9498, 9541, 9569, 13230, 13309, 10668, 10715, 10758, 13420, 13485, 13555, 7421, 7453, 7492, 8380, 11638, 13610, 13658, 13702, 10318, 10388, 10451, 9301, 9377, 9428, 9472, 6228, 6272, 6337, 10484, 10580, 10717, 11031, 11163, 11312, 11809, 11699, 11736, 7049, 7111, 8211, 9430, 9465, 9493, 6965, 7037, 8148, 8265, 7408, 7792, 8066, 8261, 6213, 6379, 6486, 13980, 8971, 9028, 9094, 6846, 6857, 6879, 6909, 10923, 11883, 11894, 9989, 10001, 10012, 9708, 10065, 10141, 10428, 8776, 14412, 8304, 8331, 9644, 9691, 10815, 10845, 10876, 10887, 14894, 14963, 15012, 7873, 7970, 8054, 12443, 12783, 10288, 10345, 10409, 10464, 8509, 8543, 8568, 6797, 6808, 6819, 11138, 11269, 11381, 8675, 8695, 8720, 6340, 6437, 6508, 6538, 7578, 7623, 7647, 9887, 8827, 9346, 10686, 10780, 12068, 12338, 12413, 9080, 9165, 9226, 12367, 12504, 12612, 11796, 13063, 15455, 15512, 15557, 14091, 14160, 8741, 8752, 8763, 6989, 8003, 8136, 8280, 11609, 6315, 6397, 6442, 6487, 7016, 8151, 8249, 14801, 14252, 14264, 11028, 11101, 11205, 11707, 8437, 8468, 8494, 8846, 8929, 9006, 14543, 15255, 12323, 12487, 12601, 10978, 10989, 11000, 10244, 10281, 10331, 10390, 8065, 8137, 8248, 7903, 7956, 8008, 8072, 8590, 8601, 8612, 10469, 10530, 10575, 10612, 9546, 5915, 5926, 5937, 8439, 8831, 8896, 9426, 6719, 6753, 6777, 11016, 11111, 11216, 9824, 9848, 9877, 10176, 10228, 10254, 10277, 13326, 13645, 15168, 5933, 6556, 7786, 7832, 7868, 12554, 8712, 8869, 9275, 10051, 10265, 10754, 11585, 11617, 11649, 10400, 10508, 11742, 11779, 13436, 13492, 13548, 7376, 7387, 6116, 6144, 6168, 6225, 6338, 6498, 6550, 6066, 6092, 6121, 12131, 12401, 8931, 9022, 9103, 10819, 10835, 10853, 10869, 7883, 7948, 7993, 8045, 14355, 10506, 10547, 11136, 7686, 7711, 7751, 7561, 7781, 8028, 6583, 6603, 6623, 6651, 8806, 12009, 12415, 8856, 7955, 7998, 8051, 9729, 13080, 13124, 13175, 13218, 8337, 8364, 8397, 14346, 14362, 6198, 6295, 6403, 6215, 6267, 11077, 11230, 11620, 9272, 9320, 9358, 9391, 11346, 11399, 11459, 9817, 9839, 9856, 9873, 12958, 6387, 6510, 6552, 11075, 11250, 11336, 11409, 8125, 8202, 8263, 6046, 6078, 6097, 6929, 6366, 7801, 7851, 7912, 11938, 12352, 12454, 12835, 15246, 15345, 15407, 15471, 6606, 6697, 6720, 11142, 11347, 11442, 10306, 10370, 10424, 10480, 11586, 11623, 11658, 7710, 7742, 7779, 7813, 14174, 14828, 14905, 13179, 13272, 7289, 7319, 7341, 7361, 11027, 11305, 11379, 11669, 13147, 13234, 13311, 13374, 10027, 10089, 10308, 11456, 11492, 9299, 9399, 9439, 10548, 10599, 10648, 10716, 14903, 14964, 15011, 15052, 10031, 10113, 10168, 10884, 12781, 12893, 13012, 7415, 8032, 8114, 8153, 8218, 9942, 9953, 9964, 9975, 10356, 10448, 9583, 9610, 9643, 9684, 6140, 6195, 6265, 6343, 5991, 6002, 6013, 6025, 9699, 10890, 10901, 10911, 13129, 13231, 13302, 10695, 10767, 10798, 8500, 8976, 9274, 11265, 11344, 11428, 11465, 15283, 15352, 12416, 12527, 9755, 9777, 9794, 9719, 10080, 10688, 14875, 8427, 11129, 11231, 11340, 9516, 9531, 9548, 13821, 13880, 13926, 14387, 9896, 9916, 9930, 10382, 10521, 12552, 12646, 12304, 12403, 12499, 13852, 13904, 13938, 12497, 12719, 12892, 7769, 7838, 7893, 13771, 13826, 13873, 12000, 12070, 12163, 13279, 13378, 13437, 13522, 6349, 6503, 6562, 11680, 11724, 11757, 12895, 15180, 15199, 15236, 15303, 11942, 12850, 13007, 13959, 13984, 14009, 11844, 11856, 11867, 13559, 13630, 13681, 14542, 14555, 14566, 12073, 12149, 12243, 13422, 13479, 13540, 11964, 11999, 12045, 13132, 13226, 13308, 13373, 12251, 14948, 15016, 15075, 15109, 12830, 15173, 15233, 15334, 15422, 14629, 14643, 12254, 12648, 12816, 12969, 15023, 15085, 15124, 14279, 11486, 11517, 11541, 11560, 13381, 13411, 13454, 13504, 14390, 14403, 14414, 8336, 9579, 9635, 9687, 11970, 12010, 12085, 13695, 14334, 12192, 12516, 12721, 12971, 14647, 14663, 14674, 14685, 13797, 13858, 13922, 13460, 14005, 14734, 12147, 12258, 13648, 13697, 13732, 12897, 9581, 9618, 9663, 11514, 11535, 11569, 15143, 15183, 14228, 14105, 14176, 14216, 14059, 13988, 14022, 14044, 13682, 14295, 14832, 13062, 13104, 13149, 15555, 14086, 14138, 14182, 14217, 13364, 14276, 14912, 13985, 14810, 14888, 8623, 8634, 8646, 12208, 15068, 15127, 15160, 14759, 14771, 14795, 7136, 12069, 12130, 12209, 12195, 12546, 12730, 12896, 13219, 13297, 13358, 8648, 8890, 9356, 11066, 13851, 14817, 14965, 15057, 15126, 13155, 13264, 13354, 14738, 14753, 15154, 15223, 15288, 8368, 11956, 12661, 12951, 14321, 14849, 14932, 15017, 12137, 12226, 12595, 12750, 12861, 11963, 15248, 15349, 15437, 15503, 11955, 12599, 14692, 14901] +fold 8: +[8654, 8682, 8705, 8726, 14440, 14459, 14489, 14494, 15224, 15291, 15374, 15441, 14273, 14296, 15260, 15329, 15388, 15450, 13780, 14014, 14045, 14061, 12028, 12075, 12123, 11662, 12758, 12868, 12663, 12833, 12961, 13588, 13618, 13656, 13698, 13073, 15474, 15523, 15558, 6997, 7045, 7090, 8093, 8231, 13006, 10060, 14143, 14181, 6718, 6747, 6765, 6601, 6642, 7286, 7303, 7325, 11811, 11823, 11838, 9705, 9733, 9761, 9789, 10616, 10675, 10734, 10764, 7120, 7155, 7193, 7240, 8412, 8454, 8484, 8533, 12345, 7187, 7222, 7256, 6656, 6670, 6681, 10939, 11113, 11223, 11314, 7219, 7260, 7275, 10546, 10637, 10768, 7012, 7078, 7116, 11048, 11180, 11455, 11830, 9097, 9203, 9298, 9370, 7534, 7570, 7603, 5899, 6890, 6930, 8522, 8545, 8565, 8586, 7527, 7579, 7630, 7667, 14690, 14701, 14712, 14723, 5950, 14102, 14152, 14776, 8938, 9193, 9308, 11014, 11085, 11188, 11635, 10064, 10139, 10185, 10231, 6006, 6484, 6723, 7587, 7651, 7684, 7712, 7048, 7213, 7392, 7412, 12358, 12512, 12632, 8902, 8992, 9071, 6861, 6886, 6908, 6925, 14578, 14591, 14604, 11776, 8999, 9083, 9162, 11703, 11740, 7003, 8105, 8237, 14467, 14496, 14522, 7659, 8046, 9106, 9172, 9225, 9277, 12389, 13489, 13547, 13590, 10092, 10162, 10214, 15442, 15499, 15527, 8812, 8879, 8947, 6196, 6269, 6365, 6469, 13136, 13664, 14934, 15362, 9444, 9509, 9549, 9573, 13294, 10626, 10683, 10728, 10772, 13424, 13493, 13561, 7417, 7446, 7485, 8375, 8456, 11630, 13568, 13633, 13684, 13726, 10283, 10353, 10415, 10482, 9347, 9408, 9455, 6199, 6254, 6311, 6380, 10553, 10636, 11073, 11196, 11333, 11721, 6948, 7067, 7118, 8242, 9400, 9443, 9477, 9501, 7017, 7077, 8224, 7528, 7887, 8139, 6218, 6388, 6490, 8935, 8994, 9063, 9122, 6843, 6854, 6871, 6900, 10918, 10930, 11889, 11900, 9985, 9996, 10008, 10018, 10111, 10165, 8772, 14193, 14747, 8294, 8320, 9629, 9672, 10812, 10839, 10874, 10885, 14955, 15003, 15050, 7830, 7923, 8013, 8095, 11987, 12492, 13000, 10299, 10358, 10419, 10479, 8479, 8527, 8556, 8580, 6793, 6804, 6815, 6826, 11068, 11233, 11358, 11431, 8662, 8687, 8711, 8732, 6362, 6453, 6515, 6544, 7586, 7625, 7649, 9912, 8800, 8895, 10746, 11918, 12316, 12391, 9018, 9129, 9199, 9255, 12350, 12490, 12600, 11788, 13085, 15488, 15532, 14126, 14208, 8747, 8758, 7029, 8035, 8171, 11616, 6351, 6413, 6458, 6961, 7041, 8185, 8270, 14794, 14259, 11052, 11127, 11227, 8404, 8446, 8473, 8499, 8794, 8893, 8969, 9038, 14631, 15350, 11959, 12442, 12559, 12725, 10977, 10988, 10999, 10229, 10269, 10307, 10371, 8070, 8144, 8258, 7936, 7980, 8041, 8592, 8603, 8614, 10481, 10534, 10578, 10617, 5914, 5925, 5936, 5948, 8511, 8838, 8906, 9503, 6696, 6739, 6766, 6785, 11057, 11153, 11246, 9821, 9843, 9874, 10143, 10219, 10248, 10271, 13386, 14809, 15232, 6033, 6776, 7789, 7835, 7870, 12456, 12653, 8670, 8826, 9197, 9759, 10069, 10386, 10873, 11599, 11636, 10357, 10483, 10562, 11727, 11773, 13459, 13511, 7372, 7383, 6106, 6135, 6160, 6187, 6262, 6406, 6523, 6069, 6095, 6124, 12283, 12804, 8905, 9000, 9081, 9153, 14419, 10823, 10841, 10857, 10872, 7901, 7965, 8007, 8061, 14508, 10510, 10552, 11093, 11185, 7687, 7716, 7763, 7397, 7670, 7892, 8106, 6594, 6614, 6638, 8626, 12296, 12452, 8819, 8898, 7958, 8001, 8058, 9732, 13099, 13146, 13193, 8345, 8374, 8405, 14353, 6165, 6243, 6357, 6463, 6201, 6255, 11044, 11207, 11508, 9289, 9335, 9371, 11301, 11364, 11417, 11469, 9808, 9832, 9852, 9867, 12907, 6416, 6519, 6557, 11229, 11322, 11397, 6949, 8138, 8215, 8271, 6036, 6074, 6093, 6120, 6410, 7744, 7815, 7859, 7922, 12059, 12378, 12474, 12938, 15310, 15371, 15444, 6574, 6626, 6704, 6730, 11065, 11292, 11407, 10347, 10405, 10461, 11615, 11650, 7725, 7753, 7795, 14315, 14845, 14919, 13232, 7295, 7324, 7347, 7363, 11010, 11297, 11370, 11452, 13196, 13269, 13350, 9891, 10063, 10138, 10621, 11468, 11509, 9234, 9324, 9418, 9453, 10535, 10589, 10638, 10706, 11499, 11570, 14945, 14993, 15031, 9977, 10099, 10153, 10666, 12757, 12870, 12996, 7413, 7436, 8080, 8127, 8184, 9941, 9952, 9963, 9974, 10309, 10369, 9580, 9609, 9641, 9680, 6189, 6261, 6327, 6001, 6012, 6023, 9707, 10897, 10907, 13157, 13246, 13331, 10647, 10720, 10783, 10802, 14240, 8291, 8847, 9130, 9480, 11283, 11360, 11440, 15306, 15366, 12366, 12479, 9753, 9775, 9791, 10062, 10450, 14860, 8420, 11058, 11178, 11277, 11377, 9491, 9519, 9534, 9553, 13817, 13875, 13921, 14389, 9894, 9915, 9929, 10399, 10574, 12591, 12681, 12108, 12371, 12467, 12824, 13847, 13898, 13932, 12518, 12733, 12927, 7757, 7829, 7877, 13747, 13779, 13837, 13883, 12047, 12125, 13351, 13408, 13503, 6447, 6548, 11671, 11692, 11741, 12841, 15174, 15193, 15222, 15281, 11934, 12821, 12991, 13940, 13971, 13996, 14027, 11832, 11853, 11864, 11875, 13543, 13616, 13674, 13721, 14546, 14557, 14568, 12066, 12142, 12233, 13445, 13506, 13562, 11925, 11978, 12011, 12064, 13110, 13195, 13271, 13357, 12262, 14936, 15000, 15067, 15101, 12798, 15182, 15253, 15346, 14625, 14638, 14655, 12196, 12538, 12740, 12884, 13011, 15033, 15093, 15131, 14291, 11504, 11529, 11548, 13370, 13406, 13446, 13494, 14370, 14393, 14404, 14415, 8302, 9563, 9597, 9665, 9693, 11932, 11990, 12050, 13733, 14325, 14339, 12566, 12779, 14657, 14668, 14679, 13778, 13834, 13893, 13958, 13100, 13811, 14443, 15382, 12231, 13631, 13683, 13725, 12906, 8308, 9595, 9640, 9689, 11516, 11540, 11573, 15114, 15167, 7342, 14233, 14068, 14147, 14196, 13963, 13998, 14033, 12265, 13125, 13842, 14472, 14851, 13087, 13131, 13176, 14104, 14154, 14190, 14222, 13053, 13923, 14714, 15147, 13290, 14153, 14831, 14904, 8624, 8635, 8647, 12182, 12267, 15110, 15151, 14767, 14786, 12106, 12169, 12271, 12394, 12604, 12806, 13169, 13253, 13325, 13377, 8691, 8954, 9384, 13818, 13872, 14877, 14998, 15091, 13182, 13281, 14732, 14745, 15152, 15218, 15284, 8365, 11960, 12672, 13036, 14827, 14914, 14981, 12104, 12171, 12273, 12560, 12704, 12829, 13020, 10266, 15217, 15326, 15410, 15486, 11946, 12561, 12668, 14886] +fold 9: +[8669, 8694, 8715, 14441, 14461, 14491, 6834, 14444, 14510, 15264, 15355, 15418, 14280, 14303, 15267, 15331, 15394, 15458, 13759, 13795, 11804, 14010, 14043, 14060, 14106, 12037, 12084, 12135, 11659, 12673, 12811, 12615, 12788, 12915, 13601, 13628, 13666, 13064, 13123, 15514, 15551, 6983, 7032, 7073, 8079, 8217, 13027, 10023, 14123, 14172, 6721, 6749, 6767, 6610, 6658, 7293, 7310, 7334, 11817, 11833, 9725, 9754, 9779, 10610, 10665, 10730, 10761, 7137, 7166, 7205, 7251, 8435, 8467, 8505, 12300, 7179, 7216, 7250, 6643, 6664, 6675, 6688, 11081, 11195, 11291, 7244, 7269, 7280, 10466, 10576, 10676, 10794, 7020, 7084, 7117, 11017, 11158, 11400, 11733, 9051, 9171, 9270, 9344, 7521, 7554, 7589, 5895, 5907, 6920, 6939, 8526, 8549, 8569, 7523, 7576, 7627, 7663, 14694, 14705, 14716, 14727, 5959, 14107, 14161, 14772, 14813, 9136, 9229, 9359, 11051, 11139, 11380, 10058, 10133, 10179, 10226, 5986, 6462, 6702, 7646, 7681, 7708, 7114, 7287, 7399, 12387, 12534, 12660, 8918, 9010, 9084, 6878, 6902, 6921, 14586, 14598, 14614, 9007, 9092, 9168, 11700, 11737, 7018, 8135, 8254, 14464, 14493, 14520, 6941, 7814, 8198, 9121, 9180, 9232, 9288, 12356, 13495, 13551, 13596, 10104, 10175, 10223, 15416, 15489, 15518, 15549, 8820, 8886, 8949, 6191, 6264, 6356, 6461, 13334, 13861, 15105, 9460, 9513, 9552, 13248, 13323, 10640, 10694, 10735, 10778, 13464, 13537, 7419, 7448, 7489, 8367, 8448, 11612, 8868, 13620, 13668, 13718, 10312, 10381, 10446, 9339, 9402, 9451, 6210, 6256, 6317, 6386, 10463, 10577, 10708, 11023, 11152, 11302, 11718, 11710, 11747, 7039, 7109, 8201, 9437, 9471, 9496, 6993, 7060, 8190, 7584, 7902, 8160, 6224, 6414, 6497, 8963, 9021, 9085, 6851, 6866, 6894, 10920, 11880, 11891, 11902, 9982, 9993, 10005, 10016, 10022, 10117, 10170, 8770, 14340, 14830, 8296, 8322, 9633, 9676, 10820, 10852, 10878, 14947, 14999, 15046, 7846, 7940, 8030, 8111, 12355, 12574, 10330, 10392, 10445, 8485, 8532, 8558, 8584, 6800, 6811, 6822, 11155, 11307, 11398, 8653, 8683, 8706, 8729, 6400, 6491, 6527, 7567, 7620, 7643, 7666, 9895, 8816, 9230, 10774, 12120, 12346, 12436, 9037, 9139, 9205, 9259, 12330, 12484, 12589, 13023, 11786, 13058, 15446, 15509, 15553, 14131, 14210, 8746, 8757, 7946, 8062, 8194, 11597, 6361, 6418, 6465, 6951, 7035, 8179, 8262, 14799, 14261, 10991, 11091, 11183, 11266, 11808, 8408, 8452, 8476, 8502, 8811, 8911, 8982, 9050, 14826, 15361, 12370, 12501, 12616, 10975, 10986, 10997, 11008, 10234, 10272, 10315, 10378, 6998, 8102, 8182, 7909, 7960, 8011, 8077, 8599, 8610, 10495, 10544, 10587, 10627, 5912, 5923, 5934, 5946, 8572, 8845, 8909, 9590, 6703, 6742, 6768, 6786, 11030, 11124, 11225, 9823, 9846, 9876, 10209, 10243, 10268, 13265, 13580, 15115, 15515, 6331, 7778, 7828, 7863, 12434, 12606, 8352, 8797, 8987, 9492, 9935, 10118, 10600, 11603, 11639, 10471, 10556, 11735, 11777, 13431, 13486, 13545, 7373, 7384, 6112, 6141, 6163, 6284, 6443, 6533, 6054, 6085, 6114, 6143, 12220, 12417, 8889, 8990, 9072, 9147, 10822, 10838, 10856, 10871, 7890, 7954, 7999, 8052, 14029, 10519, 11125, 7678, 7702, 7739, 7805, 7506, 7759, 7983, 6591, 6611, 6633, 8531, 11907, 12393, 12967, 8789, 8872, 7926, 7975, 8021, 8094, 9724, 13071, 13114, 13165, 13209, 8330, 8354, 8390, 8423, 14351, 14366, 6205, 6303, 6415, 6193, 6250, 10961, 11132, 11325, 11706, 9283, 9327, 9365, 11335, 11391, 11450, 9819, 9840, 9857, 12923, 6402, 6516, 6555, 11140, 11278, 11365, 11425, 6964, 8152, 8225, 8283, 6058, 6084, 6108, 6467, 7793, 7848, 7906, 12141, 12388, 12483, 12978, 15287, 15359, 15430, 15484, 6602, 6693, 6717, 11100, 11324, 11423, 10323, 10383, 10435, 10490, 11583, 11618, 11654, 7738, 7765, 7807, 13871, 14642, 14878, 14944, 13241, 7292, 7322, 7346, 7362, 11211, 11320, 11394, 13156, 13243, 13324, 13380, 10042, 10101, 10439, 11475, 9269, 9345, 9429, 9463, 10554, 10604, 10650, 10726, 11565, 14884, 14950, 14997, 15034, 10040, 10120, 10174, 12678, 12827, 12942, 13039, 7406, 7431, 8044, 8115, 8159, 8236, 9949, 9960, 9971, 10327, 10425, 9604, 9632, 9671, 6169, 6245, 6297, 6000, 6011, 6022, 9704, 10894, 10905, 10915, 13198, 13274, 10702, 10773, 10799, 14241, 8594, 9031, 9364, 11294, 11376, 11449, 15263, 15344, 12432, 12544, 9735, 9763, 9784, 9800, 10048, 10328, 14852, 8406, 8445, 11117, 11224, 11330, 9511, 9527, 9543, 13809, 13868, 13918, 14377, 9901, 9922, 9933, 10406, 10582, 12573, 12666, 11906, 12359, 12455, 12665, 13857, 13907, 13944, 12701, 12875, 7713, 7798, 7856, 7930, 14155, 13768, 13820, 13869, 12025, 12091, 12190, 13362, 13416, 13509, 6294, 6460, 6551, 11676, 11705, 11750, 12717, 15188, 15215, 15271, 11909, 12761, 12929, 13048, 13945, 13975, 13999, 11846, 11857, 11868, 13519, 13608, 13665, 13714, 14537, 14549, 14560, 14571, 12057, 12136, 12223, 13409, 13462, 13524, 13578, 11911, 11971, 12004, 12052, 13142, 13233, 13314, 13379, 14979, 15047, 15089, 12714, 12879, 15214, 15304, 15404, 14628, 14641, 12218, 12608, 12787, 12950, 14991, 15056, 15108, 15139, 14293, 11482, 11515, 11537, 11558, 13352, 13400, 13435, 13483, 14378, 14397, 14408, 14421, 8293, 9536, 9587, 9648, 11914, 11983, 12038, 13017, 13676, 14323, 14336, 12214, 12459, 12656, 12883, 14649, 14664, 14675, 14686, 13803, 13864, 13931, 13135, 13833, 14492, 15413, 12189, 12291, 13637, 13688, 13730, 12917, 9584, 9622, 9669, 11506, 11532, 11563, 15165, 14219, 14236, 14089, 14168, 14209, 13976, 14013, 14040, 13407, 14018, 14755, 14882, 13081, 13120, 13166, 14125, 14170, 14207, 13224, 14109, 14842, 15372, 13796, 14681, 14876, 8621, 8632, 8644, 8668, 12188, 12281, 15059, 15123, 15157, 14763, 14780, 14808, 12095, 12157, 12239, 12446, 12649, 12832, 12175, 13194, 13278, 13343, 8419, 8784, 9108, 9568, 13828, 13885, 14840, 14975, 15065, 15130, 13215, 13313, 14736, 14750, 15159, 15230, 15302, 8363, 11912, 11961, 12689, 13045, 14834, 14920, 14987, 12117, 12187, 12290, 12584, 12728, 12851, 11944, 10301, 15308, 15392, 15475, 13040, 12565, 12679, 14872] +fold 10: +[8667, 8690, 8713, 14436, 14454, 14481, 6836, 14506, 15235, 15301, 15387, 15478, 14292, 14320, 15313, 15370, 15427, 13789, 11803, 14004, 14039, 14056, 14088, 12012, 12056, 12102, 12260, 12768, 12877, 12563, 12746, 12881, 13606, 13636, 13680, 13102, 15506, 15538, 6962, 7009, 7058, 7099, 8168, 11945, 10088, 14115, 14169, 14201, 6694, 6729, 6754, 6773, 6577, 6619, 7294, 7312, 7340, 11812, 11824, 11839, 9728, 9756, 9782, 10634, 10691, 10741, 7112, 7151, 7189, 7238, 8422, 8461, 8493, 12372, 7164, 7206, 7239, 7264, 6654, 6668, 6679, 11059, 11177, 11275, 7229, 7267, 7278, 10523, 10613, 10745, 7006, 7074, 7113, 7135, 11078, 11208, 11528, 9088, 9195, 9291, 9363, 7505, 7535, 7574, 7608, 5900, 6897, 6931, 8504, 8536, 8557, 8577, 7518, 7569, 7624, 7660, 14698, 14709, 14720, 5961, 14070, 14124, 14797, 8809, 9157, 9248, 9372, 11069, 11168, 11518, 10073, 10145, 10190, 10233, 5953, 6422, 6650, 6896, 7604, 7661, 7688, 7721, 7092, 7254, 7396, 12328, 12485, 12602, 8854, 8950, 9032, 9104, 6875, 6899, 6919, 12965, 14579, 14592, 14605, 8956, 9052, 9133, 9194, 11690, 11725, 11761, 6996, 7064, 8226, 14473, 14503, 14527, 7782, 8170, 9161, 9211, 9265, 12020, 13500, 13554, 13598, 9913, 10131, 10192, 10646, 15401, 15483, 15516, 15546, 8765, 8843, 8910, 8972, 6227, 6333, 6439, 13213, 13705, 14946, 15424, 9494, 9537, 9567, 13267, 10678, 10724, 10770, 13444, 13514, 13587, 7400, 7432, 7468, 7509, 8350, 8411, 11587, 13604, 13653, 13700, 10294, 10368, 10431, 9333, 9395, 9448, 6234, 6279, 6339, 10567, 10651, 11063, 11182, 11317, 11715, 7002, 7093, 8157, 8287, 9414, 9454, 9487, 9508, 6984, 7053, 8173, 8285, 6940, 7682, 7984, 8209, 6176, 6325, 6445, 6509, 8951, 9004, 9074, 6845, 6856, 6876, 6905, 10924, 11885, 11896, 9983, 9994, 10006, 10017, 9858, 10078, 10149, 10699, 8773, 14267, 14789, 8299, 8326, 9638, 9681, 10810, 10833, 10865, 10882, 14941, 14994, 15041, 7791, 7907, 7996, 8081, 12476, 12945, 10293, 10351, 10414, 10472, 8497, 8538, 8564, 8587, 6799, 6810, 6821, 11083, 11253, 11366, 11438, 8677, 8697, 8722, 6417, 6499, 6532, 7595, 7631, 7654, 9910, 8703, 8876, 10769, 12030, 12333, 12405, 9055, 9154, 9217, 12186, 12449, 12550, 12683, 11780, 13079, 15482, 15528, 14141, 14214, 8744, 8755, 6978, 7991, 8112, 8253, 11593, 6342, 6408, 6454, 6990, 7082, 8227, 14811, 14258, 11019, 11096, 11199, 11276, 8424, 8460, 8483, 8512, 8829, 8917, 8993, 9058, 14204, 15221, 12305, 12482, 12594, 12795, 10970, 10981, 10993, 11004, 10240, 10278, 10321, 10385, 8018, 8109, 8191, 7896, 7952, 8000, 8069, 8597, 8608, 10487, 10538, 10583, 10623, 5918, 5929, 5940, 8791, 8871, 9141, 6736, 6764, 6784, 11046, 11148, 11242, 9812, 9833, 9862, 9889, 10186, 10235, 10258, 13115, 13491, 14977, 15403, 6038, 7762, 7818, 7855, 7895, 12576, 8792, 8921, 9374, 10043, 10194, 10711, 11589, 11619, 10444, 10543, 11762, 11785, 13442, 13496, 7368, 7379, 7390, 6098, 6130, 6153, 6181, 6206, 6330, 6489, 6545, 6050, 6082, 6110, 6139, 12301, 12846, 8919, 9015, 9095, 14427, 10828, 10846, 10863, 7927, 7977, 8020, 13902, 10514, 11070, 11166, 7690, 7719, 7767, 7539, 7770, 7995, 6589, 6609, 6630, 8325, 8821, 12099, 12428, 8842, 8924, 7934, 7978, 8026, 9722, 9746, 13094, 13140, 13186, 8332, 8356, 8393, 14357, 6171, 6273, 6368, 6472, 6174, 6241, 6307, 11011, 11165, 11389, 11799, 9302, 9343, 9378, 11352, 11405, 11462, 9820, 9842, 9860, 11774, 6370, 6506, 6549, 11240, 11332, 11401, 8083, 8164, 8233, 8288, 5970, 6065, 6089, 6113, 6392, 7809, 7857, 7917, 12229, 12395, 12491, 13032, 15330, 15390, 15461, 6599, 6689, 6715, 11024, 11220, 11372, 10337, 10395, 10447, 10501, 11591, 11632, 7722, 7752, 7790, 14238, 14835, 14913, 13222, 7304, 7331, 7354, 11257, 11351, 11420, 13122, 13221, 13291, 13366, 10030, 10095, 10355, 11480, 9310, 9409, 9445, 10563, 10609, 10658, 11503, 11564, 14929, 14978, 15021, 10046, 10126, 10178, 6371, 12790, 12901, 13019, 7422, 8107, 8149, 8214, 9947, 9958, 9969, 10314, 10374, 9600, 9630, 9668, 6173, 6249, 6305, 5992, 6003, 6014, 6026, 9700, 10891, 10902, 10912, 13143, 13240, 13316, 10689, 10762, 10796, 14249, 8803, 9053, 9394, 11299, 11385, 11451, 15321, 15381, 12351, 12470, 9745, 9767, 9787, 9802, 9966, 10109, 14829, 14889, 8416, 8450, 11097, 11206, 11310, 9486, 9517, 9533, 9551, 13787, 13846, 13900, 13939, 14384, 9890, 9914, 9928, 10438, 10628, 12562, 12654, 12204, 12381, 12477, 12924, 13830, 13884, 13925, 12567, 12774, 12962, 7729, 7819, 7869, 14139, 13758, 13799, 13854, 13909, 12035, 12103, 12207, 13368, 13433, 13515, 6367, 6512, 6564, 11673, 11698, 11746, 12823, 15175, 15196, 15226, 15286, 11926, 12803, 12973, 13929, 13967, 13991, 14021, 11828, 11852, 11863, 11874, 13553, 13622, 13677, 13727, 14545, 14556, 14567, 12015, 12105, 12183, 12279, 13417, 13473, 13536, 13591, 11917, 11973, 12007, 12058, 13151, 13242, 13322, 14961, 15035, 15084, 12817, 15192, 15270, 15363, 14624, 14636, 14651, 12270, 12590, 12773, 12928, 13025, 15026, 15090, 15128, 14281, 11507, 11531, 11551, 13399, 13432, 13478, 14383, 14399, 14410, 8327, 9575, 9626, 9670, 11908, 11980, 12022, 13008, 13660, 14331, 12539, 12745, 13013, 14654, 14666, 14677, 14689, 13767, 13819, 13876, 13942, 13430, 13943, 14670, 12134, 12247, 13627, 13679, 13724, 12960, 8316, 9602, 9650, 11501, 11530, 11559, 15150, 15190, 7297, 14234, 14062, 14140, 14191, 13990, 14025, 12292, 13786, 14392, 14848, 13084, 13126, 13174, 14135, 14178, 14215, 13595, 14348, 14966, 13717, 14563, 14867, 8619, 8630, 8642, 8663, 12238, 15076, 15129, 14760, 14773, 14798, 12101, 12162, 12250, 12469, 12670, 12848, 12154, 13229, 13305, 13363, 8361, 8781, 9076, 9514, 11033, 13810, 13866, 14938, 15038, 15118, 13227, 13320, 14729, 14742, 15156, 15227, 15294, 8346, 8373, 11924, 11967, 12735, 14843, 14928, 15010, 12111, 12177, 12278, 12570, 12713, 12839, 11969, 15225, 15335, 15421, 15492, 12993, 12634, 14879] diff --git a/Data/d36_updated.xlsx b/Data/d36_updated.xlsx new file mode 100644 index 0000000..dad3ea2 Binary files /dev/null and b/Data/d36_updated.xlsx differ diff --git a/Data/original/d32_updated.xlsx b/Data/original/d32_updated.xlsx new file mode 100644 index 0000000..0121af1 Binary files /dev/null and b/Data/original/d32_updated.xlsx differ diff --git a/Data/original/d36_updated.xlsx b/Data/original/d36_updated.xlsx new file mode 100644 index 0000000..29558e4 Binary files /dev/null and b/Data/original/d36_updated.xlsx differ diff --git a/Data/original/schram.xlsx b/Data/original/schram.xlsx new file mode 100644 index 0000000..38e6114 Binary files /dev/null and b/Data/original/schram.xlsx differ diff --git a/Data/original/tal.xlsx b/Data/original/tal.xlsx new file mode 100644 index 0000000..e3a9c29 Binary files /dev/null and b/Data/original/tal.xlsx differ diff --git a/Data/scenario_table_N3.csv b/Data/scenario_table_N3.csv new file mode 100644 index 0000000..039627a --- /dev/null +++ b/Data/scenario_table_N3.csv @@ -0,0 +1,7 @@ +Pref1_pos,Pref2_pos,Pref3_pos,scenario,name +1,2,3,1,TRT +1,3,2,2,TRT +2,1,3,3,WLB +2,3,1,4,SLB +3,1,2,5,CMPLB +3,2,1,6,CMP diff --git a/Data/scenario_table_N4.csv b/Data/scenario_table_N4.csv new file mode 100644 index 0000000..00f5322 --- /dev/null +++ b/Data/scenario_table_N4.csv @@ -0,0 +1,25 @@ +Pref1_pos,Pref2_pos,Pref3_pos,Pref4_pos,scenario,name +1,2,3,4,1,TRT +1,2,4,3,2,TRT +1,3,2,4,3,TRT +1,4,2,3,4,TRT +1,3,4,2,5,TRT +1,4,3,2,6,TRT +2,1,3,4,7,LB +2,1,4,3,8,LB +3,1,2,4,9,CMPLB +4,1,2,3,10,CMPLB +3,1,4,2,11,CMPLB +4,1,3,2,12,CMPLB +2,3,1,4,13,LB +2,4,1,3,14,LB +3,2,1,4,15,CMP +4,2,1,3,16,CMP +3,4,1,2,17,LB +4,3,1,2,18,CMPLB +2,3,4,1,19,LB +2,4,3,1,20,LB +3,2,4,1,21,CMP +4,2,3,1,22,CMP +3,4,2,1,23,CMP +4,3,2,1,24,CMP diff --git a/Data/schram.xlsx b/Data/schram.xlsx new file mode 100644 index 0000000..38e6114 Binary files /dev/null and b/Data/schram.xlsx differ diff --git a/Data/schram_folds.txt b/Data/schram_folds.txt new file mode 100644 index 0000000..3ef2b69 --- /dev/null +++ b/Data/schram_folds.txt @@ -0,0 +1,20 @@ +fold 1: +[81, 321, 561, 801, 1089, 1329, 1569, 1809, 2193, 2433, 2673, 2913, 3081, 3321, 3561, 3801, 4137, 4377, 4617, 4857, 4977, 5217, 5457, 5697, 130, 370, 610, 850, 1210, 1450, 1690, 1930, 2002, 2242, 2482, 2722, 2962, 3202, 3442, 3682, 3922, 4090, 4330, 4570, 4810, 5002, 5242, 5482, 5722, 11, 251, 491, 731, 971, 995, 1235, 1475, 1715, 1955, 2099, 2339, 2579, 2819, 3155, 3395, 3635, 3875, 4043, 4283, 4523, 4763, 5075, 5315, 5555, 5795, 204, 444, 684, 924, 1188, 1428, 1668, 1908, 2100, 2340, 2580, 2820, 3156, 3396, 3636, 3876, 3996, 4236, 4476, 4716, 5004, 5244, 5484, 5724, 133, 373, 613, 853, 1045, 1285, 1525, 1765, 2029, 2269, 2509, 2749, 3133, 3373, 3613, 3853, 4069, 4309, 4549, 4789, 4981, 5221, 5461, 5701, 86, 326, 566, 806, 1190, 1430, 1670, 1910, 2198, 2438, 2678, 2918, 3110, 3350, 3590, 3830, 3998, 4238, 4478, 4718, 5030, 5270, 5510, 5750, 15, 255, 495, 735, 975, 999, 1239, 1479, 1719, 1959, 2127, 2367, 2607, 2847, 2967, 3207, 3447, 3687, 3927, 4071, 4311, 4551, 4791, 4935, 5175, 5415, 5655, 5895, 208, 448, 688, 928, 1192, 1432, 1672, 1912, 1984, 2224, 2464, 2704, 2944, 3160, 3400, 3640, 3880, 4072, 4312, 4552, 4792, 5104, 5344, 5584, 5824, 89, 329, 569, 809, 1169, 1409, 1649, 1889, 2177, 2417, 2657, 2897, 3137, 3377, 3617, 3857, 4145, 4385, 4625, 4865, 4961, 5201, 5441, 5681, 18, 258, 498, 738, 978, 1146, 1386, 1626, 1866, 2154, 2394, 2634, 2874, 3138, 3378, 3618, 3858, 4170, 4410, 4650, 4890, 5106, 5346, 5586, 5826, 168, 408, 648, 888, 984, 1224, 1464, 1704, 1944, 2088, 2328, 2568, 2808, 3096, 3336, 3576, 3816, 4032, 4272, 4512, 4752, 4944, 5184, 5424, 5664, 67, 307, 547, 787, 1195, 1435, 1675, 1915, 2179, 2419, 2659, 2899, 3115, 3355, 3595, 3835, 4027, 4267, 4507, 4747, 5011, 5251, 5491, 5731, 116, 356, 596, 836, 1076, 1316, 1556, 1796, 2084, 2324, 2564, 2804, 3020, 3260, 3500, 3740, 4052, 4292, 4532, 4772, 5060, 5300, 5540, 5780, 213, 453, 693, 933, 1101, 1341, 1581, 1821, 1989, 2229, 2469, 2709, 2949, 3189, 3429, 3669, 3909, 3981, 4221, 4461, 4701, 5037, 5277, 5517, 5757, 238, 478, 718, 958, 1150, 1390, 1630, 1870, 2182, 2422, 2662, 2902, 2974, 3214, 3454, 3694, 3934, 4054, 4294, 4534, 4774, 5158, 5398, 5638, 5878, 71, 311, 551, 791, 1199, 1439, 1679, 1919, 2159, 2399, 2639, 2879, 2999, 3239, 3479, 3719, 4175, 4415, 4655, 4895, 5087, 5327, 5567, 5807, 169, 409, 649, 889, 1105, 1345, 1585, 1825, 2017, 2257, 2497, 2737, 2977, 3217, 3457, 3697, 4009, 4249, 4489, 4729, 5113, 5353, 5593, 5833, 194, 434, 674, 914, 1154, 1394, 1634, 1874, 2090, 2330, 2570, 2810, 3170, 3410, 3650, 3890, 4082, 4322, 4562, 4802, 5042, 5282, 5522, 5762, 27, 267, 507, 747, 1035, 1275, 1515, 1755, 1971, 2211, 2451, 2691, 2931, 3171, 3411, 3651, 3891, 4107, 4347, 4587, 4827, 4971, 5211, 5451, 5691, 100, 340, 580, 820, 1084, 1324, 1564, 1804, 2164, 2404, 2644, 2884, 3004, 3244, 3484, 3724, 4036, 4276, 4516, 4756, 5020, 5260, 5500, 5740, 149, 389, 629, 869, 1133, 1373, 1613, 1853, 2045, 2285, 2525, 2765, 2957, 3197, 3437, 3677, 3917, 4157, 4397, 4637, 4877, 4949, 5189, 5429, 5669, 102, 342, 582, 822, 1158, 1398, 1638, 1878, 2070, 2310, 2550, 2790, 3054, 3294, 3534, 3774, 4158, 4398, 4638, 4878, 5046, 5286, 5526, 5766, 223, 463, 703, 943, 1087, 1327, 1567, 1807, 2023, 2263, 2503, 2743, 3055, 3295, 3535, 3775, 3967, 4207, 4447, 4687, 4999, 5239, 5479, 5719, 224, 464, 704, 944, 1112, 1352, 1592, 1832, 2096, 2336, 2576, 2816, 3104, 3344, 3584, 3824, 4088, 4328, 4568, 4808, 5024, 5264, 5504, 5744] +fold 2: +[153, 393, 633, 873, 1209, 1449, 1689, 1929, 2169, 2409, 2649, 2889, 3057, 3297, 3537, 3777, 4065, 4305, 4545, 4785, 5097, 5337, 5577, 5817, 34, 274, 514, 754, 1186, 1426, 1666, 1906, 2098, 2338, 2578, 2818, 3082, 3322, 3562, 3802, 4066, 4306, 4546, 4786, 4954, 5194, 5434, 5674, 179, 419, 659, 899, 1019, 1259, 1499, 1739, 2027, 2267, 2507, 2747, 3083, 3323, 3563, 3803, 3947, 4187, 4427, 4667, 4907, 5027, 5267, 5507, 5747, 60, 300, 540, 780, 1212, 1452, 1692, 1932, 1980, 2220, 2460, 2700, 2940, 2988, 3228, 3468, 3708, 4140, 4380, 4620, 4860, 5076, 5316, 5556, 5796, 37, 277, 517, 757, 1117, 1357, 1597, 1837, 2197, 2437, 2677, 2917, 3085, 3325, 3565, 3805, 4093, 4333, 4573, 4813, 5077, 5317, 5557, 5797, 110, 350, 590, 830, 1094, 1334, 1574, 1814, 2078, 2318, 2558, 2798, 2990, 3230, 3470, 3710, 3950, 4190, 4430, 4670, 4910, 5150, 5390, 5630, 5870, 135, 375, 615, 855, 1143, 1383, 1623, 1863, 2007, 2247, 2487, 2727, 3111, 3351, 3591, 3831, 4047, 4287, 4527, 4767, 5007, 5247, 5487, 5727, 64, 304, 544, 784, 1216, 1456, 1696, 1936, 2032, 2272, 2512, 2752, 2968, 3208, 3448, 3688, 3928, 4144, 4384, 4624, 4864, 5056, 5296, 5536, 5776, 209, 449, 689, 929, 1145, 1385, 1625, 1865, 2009, 2249, 2489, 2729, 3041, 3281, 3521, 3761, 4121, 4361, 4601, 4841, 5105, 5345, 5585, 5825, 162, 402, 642, 882, 1002, 1242, 1482, 1722, 1962, 2034, 2274, 2514, 2754, 3186, 3426, 3666, 3906, 3978, 4218, 4458, 4698, 5130, 5370, 5610, 5850, 24, 264, 504, 744, 1104, 1344, 1584, 1824, 2064, 2304, 2544, 2784, 3024, 3264, 3504, 3744, 4152, 4392, 4632, 4872, 5040, 5280, 5520, 5760, 19, 259, 499, 739, 979, 1147, 1387, 1627, 1867, 1987, 2227, 2467, 2707, 2947, 3139, 3379, 3619, 3859, 4147, 4387, 4627, 4867, 5059, 5299, 5539, 5779, 164, 404, 644, 884, 1220, 1460, 1700, 1940, 2012, 2252, 2492, 2732, 3092, 3332, 3572, 3812, 4172, 4412, 4652, 4892, 4988, 5228, 5468, 5708, 141, 381, 621, 861, 1077, 1317, 1557, 1797, 2013, 2253, 2493, 2733, 2997, 3237, 3477, 3717, 4053, 4293, 4533, 4773, 5109, 5349, 5589, 5829, 70, 310, 550, 790, 1006, 1246, 1486, 1726, 1966, 1990, 2230, 2470, 2710, 2950, 3094, 3334, 3574, 3814, 4126, 4366, 4606, 4846, 4966, 5206, 5446, 5686, 239, 479, 719, 959, 1151, 1391, 1631, 1871, 2087, 2327, 2567, 2807, 3143, 3383, 3623, 3863, 4055, 4295, 4535, 4775, 5063, 5303, 5543, 5783, 1, 241, 481, 721, 961, 1177, 1417, 1657, 1897, 2161, 2401, 2641, 2881, 3025, 3265, 3505, 3745, 4153, 4393, 4633, 4873, 5065, 5305, 5545, 5785, 218, 458, 698, 938, 1202, 1442, 1682, 1922, 2162, 2402, 2642, 2882, 2954, 3194, 3434, 3674, 3914, 4130, 4370, 4610, 4850, 4922, 5162, 5402, 5642, 5882, 75, 315, 555, 795, 1155, 1395, 1635, 1875, 2163, 2403, 2643, 2883, 2955, 3195, 3435, 3675, 3915, 3963, 4203, 4443, 4683, 5067, 5307, 5547, 5787, 76, 316, 556, 796, 1156, 1396, 1636, 1876, 1972, 2212, 2452, 2692, 2932, 2956, 3196, 3436, 3676, 3916, 4156, 4396, 4636, 4876, 5068, 5308, 5548, 5788, 5, 245, 485, 725, 965, 989, 1229, 1469, 1709, 1949, 2093, 2333, 2573, 2813, 3149, 3389, 3629, 3869, 4109, 4349, 4589, 4829, 5141, 5381, 5621, 5861, 30, 270, 510, 750, 1062, 1302, 1542, 1782, 1974, 2214, 2454, 2694, 2934, 3006, 3246, 3486, 3726, 3942, 4182, 4422, 4662, 4902, 5142, 5382, 5622, 5862, 175, 415, 655, 895, 1183, 1423, 1663, 1903, 1975, 2215, 2455, 2695, 2935, 3127, 3367, 3607, 3847, 3991, 4231, 4471, 4711, 5095, 5335, 5575, 5815, 176, 416, 656, 896, 1184, 1424, 1664, 1904, 2144, 2384, 2624, 2864, 3008, 3248, 3488, 3728, 4136, 4376, 4616, 4856, 4952, 5192, 5432, 5672] +fold 3: +[177, 417, 657, 897, 1017, 1257, 1497, 1737, 1977, 2217, 2457, 2697, 2937, 3105, 3345, 3585, 3825, 4113, 4353, 4593, 4833, 4953, 5193, 5433, 5673, 82, 322, 562, 802, 994, 1234, 1474, 1714, 1954, 1978, 2218, 2458, 2698, 2938, 2986, 3226, 3466, 3706, 3946, 4186, 4426, 4666, 4906, 5026, 5266, 5506, 5746, 107, 347, 587, 827, 1211, 1451, 1691, 1931, 2075, 2315, 2555, 2795, 2963, 3203, 3443, 3683, 3923, 3971, 4211, 4451, 4691, 5147, 5387, 5627, 5867, 84, 324, 564, 804, 1164, 1404, 1644, 1884, 2196, 2436, 2676, 2916, 2964, 3204, 3444, 3684, 3924, 4092, 4332, 4572, 4812, 4956, 5196, 5436, 5676, 85, 325, 565, 805, 1141, 1381, 1621, 1861, 2149, 2389, 2629, 2869, 2989, 3229, 3469, 3709, 3973, 4213, 4453, 4693, 4957, 5197, 5437, 5677, 134, 374, 614, 854, 1214, 1454, 1694, 1934, 2102, 2342, 2582, 2822, 3062, 3302, 3542, 3782, 3974, 4214, 4454, 4694, 4934, 5174, 5414, 5654, 5894, 39, 279, 519, 759, 1215, 1455, 1695, 1935, 2031, 2271, 2511, 2751, 3015, 3255, 3495, 3735, 4143, 4383, 4623, 4863, 5127, 5367, 5607, 5847, 184, 424, 664, 904, 1072, 1312, 1552, 1792, 2176, 2416, 2656, 2896, 3184, 3424, 3664, 3904, 3976, 4216, 4456, 4696, 5152, 5392, 5632, 5872, 17, 257, 497, 737, 977, 1121, 1361, 1601, 1841, 2033, 2273, 2513, 2753, 3017, 3257, 3497, 3737, 4073, 4313, 4553, 4793, 5057, 5297, 5537, 5777, 90, 330, 570, 810, 1026, 1266, 1506, 1746, 2178, 2418, 2658, 2898, 2970, 3210, 3450, 3690, 3930, 4146, 4386, 4626, 4866, 5154, 5394, 5634, 5874, 216, 456, 696, 936, 1008, 1248, 1488, 1728, 1968, 2208, 2448, 2688, 2928, 2976, 3216, 3456, 3696, 4080, 4320, 4560, 4800, 5064, 5304, 5544, 5784, 139, 379, 619, 859, 1027, 1267, 1507, 1747, 2083, 2323, 2563, 2803, 3067, 3307, 3547, 3787, 3955, 4195, 4435, 4675, 4915, 4963, 5203, 5443, 5683, 44, 284, 524, 764, 1004, 1244, 1484, 1724, 1964, 1988, 2228, 2468, 2708, 2948, 3116, 3356, 3596, 3836, 4004, 4244, 4484, 4724, 5108, 5348, 5588, 5828, 165, 405, 645, 885, 1149, 1389, 1629, 1869, 2181, 2421, 2661, 2901, 2973, 3213, 3453, 3693, 3933, 4125, 4365, 4605, 4845, 4941, 5181, 5421, 5661, 5901, 22, 262, 502, 742, 982, 1174, 1414, 1654, 1894, 2110, 2350, 2590, 2830, 3022, 3262, 3502, 3742, 4150, 4390, 4630, 4870, 5110, 5350, 5590, 5830, 119, 359, 599, 839, 1127, 1367, 1607, 1847, 2135, 2375, 2615, 2855, 2975, 3215, 3455, 3695, 3935, 4031, 4271, 4511, 4751, 4967, 5207, 5447, 5687, 121, 361, 601, 841, 1033, 1273, 1513, 1753, 2041, 2281, 2521, 2761, 3145, 3385, 3625, 3865, 4129, 4369, 4609, 4849, 4969, 5209, 5449, 5689, 98, 338, 578, 818, 1034, 1274, 1514, 1754, 2138, 2378, 2618, 2858, 3146, 3386, 3626, 3866, 4010, 4250, 4490, 4730, 4970, 5210, 5450, 5690, 51, 291, 531, 771, 1179, 1419, 1659, 1899, 1995, 2235, 2475, 2715, 3147, 3387, 3627, 3867, 4059, 4299, 4539, 4779, 5115, 5355, 5595, 5835, 148, 388, 628, 868, 988, 1228, 1468, 1708, 1948, 2116, 2356, 2596, 2836, 3124, 3364, 3604, 3844, 4132, 4372, 4612, 4852, 4948, 5188, 5428, 5668, 29, 269, 509, 749, 1085, 1325, 1565, 1805, 2141, 2381, 2621, 2861, 2981, 3221, 3461, 3701, 4133, 4373, 4613, 4853, 5117, 5357, 5597, 5837, 150, 390, 630, 870, 1014, 1254, 1494, 1734, 2046, 2286, 2526, 2766, 3174, 3414, 3654, 3894, 4014, 4254, 4494, 4734, 4950, 5190, 5430, 5670, 7, 247, 487, 727, 967, 1111, 1351, 1591, 1831, 2119, 2359, 2599, 2839, 3007, 3247, 3487, 3727, 4159, 4399, 4639, 4879, 4951, 5191, 5431, 5671, 104, 344, 584, 824, 1016, 1256, 1496, 1736, 2168, 2408, 2648, 2888, 2960, 3200, 3440, 3680, 3920, 4016, 4256, 4496, 4736, 4928, 5168, 5408, 5648, 5888] +fold 4: +[9, 249, 489, 729, 969, 1041, 1281, 1521, 1761, 2001, 2241, 2481, 2721, 3129, 3369, 3609, 3849, 3945, 4185, 4425, 4665, 4905, 5025, 5265, 5505, 5745, 226, 466, 706, 946, 1042, 1282, 1522, 1762, 2122, 2362, 2602, 2842, 3178, 3418, 3658, 3898, 4018, 4258, 4498, 4738, 5074, 5314, 5554, 5794, 155, 395, 635, 875, 1067, 1307, 1547, 1787, 2147, 2387, 2627, 2867, 3131, 3371, 3611, 3851, 4091, 4331, 4571, 4811, 4979, 5219, 5459, 5699, 108, 348, 588, 828, 1140, 1380, 1620, 1860, 2004, 2244, 2484, 2724, 3084, 3324, 3564, 3804, 4020, 4260, 4500, 4740, 5124, 5364, 5604, 5844, 13, 253, 493, 733, 973, 1069, 1309, 1549, 1789, 2125, 2365, 2605, 2845, 3109, 3349, 3589, 3829, 3949, 4189, 4429, 4669, 4909, 5005, 5245, 5485, 5725, 230, 470, 710, 950, 1118, 1358, 1598, 1838, 2126, 2366, 2606, 2846, 3134, 3374, 3614, 3854, 4166, 4406, 4646, 4886, 4958, 5198, 5438, 5678, 159, 399, 639, 879, 1095, 1335, 1575, 1815, 1983, 2223, 2463, 2703, 2943, 3135, 3375, 3615, 3855, 4095, 4335, 4575, 4815, 5055, 5295, 5535, 5775, 232, 472, 712, 952, 1168, 1408, 1648, 1888, 2008, 2248, 2488, 2728, 3136, 3376, 3616, 3856, 4048, 4288, 4528, 4768, 4936, 5176, 5416, 5656, 5896, 161, 401, 641, 881, 1097, 1337, 1577, 1817, 1985, 2225, 2465, 2705, 2945, 3065, 3305, 3545, 3785, 4169, 4409, 4649, 4889, 4985, 5225, 5465, 5705, 210, 450, 690, 930, 1122, 1362, 1602, 1842, 2202, 2442, 2682, 2922, 3090, 3330, 3570, 3810, 4002, 4242, 4482, 4722, 5034, 5274, 5514, 5754, 72, 312, 552, 792, 1152, 1392, 1632, 1872, 2040, 2280, 2520, 2760, 2952, 3192, 3432, 3672, 3912, 3984, 4224, 4464, 4704, 5088, 5328, 5568, 5808, 187, 427, 667, 907, 1051, 1291, 1531, 1771, 2059, 2299, 2539, 2779, 3187, 3427, 3667, 3907, 4051, 4291, 4531, 4771, 4939, 5179, 5419, 5659, 5899, 20, 260, 500, 740, 980, 1148, 1388, 1628, 1868, 2204, 2444, 2684, 2924, 3044, 3284, 3524, 3764, 3956, 4196, 4436, 4676, 4916, 5036, 5276, 5516, 5756, 189, 429, 669, 909, 1221, 1461, 1701, 1941, 2157, 2397, 2637, 2877, 3045, 3285, 3525, 3765, 4077, 4317, 4557, 4797, 5061, 5301, 5541, 5781, 166, 406, 646, 886, 1030, 1270, 1510, 1750, 2038, 2278, 2518, 2758, 3142, 3382, 3622, 3862, 3958, 4198, 4438, 4678, 4918, 4942, 5182, 5422, 5662, 5902, 23, 263, 503, 743, 983, 1007, 1247, 1487, 1727, 1967, 2063, 2303, 2543, 2783, 3167, 3407, 3647, 3887, 4103, 4343, 4583, 4823, 5015, 5255, 5495, 5735, 193, 433, 673, 913, 1057, 1297, 1537, 1777, 2113, 2353, 2593, 2833, 2953, 3193, 3433, 3673, 3913, 4105, 4345, 4585, 4825, 5017, 5257, 5497, 5737, 2, 242, 482, 722, 962, 1058, 1298, 1538, 1778, 2042, 2282, 2522, 2762, 2978, 3218, 3458, 3698, 4058, 4298, 4538, 4778, 4946, 5186, 5426, 5666, 147, 387, 627, 867, 987, 1227, 1467, 1707, 1947, 2067, 2307, 2547, 2787, 3027, 3267, 3507, 3747, 4011, 4251, 4491, 4731, 4995, 5235, 5475, 5715, 28, 268, 508, 748, 1204, 1444, 1684, 1924, 2188, 2428, 2668, 2908, 3148, 3388, 3628, 3868, 3964, 4204, 4444, 4684, 5044, 5284, 5524, 5764, 221, 461, 701, 941, 1157, 1397, 1637, 1877, 1973, 2213, 2453, 2693, 2933, 3077, 3317, 3557, 3797, 4061, 4301, 4541, 4781, 4925, 5165, 5405, 5645, 5885, 78, 318, 558, 798, 1110, 1350, 1590, 1830, 2022, 2262, 2502, 2742, 3126, 3366, 3606, 3846, 4086, 4326, 4566, 4806, 5070, 5310, 5550, 5790, 55, 295, 535, 775, 1039, 1279, 1519, 1759, 2047, 2287, 2527, 2767, 3175, 3415, 3655, 3895, 4135, 4375, 4615, 4855, 5047, 5287, 5527, 5767, 56, 296, 536, 776, 1064, 1304, 1544, 1784, 2048, 2288, 2528, 2768, 3152, 3392, 3632, 3872, 4112, 4352, 4592, 4832, 4976, 5216, 5456, 5696] +fold 5: +[33, 273, 513, 753, 1113, 1353, 1593, 1833, 2121, 2361, 2601, 2841, 2961, 3201, 3441, 3681, 3921, 3993, 4233, 4473, 4713, 5145, 5385, 5625, 5865, 106, 346, 586, 826, 1066, 1306, 1546, 1786, 2194, 2434, 2674, 2914, 3010, 3250, 3490, 3730, 4114, 4354, 4594, 4834, 5146, 5386, 5626, 5866, 227, 467, 707, 947, 1187, 1427, 1667, 1907, 2171, 2411, 2651, 2891, 3059, 3299, 3539, 3779, 4067, 4307, 4547, 4787, 4931, 5171, 5411, 5651, 5891, 36, 276, 516, 756, 1068, 1308, 1548, 1788, 2028, 2268, 2508, 2748, 3012, 3252, 3492, 3732, 4044, 4284, 4524, 4764, 5100, 5340, 5580, 5820, 205, 445, 685, 925, 1165, 1405, 1645, 1885, 2053, 2293, 2533, 2773, 3037, 3277, 3517, 3757, 4165, 4405, 4645, 4885, 4933, 5173, 5413, 5653, 5893, 206, 446, 686, 926, 1142, 1382, 1622, 1862, 1982, 2222, 2462, 2702, 2942, 3086, 3326, 3566, 3806, 4022, 4262, 4502, 4742, 5102, 5342, 5582, 5822, 207, 447, 687, 927, 1119, 1359, 1599, 1839, 2079, 2319, 2559, 2799, 3183, 3423, 3663, 3903, 3999, 4239, 4479, 4719, 4959, 5199, 5439, 5679, 40, 280, 520, 760, 1000, 1240, 1480, 1720, 1960, 2152, 2392, 2632, 2872, 3112, 3352, 3592, 3832, 4168, 4408, 4648, 4888, 5080, 5320, 5560, 5800, 233, 473, 713, 953, 1025, 1265, 1505, 1745, 2081, 2321, 2561, 2801, 3161, 3401, 3641, 3881, 3953, 4193, 4433, 4673, 4913, 5129, 5369, 5609, 5849, 66, 306, 546, 786, 1050, 1290, 1530, 1770, 2106, 2346, 2586, 2826, 3042, 3282, 3522, 3762, 4050, 4290, 4530, 4770, 4986, 5226, 5466, 5706, 120, 360, 600, 840, 1056, 1296, 1536, 1776, 2016, 2256, 2496, 2736, 3120, 3360, 3600, 3840, 4008, 4248, 4488, 4728, 5136, 5376, 5616, 5856, 211, 451, 691, 931, 1219, 1459, 1699, 1939, 2107, 2347, 2587, 2827, 3163, 3403, 3643, 3883, 3979, 4219, 4459, 4699, 4987, 5227, 5467, 5707, 188, 428, 668, 908, 1196, 1436, 1676, 1916, 2156, 2396, 2636, 2876, 3164, 3404, 3644, 3884, 4076, 4316, 4556, 4796, 4940, 5180, 5420, 5660, 5900, 45, 285, 525, 765, 1029, 1269, 1509, 1749, 2205, 2445, 2685, 2925, 3069, 3309, 3549, 3789, 4101, 4341, 4581, 4821, 4965, 5205, 5445, 5685, 94, 334, 574, 814, 1102, 1342, 1582, 1822, 2062, 2302, 2542, 2782, 3070, 3310, 3550, 3790, 4006, 4246, 4486, 4726, 5014, 5254, 5494, 5734, 215, 455, 695, 935, 1031, 1271, 1511, 1751, 2015, 2255, 2495, 2735, 3119, 3359, 3599, 3839, 4079, 4319, 4559, 4799, 5111, 5351, 5591, 5831, 145, 385, 625, 865, 1201, 1441, 1681, 1921, 1969, 2209, 2449, 2689, 2929, 3097, 3337, 3577, 3817, 3937, 4177, 4417, 4657, 4897, 5041, 5281, 5521, 5761, 146, 386, 626, 866, 1106, 1346, 1586, 1826, 2018, 2258, 2498, 2738, 3074, 3314, 3554, 3794, 3962, 4202, 4442, 4682, 5066, 5306, 5546, 5786, 171, 411, 651, 891, 1083, 1323, 1563, 1803, 2043, 2283, 2523, 2763, 3099, 3339, 3579, 3819, 4083, 4323, 4563, 4803, 4947, 5187, 5427, 5667, 220, 460, 700, 940, 1132, 1372, 1612, 1852, 1996, 2236, 2476, 2716, 3100, 3340, 3580, 3820, 3940, 4180, 4420, 4660, 4900, 5140, 5380, 5620, 5860, 173, 413, 653, 893, 1109, 1349, 1589, 1829, 1997, 2237, 2477, 2717, 3173, 3413, 3653, 3893, 4013, 4253, 4493, 4733, 5069, 5309, 5549, 5789, 174, 414, 654, 894, 990, 1230, 1470, 1710, 1950, 2094, 2334, 2574, 2814, 3150, 3390, 3630, 3870, 4062, 4302, 4542, 4782, 4926, 5166, 5406, 5646, 5886, 127, 367, 607, 847, 1135, 1375, 1615, 1855, 2095, 2335, 2575, 2815, 2959, 3199, 3439, 3679, 3919, 4087, 4327, 4567, 4807, 4975, 5215, 5455, 5695, 128, 368, 608, 848, 1088, 1328, 1568, 1808, 2120, 2360, 2600, 2840, 3080, 3320, 3560, 3800, 3944, 4184, 4424, 4664, 4904, 5120, 5360, 5600, 5840] +fold 6: +[105, 345, 585, 825, 1065, 1305, 1545, 1785, 2025, 2265, 2505, 2745, 2985, 3225, 3465, 3705, 4041, 4281, 4521, 4761, 4929, 5169, 5409, 5649, 5889, 58, 298, 538, 778, 1090, 1330, 1570, 1810, 2170, 2410, 2650, 2890, 3034, 3274, 3514, 3754, 3994, 4234, 4474, 4714, 5122, 5362, 5602, 5842, 59, 299, 539, 779, 1043, 1283, 1523, 1763, 2195, 2435, 2675, 2915, 3107, 3347, 3587, 3827, 4115, 4355, 4595, 4835, 5003, 5243, 5483, 5723, 180, 420, 660, 900, 996, 1236, 1476, 1716, 1956, 2052, 2292, 2532, 2772, 3036, 3276, 3516, 3756, 3972, 4212, 4452, 4692, 5028, 5268, 5508, 5748, 157, 397, 637, 877, 1093, 1333, 1573, 1813, 2077, 2317, 2557, 2797, 3157, 3397, 3637, 3877, 3997, 4237, 4477, 4717, 5101, 5341, 5581, 5821, 38, 278, 518, 758, 1166, 1406, 1646, 1886, 2174, 2414, 2654, 2894, 3014, 3254, 3494, 3734, 4046, 4286, 4526, 4766, 5126, 5366, 5606, 5846, 231, 471, 711, 951, 1167, 1407, 1647, 1887, 2199, 2439, 2679, 2919, 3159, 3399, 3639, 3879, 4119, 4359, 4599, 4839, 4983, 5223, 5463, 5703, 160, 400, 640, 880, 1120, 1360, 1600, 1840, 2056, 2296, 2536, 2776, 2992, 3232, 3472, 3712, 3952, 4192, 4432, 4672, 4912, 5128, 5368, 5608, 5848, 185, 425, 665, 905, 1073, 1313, 1553, 1793, 2129, 2369, 2609, 2849, 3185, 3425, 3665, 3905, 4025, 4265, 4505, 4745, 5081, 5321, 5561, 5801, 186, 426, 666, 906, 1098, 1338, 1578, 1818, 2082, 2322, 2562, 2802, 3162, 3402, 3642, 3882, 4074, 4314, 4554, 4794, 5058, 5298, 5538, 5778, 96, 336, 576, 816, 1200, 1440, 1680, 1920, 1992, 2232, 2472, 2712, 3048, 3288, 3528, 3768, 3960, 4200, 4440, 4680, 5112, 5352, 5592, 5832, 91, 331, 571, 811, 1099, 1339, 1579, 1819, 2035, 2275, 2515, 2755, 3019, 3259, 3499, 3739, 4003, 4243, 4483, 4723, 5083, 5323, 5563, 5803, 68, 308, 548, 788, 1028, 1268, 1508, 1748, 2180, 2420, 2660, 2900, 2972, 3212, 3452, 3692, 3932, 4100, 4340, 4580, 4820, 5156, 5396, 5636, 5876, 93, 333, 573, 813, 1053, 1293, 1533, 1773, 2133, 2373, 2613, 2853, 3117, 3357, 3597, 3837, 4149, 4389, 4629, 4869, 5157, 5397, 5637, 5877, 190, 430, 670, 910, 1126, 1366, 1606, 1846, 2206, 2446, 2686, 2926, 3190, 3430, 3670, 3910, 4078, 4318, 4558, 4798, 5134, 5374, 5614, 5854, 47, 287, 527, 767, 1055, 1295, 1535, 1775, 1991, 2231, 2471, 2711, 2951, 3023, 3263, 3503, 3743, 4007, 4247, 4487, 4727, 5159, 5399, 5639, 5879, 49, 289, 529, 769, 1153, 1393, 1633, 1873, 2065, 2305, 2545, 2785, 3169, 3409, 3649, 3889, 4081, 4321, 4561, 4801, 4921, 5161, 5401, 5641, 5881, 26, 266, 506, 746, 1010, 1250, 1490, 1730, 2186, 2426, 2666, 2906, 3002, 3242, 3482, 3722, 3986, 4226, 4466, 4706, 5138, 5378, 5618, 5858, 3, 243, 483, 723, 963, 1203, 1443, 1683, 1923, 2091, 2331, 2571, 2811, 3075, 3315, 3555, 3795, 3987, 4227, 4467, 4707, 5043, 5283, 5523, 5763, 196, 436, 676, 916, 1180, 1420, 1660, 1900, 2140, 2380, 2620, 2860, 3052, 3292, 3532, 3772, 4108, 4348, 4588, 4828, 4924, 5164, 5404, 5644, 5884, 53, 293, 533, 773, 1013, 1253, 1493, 1733, 2117, 2357, 2597, 2837, 3029, 3269, 3509, 3749, 3965, 4205, 4445, 4685, 4973, 5213, 5453, 5693, 54, 294, 534, 774, 1086, 1326, 1566, 1806, 2118, 2358, 2598, 2838, 3030, 3270, 3510, 3750, 3990, 4230, 4470, 4710, 4974, 5214, 5454, 5694, 79, 319, 559, 799, 991, 1231, 1471, 1711, 1951, 2143, 2383, 2623, 2863, 3103, 3343, 3583, 3823, 4063, 4303, 4543, 4783, 5143, 5383, 5623, 5863, 80, 320, 560, 800, 1160, 1400, 1640, 1880, 2024, 2264, 2504, 2744, 3032, 3272, 3512, 3752, 4040, 4280, 4520, 4760, 5072, 5312, 5552, 5792] +fold 7: +[129, 369, 609, 849, 1137, 1377, 1617, 1857, 2073, 2313, 2553, 2793, 3177, 3417, 3657, 3897, 4089, 4329, 4569, 4809, 5073, 5313, 5553, 5793, 10, 250, 490, 730, 970, 1114, 1354, 1594, 1834, 2050, 2290, 2530, 2770, 3130, 3370, 3610, 3850, 4162, 4402, 4642, 4882, 4978, 5218, 5458, 5698, 131, 371, 611, 851, 1091, 1331, 1571, 1811, 2051, 2291, 2531, 2771, 3179, 3419, 3659, 3899, 3995, 4235, 4475, 4715, 5099, 5339, 5579, 5819, 156, 396, 636, 876, 1044, 1284, 1524, 1764, 2172, 2412, 2652, 2892, 3180, 3420, 3660, 3900, 4164, 4404, 4644, 4884, 4980, 5220, 5460, 5700, 229, 469, 709, 949, 1021, 1261, 1501, 1741, 2005, 2245, 2485, 2725, 2965, 3205, 3445, 3685, 3925, 4021, 4261, 4501, 4741, 5149, 5389, 5629, 5869, 62, 302, 542, 782, 998, 1238, 1478, 1718, 1958, 2150, 2390, 2630, 2870, 3182, 3422, 3662, 3902, 4070, 4310, 4550, 4790, 5078, 5318, 5558, 5798, 87, 327, 567, 807, 1023, 1263, 1503, 1743, 2151, 2391, 2631, 2871, 3039, 3279, 3519, 3759, 4023, 4263, 4503, 4743, 5103, 5343, 5583, 5823, 88, 328, 568, 808, 1096, 1336, 1576, 1816, 2104, 2344, 2584, 2824, 3040, 3280, 3520, 3760, 4024, 4264, 4504, 4744, 4984, 5224, 5464, 5704, 137, 377, 617, 857, 1001, 1241, 1481, 1721, 1961, 2201, 2441, 2681, 2921, 3113, 3353, 3593, 3833, 4049, 4289, 4529, 4769, 5009, 5249, 5489, 5729, 42, 282, 522, 762, 1170, 1410, 1650, 1890, 2058, 2298, 2538, 2778, 3066, 3306, 3546, 3786, 4026, 4266, 4506, 4746, 4938, 5178, 5418, 5658, 5898, 0, 240, 480, 720, 960, 1128, 1368, 1608, 1848, 2160, 2400, 2640, 2880, 3144, 3384, 3624, 3864, 3936, 4176, 4416, 4656, 4896, 4992, 5232, 5472, 5712, 43, 283, 523, 763, 1171, 1411, 1651, 1891, 2203, 2443, 2683, 2923, 3043, 3283, 3523, 3763, 4123, 4363, 4603, 4843, 5107, 5347, 5587, 5827, 212, 452, 692, 932, 1172, 1412, 1652, 1892, 2108, 2348, 2588, 2828, 3068, 3308, 3548, 3788, 4148, 4388, 4628, 4868, 4964, 5204, 5444, 5684, 69, 309, 549, 789, 1173, 1413, 1653, 1893, 2085, 2325, 2565, 2805, 3093, 3333, 3573, 3813, 4173, 4413, 4653, 4893, 5013, 5253, 5493, 5733, 46, 286, 526, 766, 1198, 1438, 1678, 1918, 2086, 2326, 2566, 2806, 3046, 3286, 3526, 3766, 4030, 4270, 4510, 4750, 5086, 5326, 5566, 5806, 167, 407, 647, 887, 1079, 1319, 1559, 1799, 2039, 2279, 2519, 2759, 3095, 3335, 3575, 3815, 4151, 4391, 4631, 4871, 5135, 5375, 5615, 5855, 97, 337, 577, 817, 1129, 1369, 1609, 1849, 1993, 2233, 2473, 2713, 3121, 3361, 3601, 3841, 4033, 4273, 4513, 4753, 5137, 5377, 5617, 5857, 74, 314, 554, 794, 1082, 1322, 1562, 1802, 1970, 2210, 2450, 2690, 2930, 3026, 3266, 3506, 3746, 3938, 4178, 4418, 4658, 4898, 4994, 5234, 5474, 5714, 123, 363, 603, 843, 1107, 1347, 1587, 1827, 2019, 2259, 2499, 2739, 2979, 3219, 3459, 3699, 4155, 4395, 4635, 4875, 5091, 5331, 5571, 5811, 52, 292, 532, 772, 1108, 1348, 1588, 1828, 2092, 2332, 2572, 2812, 3028, 3268, 3508, 3748, 4084, 4324, 4564, 4804, 5116, 5356, 5596, 5836, 77, 317, 557, 797, 1037, 1277, 1517, 1757, 2165, 2405, 2645, 2885, 3005, 3245, 3485, 3725, 4037, 4277, 4517, 4757, 4997, 5237, 5477, 5717, 126, 366, 606, 846, 1038, 1278, 1518, 1758, 2190, 2430, 2670, 2910, 2982, 3222, 3462, 3702, 3966, 4206, 4446, 4686, 5094, 5334, 5574, 5814, 199, 439, 679, 919, 1207, 1447, 1687, 1927, 2071, 2311, 2551, 2791, 3079, 3319, 3559, 3799, 3943, 4183, 4423, 4663, 4903, 4927, 5167, 5407, 5647, 5887, 152, 392, 632, 872, 1208, 1448, 1688, 1928, 2000, 2240, 2480, 2720, 2984, 3224, 3464, 3704, 3968, 4208, 4448, 4688, 5144, 5384, 5624, 5864] +fold 8: +[57, 297, 537, 777, 993, 1233, 1473, 1713, 1953, 2049, 2289, 2529, 2769, 3153, 3393, 3633, 3873, 4017, 4257, 4497, 4737, 5049, 5289, 5529, 5769, 202, 442, 682, 922, 1162, 1402, 1642, 1882, 2146, 2386, 2626, 2866, 3154, 3394, 3634, 3874, 4138, 4378, 4618, 4858, 5098, 5338, 5578, 5818, 203, 443, 683, 923, 1139, 1379, 1619, 1859, 2123, 2363, 2603, 2843, 3035, 3275, 3515, 3755, 4019, 4259, 4499, 4739, 4955, 5195, 5435, 5675, 132, 372, 612, 852, 1116, 1356, 1596, 1836, 2148, 2388, 2628, 2868, 3060, 3300, 3540, 3780, 3948, 4188, 4428, 4668, 4908, 4932, 5172, 5412, 5652, 5892, 109, 349, 589, 829, 1213, 1453, 1693, 1933, 2173, 2413, 2653, 2893, 3181, 3421, 3661, 3901, 4141, 4381, 4621, 4861, 5053, 5293, 5533, 5773, 14, 254, 494, 734, 974, 1046, 1286, 1526, 1766, 2030, 2270, 2510, 2750, 3038, 3278, 3518, 3758, 4118, 4358, 4598, 4838, 4982, 5222, 5462, 5702, 183, 423, 663, 903, 1071, 1311, 1551, 1791, 2055, 2295, 2535, 2775, 2991, 3231, 3471, 3711, 3975, 4215, 4455, 4695, 5031, 5271, 5511, 5751, 136, 376, 616, 856, 1144, 1384, 1624, 1864, 2128, 2368, 2608, 2848, 3064, 3304, 3544, 3784, 4096, 4336, 4576, 4816, 5008, 5248, 5488, 5728, 41, 281, 521, 761, 1217, 1457, 1697, 1937, 2153, 2393, 2633, 2873, 3089, 3329, 3569, 3809, 3977, 4217, 4457, 4697, 4937, 5177, 5417, 5657, 5897, 114, 354, 594, 834, 1218, 1458, 1698, 1938, 2010, 2250, 2490, 2730, 2994, 3234, 3474, 3714, 3954, 4194, 4434, 4674, 4914, 5082, 5322, 5562, 5802, 144, 384, 624, 864, 1080, 1320, 1560, 1800, 2112, 2352, 2592, 2832, 3168, 3408, 3648, 3888, 4104, 4344, 4584, 4824, 4968, 5208, 5448, 5688, 115, 355, 595, 835, 1123, 1363, 1603, 1843, 2011, 2251, 2491, 2731, 2995, 3235, 3475, 3715, 4075, 4315, 4555, 4795, 5035, 5275, 5515, 5755, 140, 380, 620, 860, 1100, 1340, 1580, 1820, 2036, 2276, 2516, 2756, 2996, 3236, 3476, 3716, 3980, 4220, 4460, 4700, 5012, 5252, 5492, 5732, 117, 357, 597, 837, 1197, 1437, 1677, 1917, 2109, 2349, 2589, 2829, 3021, 3261, 3501, 3741, 3957, 4197, 4437, 4677, 4917, 5085, 5325, 5565, 5805, 214, 454, 694, 934, 1078, 1318, 1558, 1798, 2158, 2398, 2638, 2878, 3118, 3358, 3598, 3838, 4174, 4414, 4654, 4894, 5062, 5302, 5542, 5782, 143, 383, 623, 863, 1175, 1415, 1655, 1895, 2111, 2351, 2591, 2831, 3071, 3311, 3551, 3791, 4127, 4367, 4607, 4847, 4991, 5231, 5471, 5711, 73, 313, 553, 793, 985, 1225, 1465, 1705, 1945, 2089, 2329, 2569, 2809, 3049, 3289, 3529, 3769, 3985, 4225, 4465, 4705, 4993, 5233, 5473, 5713, 170, 410, 650, 890, 1130, 1370, 1610, 1850, 2114, 2354, 2594, 2834, 3050, 3290, 3530, 3770, 4034, 4274, 4514, 4754, 5114, 5354, 5594, 5834, 195, 435, 675, 915, 1011, 1251, 1491, 1731, 2187, 2427, 2667, 2907, 3003, 3243, 3483, 3723, 4131, 4371, 4611, 4851, 5019, 5259, 5499, 5739, 124, 364, 604, 844, 1036, 1276, 1516, 1756, 2020, 2260, 2500, 2740, 3076, 3316, 3556, 3796, 3988, 4228, 4468, 4708, 4996, 5236, 5476, 5716, 125, 365, 605, 845, 1181, 1421, 1661, 1901, 2189, 2429, 2669, 2909, 3101, 3341, 3581, 3821, 3941, 4181, 4421, 4661, 4901, 5045, 5285, 5525, 5765, 6, 246, 486, 726, 966, 1182, 1422, 1662, 1902, 2166, 2406, 2646, 2886, 3078, 3318, 3558, 3798, 4134, 4374, 4614, 4854, 5118, 5358, 5598, 5838, 31, 271, 511, 751, 1063, 1303, 1543, 1783, 2191, 2431, 2671, 2911, 2983, 3223, 3463, 3703, 4015, 4255, 4495, 4735, 5119, 5359, 5599, 5839, 8, 248, 488, 728, 968, 1136, 1376, 1616, 1856, 2192, 2432, 2672, 2912, 3056, 3296, 3536, 3776, 4064, 4304, 4544, 4784, 5000, 5240, 5480, 5720] +fold 9: +[201, 441, 681, 921, 1161, 1401, 1641, 1881, 2097, 2337, 2577, 2817, 3033, 3273, 3513, 3753, 4161, 4401, 4641, 4881, 5001, 5241, 5481, 5721, 154, 394, 634, 874, 1138, 1378, 1618, 1858, 2026, 2266, 2506, 2746, 3106, 3346, 3586, 3826, 3970, 4210, 4450, 4690, 4930, 5170, 5410, 5650, 5890, 35, 275, 515, 755, 1115, 1355, 1595, 1835, 2003, 2243, 2483, 2723, 2987, 3227, 3467, 3707, 4139, 4379, 4619, 4859, 5051, 5291, 5531, 5771, 228, 468, 708, 948, 1020, 1260, 1500, 1740, 2124, 2364, 2604, 2844, 3132, 3372, 3612, 3852, 4116, 4356, 4596, 4836, 5148, 5388, 5628, 5868, 61, 301, 541, 781, 1189, 1429, 1669, 1909, 2101, 2341, 2581, 2821, 3013, 3253, 3493, 3733, 4117, 4357, 4597, 4837, 5029, 5269, 5509, 5749, 182, 422, 662, 902, 1022, 1262, 1502, 1742, 2054, 2294, 2534, 2774, 3158, 3398, 3638, 3878, 4094, 4334, 4574, 4814, 5054, 5294, 5534, 5774, 63, 303, 543, 783, 1191, 1431, 1671, 1911, 2103, 2343, 2583, 2823, 3063, 3303, 3543, 3783, 3951, 4191, 4431, 4671, 4911, 5151, 5391, 5631, 5871, 112, 352, 592, 832, 1024, 1264, 1504, 1744, 2080, 2320, 2560, 2800, 3088, 3328, 3568, 3808, 4120, 4360, 4600, 4840, 4960, 5200, 5440, 5680, 65, 305, 545, 785, 1049, 1289, 1529, 1769, 2057, 2297, 2537, 2777, 2993, 3233, 3473, 3713, 4097, 4337, 4577, 4817, 5033, 5273, 5513, 5753, 234, 474, 714, 954, 1074, 1314, 1554, 1794, 2130, 2370, 2610, 2850, 3018, 3258, 3498, 3738, 4122, 4362, 4602, 4842, 4962, 5202, 5442, 5682, 192, 432, 672, 912, 1176, 1416, 1656, 1896, 2184, 2424, 2664, 2904, 3072, 3312, 3552, 3792, 4128, 4368, 4608, 4848, 4920, 5160, 5400, 5640, 5880, 235, 475, 715, 955, 1075, 1315, 1555, 1795, 2155, 2395, 2635, 2875, 3091, 3331, 3571, 3811, 4171, 4411, 4651, 4891, 5155, 5395, 5635, 5875, 92, 332, 572, 812, 1052, 1292, 1532, 1772, 2060, 2300, 2540, 2780, 3140, 3380, 3620, 3860, 4028, 4268, 4508, 4748, 5132, 5372, 5612, 5852, 237, 477, 717, 957, 1005, 1245, 1485, 1725, 1965, 2037, 2277, 2517, 2757, 3141, 3381, 3621, 3861, 4005, 4245, 4485, 4725, 4989, 5229, 5469, 5709, 142, 382, 622, 862, 1054, 1294, 1534, 1774, 2014, 2254, 2494, 2734, 3166, 3406, 3646, 3886, 4102, 4342, 4582, 4822, 5038, 5278, 5518, 5758, 95, 335, 575, 815, 1103, 1343, 1583, 1823, 2207, 2447, 2687, 2927, 3191, 3431, 3671, 3911, 3959, 4199, 4439, 4679, 4919, 5039, 5279, 5519, 5759, 25, 265, 505, 745, 1009, 1249, 1489, 1729, 2185, 2425, 2665, 2905, 3073, 3313, 3553, 3793, 3961, 4201, 4441, 4681, 4945, 5185, 5425, 5665, 50, 290, 530, 770, 986, 1226, 1466, 1706, 1946, 2066, 2306, 2546, 2786, 3122, 3362, 3602, 3842, 4106, 4346, 4586, 4826, 5090, 5330, 5570, 5810, 99, 339, 579, 819, 1059, 1299, 1539, 1779, 2115, 2355, 2595, 2835, 3051, 3291, 3531, 3771, 3939, 4179, 4419, 4659, 4899, 4923, 5163, 5403, 5643, 5883, 172, 412, 652, 892, 1060, 1300, 1540, 1780, 2044, 2284, 2524, 2764, 2980, 3220, 3460, 3700, 4012, 4252, 4492, 4732, 5092, 5332, 5572, 5812, 101, 341, 581, 821, 1061, 1301, 1541, 1781, 2069, 2309, 2549, 2789, 3053, 3293, 3533, 3773, 3989, 4229, 4469, 4709, 5093, 5333, 5573, 5813, 222, 462, 702, 942, 1134, 1374, 1614, 1854, 1998, 2238, 2478, 2718, 2958, 3198, 3438, 3678, 3918, 4110, 4350, 4590, 4830, 5022, 5262, 5502, 5742, 103, 343, 583, 823, 1159, 1399, 1639, 1879, 1999, 2239, 2479, 2719, 3151, 3391, 3631, 3871, 4039, 4279, 4519, 4759, 5071, 5311, 5551, 5791, 32, 272, 512, 752, 1040, 1280, 1520, 1760, 1976, 2216, 2456, 2696, 2936, 3128, 3368, 3608, 3848, 4160, 4400, 4640, 4880, 5096, 5336, 5576, 5816] +fold 10: +[225, 465, 705, 945, 1185, 1425, 1665, 1905, 2145, 2385, 2625, 2865, 3009, 3249, 3489, 3729, 3969, 4209, 4449, 4689, 5121, 5361, 5601, 5841, 178, 418, 658, 898, 1018, 1258, 1498, 1738, 2074, 2314, 2554, 2794, 3058, 3298, 3538, 3778, 4042, 4282, 4522, 4762, 5050, 5290, 5530, 5770, 83, 323, 563, 803, 1163, 1403, 1643, 1883, 1979, 2219, 2459, 2699, 2939, 3011, 3251, 3491, 3731, 4163, 4403, 4643, 4883, 5123, 5363, 5603, 5843, 12, 252, 492, 732, 972, 1092, 1332, 1572, 1812, 2076, 2316, 2556, 2796, 3108, 3348, 3588, 3828, 4068, 4308, 4548, 4788, 5052, 5292, 5532, 5772, 181, 421, 661, 901, 997, 1237, 1477, 1717, 1957, 1981, 2221, 2461, 2701, 2941, 3061, 3301, 3541, 3781, 4045, 4285, 4525, 4765, 5125, 5365, 5605, 5845, 158, 398, 638, 878, 1070, 1310, 1550, 1790, 2006, 2246, 2486, 2726, 2966, 3206, 3446, 3686, 3926, 4142, 4382, 4622, 4862, 5006, 5246, 5486, 5726, 111, 351, 591, 831, 1047, 1287, 1527, 1767, 2175, 2415, 2655, 2895, 3087, 3327, 3567, 3807, 4167, 4407, 4647, 4887, 5079, 5319, 5559, 5799, 16, 256, 496, 736, 976, 1048, 1288, 1528, 1768, 2200, 2440, 2680, 2920, 3016, 3256, 3496, 3736, 4000, 4240, 4480, 4720, 5032, 5272, 5512, 5752, 113, 353, 593, 833, 1193, 1433, 1673, 1913, 2105, 2345, 2585, 2825, 2969, 3209, 3449, 3689, 3929, 4001, 4241, 4481, 4721, 5153, 5393, 5633, 5873, 138, 378, 618, 858, 1194, 1434, 1674, 1914, 1986, 2226, 2466, 2706, 2946, 3114, 3354, 3594, 3834, 4098, 4338, 4578, 4818, 5010, 5250, 5490, 5730, 48, 288, 528, 768, 1032, 1272, 1512, 1752, 2136, 2376, 2616, 2856, 3000, 3240, 3480, 3720, 4056, 4296, 4536, 4776, 5016, 5256, 5496, 5736, 163, 403, 643, 883, 1003, 1243, 1483, 1723, 1963, 2131, 2371, 2611, 2851, 2971, 3211, 3451, 3691, 3931, 4099, 4339, 4579, 4819, 5131, 5371, 5611, 5851, 236, 476, 716, 956, 1124, 1364, 1604, 1844, 2132, 2372, 2612, 2852, 3188, 3428, 3668, 3908, 4124, 4364, 4604, 4844, 5084, 5324, 5564, 5804, 21, 261, 501, 741, 981, 1125, 1365, 1605, 1845, 2061, 2301, 2541, 2781, 3165, 3405, 3645, 3885, 4029, 4269, 4509, 4749, 5133, 5373, 5613, 5853, 118, 358, 598, 838, 1222, 1462, 1702, 1942, 2134, 2374, 2614, 2854, 2998, 3238, 3478, 3718, 3982, 4222, 4462, 4702, 4990, 5230, 5470, 5710, 191, 431, 671, 911, 1223, 1463, 1703, 1943, 2183, 2423, 2663, 2903, 3047, 3287, 3527, 3767, 3983, 4223, 4463, 4703, 4943, 5183, 5423, 5663, 5903, 217, 457, 697, 937, 1081, 1321, 1561, 1801, 2137, 2377, 2617, 2857, 3001, 3241, 3481, 3721, 4057, 4297, 4537, 4777, 5089, 5329, 5569, 5809, 122, 362, 602, 842, 1178, 1418, 1658, 1898, 1994, 2234, 2474, 2714, 3098, 3338, 3578, 3818, 4154, 4394, 4634, 4874, 5018, 5258, 5498, 5738, 219, 459, 699, 939, 1131, 1371, 1611, 1851, 2139, 2379, 2619, 2859, 3123, 3363, 3603, 3843, 4035, 4275, 4515, 4755, 5139, 5379, 5619, 5859, 4, 244, 484, 724, 964, 1012, 1252, 1492, 1732, 2068, 2308, 2548, 2788, 3172, 3412, 3652, 3892, 4060, 4300, 4540, 4780, 4972, 5212, 5452, 5692, 197, 437, 677, 917, 1205, 1445, 1685, 1925, 2021, 2261, 2501, 2741, 3125, 3365, 3605, 3845, 4085, 4325, 4565, 4805, 5021, 5261, 5501, 5741, 198, 438, 678, 918, 1206, 1446, 1686, 1926, 2142, 2382, 2622, 2862, 3102, 3342, 3582, 3822, 4038, 4278, 4518, 4758, 4998, 5238, 5478, 5718, 151, 391, 631, 871, 1015, 1255, 1495, 1735, 2167, 2407, 2647, 2887, 3031, 3271, 3511, 3751, 4111, 4351, 4591, 4831, 5023, 5263, 5503, 5743, 200, 440, 680, 920, 992, 1232, 1472, 1712, 1952, 2072, 2312, 2552, 2792, 3176, 3416, 3656, 3896, 3992, 4232, 4472, 4712, 5048, 5288, 5528, 5768] diff --git a/Data/tal.xlsx b/Data/tal.xlsx new file mode 100644 index 0000000..16cfb67 Binary files /dev/null and b/Data/tal.xlsx differ diff --git a/Data/tal_folds.txt b/Data/tal_folds.txt new file mode 100644 index 0000000..79bcf4a --- /dev/null +++ b/Data/tal_folds.txt @@ -0,0 +1,20 @@ +fold 1: +[5825, 10713, 10731, 765, 827, 8038, 9066, 4945, 4956, 5051, 5062, 636, 5696, 5707, 10181, 10206, 10915, 10965, 1366, 731, 3237, 3248, 10786, 10810, 4296, 4308, 7565, 7717, 573, 584, 1116, 1226, 6069, 116, 138, 5469, 10471, 10589, 6135, 6146, 3915, 3926, 4854, 4865, 4203, 4214, 2499, 8691, 8720, 10638, 10683, 857, 868, 8492, 8589, 9527, 9574, 8870, 8884, 10406, 10432, 5200, 5211, 1102, 1117, 3760, 3771, 4133, 4144, 3058, 3069, 4332, 4361, 5745, 5757, 6414, 6650, 6761, 9199, 9244, 6841, 8782, 8819, 10322, 10336, 9754, 9773, 2305, 2337, 2972, 3662, 10149, 10160, 6300, 4071, 5237, 5072, 5083, 5222, 5233, 2784, 2795, 219, 230, 1552, 2376, 2535, 9820, 9832, 1025, 1059, 5107, 5217, 9712, 9731, 7939, 7992, 1404, 1415, 7301, 7479, 10502, 10554, 3281, 3293, 10766, 10809, 2995, 3006, 5002, 5013, 7270, 7398, 1303, 1419, 238, 299, 4092, 4103, 6945, 6962, 11021, 11040, 11057, 11069, 9150, 1955, 2027, 2754, 2780, 2459, 2473, 9296, 9307, 9036, 9137, 6157, 6168, 6846, 3602, 4409, 4420, 6125, 6283, 449, 1276, 1287, 3177, 3201, 5785, 5800, 659, 672, 7254, 7308, 3657, 3677, 8229, 8290, 3017, 3029, 6805, 680, 714, 4924, 4935, 9482, 9506, 2579, 2605, 4427, 4450, 10975, 11020, 7927, 7986, 6971, 6995, 639, 651, 1884, 2021, 1508, 1519, 9867, 9888, 6454, 6511, 6735, 7336, 7427, 8350, 8594, 10673, 10705, 4966, 4980, 2729, 2871, 10363, 10374, 5958, 5982, 5098, 5109, 7742, 7786, 4607, 4618, 6413, 6486, 6538, 1575, 1586, 422, 442, 10727, 10777, 4179, 4196, 3131, 3152, 4161, 4280, 7034, 7966, 10865, 10884, 6806, 897, 908, 1545, 1564, 10457, 10845, 9573, 9608, 4562, 4591, 3130, 3163, 7056, 4241, 4252, 1294, 1492, 7339, 7502, 7557, 7653, 6, 17, 1371, 1382, 3214, 3228, 1126, 9760, 9790, 7724, 7823, 1300, 1317, 2236, 2247, 10241, 10256, 5381, 5471, 1600, 1621, 7277, 7421, 8668, 8708, 926, 987, 9154, 10259, 1211, 1234, 1729, 1740, 1598, 1627, 60, 71, 9456, 9493, 1137, 7757, 7808, 9861, 9886, 38, 49, 2439, 2450, 3284, 4062, 4550, 4583, 4690, 4701, 5801, 5818, 3823, 3834, 9840, 9852, 153, 168, 10313, 10557, 5297, 8935, 8963, 3325, 3336, 6089, 6115, 1024, 1055, 2299, 2329, 6931, 6943, 1655, 1676, 7265, 7387, 4668, 4679, 6208, 6221, 998, 1009, 10090, 10107, 2040, 2057, 1809, 1830, 5154, 5165, 9238, 9319, 2291, 2327, 10297, 10308, 9314, 9502, 2273, 7815, 7925, 5994, 6082, 10379, 10612, 8252, 8289, 5653, 5664, 6850, 5677, 5688, 4800, 10495, 10560, 6182, 6193, 4023, 4034, 5333, 760, 778, 10701, 10923, 8946, 9000, 10732, 10767, 10986, 11024, 7923, 7959, 596, 608, 3297, 3308, 7190, 3892, 3903, 5492, 5503, 4104, 4115, 9245, 9259, 8294, 8330, 2261, 2277, 6000, 6030, 6040, 6051, 1875, 1907, 8091, 8119, 74, 85, 2411, 2422, 5543, 8900, 8922, 8063, 9110, 8143, 8160, 3341, 3352, 623, 634, 9263, 4049, 4060, 10456, 10544, 10594, 9435, 9464, 1758, 1782, 6419, 6510, 6717, 7494, 7589, 6225, 5892, 5931, 10844, 10856, 8241, 8267, 9724, 9980, 8128, 8157, 6814, 5728, 5739, 2000, 2036, 920, 931, 8651, 8698, 5408, 5438, 10641, 10682, 10909, 10976, 5121, 5133, 3576, 7182, 7209, 5712, 76, 108, 8485, 8548, 251, 262, 7326, 7461, 8794, 9038, 8458, 8524, 3648, 3666, 4515, 4529, 8144, 8207, 1256, 1267, 2653, 6982, 7007, 7586, 6265, 6276, 6888, 7996, 452, 463, 9180, 9221, 7896, 7948, 8556, 8588, 2831, 2842, 5634, 5645, 8545, 8644, 10292, 11108, 520, 719, 8362, 8420, 6453, 6506, 6757, 10225, 10236, 8171, 9016, 4987, 4998, 8553, 8608, 2643, 2672, 4353, 4908, 2097, 2108, 4731, 4742, 10398, 10425, 9491, 9658, 1942, 4337, 4885, 4996, 3960, 3982, 5303, 7161, 7210, 5565, 6086, 6230, 6241, 2584, 2609, 10547, 10593, 1311, 1327, 4501, 4512, 8209, 8235, 2546, 2868, 2706, 2749, 6626, 6687, 6698, 3956, 3979, 3391, 3403, 6605, 6739, 6772, 7712, 7822, 3108, 3134, 3369, 3380, 7487, 7582, 2, 172, 939, 950, 1141, 1152, 1418, 1455, 2554, 2898, 3129, 3362, 6936, 7036, 2562, 2909, 4380, 4399, 6540, 6660, 6677, 1424, 1469, 5364, 7295, 7416, 1650, 1668, 7062, 7073, 6534, 6582, 6610, 4878, 4889, 382, 9571, 9622, 5394, 5419, 106, 131, 8511, 8562, 10447, 10496, 474, 486, 5387, 5433, 5245, 1920, 1931, 3195, 8574, 8621, 686, 715, 8745, 8767, 3506, 4006, 9305, 10146, 6009, 6025, 2453, 2542, 4541, 4752, 6056, 6067, 8287, 8342, 11072, 11083, 7394, 7496, 5320, 409, 435, 808, 819, 7540, 7692, 9357, 9368, 312, 331, 2060, 2144, 7561, 7669, 2699, 2736, 2946, 2957, 3250, 3261, 4084, 4249, 274, 285, 5406, 5446, 9678, 9689, 10576, 10642, 4149, 4160, 6896, 7998, 4816, 2718, 2879, 5533, 5544, 6911, 7994, 6203, 6266, 8770, 8843, 788, 799, 4581, 4602, 3486, 3497, 10258, 3850, 3861, 9379, 9390, 7550, 7672, 6519, 6598, 6624, 7322, 7454, 4781, 4792, 1885, 2023, 2660, 2681, 8372, 8916, 9906, 9917, 10917, 10946, 7275, 7400, 1597, 1674, 5570, 5592, 8375, 8440, 3734, 3745, 9431, 9476, 4632, 4643, 9030, 9068, 10820, 10831, 6526, 6566, 6595, 8810, 8841, 2155, 2176, 10038, 10049, 6311, 8970, 9011, 10190, 10210, 9932, 9982, 4891, 4902, 6320, 2230, 2355, 8398, 8477, 1952, 1976, 881, 8971, 9007, 7116, 7133, 1718, 9995, 2921, 2932, 1757, 1785, 2804, 2815, 3690, 3701, 2168, 2185, 8337, 8396, 3538, 2044, 2210, 1960, 1988, 2379, 2394, 3800, 3811, 5865, 5877, 7834, 7895, 4707, 4718, 5352, 8799, 8849, 8014, 9073, 3449, 3468, 1209, 1233, 5327, 3777, 3788, 737, 755, 5172, 5183, 6446, 6505, 6753, 4224, 4235, 2624, 2644, 8284, 8717, 7086, 6853, 7593, 7704, 5606, 3554, 3565, 8041, 9102, 9786, 9811, 519, 3937, 3948, 10882, 10910, 527, 538, 8806, 8032, 8076, 2743, 2120, 2131, 364, 375, 6801, 7511, 7618, 2685, 2713, 4366, 4392, 7780, 7849, 10340, 10351, 10412, 7046, 7235, 290, 301, 5556, 5579, 2551, 2740, 4005, 4016, 2486, 2514, 5142, 5612, 5623, 3425, 3436, 8356, 8473, 3867, 3878, 3624, 3635, 7148, 7174, 8094, 8111, 7681, 7747, 7016, 7027, 5361, 6562, 6631, 6645, 7390, 7623, 1342, 1360, 9507, 10085, 10108, 9990, 10022, 5295, 9707, 9732, 5686, 5794, 9644, 9672, 8402, 10168, 5511, 5522, 9398, 9430, 4386, 4586, 4324, 9934, 9983, 9340, 9353, 2070, 2081, 9163, 9198, 1992, 2013, 2192, 2216, 1192, 1203, 7816, 7885, 10514, 10599, 4434, 4454, 1163, 1174, 6090, 6107, 5555, 8645, 8674, 5880, 5904, 3038, 3049, 9530, 9590, 9605, 9666, 549, 560, 2346, 2362, 10010, 10028, 390, 6257, 1030, 1062, 317, 338, 1699, 1714, 10063, 10075, 9274, 9285, 7211, 7316, 5266, 3406, 3417, 5906, 5935, 1444, 1481, 839, 850, 1076, 1087, 2502, 2524, 9178, 10275, 3603, 3622, 8891, 8913, 3442, 3458, 6316, 5026, 5037, 3516, 3527, 6906, 8003, 1817, 1841, 4470, 4481, 3091, 4061, 6356, 10127, 10138, 6365, 9938, 9973, 10487, 5843, 5854, 3718, 3730, 1749, 4651, 9551, 9632, 8169, 8198, 8050, 9122, 9043, 9103, 3373, 3695, 2968, 2979, 197, 208, 8731, 8748, 5953, 5974, 1526, 189, 4756, 4767, 5770, 963, 974, 7100, 7126, 3101, 3126, 1787, 1801, 9735, 10057, 4831, 4842, 5372, 7575, 7687] +fold 2: +[5830, 10707, 10726, 754, 816, 8060, 9088, 4939, 4950, 5056, 5067, 603, 710, 5700, 6868, 10174, 10198, 10925, 10978, 1363, 726, 892, 3236, 3247, 10765, 10799, 6367, 6785, 4291, 4302, 7604, 7729, 577, 588, 1160, 1270, 103, 125, 5459, 10516, 10628, 6132, 6143, 3909, 3921, 4848, 4859, 4201, 4212, 8682, 8711, 10630, 10676, 854, 865, 8541, 8623, 9539, 9652, 8871, 8886, 10409, 10434, 5196, 5207, 1093, 1107, 3754, 3765, 4126, 4137, 3063, 3074, 4340, 4367, 5747, 5760, 6409, 6561, 6758, 9210, 9250, 8771, 8807, 10314, 10330, 9756, 9792, 2298, 2332, 2939, 3640, 10148, 10159, 4064, 4075, 5068, 5079, 5215, 5226, 2791, 2802, 229, 240, 1551, 2373, 2533, 9800, 9822, 1029, 1063, 5118, 5228, 9714, 9733, 7924, 7983, 1397, 1409, 7234, 7412, 10518, 10573, 3272, 3283, 10780, 10814, 2991, 3002, 5003, 5014, 7208, 7337, 118, 1347, 1435, 195, 268, 4085, 4096, 6948, 6964, 10961, 11030, 11048, 11059, 8706, 1958, 2038, 2742, 2778, 2462, 2480, 9289, 9300, 9033, 9106, 6153, 6164, 6858, 3604, 4405, 4416, 6122, 6280, 405, 1278, 1289, 3182, 3203, 5787, 5804, 661, 675, 7214, 7276, 3661, 3679, 8210, 8264, 3018, 3030, 6318, 503, 687, 4918, 4929, 9500, 9517, 2589, 2618, 6794, 4439, 4467, 11003, 11029, 7889, 7949, 6983, 7002, 5264, 638, 649, 1863, 1897, 1506, 1517, 9865, 9885, 6458, 6522, 6759, 7296, 7383, 8295, 8539, 10688, 10709, 4967, 4982, 2586, 2854, 10359, 10370, 5951, 5975, 5093, 5104, 7759, 7804, 4603, 4614, 6462, 6513, 6773, 1579, 1590, 406, 425, 10764, 10804, 4180, 4198, 3135, 3155, 4260, 4283, 7041, 7988, 10862, 10881, 9680, 895, 906, 1541, 1561, 10524, 10857, 9554, 9593, 4567, 4595, 3117, 3160, 7049, 4192, 4251, 4262, 1295, 1493, 7448, 7553, 7500, 7620, 12, 23, 1377, 1388, 3208, 3220, 1125, 1136, 9776, 9803, 6345, 7714, 7813, 1301, 1340, 2240, 2251, 10237, 10248, 5373, 5467, 1612, 1631, 7248, 7389, 8675, 8716, 970, 1028, 9159, 10262, 1225, 1246, 1733, 1744, 1622, 1642, 52, 63, 9477, 9505, 7760, 7818, 9873, 9895, 34, 45, 2434, 2445, 3196, 3429, 4535, 4570, 4691, 4702, 5812, 5823, 3819, 3830, 9841, 9853, 155, 174, 10468, 10712, 6303, 8954, 8977, 3321, 3332, 5481, 6105, 1020, 1048, 2292, 2325, 6926, 6937, 1662, 1679, 7231, 7328, 4671, 4682, 6184, 6215, 993, 1004, 10098, 10119, 2052, 2063, 1808, 1828, 5159, 5170, 9316, 9327, 2280, 2306, 10295, 10306, 9403, 10069, 2266, 7736, 7836, 5905, 6016, 11096, 10324, 10513, 8277, 8314, 5651, 5662, 5672, 5683, 4794, 4806, 10453, 10510, 6178, 6189, 4026, 4037, 751, 772, 10823, 11034, 8910, 8948, 10719, 10751, 10964, 11006, 7935, 7974, 600, 612, 3303, 3314, 7246, 3888, 3899, 5493, 5504, 4107, 4118, 9242, 9258, 8288, 8324, 2271, 6002, 6032, 6042, 6053, 1850, 1888, 8083, 8109, 80, 91, 2406, 2417, 8896, 8918, 8040, 9086, 8129, 8147, 3346, 3357, 622, 633, 11044, 4043, 4054, 10452, 10505, 10588, 10626, 9433, 9462, 1693, 1768, 6391, 6474, 6546, 7459, 7563, 6354, 6224, 5908, 5941, 10842, 10854, 8251, 8278, 9769, 10046, 8136, 8164, 5725, 5736, 1851, 2003, 922, 933, 8642, 8687, 5424, 5449, 10633, 10680, 10939, 11002, 5114, 5125, 3580, 7185, 7215, 5715, 97, 173, 8466, 8540, 250, 261, 7335, 7472, 8705, 8983, 8409, 8507, 3652, 3672, 4522, 4543, 8166, 9050, 1251, 1265, 6777, 2564, 6972, 7001, 7520, 7673, 6272, 6878, 7989, 451, 462, 9207, 9233, 7908, 7954, 8563, 8597, 2830, 2841, 5636, 5647, 8416, 8611, 11101, 11112, 497, 695, 8386, 8441, 6436, 6502, 6749, 10218, 10229, 8077, 8205, 4986, 4997, 8546, 8602, 2650, 2678, 4331, 4797, 2105, 2116, 4733, 4744, 10383, 10404, 9358, 9558, 1941, 4333, 4874, 4985, 3954, 3974, 5292, 7158, 7205, 5588, 6108, 6236, 6247, 2577, 2599, 10530, 10578, 1319, 1335, 4498, 4509, 8193, 8230, 2849, 2902, 2737, 2769, 6611, 6684, 6697, 3967, 3988, 3393, 3405, 6428, 6711, 6748, 7801, 7911, 6821, 3097, 3120, 3371, 3382, 7432, 7560, 1, 171, 938, 949, 1140, 1151, 1403, 1452, 2550, 2897, 3118, 3318, 6914, 7033, 2544, 2566, 4383, 4401, 6649, 6668, 6685, 1420, 1464, 5360, 7250, 7362, 1649, 1667, 7061, 7072, 6496, 6575, 6604, 4879, 4890, 379, 9538, 9603, 5369, 5405, 128, 145, 8508, 8535, 10459, 10509, 357, 480, 5358, 5397, 1915, 1926, 3178, 8536, 8587, 684, 706, 8752, 8780, 2928, 3606, 9780, 10290, 6001, 6019, 2382, 2454, 4685, 4808, 6064, 8338, 8391, 11081, 11092, 7330, 7449, 5324, 419, 443, 812, 823, 7372, 7609, 9355, 9366, 318, 341, 2136, 2172, 7505, 7636, 2694, 2725, 2944, 2955, 7242, 3260, 3271, 4238, 4278, 271, 282, 5420, 5476, 6849, 9683, 9694, 10563, 10640, 4148, 4159, 5298, 6901, 8009, 4811, 4822, 2575, 2856, 5536, 5547, 6904, 7970, 6195, 6222, 8791, 8846, 780, 791, 4565, 4594, 3491, 3502, 10274, 3853, 3864, 9377, 9388, 7522, 7650, 6466, 6579, 6618, 7298, 7425, 4776, 4787, 1872, 1905, 2651, 2671, 8217, 8839, 9904, 9915, 10911, 10944, 7351, 7476, 1511, 1601, 5580, 5598, 8397, 8446, 3742, 3753, 9426, 9474, 4635, 4646, 9034, 9076, 10826, 10837, 6553, 6578, 6745, 8775, 8822, 2151, 2167, 10043, 10054, 2260, 8987, 9024, 10189, 10209, 9944, 9997, 4895, 4906, 6355, 2343, 2370, 8408, 8482, 1959, 1980, 878, 889, 8974, 9009, 7118, 7134, 9981, 2925, 2936, 1755, 1781, 2808, 2819, 3687, 3698, 2125, 2177, 2189, 8332, 8390, 3537, 2199, 2217, 1949, 1986, 2390, 2401, 3808, 3842, 5864, 5875, 7865, 4706, 4717, 5367, 8847, 8866, 8036, 9113, 3465, 3481, 1210, 1238, 5321, 3781, 3792, 746, 769, 5179, 5190, 6388, 6459, 6530, 4219, 4230, 6293, 2626, 2647, 8339, 8761, 5764, 7081, 7647, 7721, 3558, 3569, 9040, 9135, 9782, 9807, 502, 716, 3931, 3942, 10897, 10929, 533, 544, 8785, 8047, 9100, 2760, 2124, 2135, 368, 490, 6793, 6350, 7548, 7648, 2693, 2722, 4343, 4382, 7832, 7881, 10343, 10354, 10399, 7112, 7900, 295, 306, 5559, 5583, 2559, 2855, 1463, 4001, 4012, 2479, 2503, 2308, 5140, 5613, 5624, 3426, 3437, 8379, 8480, 3875, 3886, 3628, 3639, 7147, 7173, 8089, 8107, 7696, 7750, 7019, 5376, 6573, 6632, 6647, 7490, 7667, 1334, 1356, 9422, 6358, 10088, 10115, 9975, 10015, 5242, 9705, 9730, 5763, 5872, 10444, 9599, 9653, 8368, 5241, 9869, 5512, 5523, 9397, 9427, 4320, 4552, 4330, 9954, 10007, 9331, 9342, 2069, 2080, 9187, 10273, 1856, 2011, 2194, 2220, 1191, 1202, 5283, 7842, 7909, 10532, 10617, 4428, 4449, 1167, 1178, 6088, 6106, 5566, 8643, 8669, 5891, 5921, 3035, 3046, 9544, 9609, 9581, 9649, 552, 563, 2339, 2356, 10016, 10030, 396, 6258, 1044, 1068, 322, 342, 1697, 1711, 10061, 10073, 9269, 9280, 7198, 7302, 3412, 5900, 5930, 1456, 832, 843, 1078, 1089, 2508, 2526, 9175, 10270, 3593, 3610, 8895, 8920, 3447, 3472, 5034, 5045, 3515, 3526, 6898, 6918, 6319, 1821, 1843, 4477, 4489, 3061, 3828, 6325, 10130, 10141, 9963, 10000, 10448, 5837, 5848, 3711, 3722, 4647, 4658, 9610, 9655, 8167, 8194, 8062, 9136, 9084, 9143, 3651, 3939, 2964, 2975, 204, 215, 8700, 8735, 5962, 5981, 1524, 196, 4757, 4768, 5775, 2428, 964, 975, 7095, 7113, 3084, 3109, 1784, 1799, 9172, 9935, 4833, 4844, 5461, 7527, 7658] +fold 3: +[10720, 10748, 804, 893, 8054, 9075, 4937, 4948, 5055, 5066, 504, 655, 5698, 5709, 6861, 10184, 10208, 10960, 10999, 1364, 725, 891, 3216, 3239, 10770, 10802, 6807, 4290, 4301, 7629, 7737, 575, 586, 1138, 1248, 100, 123, 5349, 10564, 10663, 6133, 6144, 3912, 3923, 4850, 4861, 4207, 4218, 8686, 8715, 10577, 10654, 856, 867, 8498, 8593, 9543, 9656, 8859, 8876, 10396, 10427, 5198, 5209, 1099, 1111, 3757, 3768, 4130, 4141, 3060, 3071, 4338, 4365, 5744, 5756, 6418, 6707, 6767, 9193, 9241, 6859, 8790, 8824, 10319, 10334, 9738, 9757, 2301, 2335, 3551, 4028, 10156, 10169, 4066, 4077, 5069, 5080, 5216, 5227, 2790, 2801, 226, 237, 1546, 2383, 2538, 9815, 9826, 1016, 1049, 5074, 5185, 9706, 9726, 7960, 8006, 1398, 1410, 7379, 7590, 10497, 10545, 3277, 3289, 10791, 10819, 2986, 2997, 5010, 5022, 7299, 7424, 32, 1391, 1442, 249, 310, 4083, 4094, 6956, 6974, 11014, 11037, 11054, 11065, 1891, 1966, 2758, 2781, 2460, 2475, 9291, 9302, 8992, 9055, 6154, 6165, 3611, 4407, 4418, 6124, 6282, 427, 1279, 1290, 3186, 3204, 5788, 5808, 658, 670, 7207, 7269, 3670, 3685, 8200, 8256, 3012, 3023, 6330, 6779, 679, 712, 2005, 4921, 4932, 9494, 9511, 2598, 2625, 6791, 4445, 4469, 10992, 11026, 7846, 7930, 6985, 7004, 5272, 626, 647, 1881, 2015, 1505, 1516, 9872, 9893, 6441, 6487, 6594, 7363, 7450, 8273, 8528, 10645, 10699, 4959, 4970, 2818, 2885, 10366, 10377, 5966, 5986, 5094, 5105, 7732, 7777, 4606, 4617, 6427, 6501, 6683, 1578, 1589, 412, 431, 10753, 10796, 4174, 4187, 3138, 3158, 4271, 4286, 6947, 7048, 10860, 10880, 9713, 6792, 902, 913, 1554, 1571, 10779, 11023, 9532, 9576, 4571, 4600, 3143, 3175, 7055, 4242, 4253, 1293, 1491, 7415, 7532, 7526, 7630, 11, 22, 1381, 1392, 3215, 3231, 1113, 1130, 9770, 9797, 6383, 7753, 7839, 1302, 1343, 2235, 2246, 10242, 10261, 5365, 5462, 1596, 1618, 7260, 7407, 8638, 8684, 915, 986, 9165, 10269, 1216, 1239, 1724, 1735, 1604, 1632, 56, 67, 9470, 9497, 7779, 7837, 9866, 9890, 30, 41, 2431, 2442, 3140, 3351, 4528, 4558, 4684, 4696, 5798, 5816, 3820, 3831, 9836, 9848, 148, 159, 10390, 10635, 5300, 8947, 8973, 3322, 3333, 5482, 6109, 1022, 1052, 2294, 2328, 6933, 6946, 1673, 1684, 7271, 7404, 4664, 4675, 6197, 6216, 988, 999, 10095, 10114, 2042, 2058, 1819, 1840, 5151, 5162, 9313, 9324, 2295, 2349, 10293, 10304, 9391, 9847, 7793, 7914, 5983, 6081, 11093, 10424, 10846, 8246, 8286, 5658, 5668, 6825, 5681, 5692, 4799, 10470, 10526, 6175, 6186, 4022, 4033, 5335, 747, 768, 10868, 11089, 8931, 8960, 10729, 10763, 10955, 11004, 7918, 7956, 591, 602, 3299, 3310, 7223, 3891, 3902, 5490, 5501, 4113, 4124, 9220, 9253, 8275, 8313, 2275, 5989, 6008, 6043, 6054, 1879, 2025, 8081, 8103, 81, 92, 2412, 2423, 8892, 8915, 8029, 9065, 8135, 8152, 3343, 3354, 621, 632, 11046, 4042, 4053, 10463, 10551, 10598, 9428, 9461, 1748, 1775, 387, 6449, 6529, 6751, 7413, 7537, 6309, 6228, 5899, 5936, 10841, 10853, 8245, 8269, 9880, 10157, 7162, 8124, 8146, 6816, 5722, 5733, 1855, 2006, 917, 928, 8663, 8710, 5421, 5447, 10666, 10694, 10927, 10989, 5122, 5134, 3572, 3583, 7188, 7222, 5710, 43, 102, 8474, 8543, 241, 252, 7255, 7386, 8483, 8850, 8383, 8496, 3644, 3656, 4520, 4538, 8110, 8197, 1231, 1262, 6786, 2851, 6967, 6999, 7665, 6264, 6275, 7422, 8000, 453, 464, 9217, 9240, 7843, 7915, 8568, 8603, 2836, 2847, 5635, 5646, 8531, 8641, 11105, 11116, 518, 717, 8349, 8410, 6398, 6476, 6720, 10219, 10230, 8168, 8961, 4988, 4999, 8567, 8619, 2657, 2684, 4287, 4575, 2102, 2113, 4738, 4749, 10384, 10407, 9380, 9569, 1936, 1947, 4312, 4348, 4919, 5030, 3958, 3980, 7172, 7224, 5488, 5621, 6235, 6246, 2573, 2593, 10520, 10567, 1320, 1338, 4502, 4513, 8221, 8244, 2863, 2916, 2744, 2775, 6676, 6695, 6706, 3964, 3986, 3389, 3401, 6495, 6730, 6760, 7745, 7855, 6820, 3100, 3122, 3365, 3376, 7546, 7670, 5, 178, 942, 953, 1144, 1155, 1425, 1462, 2869, 2905, 3185, 3451, 6992, 7040, 2548, 2568, 4381, 4400, 6586, 6663, 6678, 1358, 1447, 5337, 7324, 7446, 1643, 1654, 7059, 7070, 6547, 6589, 6613, 4873, 4884, 378, 9564, 9619, 5384, 5407, 126, 144, 8505, 8533, 10440, 10486, 359, 481, 5380, 5400, 5265, 1916, 1927, 3166, 3200, 8578, 8622, 498, 694, 8749, 8774, 2983, 3773, 9194, 9913, 5995, 6014, 2437, 2498, 4663, 4796, 6062, 6077, 8298, 8347, 11076, 11087, 7288, 7429, 5322, 423, 447, 814, 825, 7514, 7646, 9354, 9365, 185, 330, 2049, 2143, 7577, 7675, 2687, 2717, 2942, 2953, 7274, 3252, 3263, 4095, 4263, 270, 281, 5290, 5403, 5441, 6836, 9679, 9690, 5278, 10609, 10661, 4154, 4165, 5279, 6872, 7952, 6788, 4812, 4823, 2608, 2861, 5537, 5548, 6909, 7987, 6198, 6233, 8818, 786, 797, 4542, 4585, 3490, 3501, 10267, 3848, 3859, 9374, 9385, 7607, 7706, 6477, 6585, 6620, 7267, 7395, 4779, 4790, 1887, 2031, 2642, 2663, 8506, 8950, 9905, 9916, 10921, 10951, 7319, 7441, 1577, 1652, 5582, 5601, 8418, 8459, 3732, 3743, 9460, 9488, 4634, 4645, 9025, 9062, 10816, 10830, 6532, 6568, 6617, 8777, 8825, 2152, 2169, 10037, 10048, 6308, 8953, 9001, 10185, 10205, 9927, 9971, 4899, 4911, 2229, 2352, 8345, 8423, 1880, 1967, 874, 885, 8957, 8999, 7102, 7125, 1720, 2923, 2934, 1737, 1779, 2805, 2816, 3692, 3703, 2170, 2186, 8325, 8380, 3533, 3545, 2201, 2219, 1957, 1987, 2389, 2400, 3807, 3841, 5863, 5874, 7840, 7903, 4714, 4725, 5345, 8814, 8853, 8058, 9141, 3454, 3470, 1224, 1254, 5329, 3778, 3789, 750, 773, 5181, 5192, 6406, 6479, 6639, 4225, 4236, 6295, 2621, 2638, 8195, 8561, 7082, 7659, 7728, 3557, 3568, 8059, 9126, 9784, 9808, 521, 3932, 3943, 10892, 10922, 534, 545, 8797, 8068, 9142, 2738, 2117, 2128, 362, 373, 6359, 7559, 7657, 2673, 2702, 4326, 4376, 7796, 7854, 10320, 10348, 6981, 7124, 298, 309, 5551, 5573, 2572, 2914, 4002, 4013, 7138, 2466, 2491, 2320, 5135, 5146, 5609, 5620, 3422, 3433, 3031, 8463, 8520, 3874, 3885, 3627, 3638, 7153, 7180, 8084, 8104, 7733, 7776, 7023, 5350, 6551, 6629, 6644, 7423, 7634, 1339, 1359, 9471, 6337, 10084, 10106, 9966, 10011, 5243, 9722, 9746, 5730, 5839, 10431, 9646, 9673, 5514, 5526, 9396, 9423, 4419, 4608, 4346, 9937, 9986, 9330, 9341, 2065, 2076, 9173, 9201, 1998, 2022, 2043, 2202, 1190, 1201, 7798, 7864, 10570, 10631, 4443, 4460, 1172, 1184, 6084, 6101, 8649, 8680, 5879, 5901, 3036, 3047, 9540, 9604, 9589, 9654, 557, 568, 2336, 2354, 10021, 10033, 388, 6262, 1023, 1057, 314, 337, 1692, 1703, 10066, 10079, 9270, 9281, 7249, 7391, 5261, 3409, 5887, 5922, 1439, 1478, 833, 844, 1080, 1091, 2515, 2528, 9167, 10264, 3596, 3615, 8893, 8917, 3448, 3476, 5027, 5038, 3513, 3524, 6877, 6908, 6360, 1823, 1844, 4478, 4490, 3086, 3984, 10128, 10139, 6292, 9945, 9979, 10460, 5838, 5849, 3715, 3726, 1747, 4650, 9537, 9627, 8175, 8204, 8023, 9064, 8017, 9097, 3484, 3740, 2970, 2981, 198, 209, 8732, 8751, 5947, 5967, 1529, 192, 4753, 4764, 5767, 5778, 960, 971, 7094, 7110, 3103, 3128, 1765, 1793, 9227, 10002, 4825, 4836, 5339, 7602, 7702] +fold 4: +[10717, 10740, 798, 860, 8015, 8074, 4943, 4954, 5053, 5064, 537, 666, 5704, 6864, 10186, 10211, 10932, 10980, 1370, 722, 733, 3226, 3242, 10778, 10805, 6310, 6840, 4294, 4306, 7614, 7735, 572, 583, 1105, 1215, 6071, 113, 134, 5465, 10523, 10636, 6139, 6150, 3913, 3924, 4851, 4862, 4191, 4209, 2507, 8689, 8718, 10643, 10686, 861, 872, 8534, 8616, 9535, 9648, 8869, 8882, 10392, 10426, 5199, 5210, 1100, 1112, 3758, 3769, 4131, 4142, 3066, 3077, 4315, 4347, 5749, 5762, 6389, 6433, 6729, 9170, 9225, 8766, 8804, 10317, 10333, 9748, 9766, 2282, 2326, 3273, 3706, 10150, 10161, 6341, 4063, 4074, 5070, 5081, 5218, 5229, 2786, 2797, 225, 236, 1537, 1559, 2530, 2541, 9806, 9823, 1021, 1056, 5096, 5206, 9710, 9729, 6893, 7979, 1401, 1413, 7290, 7456, 6826, 10476, 10529, 3278, 3290, 10772, 10811, 2988, 2999, 5012, 5024, 7325, 7443, 184, 1416, 1449, 260, 321, 4090, 4101, 6941, 6961, 11000, 11035, 11052, 11063, 8928, 1924, 1972, 2748, 2779, 2457, 2470, 9293, 9304, 9021, 9089, 6159, 6170, 3590, 3614, 4406, 4417, 6123, 6281, 398, 471, 1280, 1291, 3188, 3205, 5789, 5810, 656, 667, 7247, 7303, 3674, 3686, 8233, 8300, 3009, 3020, 500, 685, 4920, 4931, 9489, 9509, 2592, 2622, 4425, 4447, 10983, 11022, 7875, 7943, 6979, 7000, 5275, 640, 652, 1867, 1900, 1503, 1514, 9870, 9889, 6448, 6507, 6723, 7343, 7436, 8239, 8495, 10681, 10706, 4964, 4976, 2840, 2889, 10367, 10378, 5960, 5984, 5095, 5106, 7755, 7800, 4609, 4620, 6410, 6481, 6536, 1581, 1592, 418, 436, 10741, 10785, 4176, 4190, 3148, 3176, 4274, 4288, 6925, 7047, 10873, 10899, 9702, 899, 910, 1549, 1568, 10757, 11001, 9550, 9588, 4556, 4584, 3156, 7052, 4245, 4256, 1485, 1498, 7483, 7581, 7578, 7668, 16, 29, 1375, 1386, 3213, 3227, 1115, 1131, 9774, 9799, 7783, 7883, 1304, 1346, 2243, 2254, 10245, 10271, 5354, 5453, 1615, 1634, 7360, 7507, 8647, 8688, 948, 1006, 10251, 10286, 1220, 1243, 1727, 1738, 1613, 1637, 50, 61, 9429, 9484, 7788, 7851, 9875, 9896, 28, 40, 2430, 2441, 3107, 3329, 4526, 4555, 4683, 4695, 5809, 5821, 3815, 3826, 9842, 9854, 154, 170, 10435, 10690, 5309, 8929, 8962, 3319, 3330, 5479, 6100, 1012, 1040, 2285, 2318, 6930, 6942, 1669, 1682, 7253, 7370, 4666, 4677, 6199, 6217, 994, 1005, 10092, 10109, 2053, 2064, 1811, 1832, 5158, 5169, 9309, 9321, 2283, 2309, 10299, 10310, 9369, 9736, 7825, 7929, 5916, 6027, 10402, 10624, 8258, 8293, 5660, 5670, 5676, 5687, 4803, 10484, 10548, 6181, 6192, 4029, 4040, 5331, 741, 764, 10790, 11012, 8924, 8956, 10742, 10773, 10972, 11013, 7928, 7965, 598, 610, 3304, 3315, 7179, 3897, 3910, 5495, 5506, 4105, 4116, 9234, 9256, 8279, 8315, 2276, 6369, 6004, 6033, 6041, 6052, 1870, 1901, 8078, 8093, 82, 93, 2405, 2416, 8911, 8932, 8056, 9101, 8137, 8154, 3344, 3355, 617, 628, 2918, 4051, 10443, 10494, 10582, 10619, 9401, 9445, 1767, 1836, 6460, 6533, 6768, 7399, 7529, 6296, 6227, 5888, 5926, 10850, 10864, 8248, 8276, 9846, 10135, 7170, 8130, 8161, 5721, 5732, 1991, 2024, 923, 934, 8677, 8724, 5430, 5456, 10672, 10696, 10903, 10971, 5116, 5127, 3574, 3585, 7164, 7194, 5718, 65, 107, 8437, 8502, 245, 256, 7237, 7369, 8583, 8861, 8433, 8516, 3646, 3660, 4514, 4527, 8170, 9094, 1236, 1263, 2763, 6984, 7009, 7616, 6270, 7932, 459, 473, 9211, 9235, 7868, 7921, 8559, 8591, 2833, 2844, 5639, 5650, 8331, 8570, 11103, 11114, 517, 713, 8357, 8414, 6392, 6470, 6715, 10221, 10232, 8178, 9083, 4977, 4992, 8590, 2646, 2676, 4375, 5130, 2104, 2115, 4734, 4745, 10387, 10414, 9513, 9669, 1932, 1943, 2094, 4327, 4852, 4963, 3969, 3991, 7155, 7199, 5532, 5917, 6234, 6245, 2574, 2596, 10528, 10572, 1310, 1326, 4491, 4504, 8199, 8232, 2853, 2908, 2733, 2766, 6662, 6690, 6701, 3961, 3983, 3385, 3397, 6572, 6736, 6766, 7734, 7844, 3088, 3114, 3363, 3374, 7552, 7674, 3, 175, 940, 951, 1142, 1153, 1421, 1457, 2859, 2900, 3162, 3395, 6958, 7037, 2560, 2907, 4385, 4402, 6652, 6669, 6686, 1437, 1480, 5382, 7228, 7349, 1647, 1661, 7067, 7078, 6420, 6569, 6601, 5254, 4872, 4883, 385, 9549, 9607, 5388, 5414, 112, 135, 8453, 8523, 10465, 10517, 467, 483, 5383, 5411, 5256, 1917, 1928, 3184, 8547, 8599, 495, 692, 8742, 8765, 3050, 3895, 9249, 9991, 6007, 6024, 2451, 2520, 4408, 4719, 6059, 6073, 1470, 8304, 8352, 11079, 11090, 7406, 7509, 417, 441, 811, 822, 7564, 7705, 9352, 9364, 316, 339, 2114, 2161, 7608, 7695, 2696, 2732, 2945, 2956, 3255, 3266, 4150, 4269, 276, 287, 5286, 5415, 5470, 9684, 9695, 5293, 10604, 10655, 4152, 4163, 5285, 6894, 7995, 6795, 4818, 2762, 2890, 5535, 5546, 6885, 6922, 6130, 6209, 8832, 790, 801, 4551, 4589, 3493, 3504, 10278, 3847, 3858, 9382, 9393, 7584, 7691, 6443, 6567, 6615, 7333, 7469, 4778, 4789, 1848, 1890, 2649, 2670, 8650, 9027, 9899, 9910, 10905, 10940, 7310, 7426, 1566, 1641, 5581, 5600, 8407, 8452, 3741, 3752, 9407, 9463, 4628, 4639, 9044, 9092, 10828, 10839, 6539, 6570, 6705, 8768, 8820, 2149, 2166, 10041, 10052, 6306, 8979, 9018, 10178, 10199, 9925, 9962, 4894, 4905, 2233, 2366, 8382, 8462, 1954, 1977, 876, 887, 8964, 9002, 7108, 7128, 1723, 9947, 2922, 2933, 1715, 1776, 2813, 2824, 5315, 3688, 3699, 2164, 2182, 8327, 8385, 3532, 3544, 2047, 2212, 1978, 2377, 2392, 3799, 3810, 5856, 5867, 7870, 4712, 4723, 8845, 8864, 8019, 9085, 3457, 3475, 1207, 1230, 3785, 3796, 742, 763, 5177, 5188, 6440, 6500, 6732, 4220, 4231, 6290, 2620, 2636, 8250, 8672, 7087, 6847, 7554, 7689, 3549, 3560, 8035, 9091, 9788, 9814, 513, 3936, 3947, 10875, 10906, 528, 539, 8027, 8075, 2770, 2127, 2138, 366, 485, 6862, 7521, 7627, 2689, 2715, 4350, 4384, 7810, 7867, 10345, 10356, 10405, 7101, 7790, 291, 302, 5552, 5575, 2563, 2876, 1687, 4003, 4014, 7104, 2474, 2497, 2312, 5139, 5605, 5618, 3420, 3431, 8403, 8490, 3865, 3876, 3626, 3637, 7154, 7184, 8090, 8108, 7716, 7758, 7018, 5463, 6619, 6636, 6655, 7534, 7690, 1330, 1354, 9400, 9518, 6329, 10077, 10099, 9955, 10008, 5251, 9709, 9734, 5719, 5828, 10464, 9650, 9675, 8344, 5306, 10202, 5515, 5528, 9416, 9454, 4298, 4530, 4318, 9943, 9998, 9339, 9351, 2072, 2083, 9158, 9196, 1852, 2008, 2196, 1185, 1196, 7819, 7892, 10485, 10586, 4444, 4461, 1166, 1177, 6091, 6110, 5560, 8637, 8664, 5881, 5907, 3043, 3054, 9515, 9567, 9546, 9623, 551, 562, 2342, 2359, 10012, 10029, 393, 6260, 1026, 1060, 334, 353, 1698, 1712, 10060, 10072, 9273, 9284, 7273, 7430, 5253, 3408, 3419, 5890, 5925, 1446, 1482, 829, 840, 1084, 1096, 2494, 2521, 9153, 9191, 3589, 3607, 8880, 8898, 3453, 3482, 6312, 5025, 5036, 3514, 3525, 6905, 6923, 6349, 1820, 1842, 4479, 4492, 3016, 3684, 10133, 10144, 6297, 9956, 9992, 10473, 5842, 5853, 3713, 3724, 1754, 4657, 9601, 9651, 8172, 8201, 8045, 9114, 9078, 9140, 3595, 3884, 2973, 2984, 200, 211, 8721, 8738, 5952, 5971, 1531, 194, 4755, 4766, 5774, 966, 977, 7103, 7129, 3099, 3123, 1786, 1800, 9858, 10246, 4827, 4838, 5351, 7558, 7677] +fold 5: +[5824, 10710, 10728, 743, 806, 8021, 9028, 4938, 4949, 5046, 5057, 615, 5703, 6869, 10177, 10204, 10950, 10993, 1367, 724, 735, 3235, 3246, 10759, 10797, 6347, 6818, 4299, 4313, 7518, 7676, 571, 582, 1094, 1194, 6076, 105, 127, 5374, 10580, 10670, 6140, 6151, 3914, 3925, 4853, 4864, 4204, 4215, 2504, 8697, 8725, 10608, 10665, 852, 863, 8537, 8620, 9531, 9643, 8852, 8873, 10400, 10430, 5194, 5205, 1106, 1123, 3764, 3775, 4136, 4147, 3065, 3076, 4319, 4351, 5746, 5758, 6401, 6444, 6744, 9177, 9229, 6867, 8755, 8796, 10315, 10331, 9752, 9768, 2259, 2319, 3407, 3795, 10152, 10163, 4065, 4076, 5071, 5082, 5224, 5235, 2789, 2800, 221, 232, 1548, 2380, 2537, 9793, 9821, 1031, 1064, 5129, 5239, 9716, 9737, 7953, 8002, 1396, 1408, 7268, 7434, 6848, 10469, 10527, 3274, 3286, 10787, 10818, 2992, 3003, 5006, 5017, 7241, 7375, 1314, 1423, 216, 277, 4091, 4102, 6950, 6966, 11017, 11039, 11055, 11066, 8817, 1913, 1970, 2723, 2771, 2461, 2478, 9288, 9299, 9042, 9146, 6162, 6173, 6860, 3599, 4410, 4421, 6126, 6284, 401, 493, 1274, 1285, 3168, 3197, 5782, 5797, 654, 665, 7236, 7291, 3654, 3676, 8066, 8253, 3010, 3021, 681, 718, 984, 4915, 4926, 9492, 9510, 2601, 2627, 4435, 4462, 10924, 11005, 7913, 7968, 6987, 7006, 559, 645, 1871, 1906, 1507, 1518, 9863, 9884, 6445, 6493, 6709, 7352, 7444, 8206, 8472, 10639, 10695, 4968, 4984, 2785, 2880, 10365, 10376, 5948, 5973, 5099, 5110, 7749, 7791, 4611, 4622, 6451, 6512, 6756, 1582, 1593, 415, 434, 10750, 10793, 4177, 4193, 3137, 3157, 4266, 4284, 7038, 7977, 10874, 10904, 9624, 6784, 894, 905, 1539, 1560, 10734, 10979, 9563, 9600, 4553, 4582, 3145, 3179, 7053, 4244, 4255, 1472, 1496, 7473, 7573, 7437, 7585, 13, 25, 1372, 1383, 3217, 3233, 1119, 1132, 9781, 9809, 7730, 7828, 1308, 1353, 2245, 2256, 10243, 10263, 5439, 1606, 1624, 7320, 7478, 8671, 8712, 959, 1017, 9168, 10272, 1223, 1244, 1725, 1736, 1620, 1640, 57, 68, 9475, 9503, 7770, 7826, 9871, 9894, 33, 44, 2433, 2444, 3173, 3396, 4533, 4564, 4693, 4704, 5811, 5822, 3821, 3832, 9829, 9843, 150, 164, 10512, 10957, 5311, 6324, 8941, 8967, 3327, 3338, 6095, 6118, 1036, 2270, 2310, 6929, 6940, 1501, 1675, 7213, 7304, 4662, 4673, 6206, 6220, 992, 1003, 10104, 10123, 2054, 2218, 1818, 1838, 5155, 5166, 9311, 9322, 2278, 2304, 10294, 10305, 9447, 10291, 7807, 7919, 5928, 6039, 10413, 10668, 8274, 8311, 5659, 5669, 5679, 5690, 4802, 10472, 10533, 6185, 6196, 4025, 4036, 5332, 753, 774, 10723, 10934, 8936, 8989, 10716, 10749, 10947, 10998, 7931, 7969, 593, 605, 3301, 3312, 7201, 3889, 3900, 5491, 5502, 4114, 4125, 9246, 9262, 8292, 8326, 2272, 6378, 5996, 6023, 6036, 6047, 1878, 1909, 8092, 8120, 75, 86, 2410, 2421, 5610, 8909, 8930, 9051, 9148, 8134, 8150, 3349, 3360, 620, 631, 11038, 4046, 4057, 10441, 10488, 10574, 10616, 9442, 9468, 1756, 1778, 6411, 6497, 6694, 7378, 7519, 5914, 5944, 10849, 10863, 8236, 8263, 9957, 10268, 7159, 8139, 8165, 5729, 5740, 1997, 2032, 918, 929, 8646, 8692, 5412, 5440, 10677, 10698, 10877, 10953, 5115, 5126, 3575, 7189, 7226, 5713, 94, 129, 8491, 8554, 248, 259, 7359, 7493, 8406, 8828, 8401, 8500, 3643, 3655, 4517, 4534, 8044, 8181, 1242, 1264, 6787, 2884, 6957, 6991, 7652, 6268, 7894, 8008, 454, 465, 9190, 9223, 7880, 7941, 8576, 8609, 2835, 2846, 5637, 5648, 8348, 8581, 10968, 11109, 509, 703, 8369, 8425, 6464, 6661, 6770, 10217, 10228, 8155, 8939, 4983, 4995, 8560, 8615, 2659, 2688, 4173, 4430, 2101, 2112, 4732, 4743, 10381, 10403, 9436, 9591, 1940, 2091, 4305, 4341, 4896, 5007, 3966, 3989, 5310, 7140, 7181, 5554, 6028, 6240, 6251, 2580, 2603, 10539, 10587, 1315, 1331, 4484, 4503, 8213, 8238, 2664, 2882, 2724, 2759, 6641, 6688, 6699, 3968, 3990, 3387, 3399, 6395, 6708, 6743, 7811, 7922, 6824, 3093, 3116, 3366, 3377, 7531, 7655, 163, 180, 944, 955, 1146, 1157, 1430, 1479, 2878, 2912, 3229, 3729, 7025, 7043, 2549, 2569, 4373, 4391, 6638, 6664, 6679, 1440, 1483, 7340, 7462, 1651, 1670, 7066, 7077, 6400, 6558, 6596, 5244, 4869, 4880, 386, 9528, 9597, 5392, 5417, 124, 143, 8457, 8525, 10455, 10507, 477, 491, 5391, 5466, 5248, 1912, 1923, 3198, 8558, 8610, 506, 697, 8754, 8787, 3005, 3817, 9183, 9835, 5997, 6015, 2393, 2455, 4597, 4774, 6063, 6079, 1484, 8333, 8376, 11074, 11085, 7380, 7485, 5316, 403, 433, 807, 818, 7530, 7686, 9359, 9370, 315, 336, 2103, 2150, 7619, 7703, 2703, 2745, 2948, 2959, 3259, 3270, 4216, 4275, 267, 280, 5401, 5437, 9677, 9688, 10549, 10637, 4158, 4169, 6899, 8004, 6781, 4813, 4824, 2697, 2873, 5538, 5549, 6897, 7958, 6174, 6214, 8823, 785, 796, 4576, 4599, 3494, 3505, 9151, 3846, 3857, 9383, 9394, 7510, 7638, 6429, 6563, 6614, 7307, 7440, 4772, 4783, 1853, 1893, 2658, 2679, 8439, 8927, 9908, 9919, 10908, 10941, 7251, 7376, 1555, 1630, 5572, 5594, 8432, 8467, 3735, 3746, 9444, 9481, 4627, 4638, 9032, 9070, 10822, 10833, 6499, 6559, 6584, 8803, 8837, 2147, 2160, 10040, 10051, 6343, 8959, 9006, 10182, 10200, 9940, 9993, 4898, 4910, 2232, 2363, 8365, 8448, 1964, 1985, 879, 890, 8986, 9015, 7109, 7130, 1710, 10001, 2927, 2938, 1763, 1794, 2814, 2825, 5304, 3694, 3705, 2071, 2173, 2187, 8296, 8343, 3531, 3543, 2203, 2222, 1968, 1990, 2385, 2397, 3803, 3836, 5857, 5868, 7797, 7886, 4710, 4721, 8826, 8854, 8049, 9125, 3464, 3480, 1219, 1250, 3783, 3794, 740, 761, 5176, 5187, 6450, 6518, 6764, 4226, 4237, 2616, 2634, 8417, 8816, 5759, 7088, 6855, 7606, 7711, 5608, 3539, 3559, 8051, 9118, 9777, 9801, 510, 3934, 3945, 10887, 10918, 525, 536, 8778, 8067, 9128, 2119, 2130, 363, 374, 6789, 6327, 7431, 7587, 2683, 2709, 4362, 4389, 7824, 7873, 10341, 10352, 10394, 7090, 7679, 294, 305, 5571, 5595, 2570, 2893, 1688, 4000, 4011, 2469, 2493, 5144, 5614, 5625, 3423, 3434, 3032, 8445, 8510, 3870, 3881, 3623, 3634, 7139, 7156, 8086, 8105, 7708, 7754, 7015, 7026, 6516, 6623, 6640, 7368, 7601, 1337, 1357, 9411, 9676, 6348, 10083, 10103, 9951, 10004, 9715, 9740, 5675, 5783, 10438, 9628, 9664, 8353, 5336, 10091, 5516, 5529, 9405, 9443, 4342, 4574, 4339, 9941, 9994, 9338, 9350, 2068, 2079, 9152, 9192, 2007, 2037, 2195, 2225, 1186, 1197, 5280, 7812, 7876, 10498, 10592, 4446, 4465, 1162, 1173, 6093, 6112, 8648, 8678, 5898, 5932, 3044, 3055, 9523, 9579, 9529, 9611, 553, 564, 2351, 2367, 10023, 10034, 389, 6256, 1041, 1067, 324, 345, 1701, 1719, 10059, 10071, 9267, 9278, 7204, 7309, 3411, 5902, 5933, 1429, 1471, 836, 847, 1075, 1086, 2496, 2522, 9162, 10253, 3598, 3619, 8888, 8906, 3443, 3460, 5032, 5043, 3517, 3528, 6889, 6915, 6338, 1829, 1846, 4480, 4493, 3082, 3928, 10131, 10142, 6294, 9942, 9978, 10480, 5834, 5845, 3712, 3723, 1745, 4648, 4659, 9616, 9659, 8180, 8212, 8037, 9093, 9067, 9130, 3540, 3806, 2966, 2977, 199, 210, 8729, 8743, 5949, 5969, 1530, 186, 4750, 4761, 5768, 965, 976, 7097, 7117, 3098, 3121, 1780, 1798, 9824, 10179, 4829, 4840, 5362, 7499, 7644] +fold 6: +[5827, 10714, 10736, 776, 838, 8070, 9124, 4946, 4957, 5048, 5059, 604, 5701, 6856, 10173, 10196, 10938, 10984, 723, 734, 3222, 3241, 10744, 10792, 6302, 6851, 4295, 4307, 7591, 7726, 576, 587, 1149, 1259, 87, 119, 5454, 10492, 10602, 6131, 6142, 3911, 3922, 4849, 4860, 4182, 4208, 2492, 8693, 8723, 10634, 10678, 859, 870, 8504, 8601, 9548, 9661, 8867, 8879, 10388, 10421, 5193, 5204, 1104, 1120, 3763, 3774, 4135, 4146, 3062, 3073, 4335, 4363, 5742, 5753, 6404, 6447, 6754, 9156, 9218, 8744, 8795, 10325, 10338, 9745, 9764, 2279, 2322, 3340, 3762, 10151, 10162, 6336, 4072, 5238, 5078, 5089, 5220, 5231, 2782, 2793, 224, 235, 1538, 1562, 2372, 2532, 9816, 9827, 1010, 1035, 4073, 5141, 9699, 9717, 6871, 7964, 1400, 1412, 7401, 7612, 6839, 10511, 10566, 3279, 3291, 10775, 10813, 2987, 2998, 5005, 5016, 7278, 7411, 1325, 1427, 266, 354, 4086, 4097, 6953, 6970, 10985, 11032, 11050, 11061, 8484, 1858, 1961, 2711, 2765, 2456, 2468, 9290, 9301, 8980, 9047, 6160, 6171, 3588, 3613, 4403, 4414, 6120, 6278, 416, 1272, 1283, 3159, 3192, 5780, 5792, 657, 669, 7244, 7300, 3665, 3681, 8243, 9116, 3011, 3022, 6813, 673, 693, 4916, 4927, 9486, 9508, 2581, 2610, 6800, 4437, 4466, 10966, 11016, 7863, 7937, 6993, 7011, 5268, 592, 646, 1886, 2029, 1502, 1513, 9859, 9877, 6456, 6514, 6747, 7281, 7374, 8428, 8661, 10660, 10702, 4961, 4972, 2850, 2917, 10369, 10380, 5970, 5988, 5100, 5111, 7725, 7771, 4610, 4621, 6421, 6491, 6544, 1573, 1584, 408, 426, 10769, 10806, 4183, 4199, 3144, 3169, 4139, 4279, 6892, 7045, 10858, 10876, 9647, 6810, 901, 912, 1553, 1570, 10801, 11045, 9545, 9586, 4545, 4573, 3147, 3183, 7051, 4247, 4258, 1487, 1499, 7405, 7525, 7570, 7662, 15, 27, 1376, 1387, 3210, 3223, 1110, 1129, 9750, 9785, 6323, 7769, 7857, 1307, 1351, 2244, 2255, 10238, 10250, 5359, 5457, 1617, 1638, 7377, 7516, 8665, 8703, 981, 1050, 9174, 10284, 1213, 1235, 1730, 1741, 1611, 1636, 55, 66, 9413, 9479, 7799, 7859, 9864, 9887, 24, 39, 2429, 2440, 3085, 3307, 4524, 4554, 4689, 4700, 5796, 5815, 3814, 3825, 9837, 9849, 147, 158, 10535, 11107, 6342, 8955, 8981, 3320, 3331, 5480, 6103, 1027, 1058, 2303, 2334, 6928, 6939, 1659, 1678, 7245, 7356, 4670, 4681, 6202, 6218, 990, 1001, 10093, 10111, 2055, 2221, 1806, 1824, 5153, 5164, 9315, 9326, 2293, 2338, 10301, 10318, 9402, 9958, 7752, 7861, 5961, 6072, 10446, 11068, 8260, 8297, 5652, 5663, 6845, 5674, 5685, 4804, 10477, 10540, 6179, 6190, 4020, 4031, 762, 779, 10768, 10990, 8937, 8991, 10724, 10758, 10982, 11018, 7898, 7946, 594, 606, 3300, 3311, 7157, 3896, 3908, 5486, 5497, 4109, 4120, 9224, 9254, 8283, 8320, 2265, 6366, 5992, 6017, 6037, 6048, 1860, 1894, 8082, 8106, 78, 89, 2403, 2414, 8899, 8919, 8034, 9074, 8142, 8159, 3347, 3358, 618, 629, 11042, 4047, 4058, 10466, 10558, 10603, 9424, 9457, 1764, 1814, 402, 6425, 6517, 6726, 7447, 7555, 6226, 5911, 5943, 10848, 10861, 8231, 8259, 9802, 10068, 8126, 8151, 6817, 5726, 5737, 1861, 2012, 925, 936, 8667, 8713, 5427, 5452, 10647, 10685, 10914, 10981, 5112, 5123, 3570, 3581, 7178, 7206, 5717, 54, 104, 8442, 8538, 242, 253, 7262, 7403, 8750, 9005, 8355, 8478, 3653, 3675, 4521, 4540, 8174, 9127, 1257, 1268, 6775, 2906, 6977, 7005, 7542, 7685, 6267, 6882, 7993, 457, 469, 9197, 9226, 7890, 7945, 8532, 8585, 2832, 2843, 5630, 5641, 8476, 8632, 11056, 11110, 516, 711, 8381, 8436, 6457, 6583, 6762, 10223, 10234, 8192, 9138, 4989, 5000, 8575, 8627, 2662, 2690, 4265, 4464, 2096, 2107, 4735, 4746, 10397, 10422, 9480, 9636, 1938, 2090, 4310, 4345, 4907, 5018, 3955, 3976, 7176, 7230, 5521, 5806, 6239, 6250, 2578, 2602, 10537, 10583, 1312, 1329, 4494, 4505, 8189, 8226, 2707, 2891, 2719, 2756, 6654, 6689, 6700, 3972, 3994, 3390, 3402, 6606, 6741, 6774, 7756, 7866, 6822, 3105, 3132, 3361, 3372, 7495, 7588, 160, 179, 943, 954, 1145, 1156, 1426, 1468, 2872, 2910, 3207, 3618, 7003, 7042, 2552, 2874, 4374, 4393, 6485, 6659, 6675, 1402, 1453, 5346, 7266, 7373, 1653, 1671, 7065, 7076, 6555, 6593, 6765, 5247, 4875, 4886, 381, 9582, 9629, 5398, 5423, 120, 141, 8429, 8522, 10450, 10503, 476, 489, 5390, 5455, 1911, 1922, 3193, 8549, 8604, 690, 720, 8747, 8769, 2950, 3717, 9308, 10191, 6012, 6031, 2415, 2476, 4563, 4763, 6057, 6068, 1458, 8329, 8367, 11075, 11086, 7418, 7517, 5318, 421, 445, 813, 824, 7551, 7700, 9360, 9371, 313, 335, 2082, 2145, 7598, 7693, 2701, 2739, 2947, 2958, 7261, 3258, 3269, 4205, 4273, 275, 286, 5368, 5429, 6831, 9686, 9697, 5259, 10595, 10651, 4156, 4167, 5294, 6886, 7990, 6803, 4810, 4821, 2553, 2852, 5539, 5550, 6875, 6919, 6205, 6277, 8781, 8844, 783, 794, 4557, 4590, 3496, 3508, 10283, 3845, 3856, 9381, 9392, 7471, 7617, 6408, 6557, 6609, 7283, 7410, 4780, 4791, 1882, 2014, 2645, 2666, 8261, 8872, 9900, 9911, 10928, 10952, 7297, 7409, 1533, 1608, 5586, 5604, 8412, 8454, 3736, 3747, 9437, 9478, 4626, 4637, 9037, 9080, 10824, 10835, 6548, 6576, 6727, 8784, 8827, 2153, 2171, 10045, 10056, 2257, 6291, 8982, 9020, 10187, 10207, 9936, 9988, 4900, 4912, 6363, 2231, 2358, 8374, 8456, 1935, 1971, 880, 8949, 8995, 7119, 7136, 1713, 2929, 2940, 1766, 1803, 2812, 2823, 5308, 3689, 3700, 2159, 2180, 8316, 8366, 3541, 2039, 2206, 1975, 2387, 2398, 3804, 3837, 5855, 5866, 7802, 7891, 4715, 4726, 5357, 8840, 8860, 9057, 9144, 3459, 3477, 1217, 1247, 3780, 3791, 736, 752, 5171, 5182, 6399, 6469, 6550, 4228, 4239, 2613, 2633, 8228, 8606, 7084, 7571, 7697, 5603, 3556, 3567, 8025, 9063, 9771, 9796, 494, 707, 3940, 3952, 10878, 10907, 535, 546, 8792, 8061, 9120, 2118, 2129, 358, 370, 6782, 6370, 7498, 7610, 2691, 2720, 4334, 4379, 7817, 7869, 10342, 10353, 7035, 7135, 5366, 293, 304, 5567, 5591, 2556, 2829, 4007, 4018, 7091, 2472, 2495, 2324, 5145, 5611, 5622, 3430, 3441, 3034, 8435, 8503, 3869, 3880, 3631, 3642, 7143, 7166, 8096, 8113, 7718, 7763, 7013, 7024, 5338, 6621, 6637, 6657, 7556, 7701, 1328, 1352, 9441, 10096, 10120, 9972, 10014, 9718, 9742, 5772, 5883, 10451, 9630, 9667, 8475, 5284, 5508, 5519, 9419, 9459, 4497, 4674, 4354, 9922, 9961, 9337, 9348, 2075, 2086, 9179, 9208, 2004, 2033, 2191, 2211, 1189, 1200, 5277, 7792, 7858, 10467, 10575, 4436, 4455, 1168, 1179, 6087, 6104, 8633, 8657, 5889, 5918, 3041, 3052, 9526, 9584, 9541, 9618, 550, 561, 2347, 2364, 10005, 10027, 392, 6259, 1051, 1070, 328, 348, 1700, 1717, 10064, 10076, 9268, 9279, 7191, 7285, 3410, 5909, 5937, 1466, 830, 841, 1082, 1095, 2505, 2525, 9182, 10280, 3591, 3609, 8887, 8903, 3450, 3479, 5031, 5042, 3509, 3520, 6883, 6910, 6362, 1815, 1839, 4474, 4487, 3080, 3873, 6373, 10129, 10140, 9933, 9970, 5836, 5847, 3710, 3721, 1753, 4656, 9594, 9645, 8179, 8208, 8057, 9129, 8012, 9090, 3629, 3917, 2971, 2982, 206, 217, 8727, 8740, 5957, 5977, 1534, 191, 4759, 4770, 5773, 2424, 969, 980, 7092, 7106, 3072, 3106, 1769, 1795, 9260, 10024, 4835, 4846, 5473, 7543, 7671] +fold 7: +[5829, 10703, 10721, 732, 805, 8064, 9104, 4936, 4947, 5050, 5061, 581, 699, 5702, 10175, 10201, 10920, 10974, 727, 3232, 3244, 10752, 10795, 4289, 4300, 7660, 7743, 574, 585, 1127, 1237, 6074, 109, 130, 5475, 10504, 10613, 6134, 6145, 3916, 3927, 4855, 4866, 4200, 4211, 2513, 8699, 8726, 10625, 10674, 855, 866, 8530, 8614, 9557, 9671, 8865, 8878, 10410, 10437, 5197, 5208, 1097, 1108, 3755, 3766, 4127, 4138, 3064, 3075, 4314, 4344, 5751, 6397, 6442, 6738, 9184, 9236, 6865, 8776, 8811, 10316, 10332, 9741, 9761, 2311, 2360, 3495, 3906, 10154, 10165, 6346, 4067, 4078, 5075, 5086, 5223, 5234, 2792, 2803, 223, 234, 1536, 1557, 2386, 2539, 9818, 9830, 1014, 1045, 5063, 5174, 9704, 9723, 7934, 7985, 1395, 1407, 7323, 7523, 10482, 10536, 3276, 3288, 10755, 10794, 2993, 3004, 5008, 5020, 7256, 7385, 1336, 1431, 205, 269, 4088, 4099, 6954, 6973, 10973, 11031, 11049, 11060, 8373, 1951, 1994, 2705, 2761, 2467, 2488, 9287, 9298, 9017, 9077, 6155, 6166, 6837, 3608, 4413, 4424, 6129, 6287, 404, 1273, 1284, 3164, 3194, 5781, 5795, 650, 663, 7221, 7282, 3659, 3678, 8223, 8280, 3015, 3026, 6314, 6829, 496, 682, 4917, 4928, 9498, 9516, 2594, 2623, 6797, 4429, 4453, 10958, 11011, 7920, 7982, 6975, 6998, 5255, 526, 643, 1889, 2034, 1509, 1520, 9856, 9876, 6396, 6468, 6542, 7313, 7402, 8361, 8617, 10684, 10708, 4958, 4969, 2686, 2867, 10362, 10373, 5963, 5985, 5092, 5103, 7751, 7795, 4604, 4615, 6394, 6471, 6523, 1576, 1587, 420, 437, 10737, 10781, 4175, 4189, 3146, 3172, 4117, 4277, 7014, 7944, 10872, 10896, 9547, 896, 907, 1543, 1563, 10679, 10956, 9542, 9583, 4566, 4593, 3150, 7050, 4243, 4254, 1296, 1494, 7367, 7508, 7484, 7613, 8, 19, 1378, 1389, 3212, 3225, 1128, 9765, 9795, 6379, 7744, 7831, 1305, 1349, 2238, 2249, 10244, 10266, 5340, 5448, 1603, 1623, 7348, 7497, 8656, 8695, 982, 1061, 9171, 10281, 1218, 1240, 1728, 1739, 1607, 1635, 58, 69, 9473, 9499, 7774, 7830, 9878, 9897, 31, 42, 2432, 2443, 3151, 3384, 4531, 4561, 4692, 4703, 5793, 5814, 3813, 3824, 9839, 9851, 152, 166, 10335, 10590, 5305, 6352, 8951, 8976, 3323, 3334, 5483, 6111, 1032, 2237, 2307, 6932, 6944, 1672, 1683, 7196, 7284, 4669, 4680, 6204, 6219, 996, 1007, 10100, 10121, 2056, 2224, 1805, 1822, 5157, 5168, 9318, 9329, 2297, 2371, 10300, 10312, 9425, 10180, 7761, 7872, 5950, 6061, 11095, 10479, 11118, 8254, 8291, 5661, 5671, 6835, 5678, 5689, 4795, 4807, 10499, 10565, 6180, 6191, 4030, 4041, 5330, 749, 770, 10657, 10901, 8934, 8985, 10739, 10771, 10987, 11025, 7926, 7962, 599, 611, 3298, 3309, 7146, 7279, 3894, 3907, 5485, 5496, 4111, 4122, 9204, 9248, 8285, 8322, 2267, 6374, 5993, 6020, 6038, 6049, 1873, 1904, 8087, 8118, 72, 83, 2408, 2419, 8904, 8925, 9035, 9139, 8138, 8156, 3348, 3359, 624, 635, 11047, 2919, 4052, 10449, 10500, 10584, 10622, 9420, 9453, 1762, 1792, 6432, 6520, 6737, 7350, 7504, 5903, 5938, 10852, 10869, 8237, 8265, 9902, 10213, 8132, 8163, 6804, 5723, 5734, 1859, 2009, 916, 927, 8658, 8704, 5435, 5464, 10662, 10693, 10933, 10996, 5117, 5128, 3579, 7186, 7217, 5714, 10, 99, 8489, 8552, 247, 258, 7292, 7442, 8683, 8894, 8370, 8487, 3645, 3658, 4516, 4532, 8055, 8187, 1221, 1261, 6963, 6996, 7576, 6273, 7978, 455, 466, 9202, 9231, 7904, 7951, 8519, 8580, 2834, 2845, 5631, 5642, 8309, 8555, 11104, 11115, 514, 708, 8394, 8447, 6412, 6490, 6733, 10215, 10226, 8133, 8215, 4979, 4993, 8596, 2635, 2665, 4364, 5019, 2099, 2110, 4728, 4739, 10386, 10411, 9414, 9580, 1933, 1944, 2088, 4329, 4863, 4974, 3965, 3987, 5301, 7149, 7187, 5510, 5654, 6231, 6242, 2582, 2607, 10543, 10591, 1318, 1333, 4499, 4510, 8185, 8225, 2807, 2899, 2712, 2753, 6667, 6691, 6702, 3975, 3996, 3383, 3394, 6528, 6731, 6763, 7789, 7899, 3104, 3127, 3364, 3375, 7506, 7594, 167, 182, 946, 957, 1148, 1159, 1436, 2543, 2883, 3028, 3285, 6881, 7030, 2555, 2888, 4377, 4395, 6452, 6656, 6671, 1443, 1486, 7331, 7455, 1648, 1664, 7060, 7071, 6482, 6571, 6603, 5252, 4870, 4881, 380, 9587, 9633, 5399, 5426, 122, 142, 8404, 8515, 10429, 10475, 478, 492, 5393, 5477, 5260, 1914, 1925, 3191, 8526, 8582, 683, 704, 8758, 8812, 3027, 3851, 9216, 9946, 6003, 6021, 2426, 2487, 4508, 4741, 6066, 1488, 8310, 8358, 11073, 11084, 7311, 7438, 413, 440, 810, 821, 7503, 7633, 9362, 9373, 323, 346, 2140, 2193, 7539, 7654, 2708, 2747, 2949, 2960, 3257, 3268, 4184, 4272, 278, 289, 5282, 5396, 5432, 6838, 9685, 9696, 5302, 10615, 10664, 4157, 4168, 6890, 7991, 6808, 4817, 2630, 2866, 5527, 5541, 6916, 8007, 6200, 6244, 8838, 782, 793, 4568, 4596, 3488, 3499, 10249, 3852, 3863, 9384, 9395, 7536, 7661, 6549, 6607, 6771, 7344, 7481, 4777, 4788, 1876, 1908, 2648, 2668, 8572, 8972, 9901, 9912, 10919, 10949, 7259, 7392, 1595, 1663, 5584, 5602, 8424, 8460, 3737, 3748, 9421, 9472, 4629, 4640, 9041, 9087, 10827, 10838, 6541, 6574, 6716, 8788, 8831, 2157, 2178, 10047, 10058, 6301, 8975, 9013, 10172, 10195, 9921, 9952, 4893, 4904, 6371, 2234, 2368, 8413, 8488, 1946, 1974, 875, 886, 8978, 9012, 7121, 7137, 1716, 2924, 2935, 1761, 1790, 2811, 2822, 5314, 3697, 3708, 2092, 2175, 2188, 8302, 8354, 3536, 3548, 2205, 2223, 1981, 2384, 2396, 3802, 3835, 5862, 5873, 7860, 4713, 4724, 5377, 8833, 8857, 8024, 9099, 3452, 3469, 1214, 1245, 5325, 3786, 3797, 738, 757, 5173, 5184, 6423, 6492, 6714, 4223, 4234, 6299, 2600, 2631, 8384, 8783, 5755, 7083, 6834, 7528, 7682, 3553, 3564, 9049, 9145, 9763, 9794, 507, 3941, 3953, 10894, 10926, 530, 541, 8789, 8053, 9111, 2757, 2122, 2133, 355, 369, 6364, 7486, 7603, 2680, 2704, 4369, 4398, 7838, 7887, 10329, 10350, 10408, 7057, 7346, 297, 308, 5561, 5585, 2547, 2641, 1689, 4004, 4015, 2485, 2512, 2302, 5138, 5617, 5628, 3421, 3432, 8389, 8486, 3868, 3879, 3632, 7141, 7160, 8101, 8116, 7722, 7768, 7020, 6537, 6625, 6642, 7468, 7656, 1345, 1361, 9417, 10087, 10112, 9984, 10020, 5273, 9725, 9749, 5752, 5861, 10458, 9621, 9660, 8421, 5513, 5525, 9408, 9446, 4441, 4619, 4349, 9929, 9968, 9332, 9343, 2073, 2084, 9189, 10282, 2002, 2030, 2200, 1187, 1198, 5281, 7806, 7871, 10542, 10621, 4440, 4458, 1165, 1176, 6094, 6113, 5557, 8630, 8655, 5886, 5913, 3040, 3051, 9520, 9575, 9596, 9662, 547, 558, 2350, 2365, 10017, 10031, 397, 6254, 1034, 1066, 333, 351, 1694, 1705, 10067, 10081, 9266, 9277, 7263, 7414, 3414, 5885, 5920, 1454, 831, 842, 1079, 1090, 2511, 2527, 9157, 9195, 3587, 3605, 8890, 8912, 3445, 3466, 6315, 5033, 5044, 3512, 3523, 6902, 6921, 6334, 1810, 1833, 4476, 4488, 3039, 3751, 6339, 10126, 10137, 6289, 9930, 9967, 5841, 5852, 3719, 3731, 1752, 4655, 9585, 9642, 8190, 8222, 8016, 9052, 9060, 9123, 3673, 3962, 2974, 2985, 201, 212, 8733, 8753, 5946, 5965, 1528, 193, 4754, 4765, 5776, 2425, 961, 972, 7096, 7115, 3087, 3111, 1777, 1797, 9758, 10113, 4832, 4843, 5379, 7596, 7694] +fold 8: +[5826, 10718, 10743, 802, 871, 8069, 9115, 4940, 4951, 5047, 5058, 570, 688, 5699, 10167, 10192, 10954, 10995, 1368, 729, 3238, 3249, 10774, 10803, 4297, 4311, 7642, 7740, 579, 590, 1182, 1292, 111, 132, 5355, 10550, 10658, 6136, 6147, 3918, 3929, 4856, 4867, 4202, 4213, 2517, 8670, 8707, 10649, 10689, 858, 869, 8479, 8586, 9553, 9665, 8856, 8874, 10385, 10418, 5201, 5212, 1098, 1109, 3756, 3767, 4129, 4140, 3057, 3068, 4325, 4356, 5750, 5765, 6393, 6437, 6734, 9166, 9222, 8779, 8813, 10327, 10339, 9753, 9772, 2289, 2331, 3529, 3973, 10155, 10166, 6384, 4068, 4079, 5076, 5087, 5219, 5230, 2787, 2798, 222, 233, 1535, 1556, 2374, 2534, 9819, 9831, 1011, 1038, 5041, 5152, 9700, 9719, 6879, 7972, 1393, 1405, 7345, 7545, 6833, 10522, 10581, 3282, 3294, 10762, 10808, 2990, 3001, 5011, 5023, 7315, 7435, 1369, 1438, 265, 343, 4087, 4098, 6951, 6968, 10991, 11033, 11051, 11062, 8262, 1869, 1963, 2735, 2776, 2458, 2471, 9295, 9306, 9003, 9059, 6156, 6167, 3594, 4411, 4422, 6127, 6285, 438, 1275, 1286, 3171, 3199, 5784, 5799, 653, 664, 7195, 7258, 3667, 3682, 8033, 8249, 3013, 3024, 6351, 6809, 668, 689, 1039, 4922, 4933, 9501, 9519, 2576, 2604, 6799, 4433, 4459, 10942, 11010, 7907, 7963, 6994, 7012, 515, 642, 1877, 2010, 1510, 1521, 9855, 9874, 6390, 6463, 6535, 7305, 7393, 8122, 8450, 10629, 10692, 4960, 4971, 2652, 2864, 10361, 10372, 5968, 5987, 5090, 5101, 7762, 7809, 4613, 4624, 6402, 6475, 6527, 1574, 1585, 414, 432, 10756, 10798, 4171, 4186, 3142, 3165, 4195, 4281, 7044, 8010, 10871, 10893, 900, 911, 1550, 1569, 10623, 10912, 9559, 9595, 4569, 4598, 3139, 3170, 7054, 4246, 4257, 1489, 1500, 7439, 7541, 7549, 7643, 7, 18, 1374, 1385, 3206, 3219, 1122, 1134, 9755, 9787, 6382, 7764, 7847, 1299, 1316, 2239, 2250, 10247, 10276, 5436, 5474, 1614, 1633, 7306, 7464, 8679, 8722, 904, 985, 9169, 10277, 1227, 1249, 1731, 1742, 1588, 1625, 53, 64, 9448, 9490, 7748, 7803, 9881, 9898, 37, 48, 2438, 2449, 3262, 3840, 4544, 4579, 4694, 4705, 5803, 5819, 3816, 3827, 9833, 9844, 146, 157, 10490, 10735, 6357, 8958, 8984, 3317, 3328, 5478, 6098, 1018, 1046, 2290, 2323, 6934, 6949, 1666, 1681, 7202, 7294, 4667, 4678, 6141, 6212, 997, 1008, 10097, 10116, 2048, 2061, 1813, 1834, 5149, 5160, 9317, 9328, 2284, 2313, 10303, 10321, 9261, 9469, 2268, 7773, 7882, 5939, 6050, 11097, 10346, 10546, 8266, 8301, 5655, 5665, 6828, 5680, 5691, 4805, 10461, 10521, 6183, 6194, 4021, 4032, 756, 775, 10834, 11067, 8940, 8996, 10745, 10776, 10977, 11015, 7910, 7950, 595, 607, 3295, 3306, 7168, 3887, 3898, 5494, 5505, 4110, 4121, 9230, 9255, 8268, 8308, 2262, 6372, 5991, 6013, 6035, 6046, 1864, 1898, 8079, 8097, 79, 90, 2402, 2413, 8902, 8923, 9023, 9121, 8145, 8162, 3345, 3356, 614, 625, 11043, 4044, 4055, 10428, 10474, 10562, 10607, 9440, 9466, 1760, 1783, 407, 6435, 6525, 6746, 7433, 7547, 6307, 5884, 5923, 10847, 10859, 8255, 8281, 9813, 10102, 7167, 8123, 8141, 5724, 5735, 1868, 2020, 924, 935, 8636, 8681, 5416, 5442, 10652, 10687, 10889, 10962, 5113, 5124, 3571, 3582, 7175, 7203, 5711, 96, 151, 8481, 8544, 244, 255, 7347, 7480, 8628, 8883, 8449, 8521, 3647, 3663, 4518, 4536, 8088, 8191, 1260, 1271, 6790, 6955, 6989, 7640, 6271, 7651, 8005, 456, 468, 9214, 9239, 7820, 7912, 8566, 8600, 2828, 2839, 5629, 5640, 8393, 8598, 11099, 11111, 505, 702, 8377, 8431, 6387, 6467, 6710, 10216, 10227, 8022, 8196, 4975, 4991, 8571, 8624, 2640, 2669, 4397, 5240, 2095, 2106, 4736, 4747, 10393, 10419, 9458, 9613, 1937, 1948, 2089, 4317, 4819, 4930, 3963, 3985, 7152, 7192, 5499, 5643, 6232, 6243, 2591, 2617, 10561, 10606, 1306, 1323, 4496, 4507, 8176, 8224, 2597, 2875, 2730, 2764, 6591, 6680, 6696, 3977, 3997, 3392, 3404, 6461, 6719, 6752, 7767, 7877, 3083, 3112, 3368, 3379, 7538, 7663, 169, 183, 947, 958, 1150, 1161, 1448, 2545, 2887, 3096, 3296, 6903, 7032, 2565, 2913, 4372, 4390, 6643, 6665, 6681, 1417, 1460, 5353, 7314, 7428, 1645, 1658, 7063, 7074, 6415, 6565, 6599, 5250, 4877, 4888, 384, 9556, 9614, 5404, 5431, 110, 133, 8415, 8518, 10436, 10481, 475, 487, 5389, 5444, 5258, 1910, 1921, 3181, 8564, 8613, 511, 701, 8756, 8809, 3562, 4039, 9310, 10257, 6011, 6029, 2452, 2531, 4463, 4730, 6065, 1474, 8323, 8363, 11077, 11088, 7353, 7466, 410, 439, 809, 820, 7599, 7709, 9361, 9372, 327, 352, 2142, 2215, 7524, 7641, 2714, 2755, 2952, 2963, 7289, 3253, 3264, 4106, 4264, 272, 283, 5276, 5413, 5458, 6843, 9687, 9698, 5271, 10620, 10669, 4153, 4164, 5289, 6876, 7961, 4815, 2796, 2892, 5534, 5545, 6913, 8001, 6201, 6255, 8829, 789, 800, 4578, 4601, 3489, 3500, 10287, 3849, 3860, 9375, 9386, 7597, 7698, 6531, 6600, 6627, 7238, 7366, 4773, 4784, 1857, 1896, 2639, 2661, 8184, 8772, 9903, 9914, 10888, 10935, 7327, 7453, 1522, 1605, 5574, 5596, 8427, 8464, 3733, 3744, 9450, 9483, 4625, 4636, 9014, 9053, 10825, 10836, 6508, 6560, 6588, 8759, 8815, 2146, 2158, 10039, 10050, 2258, 8945, 8998, 10183, 10203, 9928, 9976, 4897, 4909, 6377, 2227, 2345, 8351, 8430, 1902, 1969, 877, 888, 8943, 8990, 7114, 7132, 2930, 2941, 1759, 1788, 2810, 2821, 5291, 3696, 3707, 2165, 2184, 8312, 8364, 3535, 3547, 2050, 2213, 1965, 1989, 2388, 2399, 3805, 3838, 5859, 5870, 7853, 4716, 4727, 8830, 8855, 8043, 9119, 3455, 3474, 1222, 1253, 5319, 3782, 3793, 739, 759, 5175, 5186, 6407, 6488, 6672, 4229, 4240, 6376, 2595, 2629, 8461, 9039, 6827, 7666, 7731, 3550, 3561, 8031, 9079, 9775, 9798, 523, 3938, 3949, 10883, 10913, 531, 542, 8802, 8039, 9082, 2752, 2126, 2137, 360, 371, 6340, 7397, 7572, 2698, 2731, 4368, 4394, 7805, 7862, 10347, 10358, 7079, 7568, 296, 307, 5569, 5593, 2561, 2860, 4008, 4019, 2477, 2501, 5137, 5148, 5616, 5627, 3424, 3435, 8411, 8497, 3872, 3883, 3630, 3641, 7151, 7177, 8102, 8117, 7664, 7741, 7021, 5342, 6597, 6634, 6651, 7334, 7579, 1324, 1350, 9406, 9522, 10089, 10117, 9939, 9999, 9720, 9744, 5708, 5817, 9634, 9668, 8465, 5262, 9161, 5509, 5520, 9410, 9452, 4475, 4652, 4357, 9926, 9965, 9335, 9346, 2067, 2078, 9176, 9206, 1999, 2026, 2197, 1195, 1206, 5287, 7829, 7897, 10553, 10627, 4426, 4448, 1164, 1175, 6085, 6102, 5558, 8634, 8659, 5882, 5910, 3045, 3056, 9560, 9617, 9561, 9635, 554, 565, 2344, 2361, 10019, 10032, 395, 6253, 1047, 1069, 329, 349, 1696, 1709, 10062, 10074, 9264, 9275, 7232, 7371, 5263, 5876, 3413, 5915, 5942, 1450, 835, 846, 1081, 1092, 2500, 2523, 9164, 10260, 3600, 3620, 8885, 8901, 3456, 3485, 6322, 5035, 3510, 3521, 6900, 6920, 1826, 1845, 4471, 4482, 3089, 4017, 6368, 10132, 10143, 9950, 9985, 5833, 5844, 3709, 3720, 1746, 4649, 4660, 9620, 9663, 8188, 8220, 8026, 9081, 9071, 9132, 3573, 3862, 2965, 2976, 202, 213, 8714, 8736, 5955, 5976, 1532, 187, 4760, 4771, 5771, 962, 973, 7099, 7122, 3094, 3119, 1774, 1796, 9791, 10124, 4834, 4845, 5468, 7632, 7713] +fold 9: +[5832, 10711, 10730, 787, 849, 8072, 9134, 4942, 4953, 5049, 5060, 644, 5694, 5705, 10188, 10214, 10943, 10988, 1365, 728, 3230, 3243, 10733, 10788, 6313, 4293, 4304, 7544, 7688, 578, 589, 1171, 1281, 98, 121, 5341, 10541, 10650, 6138, 6149, 3919, 3930, 4857, 4868, 4206, 4217, 8676, 8709, 10618, 10671, 853, 864, 8551, 8626, 9565, 9674, 8863, 8877, 10382, 10415, 5202, 5213, 1101, 1114, 3759, 3770, 4132, 4143, 3067, 3078, 4328, 4359, 5743, 5754, 6422, 6713, 6769, 9203, 9247, 6863, 8763, 8801, 10311, 10328, 9743, 9762, 2286, 2330, 3584, 4050, 10158, 10170, 6328, 4070, 4081, 5077, 5088, 5221, 5232, 2783, 2794, 228, 239, 1540, 1565, 2529, 2540, 9817, 9828, 1013, 1042, 5052, 5163, 9703, 9721, 7947, 7997, 1394, 1406, 7312, 7501, 6854, 10491, 10538, 3280, 3292, 10784, 10817, 2996, 3007, 5009, 5021, 7220, 7354, 1380, 1441, 263, 332, 4082, 4093, 6959, 6976, 11027, 11041, 11058, 11070, 1953, 2016, 2728, 2773, 2463, 2483, 9292, 9303, 9029, 9096, 6158, 6169, 6830, 3592, 4412, 4423, 6128, 6286, 400, 482, 1277, 1288, 3180, 3202, 5786, 5802, 660, 674, 7227, 7287, 3668, 3683, 8240, 8307, 3008, 3019, 6335, 671, 691, 4914, 4925, 9496, 9512, 2587, 2615, 6783, 4432, 4456, 10970, 11019, 7901, 7957, 6990, 7010, 5269, 637, 648, 1874, 2001, 1512, 1523, 9860, 9879, 6431, 6478, 6554, 7321, 7408, 8328, 8550, 10667, 10704, 4965, 4978, 2619, 2858, 10360, 10371, 5956, 5980, 5091, 5102, 7766, 7814, 4605, 4616, 6438, 6509, 6740, 1583, 1594, 411, 429, 10760, 10800, 4178, 4194, 3125, 3149, 4227, 4282, 7031, 7955, 10870, 10891, 9602, 6802, 903, 914, 1558, 1572, 10568, 10879, 9566, 9606, 4549, 4580, 3153, 7058, 4181, 4248, 4259, 1477, 1497, 7458, 7562, 7535, 7639, 14, 26, 1373, 1384, 3211, 3224, 1121, 1133, 9783, 9812, 7680, 7787, 1297, 1309, 2242, 2253, 10240, 10254, 5443, 1610, 1628, 7332, 7488, 8660, 8696, 937, 995, 9160, 10265, 1229, 1252, 1721, 1734, 1616, 1639, 59, 70, 9465, 9495, 1139, 7785, 7845, 9868, 9892, 35, 46, 2435, 2446, 3218, 3440, 4537, 4572, 4688, 4699, 5791, 5813, 3822, 3833, 9838, 9850, 149, 161, 10401, 10646, 5288, 8938, 8966, 3326, 3337, 6092, 6117, 1015, 1043, 2288, 2321, 6924, 6935, 1657, 1677, 7225, 7318, 4665, 4676, 6210, 6223, 991, 1002, 10105, 10125, 2051, 2062, 1807, 1827, 5156, 5167, 9312, 9323, 2287, 2317, 10296, 10307, 9336, 9625, 2269, 7784, 7902, 5894, 6005, 11094, 10368, 10579, 8270, 8305, 5657, 5667, 5682, 5693, 4798, 10445, 10506, 6176, 6187, 4027, 4038, 5334, 745, 767, 10890, 11100, 8921, 8952, 10725, 10761, 10967, 11009, 7938, 7976, 597, 609, 3302, 3313, 7212, 3890, 3901, 5489, 5500, 4108, 4119, 9213, 9251, 8303, 8335, 2263, 6375, 5990, 6010, 6034, 6045, 1854, 1892, 8085, 8114, 77, 88, 2407, 2418, 5576, 8907, 8926, 8046, 9095, 8140, 8158, 3339, 3350, 619, 630, 4048, 4059, 10439, 10483, 10571, 10614, 9409, 9449, 1704, 1771, 365, 6403, 6480, 6628, 7470, 7574, 6304, 5897, 5934, 10843, 10855, 8227, 8257, 9924, 10224, 7150, 8127, 8153, 6811, 5720, 5731, 1995, 2028, 921, 932, 8673, 8719, 5434, 5460, 10656, 10691, 10895, 10969, 5119, 5131, 3577, 7193, 7229, 5716, 21, 101, 8422, 8499, 243, 254, 7272, 7419, 8728, 8994, 8419, 8512, 3650, 3671, 4523, 4546, 8177, 9149, 1255, 1266, 6780, 2862, 6965, 6997, 7628, 6274, 6873, 7984, 458, 472, 9219, 9243, 7856, 7917, 8573, 8605, 2827, 2838, 5638, 5649, 8451, 8625, 11106, 11117, 512, 705, 8346, 8405, 6416, 6498, 6742, 10220, 10231, 8099, 8211, 4981, 4994, 8584, 8631, 2637, 2667, 4309, 4686, 2098, 2109, 4737, 4748, 10391, 10417, 9525, 9691, 1934, 1945, 2087, 4323, 4841, 4952, 3957, 3978, 5313, 7144, 7183, 5587, 6097, 6237, 6248, 2585, 2611, 10552, 10596, 1322, 1344, 4500, 4511, 8203, 8234, 2774, 2896, 2741, 2772, 6673, 6693, 6704, 3970, 3992, 3386, 3398, 6439, 6718, 6750, 7778, 7888, 6812, 3110, 3136, 3370, 3381, 7474, 7569, 4, 177, 941, 952, 1143, 1154, 1422, 1461, 2865, 2904, 3174, 3418, 6969, 7039, 2558, 2901, 4378, 4396, 6465, 6658, 6674, 1428, 1473, 5371, 7280, 7384, 1646, 1660, 7069, 7080, 6515, 6577, 6608, 4871, 4882, 377, 9577, 9626, 5386, 5410, 115, 137, 8469, 8529, 10462, 10515, 470, 484, 5385, 5422, 1918, 1929, 3189, 8569, 8618, 501, 696, 8741, 8762, 3079, 3950, 9283, 10080, 5999, 6018, 2448, 2509, 4352, 4708, 6060, 6075, 8318, 8360, 11080, 11091, 7364, 7477, 5326, 430, 450, 817, 828, 7489, 7622, 9356, 9367, 325, 350, 2141, 2204, 7475, 7626, 2692, 2721, 2943, 2954, 7219, 3254, 3265, 4128, 4267, 273, 284, 5270, 5409, 5450, 6857, 9682, 9693, 5267, 10534, 10632, 4151, 4162, 6880, 7973, 4814, 2675, 2870, 5530, 5542, 6891, 7940, 6152, 6211, 8808, 8851, 781, 792, 4560, 4592, 3487, 3498, 9155, 3844, 3855, 9378, 9389, 7566, 7683, 6489, 6590, 6622, 7252, 7382, 4775, 4786, 1862, 1899, 2656, 2677, 8694, 9072, 9909, 9920, 10898, 10937, 7240, 7361, 1544, 1619, 5568, 5590, 8434, 8468, 3738, 3749, 9415, 9467, 4633, 4644, 9048, 9098, 10821, 10832, 6472, 6556, 6580, 8793, 8834, 2148, 2163, 10044, 10055, 8993, 9026, 10193, 10212, 9948, 10006, 4901, 4913, 2340, 2369, 8359, 8438, 1962, 1982, 873, 884, 8968, 9004, 7105, 7127, 1708, 2920, 2931, 1770, 1825, 2806, 2817, 3693, 3704, 2156, 2179, 2226, 8321, 8371, 3542, 2198, 2214, 1973, 2381, 2395, 3801, 3812, 5858, 5869, 7848, 7906, 4709, 4720, 8842, 8862, 8052, 9133, 3467, 3483, 1212, 1241, 5323, 3776, 3787, 748, 771, 5180, 5191, 6430, 6494, 6725, 4221, 4232, 2590, 2628, 8173, 8517, 7089, 7635, 7719, 3555, 3566, 8048, 9109, 9759, 9789, 524, 3935, 3946, 10885, 10916, 532, 543, 8773, 8020, 8073, 2767, 2123, 2134, 361, 372, 7533, 7637, 2695, 2727, 4358, 4387, 7827, 7878, 10326, 10349, 7123, 8011, 292, 303, 5553, 5578, 2571, 2903, 1451, 1691, 3999, 4010, 2482, 2506, 2296, 5136, 5147, 5615, 5626, 3427, 3438, 3033, 8426, 8501, 3866, 3877, 3625, 3636, 7142, 7163, 8098, 8115, 7631, 7739, 7017, 7028, 5472, 6581, 6633, 6648, 7512, 7678, 1332, 1355, 9418, 6344, 10094, 10118, 9964, 10009, 9711, 9739, 5741, 5850, 9639, 9670, 8444, 5317, 5517, 5531, 9399, 9434, 4452, 4630, 4360, 9931, 9974, 9333, 9344, 2066, 2077, 9181, 9209, 1996, 2019, 2045, 2207, 1193, 1204, 7775, 7852, 10525, 10610, 4438, 4457, 1170, 1181, 6096, 6116, 5562, 8640, 8666, 5896, 5929, 3037, 3048, 9533, 9598, 9555, 9631, 555, 566, 2341, 2357, 10025, 10036, 391, 6261, 1033, 1065, 319, 340, 1695, 1707, 10070, 10082, 9265, 9276, 7218, 7355, 5246, 3415, 5912, 5940, 1467, 837, 848, 1074, 1085, 2490, 2519, 9186, 10285, 3597, 3616, 8889, 8908, 3444, 3463, 6317, 5029, 5040, 3519, 3530, 6887, 6912, 1812, 1835, 4472, 4483, 2961, 3095, 10134, 10145, 6321, 9953, 9987, 5835, 5846, 3714, 3725, 1751, 4654, 9572, 9640, 8182, 8214, 8042, 9107, 9046, 9112, 3518, 3784, 2967, 2978, 207, 218, 8734, 8757, 5959, 5979, 1527, 190, 4751, 4762, 5766, 5777, 2427, 967, 978, 7098, 7120, 3092, 3115, 1789, 1802, 9205, 9969, 4826, 4837, 5344, 7621, 7710] +fold 10: +[5831, 10715, 10738, 803, 882, 8028, 9045, 4944, 4955, 5054, 5065, 548, 677, 5695, 5706, 6866, 10171, 10194, 10902, 10963, 730, 3234, 3245, 10783, 10807, 6331, 6796, 4292, 4303, 7580, 7720, 569, 580, 1083, 1183, 6078, 114, 136, 5363, 10531, 10644, 6137, 6148, 3904, 3920, 4847, 4858, 4197, 4210, 2510, 8662, 8702, 10597, 10659, 851, 862, 8509, 8607, 9524, 9570, 8868, 8881, 10389, 10423, 5203, 5214, 1103, 1118, 3761, 3772, 4134, 4145, 3059, 3070, 4322, 4355, 5748, 5761, 6386, 6426, 6721, 9215, 9252, 6832, 8786, 8821, 10323, 10337, 9751, 9767, 2315, 3473, 3839, 10153, 10164, 4069, 4080, 5073, 5084, 5225, 5236, 2788, 2799, 220, 231, 1542, 2378, 2536, 9810, 9825, 1019, 1054, 5085, 5195, 9708, 9727, 6874, 7967, 1399, 1411, 7357, 7567, 6844, 10508, 10559, 3275, 3287, 10782, 10815, 2989, 3000, 5004, 5015, 7233, 7365, 162, 1414, 1445, 227, 288, 4089, 4100, 6960, 6978, 11008, 11036, 11053, 11064, 8595, 1950, 1983, 2716, 2768, 2464, 2484, 9286, 9297, 9010, 9069, 6161, 6172, 6852, 3586, 3612, 4404, 4415, 6121, 6279, 376, 460, 1073, 1282, 3154, 3190, 5779, 5790, 662, 678, 7200, 7264, 3664, 3680, 8219, 8272, 3014, 3025, 6326, 6870, 676, 709, 4923, 4934, 9504, 9521, 2583, 2612, 4442, 4468, 10997, 11028, 7916, 7975, 6988, 7008, 641, 721, 1849, 1895, 1504, 1515, 9862, 9882, 6424, 6473, 6552, 7329, 7417, 8395, 8639, 10653, 10700, 4962, 4973, 2751, 2877, 10364, 10375, 5954, 5978, 5097, 5108, 7738, 7781, 4612, 4623, 6434, 6503, 6722, 1580, 1591, 424, 446, 10747, 10789, 4170, 4185, 3141, 3161, 4268, 4285, 6980, 7933, 10867, 10886, 6778, 898, 909, 1547, 1567, 10812, 11078, 9534, 9578, 4559, 4587, 3133, 3167, 4188, 4250, 4261, 1465, 1495, 7388, 7515, 7465, 7600, 9, 20, 1379, 1390, 3209, 3221, 1124, 1135, 9779, 9805, 6381, 7699, 7794, 1298, 1313, 2241, 2252, 10239, 10252, 5343, 5451, 1609, 1626, 7293, 7452, 8652, 8690, 983, 1072, 10255, 10289, 1208, 1232, 1732, 1743, 1602, 1629, 51, 62, 9438, 9487, 7765, 7821, 9857, 9883, 36, 47, 2436, 2447, 3240, 3507, 4539, 4577, 4687, 4698, 5807, 5820, 3818, 3829, 9834, 9845, 156, 176, 10357, 10601, 6333, 8944, 8969, 3324, 3335, 5484, 6114, 1037, 2281, 2314, 6927, 6938, 1665, 1680, 7239, 7342, 4661, 4672, 6163, 6213, 989, 1000, 10101, 10122, 2046, 2059, 1816, 1837, 5150, 5161, 9294, 9320, 2248, 2300, 10298, 10309, 9325, 9514, 2264, 7746, 7850, 5972, 6080, 11098, 10302, 10501, 8282, 8319, 5656, 5666, 6842, 5673, 5684, 4801, 10489, 10555, 6177, 6188, 4024, 4035, 758, 777, 10746, 10945, 8942, 8997, 10722, 10754, 10930, 10994, 7884, 7942, 601, 613, 3305, 3316, 7257, 3893, 3905, 5487, 5498, 4112, 4123, 9237, 9257, 8299, 8334, 2274, 5998, 6026, 6044, 6055, 1883, 2035, 8080, 8100, 73, 84, 2409, 2420, 5632, 8914, 8933, 9031, 9131, 8131, 8149, 3342, 3353, 616, 627, 9232, 4045, 4056, 10433, 10478, 10569, 10611, 9412, 9451, 1726, 1773, 399, 6417, 6504, 6712, 7482, 7583, 6298, 6229, 5919, 5945, 10851, 10866, 8247, 8271, 9747, 10013, 8125, 8148, 6819, 5727, 5738, 1865, 2018, 919, 930, 8654, 8701, 5418, 5445, 10675, 10697, 10948, 11007, 5120, 5132, 3578, 7171, 7197, 95, 140, 8493, 8557, 246, 257, 7317, 7451, 8805, 9061, 8471, 8527, 3649, 3669, 4525, 4547, 8121, 8202, 1258, 1269, 6952, 6986, 7605, 6269, 7971, 444, 461, 9200, 9228, 7874, 7936, 8579, 8612, 2826, 2837, 5633, 5644, 8494, 8635, 11102, 11113, 499, 698, 8341, 8400, 6405, 6484, 6728, 10222, 10233, 8183, 9105, 4990, 5001, 8577, 8629, 2655, 2682, 4276, 4486, 2100, 2111, 4729, 4740, 10395, 10420, 9347, 9536, 1939, 2093, 4321, 4830, 4941, 3971, 3993, 5307, 7165, 7216, 5599, 6119, 6238, 6249, 2588, 2614, 10556, 10600, 1321, 1341, 4495, 4506, 8218, 8242, 2857, 2911, 2746, 2777, 6670, 6692, 6703, 3959, 3981, 3388, 3400, 6483, 6724, 6755, 7723, 7833, 6815, 3102, 3124, 3367, 3378, 7513, 7649, 165, 181, 945, 956, 1147, 1158, 1432, 1490, 2881, 2915, 3251, 3951, 7029, 7999, 2557, 2894, 4371, 4388, 6646, 6666, 6682, 1433, 1476, 5375, 7286, 7396, 1644, 1656, 7064, 7075, 6543, 6587, 6612, 5249, 4876, 4887, 383, 9592, 9638, 5402, 5428, 117, 139, 8513, 8565, 10442, 10493, 356, 479, 5347, 5395, 1919, 1930, 3187, 8542, 8592, 508, 700, 8737, 8760, 3081, 3995, 9272, 10035, 6006, 6022, 2404, 2465, 4641, 4785, 6058, 6070, 8336, 8378, 11071, 11082, 7338, 7460, 428, 448, 815, 826, 7420, 7615, 9349, 9363, 320, 344, 2139, 2183, 7592, 7684, 2710, 2750, 2951, 2962, 3256, 3267, 4172, 4270, 264, 279, 5296, 5348, 5425, 9681, 9692, 10585, 10648, 4155, 4166, 5299, 6884, 7981, 6776, 4809, 4820, 2848, 2895, 5524, 5540, 6907, 7980, 6207, 6288, 8800, 8848, 784, 795, 4548, 4588, 3492, 3503, 3843, 3854, 9376, 9387, 7492, 7625, 6545, 6602, 6630, 7358, 7491, 4782, 4793, 1866, 1903, 2654, 2674, 8317, 8905, 9907, 9918, 10931, 10959, 7341, 7463, 1599, 1685, 5577, 5597, 8387, 8443, 3739, 3750, 9455, 9485, 4631, 4642, 9022, 9058, 10829, 10840, 6521, 6564, 6592, 8798, 8835, 2154, 2174, 10042, 10053, 8965, 9008, 10176, 10197, 9923, 9959, 4892, 4903, 6380, 2228, 2348, 8388, 8470, 1956, 1979, 883, 8988, 9019, 7111, 7131, 1706, 2926, 2937, 1772, 1847, 2809, 2820, 5312, 3691, 3702, 2162, 2181, 8340, 8399, 3534, 3546, 2041, 2208, 1984, 2375, 2391, 3798, 3809, 5860, 5871, 7782, 7879, 4711, 4722, 8836, 8858, 8030, 9108, 3461, 3478, 1205, 1228, 3779, 3790, 744, 766, 5178, 5189, 6385, 6455, 6524, 4222, 4233, 6332, 2606, 2632, 8306, 8739, 7085, 6823, 7624, 7715, 3552, 3563, 8018, 9056, 9778, 9804, 522, 3933, 3944, 10900, 10936, 529, 540, 8764, 8013, 8071, 2726, 2121, 2132, 367, 488, 6798, 7467, 7595, 2700, 2734, 4316, 4370, 7841, 7893, 10344, 10355, 10416, 7068, 7457, 5378, 300, 311, 5564, 5589, 2567, 2886, 1459, 1690, 3998, 4009, 2489, 2516, 2316, 5143, 5607, 5619, 3428, 3439, 8455, 8514, 3871, 3882, 3633, 7145, 7169, 8095, 8112, 7727, 7772, 7022, 5370, 6616, 6635, 6653, 7445, 7645, 1348, 1362, 9432, 6353, 10086, 10110, 9977, 10018, 5328, 9701, 9728, 5697, 5805, 9615, 9657, 8392, 10235, 5507, 5518, 9404, 9439, 4519, 4697, 4336, 9949, 10003, 9334, 9345, 2074, 2085, 9185, 9212, 1993, 2017, 2190, 2209, 1188, 1199, 5274, 7835, 7905, 10519, 10605, 4431, 4451, 1169, 1180, 6083, 6099, 5563, 8653, 8685, 5893, 5924, 3042, 3053, 9552, 9612, 9568, 9641, 556, 567, 2333, 2353, 9989, 10026, 394, 6252, 6263, 1053, 1071, 326, 347, 1702, 1722, 10065, 10078, 9271, 9282, 7243, 7381, 5257, 5878, 3416, 5895, 5927, 1434, 1475, 834, 845, 1077, 1088, 2481, 2518, 9188, 10288, 3601, 3621, 8875, 8897, 3446, 3471, 6305, 5028, 5039, 3511, 3522, 6895, 6917, 1686, 1831, 4473, 4485, 2994, 3617, 6361, 10136, 10147, 9960, 9996, 10454, 5840, 5851, 3716, 3727, 1750, 4653, 9562, 9637, 8186, 8216, 8065, 9147, 9054, 9117, 3462, 3728, 2969, 2980, 203, 214, 8730, 8746, 5964, 1525, 188, 4758, 4769, 5769, 968, 979, 7093, 7107, 3090, 3113, 1791, 1804, 9891, 10279, 4828, 4839, 5356, 7611, 7707] diff --git a/LikelihoodModel.py b/LikelihoodModel.py new file mode 100644 index 0000000..177d6f5 --- /dev/null +++ b/LikelihoodModel.py @@ -0,0 +1,53 @@ +import numbers +import numpy as np +import sklearn + +from sklearn.utils import check_X_y, check_array, column_or_1d +from sklearn.utils.multiclass import check_classification_targets + +from sklearn.externals.joblib import Parallel, \ + delayed # For parallel computing TODO: check if we need to be parallel or not +from sklearn.utils.validation import has_fit_parameter, check_is_fitted + + +class LHClassifier(): + """Base class for ordinal meta-classifier. + + """ + + def __init__(self): + return self + + def fit(self, X, y, sample_weight=None): + return self + + def _fit(self, X, y, max_samples=None, max_depth=None, sample_weight=None): + return self + + def predict(self, X): + X = check_array(X, accept_sparse=['csr', 'csc']) + + # ---------------------------------------------Our CODE + n_samples = X.shape[0] + prediction = np.zeros((n_samples, 1)) + + for i in range(0, n_samples): + if X[i, "Scenario"] == "C": + if X[i, "VoterType"] == "LB": + prediction[i] = 2 # Q' vote + else: + prediction[i] = 1 # Q vote + else: + if X[i, "Scenario"] in ["E", "F"]: + if X[i, "VoterType"] == "TRT": + prediction[i] = 1 # Q vote + else: + prediction[i] = 2 # Q' vote + + else: + prediction[i] = 1 # Q vote + + return prediction + + + diff --git a/MaximumLikelihoodModel.py b/MaximumLikelihoodModel.py new file mode 100644 index 0000000..21bed7a --- /dev/null +++ b/MaximumLikelihoodModel.py @@ -0,0 +1,53 @@ +import numbers +import numpy as np +import sklearn + +from sklearn.utils import check_X_y, check_array, column_or_1d +from sklearn.utils.multiclass import check_classification_targets + +from sklearn.externals.joblib import Parallel, \ + delayed # For parallel computing TODO: check if we need to be parallel or not +from sklearn.utils.validation import has_fit_parameter, check_is_fitted + + +class MLHClassifier(): + """Base class for ordinal meta-classifier. + + """ + + def __init__(self): + return self + + def fit(self, X, y, sample_weight=None): + return self + + def _fit(self, X, y, max_samples=None, max_depth=None, sample_weight=None): + return self + + def predict(self, X): + X = check_array(X, accept_sparse=['csr', 'csc']) + + # ---------------------------------------------Our CODE + n_samples = X.shape[0] + prediction = np.zeros((n_samples, 1)) + + for i in range(0, n_samples): + if X[i, "Scenario"] == "C": + if X[i, "VoterType"] == "LB": + prediction[i] = 2 # Q' vote + else: + prediction[i] = 1 # Q vote + else: + if X[i, "Scenario"] in ["E", "F"]: + if X[i, "VoterType"] == "TRT": + prediction[i] = 1 # Q vote + else: + prediction[i] = 2 # Q' vote + + else: + prediction[i] = 1 # Q vote + + return prediction + + + diff --git a/OneShotDataPreperation.py b/OneShotDataPreperation.py new file mode 100644 index 0000000..407613c --- /dev/null +++ b/OneShotDataPreperation.py @@ -0,0 +1,73 @@ +import numbers +import numpy as np +import pandas as pd +import sklearn +from scipy.stats import kurtosis +from scipy.stats import skew + +from sklearn.ensemble.forest import RandomForestClassifier +from sklearn.tree import DecisionTreeClassifier +from sklearn.neural_network import MLPClassifier +from keras.layers import Input, Dense +from keras.models import Model + +from datetime import datetime +# Model and feature selection +from sklearn.feature_selection import SelectKBest +from sklearn.model_selection import KFold +from sklearn.feature_selection import chi2 +# Classification metrics +from sklearn.metrics import f1_score +from sklearn.metrics import precision_score +from sklearn.metrics import recall_score +from sklearn.metrics import accuracy_score + +from sklearn import preprocessing +from PersonalClassifier import PersonalClassifier +from sklearn.ensemble import AdaBoostClassifier +from sklearn.ensemble import VotingClassifier +from sklearn.svm import SVC +from sklearn.linear_model import logistic + + +def _data_conversion(data_df, is_target, le): + if is_target: + data_df = data_df.astype("category") + data_df = le.fit_transform(data_df) + else: + for c in data_df.columns: + if data_df[c].dtype in (object, str, np.object, bool): + if not (data_df[c].dtype in (int, float)): + data_df[c] = le.fit_transform(data_df[c]) + return data_df + +class OneShotDataPreparation(): + """ Class for One Shot data preparation + + """ + @staticmethod + def _prepare_dataset(features_df): + le = sklearn.preprocessing.LabelEncoder() + features_encoded_df = pd.DataFrame( + preprocessing.normalize(preprocessing.scale(_data_conversion(features_df, False, le).as_matrix()), axis=0, + norm='max')) + + # target_le = sklearn.preprocessing.LabelEncoder() + # target_df = _data_conversion(target_df, True, target_le) + + return features_encoded_df#, target_df + + + + + + + + + + + + + + + diff --git a/OneShotFeatureGenerator.py b/OneShotFeatureGenerator.py new file mode 100644 index 0000000..f7788f0 --- /dev/null +++ b/OneShotFeatureGenerator.py @@ -0,0 +1,404 @@ +import numbers +import numpy as np +import pandas as pd +import sklearn +from scipy.stats import kurtosis +from scipy.stats import skew + +from sklearn.ensemble.forest import RandomForestClassifier +from sklearn.tree import DecisionTreeClassifier +from sklearn.neural_network import MLPClassifier +from keras.layers import Input, Dense +from keras.models import Model + +from datetime import datetime +# Model and feature selection +from sklearn.feature_selection import SelectKBest +from sklearn.model_selection import KFold +from sklearn.feature_selection import chi2 +# Classification metrics +from sklearn.metrics import f1_score +from sklearn.metrics import precision_score +from sklearn.metrics import recall_score +from sklearn.metrics import accuracy_score + +from sklearn import preprocessing +from PersonalClassifier import PersonalClassifier +from sklearn.ensemble import AdaBoostClassifier +from sklearn.ensemble import VotingClassifier +from sklearn.svm import SVC +from sklearn.linear_model import logistic + + +def _autoencode(features): + # test + + encoding_dim = int(len(features.columns) / 5) + input_votes = Input(shape=(len(features.columns),)) + encoded = Dense(encoding_dim, activation='relu')(input_votes) + decoded = Dense(len(features.columns), activation='tanh')(encoded) + autoencoder = Model(input_votes, decoded) + encoder = Model(input_votes, encoded) + + # encoded_input = Input(shape=(encoding_dim,)) + # decoder_layer = autoencoder.layers[-1] + # decoder = Model(encoded_input, decoder_layer(encoded_input)) + + autoencoder.compile(optimizer='adadelta', loss='MSE') + + autoencoder.fit(features, features, + epochs=150, + batch_size=256, + shuffle=True, verbose=False) + + encoded_votes = encoder.predict(features) + + return encoded_votes + +class OneShotFeatureGenerator(): + """ Class for One Shot feature generation + + """ + + def __init__(self, + actions_df, + scenarios_df, + n_candidates): + self.actions_df = actions_df + self.scenarios_df = scenarios_df + self.n_candidates = n_candidates + + def _get_actions(self): + return ['TRT','WLB','SLB','CMP','DOM'] + + def _get_strategic_actions(self): + return ['WLB','SLB','CMP'] + + def _get_preference_features(self): + preference_features = ['Pref1','Pref2','Pref3'] + + if self.n_candidates == 4: + preference_features.append('Pref4') + + return preference_features + + def _get_gap_pref_features(self): + feature12 = 'GAP12_pref_poll' + feature23 = 'GAP23_pref_poll' + feature13 = 'GAP13_pref_poll' + + features = [feature12, feature23, feature13] + + if self.n_candidates == 4: + feature14 = 'GAP14_pref_poll' + feature24 = 'GAP24_pref_poll' + feature34 = 'GAP34_pref_poll' + features.extend([feature14,feature24,feature34]) + + return features + + def _get_scenarios_by_actions(self,actions): + scenarios = set([]) + for action in actions: + action_scenarios = self._get_scenarios_by_action(action) + scenarios = scenarios.union(action_scenarios) + + return scenarios + + def _get_scenarios_by_action(self, action): + scenarios = set([x[1].scenario for x in self.actions_df.iloc[[action in str(x) for x in self.actions_df['action_name']],].iterrows()]) + + return scenarios + + def _generate_action_name(self, df): + # Generate action name + df['Action_name'] = [self._get_action_name(df, x[0]) for x in + df.iterrows()] + + return df + + def _get_action_name(self, vote_row): + action_name = (self.actions_df.loc[(self.actions_df.scenario == vote_row['Scenario']) & ( + self.actions_df.action == int(vote_row['Action'])), 'action_name']).values[0] + + return action_name + + + def _get_scenario(self, vote_row): + scenario_table = self.scenarios_df + + + pass + + + + def _convert_prediction(self, df): + preference_features = self._get_preference_features() + for preference_feature in preference_features: + df.loc[df['Prediction'] == 1, "VotePrediction"] = df.loc[df['Prediction'] == 1, preference_feature] + + return df + + +class OneShotStaticFeatureGenerator(OneShotFeatureGenerator): + """ Class for One Shot feature generation + + """ + + def __init__(self, + actions_df, + scenarios_df, + n_candidates): + super().__init__(actions_df, scenarios_df, n_candidates) + + + def _generate_scenario(self, df): + if self.n_candidates == 4: + get_scenario = lambda vote, scenarios_table, attr : scenarios_table[ + (scenarios_table["Pref1_pos"] == vote["Pref1_pos"]) & (scenarios_table["Pref2_pos"] == vote["Pref2_pos"]) & + (scenarios_table["Pref3_pos"] == vote["Pref3_pos"]) & (scenarios_table["Pref4_pos"] == vote[ + "Pref4_pos"])][attr].values[0] + else: + get_scenario = lambda vote, scenarios_table, attr : scenarios_table[ + (scenarios_table["Pref1_pos"] == vote["Pref1_pos"]) & (scenarios_table["Pref2_pos"] == vote["Pref2_pos"]) & + (scenarios_table["Pref3_pos"] == vote["Pref3_pos"])][attr].values[0] + + df["Scenario"] = [get_scenario(vote[1], self.scenarios_df, "scenario") for vote in df.iterrows()] + df["Scenario_type"] = [get_scenario(vote[1], self.scenarios_df, "name") for vote in df.iterrows()] + + return df + + def _generate_pref_positions(self, df): + for vote in df.iterrows(): + pref_votes = [vote[1]["VotesCand" + str(vote[1]["Pref1"]) + "PreVote"], + vote[1]["VotesCand" + str(vote[1]["Pref2"]) + "PreVote"], + vote[1]["VotesCand" + str(vote[1]["Pref3"]) + "PreVote"]] + prefs = [1,2,3] + if self.n_candidates == 4: + pref_votes.append(vote[1]["VotesCand" + str(vote[1]["Pref4"]) + "PreVote"]) + prefs.append((4)) + combined = pd.DataFrame({'votes': pref_votes, 'pref': prefs}) + combined = combined.sort_values(by="votes", ascending=0) + combined = combined.reset_index(drop=True) + + for index in range(0, len(combined)): + column_name = "Pref" + str(combined["pref"][index]) + "_pos" + column_value = index + 1 + df.loc[df['VoterID'] == vote[1].VoterID,column_name] = int(column_value) + + + return df + + + def _generate_pref_gaps(self, df): + preference_features = self._get_preference_features() + for preference_feature in preference_features: + df["Votes"+preference_feature+"PreVote"] = [x[1]["VotesCand" + str(x[1][preference_feature]) + "PreVote"] for x in df.iterrows()] + + return df + + def _generate_gaps(self, df): + """Generate Gaps features""" + X = df + + X['VotesLeader_poll'] = X[['VotesCand1PreVote', 'VotesCand2PreVote', 'VotesCand3PreVote']].max(axis=1) + X['VotesRunnerup_poll'] = X[['VotesCand1PreVote', 'VotesCand2PreVote', 'VotesCand3PreVote']].apply( + np.median, axis=1) + X['VotesThird_poll'] = X[['VotesCand1PreVote', 'VotesCand2PreVote', 'VotesCand3PreVote']].min(axis=1) + + X['GAP12_poll'] = X['VotesLeader_poll'] - X['VotesRunnerup_poll'] + X['GAP23_poll'] = X['VotesRunnerup_poll'] - X['VotesThird_poll'] + X['GAP13_poll'] = X['VotesLeader_poll'] - X['VotesThird_poll'] + + # Preference based gaps - I think more suitable for ML for it's more synchronized across the scenarios + + X['GAP12_pref_poll'] = X['VotesPref1PreVote'] - X['VotesPref2PreVote'] + X['GAP23_pref_poll'] = X['VotesPref2PreVote'] - X['VotesPref3PreVote'] + X['GAP13_pref_poll'] = X['VotesPref1PreVote'] - X['VotesPref3PreVote'] + + #N=4 case + if self.n_candidates == 4: + X['VotesFourth_poll'] = X[['VotesCand1PreVote', 'VotesCand2PreVote', 'VotesCand3PreVote','VotesCand4PreVote']].min(axis=1) + + X['GAP14_poll'] = X['VotesLeader_poll'] - X['VotesFourth_poll'] + X['GAP24_poll'] = X['VotesRunnerup_poll'] - X['VotesFourth_poll'] + X['GAP34_poll'] = X['VotesThird_poll'] - X['VotesFourth_poll'] + + X['GAP14_pref_poll'] = X['VotesPref1PreVote'] - X['VotesPref4PreVote'] + X['GAP24_pref_poll'] = X['VotesPref2PreVote'] - X['VotesPref4PreVote'] + X['GAP34_pref_poll'] = X['VotesPref3PreVote'] - X['VotesPref4PreVote'] + + return X + + + def _static_feature_generation(self, df): + df = self._generate_pref_gaps(df) + df = self._generate_gaps(df) + df = self._generate_pref_positions(df) + df = self._generate_scenario(df) + + return df + + +class OneShotDynamicFeatureGenerator(OneShotFeatureGenerator): + """ Class for One Shot feature generation + + """ + + def __init__(self, + actions_df, + scenarios_df, + n_candidates): + super().__init__(actions_df, scenarios_df, n_candidates) + + + def _count_action_for_voter(self, action, voter_df): + action_counter = np.count_nonzero(([action in self._get_action_name(x[1]) for x in voter_df.iterrows()])) + + return action_counter + + def _generate_A_ratios(self, df, X_train, y_train ,voter): + """Generate A ratios - That is TRT-ratio, CMP-ratio, WLB-ratio, SLB-ratio, DOM-ratio + Action is in {TRT,DLB,SLB,WLB,CMP,DOM} + Scenario is in {A,B,C,D,E,F} + """ + + voter_df = pd.concat([X_train.loc[X_train['VoterID'] == voter.VoterID,] , y_train], axis=1, join='inner') + + for action in self._get_actions(): + availability_counter = np.count_nonzero([x[1].Scenario in self._get_scenarios_by_action(action) for x in voter_df.iterrows()]) + action_counter = self._count_action_for_voter(action, voter_df) + df.loc[df['VoterID'] == voter.VoterID, action + '-ratio'] = float(action_counter/availability_counter if availability_counter > 0 else 0) + df.loc[df['VoterID'] == voter.VoterID, action + '-counter'] = float(action_counter) + + return df + + def _generate_is_random_voter(self, df): + """Identify random voters using the rule of DOM-counter >= 2 (excluding SLB actions)""" + df['Is_Random'] = [x >= 2 for x in df['DOM-counter']] + + return df + + def _generate_voter_type(self, df): + """Generate Voter Type using thresholds over the A-ratio values""" + df['VoterType'] = 'Other' + # X.loc[ [int(x[1]['CMP-ratio'])>=0.7 for x in X.iterrows()], 'VoterType'] = 'CMP' + df.loc[[float(x[1]['WLB-ratio']) > 0.8 for x in df.iterrows()], 'VoterType'] = 'LB' + df.loc[[float(x[1]['TRT-ratio']) > 0.9 for x in df.iterrows()], 'VoterType'] = 'TRT' + + return df + + + + def _generate_feature_aggregation_class_dependant(self, df, X_train, y_train, scenarios, voter, feature_name, aggregation_func): + + X = df + X_train, y_train = X_train.loc[X_train['Scenario'].isin(scenarios)], y_train.loc[X_train['Scenario'].isin(scenarios)] + + #X_train, y_train = X_train, y_train #X.drop([self.target_index], axis=1),X[self.target_index] + + for action in range(1, self.n_candidates + 1): + actioni_list = [float(x[1][feature_name]) for x in + X_train.loc[(X_train['VoterID'] == voter.VoterID) & (y_train == action)].iterrows()] + if len(actioni_list) > 0: + X.loc[X['VoterID'] == voter.VoterID, feature_name + '_action'+ str(action) + '_' + aggregation_func.__name__] = aggregation_func( + actioni_list) + return X + + def _generate_action_aggregation_features(self, df, X_train, y_train, voter): + X = df + + aggregators = [np.average, np.std, np.median] + feature_name = "Action" + + scenarios = self._get_scenarios_by_actions(self._get_strategic_actions()) + + X_train, y_train = X_train.loc[X_train['Scenario'].isin(scenarios)], y_train.loc[X_train['Scenario'].isin(scenarios)] + + voter_train = X_train.loc[(X_train['VoterID'] == voter.VoterID)] + + for aggregation_func in aggregators: + X.loc[X['VoterID'] == voter.VoterID, feature_name + "_" + aggregation_func.__name__] = aggregation_func( + [float(y_train[x[0]]) for x in voter_train.iterrows()]) + + return X + + def _generate_gaps_features(self, df, X_train, y_train, voter): + X = df + + features = self._get_gap_pref_features() + aggregators = [np.average, np.std, np.median, np.min, np.max, skew, kurtosis] + scenarios = self._get_scenarios_by_actions(self._get_strategic_actions()) + + for aggregator in aggregators: + for feature in features: + X = self._generate_feature_aggregation_class_dependant(X, X_train, y_train, scenarios, voter, feature, aggregator) + + return X + + def _generate_gap_dif_features(self, df): + + X = df + features = self._get_gap_pref_features() + aggregators = [np.average, np.median, np.min, np.max] + + for action in range(1, self.n_candidates + 1): + for feature in features: + for aggregator in aggregators: + X[feature + '_action'+str(action)+'_' + aggregator.__name__ + '_dif'] = X[feature] - X[ + feature + '_action'+str(action)+'_' + aggregator.__name__] + + return X + + + + def _dynamic_feature_generation(self, df, X_train, y_train): + X = df + a_ratio_columns, gaps_columns = [], [] + all_voters = pd.DataFrame(X[["VoterID", "SessionIDX"]].drop_duplicates()) + for voter in all_voters.iterrows(): + before_columns = len(X.columns) + X = self._generate_A_ratios(X, X_train, y_train, voter[1]) + if len(a_ratio_columns) == 0: + a_ratio_columns = list(range(before_columns, len(X.columns))) + + before_columns = len(X.columns) + X = self._generate_gaps_features(X, X_train, y_train, voter[1]) + if len(gaps_columns) == 0: + gaps_columns = list(range(before_columns, len(X.columns))) + + + X = self._generate_action_aggregation_features(X, X_train, y_train, voter[1]) + + # Gaps features encoding + X = X.fillna( + X.mean()) # X.fillna(1000) #fill na with some high value (maybe maximum) because the voters with na values didn't choose the action (say q'', 3) in all gaps they incounterd. + + before_columns = len(X.columns) + X = self._generate_gap_dif_features(X) + gaps_dif_columns = list(range(before_columns, len(X.columns))) + + total_gaps_columns = a_ratio_columns + gaps_columns + gaps_dif_columns + + gap_pref_features = self._get_gap_pref_features() + for gap_pref_feature in gap_pref_features: + total_gaps_columns.append(X.columns.get_loc(gap_pref_feature)) + + normalized_gap_fs = pd.DataFrame(preprocessing.normalize(X.iloc[:, total_gaps_columns])) + encoded_gap_fs = pd.DataFrame(_autoencode(normalized_gap_fs)) + + X = pd.concat([X, encoded_gap_fs], axis=1, join='inner') + + X = X.drop(X.columns[gaps_columns + gaps_dif_columns], axis=1) + + X = self._generate_is_random_voter(X) + X = self._generate_voter_type(X) + + return X + + + + + + + diff --git a/OneShot_NewAnalysis.py b/OneShot_NewAnalysis.py new file mode 100644 index 0000000..c358f78 --- /dev/null +++ b/OneShot_NewAnalysis.py @@ -0,0 +1,731 @@ +# -*- coding: utf-8 -*- +""" +Created on Fri Aug 17 11:16:03 2018 + +@author: Adam +""" +import numbers +import numpy as np +import pandas as pd +import sklearn +from scipy.stats import kurtosis +from scipy.stats import skew + +from sklearn.ensemble.forest import RandomForestClassifier +from sklearn.tree import DecisionTreeClassifier +from sklearn.neural_network import MLPClassifier +from keras.layers import Input, Dense +from keras.models import Model + +from datetime import datetime +# Model and feature selection +from sklearn.feature_selection import SelectKBest +from sklearn.model_selection import KFold +from sklearn.feature_selection import chi2 +# Classification metrics +from sklearn.metrics import f1_score +from sklearn.metrics import precision_score +from sklearn.metrics import recall_score +from sklearn.metrics import accuracy_score + +from sklearn import preprocessing +from PersonalClassifier import PersonalClassifier +from sklearn.ensemble import AdaBoostClassifier +from sklearn.ensemble import VotingClassifier +from sklearn.svm import SVC +from sklearn.linear_model import logistic + +def _convert_prediction(X): + X.loc[X['Prediction']==1,"VotePrediction"] = X.loc[X['Prediction']==1,"Pref1"] + X.loc[X['Prediction']==2,"VotePrediction"] = X.loc[X['Prediction']==2,"Pref2"] + X.loc[X['Prediction']==3,"VotePrediction"] = X.loc[X['Prediction']==3,"Pref3"] + + return X + +def _generate_action_name(X): + # Generate action name + # Action mapping table + d = {'scenario': ['A', 'A', 'A', 'B', 'B', 'B', 'C', 'C', 'C', 'D', 'D', 'D', 'E', 'E', 'E', 'F', 'F', 'F'], + 'action': [1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3], + 'action_name': ['TRT', 'DOM', 'DOM', 'TRT', 'DOM', 'DOM', 'TRT', 'WLB', 'DOM', 'TRT', 'DOM', 'SLB', 'TRT', + 'CMP\WLB', 'DOM', 'TRT', 'CMP', 'SLB']} + action_map_df = pd.DataFrame(data=d) + + X['Action_name'] = [(action_map_df.loc[(action_map_df.scenario == str(x[1]['Scenario'])) & ( + action_map_df.action == int(X.loc[x[0], 'Action'])), 'action_name']).values[0] for x in + X.iterrows()] + + return X + +def _data_cleaning(data_df,is_target ,le): + if is_target: + data_df = data_df.astype("category") + data_df = le.fit_transform(data_df) + else: + for c in data_df.columns: + if data_df[c].dtype in (object, str, np.object, bool): + if not (data_df[c].dtype in (int, float)): + data_df[c] = le.fit_transform(data_df[c]) + return data_df + +def _generate_A_ratios(X, X_train, y_train, voter): + """Generate A ratios - That is TRT-ratio, CMP-ratio, WLB-ratio, SLB-ratio, DOM-ratio + Action is in {TRT,DLB,SLB,WLB,CMP,DOM} + Scenario is in {A,B,C,D,E,F} + """ + start_ind_col = len(X.columns) + + availability_counter = np.count_nonzero([x[1].Scenario in ['A','B','C','D','E','F'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]) + X.loc[X['VoterID'] == voter.VoterID, 'TRT-ratio'] = np.count_nonzero((['TRT' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]))/availability_counter if availability_counter>0 else 0 + + availability_counter = np.count_nonzero([x[1].Scenario in ['C','E'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]) + X.loc[X['VoterID'] == voter.VoterID, 'WLB-ratio'] = np.count_nonzero((['WLB' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]))/availability_counter if availability_counter>0 else 0 + + availability_counter = np.count_nonzero([x[1].Scenario in ['D','F'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]) + X.loc[X['VoterID'] == voter.VoterID, 'SLB-ratio'] = np.count_nonzero((['SLB' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]))/availability_counter if availability_counter>0 else 0 + #X.loc[X['VoterID'] == voter.VoterID, 'LB-ratio'] = np.count_nonzero((['LB' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]))/np.count_nonzero([x[1].Scenario in ['C','D','E','F'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]) + availability_counter = np.count_nonzero([x[1].Scenario in ['E','F'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]) + X.loc[X['VoterID'] == voter.VoterID, 'CMP-ratio'] = np.count_nonzero((['CMP' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]))/availability_counter if availability_counter>0 else 0 + + availability_counter = np.count_nonzero([x[1].Scenario in ['A','B','C','D','E','F'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]) + X.loc[X['VoterID'] == voter.VoterID, 'DOM-ratio'] = np.count_nonzero((['DOM' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]))/availability_counter if availability_counter>0 else 0 + + X.loc[X['VoterID'] == voter.VoterID, 'DOM-counter'] = np.count_nonzero((['DOM' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()])) + + X['TRT-ratio'] = (X['TRT-ratio']).astype(float) + X['WLB-ratio'] = (X['WLB-ratio']).astype(float) + X['SLB-ratio'] = (X['SLB-ratio']).astype(float) + X['CMP-ratio'] = (X['CMP-ratio']).astype(float) + X['DOM-ratio'] = (X['DOM-ratio']).astype(float) + X['DOM-counter'] = (X['DOM-counter']).astype(float) + + end_ind_col = len(X.columns) + return X, list(range(start_ind_col, end_ind_col)) + +def _generate_is_random_voter(X): + """Identify random voters using the rule of DOM-counter >= 2 (excluding SLB actions)""" + X['Is_Random'] = [x >= 2 for x in X['DOM-counter']] + + return X + +def _generate_voter_type(X): + """Generate Voter Type using thresholds over the A-ratio values""" + X['VoterType'] = 'Other' + #X.loc[ [int(x[1]['CMP-ratio'])>=0.7 for x in X.iterrows()], 'VoterType'] = 'CMP' + X.loc[ [float(x[1]['WLB-ratio'])>0.8 for x in X.iterrows()], 'VoterType'] = 'LB' + X.loc[ [float(x[1]['TRT-ratio'])>0.9 for x in X.iterrows()], 'VoterType'] = 'TRT' + + return X + +def _generate_pref_gaps(X): + X["VotesPref1PreVote"] = [x[1]["VotesCand"+str(x[1]["Pref1"])+"PreVote"] for x in X.iterrows()] + X["VotesPref2PreVote"] = [x[1]["VotesCand"+str(x[1]["Pref2"])+"PreVote"] for x in X.iterrows()] + X["VotesPref3PreVote"] = [x[1]["VotesCand"+str(x[1]["Pref3"])+"PreVote"] for x in X.iterrows()] + + return X + +def _generate_gaps(X): + """Generate Gaps features""" + + X['VotesLeader_poll'] = X[['VotesCand1PreVote','VotesCand2PreVote','VotesCand3PreVote']].max(axis = 1) + X['VotesRunnerup_poll'] = X[['VotesCand1PreVote','VotesCand2PreVote','VotesCand3PreVote']].apply(np.median, axis=1) + X['VotesLoser_poll'] = X[['VotesCand1PreVote','VotesCand2PreVote','VotesCand3PreVote']].min(axis = 1) + + + X['GAP12_poll'] = X['VotesLeader_poll'] - X['VotesRunnerup_poll'] + X['GAP23_poll'] = X['VotesRunnerup_poll'] - X['VotesLoser_poll'] + X['GAP13_poll'] = X['VotesLeader_poll'] - X['VotesLoser_poll'] + + #Preference based gaps - I think more suitable for ML for it's more synchronized across the scenarios + + # for x in X.iterrows(): + # vote = x[1] + # candPref1 = 'VotesCand' + str(vote.Pref1) + 'PreVote' + # candPref2 = 'VotesCand' + str(vote.Pref2) + 'PreVote' + # candPref3 = 'VotesCand' + str(vote.Pref3) + 'PreVote' + # X.loc[x[0], 'VotesPref1_poll'] = X.loc[x[0], candPref1] + # X.loc[x[0], 'VotesPref2_poll'] = X.loc[x[0], candPref2] + # X.loc[x[0], 'VotesPref3_poll'] = X.loc[x[0], candPref3] + # + # + X['GAP12_pref_poll'] = X['VotesPref1PreVote'] - X['VotesPref2PreVote'] + X['GAP23_pref_poll'] = X['VotesPref2PreVote'] - X['VotesPref3PreVote'] + X['GAP13_pref_poll'] = X['VotesPref1PreVote'] - X['VotesPref3PreVote'] + + + return X + +def _generate_scenario_type(X): + #initialize + X['TRT'] = 0 + X['WLB'] = 0 + X['SLB'] = 0 + X['CMP'] = 0 + + X.loc[[x[1].Scenario in ['A','B','C','D','E','F'] for x in X],'TRT'] = 1 + X.loc[[x[1].Scenario in ['C','E'] for x in X],'WLB'] = 1 + X.loc[[x[1].Scenario in ['D','F'] for x in X],'SLB'] = 1 + X.loc[[x[1].Scenario in ['E','F'] for x in X],'CMP'] = 1 + + return X + +def _generate_feature_aggregation_class_dependant(X, X_train, y_train, voter, feature_name, aggregation_func): + action1_list = [ float(x[1][feature_name]) for x in X_train.loc[(X_train['VoterID'] == voter.VoterID) & (y_train == 1)].iterrows()] + if len(action1_list)>0: + X.loc[X['VoterID'] == voter.VoterID, feature_name + '_action1_' + aggregation_func.__name__] = aggregation_func(action1_list) + + action2_list = [ float(x[1][feature_name]) for x in X_train.loc[(X_train['VoterID'] == voter.VoterID) & (y_train == 2)].iterrows()] + if len(action2_list)>0: + X.loc[X['VoterID'] == voter.VoterID, feature_name + '_action2_' + aggregation_func.__name__] = aggregation_func(action2_list) + + action3_list = [ float(x[1][feature_name]) for x in X_train.loc[(X_train['VoterID'] == voter.VoterID) & (y_train == 3)].iterrows()] + if len(action3_list)>0: + X.loc[X['VoterID'] == voter.VoterID, feature_name + '_action3_' + aggregation_func.__name__] = aggregation_func(action3_list) + + + return X + +def _generate_action_aggregation_features(X, X_train, y_train, voter): + aggregators = [np.average, np.std, np.median] + feature_name = "Action" + + for aggregation_func in aggregators: + X.loc[X['VoterID'] == voter.VoterID, feature_name + "_" + aggregation_func.__name__] = aggregation_func([float(y_train[x[0]]) for x in X_train.loc[(X_train['VoterID'] == voter.VoterID) & (X_train['Scenario'].isin(['C','D','E','F']))].iterrows()]) + + return X + + +def _generate_gaps_features(X, X_train, y_train, voter): + start_ind_col = len(X.columns) + + feature12 = 'GAP12_pref_poll' + feature23 = 'GAP23_pref_poll' + feature13 = 'GAP13_pref_poll' + + features = [feature12, feature23, feature13] + aggregators = [np.average, np.std, np.median, np.min, np.max, skew, kurtosis] + scenarios = ['C','D','E','F'] + + for aggregator in aggregators: + for feature in features: + X = _generate_feature_aggregation_class_dependant(X, X_train.loc[X_train['Scenario'].isin(scenarios)], y_train, voter, feature, aggregator) + + + end_ind_col = len(X.columns) + + return X, list(range(start_ind_col, end_ind_col)) + +def _generate_gap_dif_features(X): + + start_ind_col = len(X.columns) + + feature12 = 'GAP12_pref_poll' + feature23 = 'GAP23_pref_poll' + feature13 = 'GAP13_pref_poll' + + + X[feature12 + '_action1_' + np.average.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action1_' + np.average.__name__ ] + X[feature12 + '_action2_' + np.average.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action2_' + np.average.__name__ ] + X[feature12 + '_action3_' + np.average.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action3_' + np.average.__name__ ] + + X[feature23 + '_action1_' + np.average.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action1_' + np.average.__name__ ] + X[feature23 + '_action2_' + np.average.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action2_' + np.average.__name__ ] + X[feature23 + '_action3_' + np.average.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action3_' + np.average.__name__ ] + + X[feature13 + '_action1_' + np.average.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action1_' + np.average.__name__ ] + X[feature13 + '_action2_' + np.average.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action2_' + np.average.__name__ ] + X[feature13 + '_action3_' + np.average.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action3_' + np.average.__name__ ] + + X[feature12 + '_action1_' + np.median.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action1_' + np.median.__name__ ] + X[feature12 + '_action2_' + np.median.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action2_' + np.median.__name__ ] + X[feature12 + '_action3_' + np.median.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action3_' + np.median.__name__ ] + + X[feature23 + '_action1_' + np.median.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action1_' + np.median.__name__ ] + X[feature23 + '_action2_' + np.median.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action2_' + np.median.__name__ ] + X[feature23 + '_action3_' + np.median.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action3_' + np.median.__name__ ] + + X[feature13 + '_action1_' + np.median.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action1_' + np.median.__name__ ] + X[feature13 + '_action2_' + np.median.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action2_' + np.median.__name__ ] + X[feature13 + '_action3_' + np.median.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action3_' + np.median.__name__ ] + + X[feature12 + '_action1_' + np.max.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action1_' + np.max.__name__ ] + X[feature12 + '_action2_' + np.max.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action2_' + np.max.__name__ ] + X[feature12 + '_action3_' + np.max.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action3_' + np.max.__name__ ] + + X[feature23 + '_action1_' + np.max.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action1_' + np.max.__name__ ] + X[feature23 + '_action2_' + np.max.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action2_' + np.max.__name__ ] + X[feature23 + '_action3_' + np.max.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action3_' + np.max.__name__ ] + + X[feature13 + '_action1_' + np.max.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action1_' + np.max.__name__ ] + X[feature13 + '_action2_' + np.max.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action2_' + np.max.__name__ ] + X[feature13 + '_action3_' + np.max.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action3_' + np.max.__name__ ] + + X[feature12 + '_action1_' + np.min.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action1_' + np.min.__name__ ] + X[feature12 + '_action2_' + np.min.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action2_' + np.min.__name__ ] + X[feature12 + '_action3_' + np.min.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action3_' + np.min.__name__ ] + + X[feature23 + '_action1_' + np.min.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action1_' + np.min.__name__ ] + X[feature23 + '_action2_' + np.min.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action2_' + np.min.__name__ ] + X[feature23 + '_action3_' + np.min.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action3_' + np.min.__name__ ] + + X[feature13 + '_action1_' + np.min.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action1_' + np.min.__name__ ] + X[feature13 + '_action2_' + np.min.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action2_' + np.min.__name__ ] + X[feature13 + '_action3_' + np.min.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action3_' + np.min.__name__ ] + + + end_ind_col = len(X.columns) + + return X, list(range(start_ind_col, end_ind_col)) + +def _static_feature_generation(X): + X = _generate_pref_gaps(X) + X = _generate_gaps(X) + #X = _generate_scenario_type(X) + + return X + +def _dynamic_feature_generation(X, X_train, y_train): + all_voters = pd.DataFrame(X[["VoterID", "SessionIDX"]].drop_duplicates()) + for voter in all_voters.iterrows(): + X, added_columns = _generate_A_ratios(X, X_train, y_train, voter[1]) + if len(added_columns)>0: + a_ratio_columns = added_columns + X, added_columns = _generate_gaps_features(X, X_train, y_train, voter[1]) + if len(added_columns)>0: + gaps_columns = added_columns + X = _generate_action_aggregation_features(X, X_train, y_train, voter[1]) + + #Gaps features encoding + X = X.fillna(X.mean()) #X.fillna(1000) #fill na with some high value (maybe maximum) because the voters with na values didn't choose the action (say q'', 3) in all gaps they incounterd. + X, gaps_dif_columns = _generate_gap_dif_features(X) + total_gaps_columns = a_ratio_columns + gaps_columns + gaps_dif_columns + total_gaps_columns.append(X.columns.get_loc("GAP12_pref_poll")) + total_gaps_columns.append(X.columns.get_loc("GAP23_pref_poll")) + total_gaps_columns.append(X.columns.get_loc("GAP13_pref_poll")) + + normalized_gap_fs = pd.DataFrame(preprocessing.normalize(X.iloc[:,total_gaps_columns])) + encoded_gap_fs = pd.DataFrame(_autoencode(normalized_gap_fs)) + + X = pd.concat([X, encoded_gap_fs], axis=1, join='inner') + + X = X.drop(X.columns[gaps_columns + gaps_dif_columns], axis=1) + + X = _generate_is_random_voter(X) + X = _generate_voter_type(X) + + + + return X + +def _evaluation(X, clf, target): + #tests + + #static features generation + X = _static_feature_generation(X) + # Encoders definitions + le = sklearn.preprocessing.LabelEncoder() + target_le = sklearn.preprocessing.LabelEncoder() + + # Split into features and target + features_df, target_df = X.drop([target], axis=1),X[target] + + + n_folds = 10 + results_df = pd.DataFrame(columns=['Measure', 'Result']) + # Initialize metrics: + results_df.loc[0] = ['PRECISION', 0] + results_df.loc[1] = ['RECALL', 0] + results_df.loc[2] = ['F_MEASURE', 0] + + + # 10 fold cross validation + kf = KFold(n_folds, shuffle=True, random_state=1) # 10 fold cross validation + for train_indices, test_indices in kf.split(features_df, target_df): + # Feature Generation + features_train = features_df.loc[[ii for ii in train_indices],] + targets_train = target_df[[ii for ii in train_indices]] + features_ext_df = _dynamic_feature_generation(features_df, features_train, targets_train) + features_ext_df = features_ext_df.drop(["Action_name"], axis=1) + + # encoding the dataframes + features_encoded_df = pd.DataFrame(preprocessing.normalize(preprocessing.scale(_data_cleaning(features_ext_df, False, le)))) + target_encoded_df = _data_cleaning(target_df, True, target_le) + + + # make training and testing datasets + features_train = features_encoded_df.loc[[ii for ii in train_indices],] + features_test = features_encoded_df.loc[[ii for ii in test_indices],] + targets_train = target_encoded_df[[ii for ii in train_indices]] + targets_test = target_encoded_df[[ii for ii in test_indices]] + + # Train + clf.fit(X = features_train.as_matrix(), y = targets_train) + # Test + predicated = clf.predict(features_test.as_matrix()) + + # Measures + results_df.iloc[0, 1] = results_df.iloc[0, 1] + precision_score(targets_test, predicated, average='weighted') + results_df.iloc[1, 1] = results_df.iloc[1, 1] + recall_score(targets_test, predicated, average='weighted') + results_df.iloc[2, 1] = results_df.iloc[2, 1] + f1_score(targets_test, predicated, average='weighted') + + results_df.Result = results_df.Result.apply(lambda x: x / n_folds) + return results_df + +def intersection(lst1, lst2): + lst3 = [value for value in lst1 if value in lst2] + return lst3 + +def _read_roy_folds(folds_file): + lines = folds_file.read().split('\n') + folds = list() + for index in range(0,len(lines)): + line = lines[index] + if not ('fold' in line) and line != '': + folds.append([int(ii) for ii in line[1:len(line)-1].split(',')]) + + + return folds + +def _get_loo_folds(X): + folds = list() + for x in X.iterrows(): + fold = [x[0]] + folds.append(fold) + + return folds + + +def _autoencode(features): + #test + + encoding_dim = int(len(features.columns)/5) + input_votes = Input(shape = (len(features.columns),)) + encoded = Dense(encoding_dim, activation='relu')(input_votes) + decoded = Dense(len(features.columns), activation='tanh')(encoded) + autoencoder = Model(input_votes, decoded) + encoder = Model(input_votes, encoded) + + # encoded_input = Input(shape=(encoding_dim,)) +# decoder_layer = autoencoder.layers[-1] + # decoder = Model(encoded_input, decoder_layer(encoded_input)) + + autoencoder.compile(optimizer='adadelta', loss='MSE') + + autoencoder.fit(features, features, + epochs=150, + batch_size=256, + shuffle=True,verbose=False) + + encoded_votes = encoder.predict(features) + + return encoded_votes + + +def _evaluation_roy_splits(raw_data, clfs, target, folds, scenario_filter): + data = raw_data.copy() + #static features generation + data = _static_feature_generation(data) + # Encoders definitions + le = sklearn.preprocessing.LabelEncoder() + target_le = sklearn.preprocessing.LabelEncoder() + + + n_folds = 10 + + + results_df = pd.DataFrame(columns=['Classifier', 'PRECISION', 'RECALL','F_MEASURE','ACCURACY']) + + prediction = pd.DataFrame(np.matrix([])) + feature_importances = pd.DataFrame() + features_train = pd.DataFrame() + # 10 fold cross validation + for i in range(0,len(folds)): + # Split into features and target + features_df, target_df = data.drop([target], axis=1),data[target] + + test_indices = data.index[[x[1].RoundIndex in folds[i] for x in data.iterrows()]].tolist() + train_indices = data.index[[not (x[1].RoundIndex in folds[i] or x[1].Scenario == scenario_filter) for x in data.iterrows()]].tolist() + # Feature Generation + features_train = features_df.loc[[ii for ii in train_indices],] + targets_train = target_df[[ii for ii in train_indices]] + features_ext_df = _dynamic_feature_generation(features_df, features_train , targets_train) + features_ext_df = features_ext_df.drop(["Action_name"], axis=1) + features_ext_df = features_ext_df.drop(["Vote"], axis=1) + # encoding the dataframes + features_encoded_df = pd.DataFrame(preprocessing.normalize(preprocessing.scale(_data_cleaning(features_ext_df, False, le).as_matrix()), axis=0, norm='max'))# + target_encoded_df = target_df #_data_cleaning(target_df, True, target_le) + + + # make training and testing datasets + features_train = features_encoded_df.loc[[ii for ii in train_indices],] + features_test = features_encoded_df.loc[[ii for ii in test_indices],] + targets_train = target_encoded_df[[ii for ii in train_indices]] + targets_test = target_encoded_df[[ii for ii in test_indices]] + + +# training = pd.concat([features_train, pd.DataFrame(targets_train)], axis=1, join='inner') +# testing = pd.concat([features_test, pd.DataFrame(targets_test)], axis=1, join='inner') +# training.to_csv("training_fold_"+str(i)+".csv") +# testing.to_csv("testing_fold_"+str(i)+".csv") + + for j in range(0,len(clfs)): + clf = clfs[j] + clf_name = str(clf).split("(")[0] + if i == 0: + #Initialize metrics + results_df.loc[j] = [str(clf), 0, 0, 0, 0] + + if clf != "baseline": + + # Train + clf.fit(X = features_train.as_matrix(), y = targets_train) + # Test + predicated = clf.predict(features_test.as_matrix()) + + # #feature importance + # current_feature_importances = pd.DataFrame(clf.feature_importances_, + # index=features_ext_df.columns, + # columns=['importance']).sort_values('importance', + # ascending=False) + # if len(feature_importances) == 0: + # feature_importances = current_feature_importances + # else: + # feature_importances['importance'] = feature_importances['importance'] + current_feature_importances['importance'] + # + # print(feature_importances) + + else: + features_ext_df_test = features_ext_df.loc[[ii for ii in test_indices],] + n_samples = len(features_ext_df_test) + predicated = np.zeros((n_samples),dtype=int) + + features_ext_df_test["Prediction"] = 1 + features_ext_df_test.loc[(features_ext_df_test["Scenario"] == "C") & (features_ext_df_test["VoterType"] == "LB"), "Prediction"] = 2 + features_ext_df_test.loc[(features_ext_df_test['Scenario'].isin(["E","F"])) & (features_ext_df_test["VoterType"] != "TRT"), "Prediction"] = 2 + + predicated = features_ext_df_test["Prediction"] + + features_ext_df_test = pd.concat([features_ext_df_test, pd.DataFrame(predicated)], axis=1) + + + + + #aggregate results + if len(prediction) == 0: + prediction = pd.DataFrame(predicated) + else: + prediction = pd.concat([prediction, pd.DataFrame(predicated)]) + + raw_data.loc[[ii for ii in test_indices],"Prediction" + "_" + clf_name] = predicated + + print(str(clf) +": F_score = " + str(f1_score(targets_test, predicated, average='weighted'))) + # Measures + results_df.iloc[j, 1] = results_df.iloc[j, 1] + precision_score(targets_test, predicated, average='weighted') + results_df.iloc[j, 2] = results_df.iloc[j, 2] + recall_score(targets_test, predicated, average='weighted') + results_df.iloc[j, 3] = results_df.iloc[j, 3] + f1_score(targets_test, predicated, average='weighted') + results_df.iloc[j, 4] = results_df.iloc[j, 4] + accuracy_score(targets_test, predicated) + + results_df.Result = results_df.Result.apply(lambda x: x / n_folds) + return results_df, prediction, raw_data, feature_importances, features_train + + +def _build_data_by_folds(data, folds): + transformed_data = pd.DataFrame() + for i in range(0,len(folds)): + # Split into features and target + fold_indices = data.index[[x[1].RoundIndex in folds[i] for x in data.iterrows()]].tolist() + fold_df = data.iloc[fold_indices,:] + + if len(transformed_data) == 0: + transformed_data = fold_df + else: + transformed_data = pd.concat([transformed_data, fold_df]) + return transformed_data + +# +# data = pd.read_excel("datasets/oneshot/full_data_no_timeout_no_first_round.xlsx") +# +# data_with_action_name = _generate_action_name(data) +# data_with_action_name.to_csv("datasets\\oneshot\\full_data_no_timeout_no_first_round_with_action_name.csv") +# + +# #Load rawdata +# oneshot_df = pd.read_csv("datasets/oneshot/full_data_no_timeout_no_first_round_with_action_name.csv")#read_excel("datasets/oneshot/NewData.xlsx") +# +# #sorted_data = _build_data_by_folds(oneshot_df) +# #sorted_data.to_csv("Results\\sorted_fulldata.csv") +# # +# #print("F_score = " + str(f1_score(sorted_data.Action, nn_prediction[1:], average='weighted'))) +# # +# +# raw_data = oneshot_df +# +# +# +# raw_data = raw_data.drop(['GameConfiguration','FileName','PlayerID','DateTime','CandName1', 'CandName2','CandName3','WinnerPostVote', 'VotesCand1', 'VotesCand2', 'VotesCan3', 'PointsPostVote','ResponseTime'], axis=1) + + +classifier = RandomForestClassifier(n_estimators=100)#MLPClassifier(hidden_layer_sizes = (92), max_iter = 500) + +# +# #Model Phase +# #rf_results_df = _evaluation_roy_splits(data, RandomForestClassifier(n_estimators=100), 'Action') +# +# +# nn_results_df, nn_prediction, data_with_pred = _evaluation_roy_splits(raw_data, classifier, 'Action') +# +# nn_results_df.to_csv("Results\\rf_new_performance_df.csv") +# nn_prediction.to_csv("Results\\rf_new_prediction.csv") +# data_with_pred.to_csv("Results\\rf_new_data_with_pred.csv") +# + +#baseline_results_df, baseline_prediction, baseline_full_pred = _evaluation_roy_splits(data, "baseline", 'Action') +# +# +#baseline_results_df.to_csv("Results\\baseline_performance_df.csv") +#baseline_prediction.to_csv("Results\\baseline_prediction.csv") +#baseline_full_pred.to_csv("Results\\baseline_full_pred.csv") + + +# +# # #other data sets +# data = pd.read_excel("datasets/oneshot/tal.xlsx") +# data_with_action_name = _generate_action_name(data) +# data_with_action_name.to_csv("datasets\\oneshot\\tal_no_timeout_with_action_name.csv") +# folds = _read_roy_folds(open("datasets/oneshot/tal_folds.txt", "r")) +# # +# +# oneshot_df = pd.read_csv("datasets/oneshot/tal_no_timeout_with_action_name.csv")#read_excel("datasets/oneshot/NewData.xlsx") +# oneshot_df = oneshot_df.fillna(oneshot_df.mean()) +# +# +# raw_data = oneshot_df +# +# raw_data = raw_data.drop(['DateTime','Util4','LabExperiment','ExperimentComments','CandName1', 'CandName2','CandName3','CanName4','GameID', 'PrefsObserved','Pref4', 'VotesCand1','ResponseTime', 'VotesCand2', 'VotesCand3'], axis=1) +# +# #Model Phase +# #rf_results_df = _evaluation_roy_splits(data, RandomForestClassifier(n_estimators=100), 'Action') +# +# +# tal_nn_performance_df, tal_nn_prediction, tal_nn_pred = _evaluation_roy_splits(raw_data, classifier, 'Action', folds) +# +# tal_nn_performance_df.to_csv("Results\\tal_rf_performance_df.csv") +# tal_nn_prediction.to_csv("Results\\tal_rf_prediction.csv") +# tal_nn_pred.to_csv("Results\\tal_rf_pred.csv") +# +# +# print("F_score = " + str(f1_score(tal_nn_pred.Action, tal_nn_pred.nn_predction.astype(int), average='weighted'))) +# +# #scharm +# scharm_data = pd.read_excel("datasets/oneshot/schram.xlsx") +# +# data_with_action_name = _generate_action_name(scharm_data) +# data_with_action_name.to_csv("datasets\\oneshot\\scharm_data_with_action_name.csv") +# #other data sets +# scharm_df = pd.read_csv("datasets/oneshot/scharm_data_with_action_name.csv")#read_excel("datasets/oneshot/NewData.xlsx") +# scharm_df = scharm_df.fillna(scharm_df.mean()) +# folds = _read_roy_folds(open("datasets/oneshot/schram_folds.txt", "r")) +# +# raw_data = scharm_df +# +# #raw_data = raw_data.drop(['DateTime','Util4','LabExperiment','ExperimentComments','CandName1', 'CandName2','CandName3','CanName4','GameID', 'PrefsObserved','Pref4', 'VotesCand1','ResponseTime', 'VotesCand2', 'VotesCand3'], axis=1) +# +# #Model Phase +# #rf_results_df = _evaluation_roy_splits(data, RandomForestClassifier(n_estimators=100), 'Action') +# +# +# scharm_nn_performance_df, scharm_nn_prediction, scharm_nn_pred = _evaluation_roy_splits(raw_data, classifier, 'Action', folds) +# +# scharm_nn_performance_df.to_csv("Results\\scharm_rf_performance_df.csv") +# scharm_nn_prediction.to_csv("Results\\scharm_rf_prediction.csv") +# scharm_nn_pred.to_csv("Results\\scharm_rf_pred.csv") +# +# print("F_score = " + str(f1_score(scharm_nn_pred.Action, scharm_nn_pred.nn_predction.astype(int), average='weighted'))) + + + +# # d32 +# d32_data = pd.read_excel("datasets/oneshot/d32_updated.xlsx") +# +# +# data_with_action_name = _generate_action_name(d32_data) +# data_with_action_name.to_csv("datasets\\oneshot\\d32_data_with_action_name.csv") +# #other data sets +d32_df = pd.read_csv("datasets/oneshot/d32_data_with_action_name.csv")#read_excel("datasets/oneshot/NewData.xlsx") +d32_df = d32_df.fillna(d32_df.mean()) +folds = _read_roy_folds(open("datasets/oneshot/d32_folds.txt", "r")) + + +#Model Phase +#rf_results_df = _evaluation_roy_splits(data, RandomForestClassifier(n_estimators=100), 'Action') + +for scenario in ['NONE']:#['A','B','C','D','E','F','NONE']: + raw_data = d32_df.copy() + raw_data = raw_data.drop( + ['DateTime', 'CandName1', 'CandName2', 'CandName3', 'VotesCand1', 'ResponseTime', 'VotesCand2', 'VotesCan3', + 'PointsPostVote', 'WinnerPostVote', 'AU', 'KP', 'LD', 'LDLB', 'CV', 'BW', 'HF', 'heuristic', 'parameter'], + axis=1) + +# loo_folds = _get_loo_folds(raw_data) + personal_rf_clf = PersonalClassifier(id_index=raw_data.columns.get_loc("VoterID"), n_upsample=3)#RandomForestClassifier(n_estimators=100) # MLPClassifier(hidden_layer_sizes = (92), max_iter = 500) + personal_nn_clf = PersonalClassifier(id_index=raw_data.columns.get_loc("VoterID"), base_classifier=MLPClassifier(hidden_layer_sizes = (92), max_iter = 500), n_upsample=10, general_base_classifier=True)#RandomForestClassifier(n_estimators=100) # MLPClassifier(hidden_layer_sizes = (92), max_iter = 500) + neural_net_cf = MLPClassifier(hidden_layer_sizes = (92), max_iter = 500) + rf_clf = RandomForestClassifier(n_estimators=100) + dt_clf = DecisionTreeClassifier() + adaboost_clf = AdaBoostClassifier(n_estimators=200) + svm_clf = SVC() + logistics_clf = logistic.LogisticRegression() + + classifiers = [personal_rf_clf, personal_nn_clf, neural_net_cf, rf_clf, dt_clf, adaboost_clf, svm_clf, logistics_clf] + + d32_performance_df,d32_prediction, d32_pred, feature_importances, features_train = _evaluation_roy_splits(raw_data, classifiers, 'Action', folds, scenario) + + d32_performance_df.to_csv("Results\\d32_performance_df_"+scenario+".csv") +# d32_prediction.to_csv("Results\\d32_rf_prediction_"+scenario+".csv") + d32_pred.to_csv("Results\\d32_pred_"+scenario+".csv") +# feature_importances.to_csv("Results\\d32_feature_importance_"+scenario+".csv") +# features_train.to_csv("Results\\d32_feature_train_"+scenario+".csv") + + print("F_score = " + str(f1_score(d32_pred.Action, d32_pred.Prediction.astype(int), average='weighted'))) + + +#d36 +# d36_data = pd.read_excel("datasets/oneshot/d36_updated.xlsx") +# +# data_with_action_name = _generate_action_name(d36_data) +# data_with_action_name.to_csv("datasets\\oneshot\\d36_data_with_action_name.csv") +# #other data sets +# d36_df = pd.read_csv("datasets/oneshot/d36_data_with_action_name.csv")#read_excel("datasets/oneshot/NewData.xlsx") +# d36_df = d36_df.fillna(d36_df.mean()) +# folds = _read_roy_folds(open("datasets/oneshot/d36_folds.txt", "r")) +# +# +# +# #Model Phase +# #rf_results_df = _evaluation_roy_splits(data, RandomForestClassifier(n_estimators=100), 'Action') +# +# for scenario in ['A','B','C','D','E','F','NONE']: +# classifier = RandomForestClassifier(n_estimators=100) # MLPClassifier(hidden_layer_sizes = (92), max_iter = 500) +# raw_data = d36_df.copy() +# +# raw_data = raw_data.drop(['DateTime', 'CandName1', 'CandName2', 'CandName3', 'VotesCand1', 'ResponseTime', 'VotesCand2', 'VotesCan3', +# 'PointsPostVote', 'WinnerPostVote', 'AU', 'KP', 'LD', 'LDLB', 'CV', 'BW', 'HF', 'heuristic', 'parameter'], +# axis=1) +# +# d36_nn_performance_df,d36_nn_prediction, d36_nn_pred,feature_importances,features_train = _evaluation_roy_splits(raw_data, classifier, 'Action', folds,scenario) +# +# d36_nn_performance_df.to_csv("Results\\d36_rf_performance_df_"+scenario+".csv") +# d36_nn_prediction.to_csv("Results\\d36_rf_prediction_"+scenario+".csv") +# d36_nn_pred.to_csv("Results\\d36_rf_pred_"+scenario+".csv") +# feature_importances.to_csv("Results\\d36_feature_importance_"+scenario+".csv") +# features_train.to_csv("Results\\d36_feature_train_"+scenario+".csv") +# +# d36_nn_pred = _convert_prediction(d36_nn_pred) +# +# d36_nn_pred.to_csv("Results\\V_d36_rf_pred_"+scenario+".csv") +# +# +# print("F_score = " + str(f1_score(d36_nn_pred.Action, d36_nn_pred.Prediction.astype(int), average='weighted'))) +# +# + diff --git a/OneShot_NewAnalysis_N4.py b/OneShot_NewAnalysis_N4.py new file mode 100644 index 0000000..2cdea1c --- /dev/null +++ b/OneShot_NewAnalysis_N4.py @@ -0,0 +1,238 @@ +# -*- coding: utf-8 -*- +""" +Created on Fri Aug 17 11:16:03 2018 + +@author: Adam +""" +import numbers +import numpy as np +import pandas as pd +import sklearn +from scipy.stats import kurtosis +from scipy.stats import skew +from keras.layers import Input, Dense +from keras.models import Model + +from sklearn.ensemble.forest import RandomForestClassifier +from sklearn.tree import DecisionTreeClassifier +from sklearn.neural_network import MLPClassifier + +from datetime import datetime +# Model and feature selection +from sklearn.feature_selection import SelectKBest +from sklearn.model_selection import KFold +from sklearn.feature_selection import chi2 +# Classification metrics +from sklearn.metrics import f1_score +from sklearn.metrics import precision_score +from sklearn.metrics import recall_score +from sklearn.metrics import accuracy_score + +from sklearn import preprocessing +from PersonalClassifier import PersonalClassifier +from sklearn.ensemble import AdaBoostClassifier +from sklearn.ensemble import VotingClassifier +from sklearn.svm import SVC +from sklearn.linear_model import logistic +from OneShotFeatureGenerator import OneShotStaticFeatureGenerator +from OneShotFeatureGenerator import OneShotDynamicFeatureGenerator +from OneShotDataPreperation import OneShotDataPreparation +from OrdinalClassifier import OrdinalClassifier +from BaselineModel import DecisionTreeBaseline +from BayesRuleModel import BayesRuleClassifier +from LikelihoodModel import LHClassifier +from MaximumLikelihoodModel import MLHClassifier + +def _convert_prediction(X, column_name, n_candidates): + X.loc[X[column_name]==1,"Vote_"+column_name] = X.loc[X[column_name]==1,"Pref1"] + X.loc[X[column_name]==2,"Vote_"+column_name] = X.loc[X[column_name]==2,"Pref2"] + X.loc[X[column_name]==3,"Vote_"+column_name] = X.loc[X[column_name]==3,"Pref3"] + if n_candidates == 4: + X.loc[X[column_name] ==4, "Vote_" + column_name] = X.loc[X[column_name] == 4, "Pref4"] + + return X + +def _read_roy_folds(folds_file): + lines = folds_file.read().split('\n') + folds = list() + for index in range(0,len(lines)): + line = lines[index] + if not ('fold' in line) and line != '': + folds.append([int(ii) for ii in line[1:len(line)-1].split(',')]) + + + return folds + +def _get_loo_folds(X): + folds = list() + for x in X.iterrows(): + fold = [x[1].RoundIndex] + folds.append(fold) + + return folds + +def _get_k_folds(X,k): + folds = list() + kf = KFold(k, shuffle=True, random_state=1) # 10 fold cross validation + for train_indices, test_indices in kf.split(X): + folds.append(test_indices) + return folds + +def _evaluation(raw_data, clfs, target, folds, scenario_filter, action_table_df, scenarios_df, n_candidates = 3): + data = raw_data.copy() + + oneshot_static_fg = OneShotStaticFeatureGenerator(action_table_df, scenarios_df, n_candidates) + oneshot_dyn_fg = OneShotDynamicFeatureGenerator(action_table_df, scenarios_df, n_candidates) + + #static features generation + data = oneshot_static_fg._static_feature_generation(data) + + n_folds = len(folds) + + + results_df = pd.DataFrame(columns=['Classifier','FOLD','PRECISION','RECALL','F_MEASURE','ACCURACY']) + + prediction = pd.DataFrame(np.matrix([])) + feature_importances = pd.DataFrame() + features_train = pd.DataFrame() + # 10 fold cross validation + for i in range(0,len(folds)): + + print(str(100*(i/len(folds)))+"%") + # Split into features and target + features_df, target_df = data.drop([target], axis=1),data[target] + + test_indices = data.index[[x[1].RoundIndex in folds[i] for x in data.iterrows()]].tolist() + train_indices = data.index[[not (x[1].RoundIndex in folds[i] or x[1].Scenario == scenario_filter) for x in data.iterrows()]].tolist() + # Feature Generation + features_train = features_df.loc[[ii for ii in train_indices],] + targets_train = target_df[[ii for ii in train_indices]] + features_ext_df = oneshot_dyn_fg._dynamic_feature_generation(features_df, features_train , targets_train) + features_ext_df = features_ext_df.drop(["Vote"], axis=1) + # encoding the dataframes + features_encoded_df = OneShotDataPreparation._prepare_dataset(features_ext_df) + target_encoded_df = target_df + # make training and testing datasets + features_train = features_encoded_df.loc[[ii for ii in train_indices],] + features_test = features_encoded_df.loc[[ii for ii in test_indices],] + targets_train = target_encoded_df[[ii for ii in train_indices]] + targets_test = target_encoded_df[[ii for ii in test_indices]] + + for j in range(0,len(clfs)): + clf = clfs[j] + clf_name = str(clf).split("(")[0] + if i == 0: + #Initialize metrics + results_df.loc[j] = [str(clf), i + 1,0, 0, 0, 0] + + # Train + clf.fit(X = features_train.as_matrix(), y = targets_train) + # Test + predicated = clf.predict(features_test.as_matrix()) + + # #feature importance + # current_feature_importances = pd.DataFrame(clf.feature_importances_, + # index=features_ext_df.columns, + # columns=['importance']).sort_values('importance', + # ascending=False) + # if len(feature_importances) == 0: + # feature_importances = current_feature_importances + # else: + # feature_importances['importance'] = feature_importances['importance'] + current_feature_importances['importance'] + # + # print(feature_importances) + + #aggregate results + if len(prediction) == 0: + prediction = pd.DataFrame(predicated) + else: + prediction = pd.concat([prediction, pd.DataFrame(predicated)]) + + raw_data.loc[[ii for ii in test_indices],"Prediction" + "_" + clf_name] = predicated + + raw_data = _convert_prediction(raw_data, "Prediction" + "_" + clf_name, n_candidates) + + print(str(clf) +": F_score = " + str(f1_score(targets_test, predicated, average='weighted'))) + # Measures + results_df.iloc[j + i, 1] = results_df.iloc[j + i, 1] + precision_score(targets_test, predicated, average='weighted') + results_df.iloc[j + i, 2] = results_df.iloc[j + i, 2] + recall_score(targets_test, predicated, average='weighted') + results_df.iloc[j + i, 3] = results_df.iloc[j + i, 3] + f1_score(targets_test, predicated, average='weighted') + results_df.iloc[j + i, 4] = results_df.iloc[j + i, 4] + accuracy_score(targets_test, predicated) + + # if i == n_folds - 1: + # results_df.iloc[j, 1] = results_df.iloc[j, 1]/n_folds + # results_df.iloc[j, 2] = results_df.iloc[j, 2]/n_folds + # results_df.iloc[j, 3] = results_df.iloc[j, 3]/n_folds + # results_df.iloc[j, 4] = results_df.iloc[j, 4]/n_folds + + + #results_df.Result = results_df.Result.apply(lambda x: x / n_folds) + return results_df, raw_data#, feature_importances + +def _build_data_by_folds(data, folds): + transformed_data = pd.DataFrame() + for i in range(0,len(folds)): + # Split into features and target + fold_indices = data.index[[x[1].RoundIndex in folds[i] for x in data.iterrows()]].tolist() + fold_df = data.iloc[fold_indices,:] + + if len(transformed_data) == 0: + transformed_data = fold_df + else: + transformed_data = pd.concat([transformed_data, fold_df]) + return transformed_data + +def _load_and_run(datasets, load_folds, classifiers, n_candidates, scenarios = ['NONE'], is_loo = False, n_folds = 10): + actions_table = pd.read_csv("datasets/oneshot/action_table_N"+str(n_candidates)+".csv") + scenarios_table = pd.read_csv("datasets/oneshot/scenario_table_N"+str(n_candidates)+".csv") + + for dataset in datasets: + file_path = "datasets/oneshot/" + dataset + ".xlsx" + xls = pd.ExcelFile(file_path) + for sheet in xls.sheet_names: + #Get sheet from xlsx + data = pd.read_excel(file_path, sheet_name=sheet) + d_df = data.fillna(data.mean()) + + #Prepare folds + if load_folds == True: + folds = _read_roy_folds(open("datasets/oneshot/"+dataset+"_folds.txt", "r")) + else: + if is_loo == True: + folds = _get_loo_folds(d_df) + else: + folds = _get_k_folds(d_df, n_folds) + + for scenario in scenarios: # ['A','B','C','D','E','F','NONE']: + raw_data = d_df.copy() + + d_performance_df, d_pred = _evaluation(raw_data, classifiers, 'Action', folds, scenario, actions_table, scenarios_table, n_candidates) + d_performance_df.to_csv("Results\\" + dataset + "_" + sheet + "_performance_df_" + scenario + ".csv") + d_pred.to_csv("Results\\" + dataset + "_" + sheet + "_pred_" + scenario + ".csv") + pass + + +#---------------------------------- Classifiers Definition ------------------------------------# +# personal_rf_clf = PersonalClassifier(id_index=raw_data.columns.get_loc("VoterID"), n_upsample=3)#RandomForestClassifier(n_estimators=100) # MLPClassifier(hidden_layer_sizes = (92), max_iter = 500) +# personal_nn_clf = PersonalClassifier(id_index=raw_data.columns.get_loc("VoterID"), base_classifier=MLPClassifier(hidden_layer_sizes = (92), max_iter = 500), n_upsample=10, general_base_classifier=True)#RandomForestClassifier(n_estimators=100) # MLPClassifier(hidden_layer_sizes = (92), max_iter = 500) +# neural_net_cf = MLPClassifier(hidden_layer_sizes = (92), max_iter = 500) +rf_clf = RandomForestClassifier(n_estimators=100) +# dt_clf = DecisionTreeClassifier() +# adaboost_clf = AdaBoostClassifier(n_estimators=200) +# svm_clf = SVC() +# logistics_clf = logistic.LogisticRegression() +#ordinal_clf = OrdinalClassifier(base_classifier = RandomForestClassifier(n_estimators=100)) +#baseline_clf = DecisionTreeBaseline() +# bayesrule_clf = BayesRuleClassifier() +# likelihood_clf = LHClassifier() +# maxlikelihood_clf = MLHClassifier() + +classifiers = [rf_clf] # ,personal_nn_clf,neural_net_cf, rf_clf,dt_clf,adaboost_clf, svm_clf,logistics_clf] +#---------------------------------- Classifiers Definition ------------------------------------# +#----------------------------------- Dataset definition ---------------------------------------# +# datasets: ["schram"]#["d36_2_folds","d36_4_folds","d36_6_folds","d32_2_folds","d32_4_folds","d32_6_folds"] +datasets = ["schram"] +n_candidates = 3 + +_load_and_run(datasets=datasets, load_folds=True, classifiers=classifiers, n_candidates=n_candidates) + diff --git a/OneShot_NewAnalysis_rows_removed.py b/OneShot_NewAnalysis_rows_removed.py new file mode 100644 index 0000000..d2bcce7 --- /dev/null +++ b/OneShot_NewAnalysis_rows_removed.py @@ -0,0 +1,756 @@ +# -*- coding: utf-8 -*- +""" +Created on Fri Aug 17 11:16:03 2018 + +@author: Adam +""" +import numbers +import numpy as np +import pandas as pd +import sklearn +from scipy.stats import kurtosis +from scipy.stats import skew +from keras.layers import Input, Dense +from keras.models import Model + +from sklearn.ensemble.forest import RandomForestClassifier +from sklearn.tree import DecisionTreeClassifier +from sklearn.neural_network import MLPClassifier + +from datetime import datetime +# Model and feature selection +from sklearn.feature_selection import SelectKBest +from sklearn.model_selection import KFold +from sklearn.feature_selection import chi2 +# Classification metrics +from sklearn.metrics import f1_score +from sklearn.metrics import precision_score +from sklearn.metrics import recall_score +from sklearn.metrics import accuracy_score + +from sklearn import preprocessing +from PersonalClassifier import PersonalClassifier +from sklearn.ensemble import AdaBoostClassifier +from sklearn.ensemble import VotingClassifier +from sklearn.svm import SVC +from sklearn.linear_model import logistic + +def _convert_prediction(X, column_name): + X.loc[X[column_name]==1,"Vote_"+column_name] = X.loc[X[column_name]==1,"Pref1"] + X.loc[X[column_name]==2,"Vote_"+column_name] = X.loc[X[column_name]==2,"Pref2"] + X.loc[X[column_name]==3,"Vote_"+column_name] = X.loc[X[column_name]==3,"Pref3"] + + return X + +def _generate_action_name(X): + # Generate action name + # Action mapping table + d = {'scenario': ['A', 'A', 'A', 'B', 'B', 'B', 'C', 'C', 'C', 'D', 'D', 'D', 'E', 'E', 'E', 'F', 'F', 'F'], + 'action': [1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3], + 'action_name': ['TRT', 'DOM', 'DOM', 'TRT', 'DOM', 'DOM', 'TRT', 'WLB', 'DOM', 'TRT', 'DOM', 'SLB', 'TRT', + 'CMP\WLB', 'DOM', 'TRT', 'CMP', 'SLB']} + action_map_df = pd.DataFrame(data=d) + + X['Action_name'] = [(action_map_df.loc[(action_map_df.scenario == str(x[1]['Scenario'])) & ( + action_map_df.action == int(X.loc[x[0], 'Action'])), 'action_name']).values[0] for x in + X.iterrows()] + + return X + +def _data_cleaning(data_df,is_target ,le): + if is_target: + data_df = data_df.astype("category") + data_df = le.fit_transform(data_df) + else: + for c in data_df.columns: + if data_df[c].dtype in (object, str, np.object, bool): + if not (data_df[c].dtype in (int, float)): + data_df[c] = le.fit_transform(data_df[c]) + return data_df + +def _generate_A_ratios(X, X_train, y_train, voter): + """Generate A ratios - That is TRT-ratio, CMP-ratio, WLB-ratio, SLB-ratio, DOM-ratio + Action is in {TRT,DLB,SLB,WLB,CMP,DOM} + Scenario is in {A,B,C,D,E,F} + """ + start_ind_col = len(X.columns) + + availability_counter = np.count_nonzero([x[1].Scenario in ['A','B','C','D','E','F'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]) + X.loc[X['VoterID'] == voter.VoterID, 'TRT-ratio'] = np.count_nonzero((['TRT' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]))/availability_counter if availability_counter>0 else 0 + + availability_counter = np.count_nonzero([x[1].Scenario in ['C','E'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]) + X.loc[X['VoterID'] == voter.VoterID, 'WLB-ratio'] = np.count_nonzero((['WLB' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]))/availability_counter if availability_counter>0 else 0 + + availability_counter = np.count_nonzero([x[1].Scenario in ['D','F'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]) + X.loc[X['VoterID'] == voter.VoterID, 'SLB-ratio'] = np.count_nonzero((['SLB' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]))/availability_counter if availability_counter>0 else 0 + #X.loc[X['VoterID'] == voter.VoterID, 'LB-ratio'] = np.count_nonzero((['LB' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]))/np.count_nonzero([x[1].Scenario in ['C','D','E','F'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]) + availability_counter = np.count_nonzero([x[1].Scenario in ['E','F'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]) + X.loc[X['VoterID'] == voter.VoterID, 'CMP-ratio'] = np.count_nonzero((['CMP' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]))/availability_counter if availability_counter>0 else 0 + + availability_counter = np.count_nonzero([x[1].Scenario in ['A','B','C','D','E','F'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]) + X.loc[X['VoterID'] == voter.VoterID, 'DOM-ratio'] = np.count_nonzero((['DOM' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()]))/availability_counter if availability_counter>0 else 0 + + X.loc[X['VoterID'] == voter.VoterID, 'DOM-counter'] = np.count_nonzero((['DOM' in x[1]['Action_name'] for x in X_train.loc[X_train['VoterID'] == voter.VoterID].iterrows()])) + + X['TRT-ratio'] = (X['TRT-ratio']).astype(float) + X['WLB-ratio'] = (X['WLB-ratio']).astype(float) + X['SLB-ratio'] = (X['SLB-ratio']).astype(float) + X['CMP-ratio'] = (X['CMP-ratio']).astype(float) + X['DOM-ratio'] = (X['DOM-ratio']).astype(float) + X['DOM-counter'] = (X['DOM-counter']).astype(float) + + end_ind_col = len(X.columns) + return X, list(range(start_ind_col, end_ind_col)) + +def _generate_is_random_voter(X): + """Identify random voters using the rule of DOM-counter >= 2 (excluding SLB actions)""" + X['Is_Random'] = [x >= 2 for x in X['DOM-counter']] + + return X + +def _generate_voter_type(X): + """Generate Voter Type using thresholds over the A-ratio values""" + X['VoterType'] = 'Other' + #X.loc[ [int(x[1]['CMP-ratio'])>=0.7 for x in X.iterrows()], 'VoterType'] = 'CMP' + X.loc[ [float(x[1]['WLB-ratio'])>0.8 for x in X.iterrows()], 'VoterType'] = 'LB' + X.loc[ [float(x[1]['TRT-ratio'])>0.9 for x in X.iterrows()], 'VoterType'] = 'TRT' + + return X + +def _generate_pref_gaps(X): + X["VotesPref1PreVote"] = [x[1]["VotesCand"+str(x[1]["Pref1"])+"PreVote"] for x in X.iterrows()] + X["VotesPref2PreVote"] = [x[1]["VotesCand"+str(x[1]["Pref2"])+"PreVote"] for x in X.iterrows()] + X["VotesPref3PreVote"] = [x[1]["VotesCand"+str(x[1]["Pref3"])+"PreVote"] for x in X.iterrows()] + + return X + +def _generate_gaps(X): + """Generate Gaps features""" + + X['VotesLeader_poll'] = X[['VotesCand1PreVote','VotesCand2PreVote','VotesCand3PreVote']].max(axis = 1) + X['VotesRunnerup_poll'] = X[['VotesCand1PreVote','VotesCand2PreVote','VotesCand3PreVote']].apply(np.median, axis=1) + X['VotesLoser_poll'] = X[['VotesCand1PreVote','VotesCand2PreVote','VotesCand3PreVote']].min(axis = 1) + + + X['GAP12_poll'] = X['VotesLeader_poll'] - X['VotesRunnerup_poll'] + X['GAP23_poll'] = X['VotesRunnerup_poll'] - X['VotesLoser_poll'] + X['GAP13_poll'] = X['VotesLeader_poll'] - X['VotesLoser_poll'] + + #Preference based gaps - I think more suitable for ML for it's more synchronized across the scenarios + + # for x in X.iterrows(): + # vote = x[1] + # candPref1 = 'VotesCand' + str(vote.Pref1) + 'PreVote' + # candPref2 = 'VotesCand' + str(vote.Pref2) + 'PreVote' + # candPref3 = 'VotesCand' + str(vote.Pref3) + 'PreVote' + # X.loc[x[0], 'VotesPref1_poll'] = X.loc[x[0], candPref1] + # X.loc[x[0], 'VotesPref2_poll'] = X.loc[x[0], candPref2] + # X.loc[x[0], 'VotesPref3_poll'] = X.loc[x[0], candPref3] + # + # + X['GAP12_pref_poll'] = X['VotesPref1PreVote'] - X['VotesPref2PreVote'] + X['GAP23_pref_poll'] = X['VotesPref2PreVote'] - X['VotesPref3PreVote'] + X['GAP13_pref_poll'] = X['VotesPref1PreVote'] - X['VotesPref3PreVote'] + + + return X + +def _generate_scenario_type(X): + #initialize + X['TRT'] = 0 + X['WLB'] = 0 + X['SLB'] = 0 + X['CMP'] = 0 + + X.loc[[x[1].Scenario in ['A','B','C','D','E','F'] for x in X],'TRT'] = 1 + X.loc[[x[1].Scenario in ['C','E'] for x in X],'WLB'] = 1 + X.loc[[x[1].Scenario in ['D','F'] for x in X],'SLB'] = 1 + X.loc[[x[1].Scenario in ['C','E','D','F'] for x in X], 'LB'] = 1 + X.loc[[x[1].Scenario in ['E','F'] for x in X],'CMP'] = 1 + + return X + +def _generate_feature_aggregation_class_dependant(X, X_train, y_train, voter, feature_name, aggregation_func): + action1_list = [ float(x[1][feature_name]) for x in X_train.loc[(X_train['VoterID'] == voter.VoterID) & (y_train == 1)].iterrows()] + if len(action1_list)>0: + X.loc[X['VoterID'] == voter.VoterID, feature_name + '_action1_' + aggregation_func.__name__] = aggregation_func(action1_list) + + action2_list = [ float(x[1][feature_name]) for x in X_train.loc[(X_train['VoterID'] == voter.VoterID) & (y_train == 2)].iterrows()] + if len(action2_list)>0: + X.loc[X['VoterID'] == voter.VoterID, feature_name + '_action2_' + aggregation_func.__name__] = aggregation_func(action2_list) + + action3_list = [ float(x[1][feature_name]) for x in X_train.loc[(X_train['VoterID'] == voter.VoterID) & (y_train == 3)].iterrows()] + if len(action3_list)>0: + X.loc[X['VoterID'] == voter.VoterID, feature_name + '_action3_' + aggregation_func.__name__] = aggregation_func(action3_list) + + + return X + +def _generate_action_aggregation_features(X, X_train, y_train, voter): + aggregators = [np.average, np.std, np.median] + feature_name = "Action" + + for aggregation_func in aggregators: + X.loc[X['VoterID'] == voter.VoterID, feature_name + "_" + aggregation_func.__name__] = aggregation_func([float(y_train[x[0]]) for x in X_train.loc[(X_train['VoterID'] == voter.VoterID) & (X_train['Scenario'].isin(['C','D','E','F']))].iterrows()]) + + return X + + +def _generate_gaps_features(X, X_train, y_train, voter): + start_ind_col = len(X.columns) + + feature12 = 'GAP12_pref_poll' + feature23 = 'GAP23_pref_poll' + feature13 = 'GAP13_pref_poll' + + features = [feature12, feature23, feature13] + aggregators = [np.average, np.std, np.median, np.min, np.max, skew, kurtosis] + scenarios = ['C','D','E','F'] + + for aggregator in aggregators: + for feature in features: + X = _generate_feature_aggregation_class_dependant(X, X_train.loc[X_train['Scenario'].isin(scenarios)], y_train, voter, feature, aggregator) + + + end_ind_col = len(X.columns) + + return X, list(range(start_ind_col, end_ind_col)) + +def _generate_gap_dif_features(X): + + start_ind_col = len(X.columns) + + feature12 = 'GAP12_pref_poll' + feature23 = 'GAP23_pref_poll' + feature13 = 'GAP13_pref_poll' + + + X[feature12 + '_action1_' + np.average.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action1_' + np.average.__name__ ] + X[feature12 + '_action2_' + np.average.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action2_' + np.average.__name__ ] + X[feature12 + '_action3_' + np.average.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action3_' + np.average.__name__ ] + + X[feature23 + '_action1_' + np.average.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action1_' + np.average.__name__ ] + X[feature23 + '_action2_' + np.average.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action2_' + np.average.__name__ ] + X[feature23 + '_action3_' + np.average.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action3_' + np.average.__name__ ] + + X[feature13 + '_action1_' + np.average.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action1_' + np.average.__name__ ] + X[feature13 + '_action2_' + np.average.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action2_' + np.average.__name__ ] + X[feature13 + '_action3_' + np.average.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action3_' + np.average.__name__ ] + + X[feature12 + '_action1_' + np.median.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action1_' + np.median.__name__ ] + X[feature12 + '_action2_' + np.median.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action2_' + np.median.__name__ ] + X[feature12 + '_action3_' + np.median.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action3_' + np.median.__name__ ] + + X[feature23 + '_action1_' + np.median.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action1_' + np.median.__name__ ] + X[feature23 + '_action2_' + np.median.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action2_' + np.median.__name__ ] + X[feature23 + '_action3_' + np.median.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action3_' + np.median.__name__ ] + + X[feature13 + '_action1_' + np.median.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action1_' + np.median.__name__ ] + X[feature13 + '_action2_' + np.median.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action2_' + np.median.__name__ ] + X[feature13 + '_action3_' + np.median.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action3_' + np.median.__name__ ] + + X[feature12 + '_action1_' + np.max.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action1_' + np.max.__name__ ] + X[feature12 + '_action2_' + np.max.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action2_' + np.max.__name__ ] + X[feature12 + '_action3_' + np.max.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action3_' + np.max.__name__ ] + + X[feature23 + '_action1_' + np.max.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action1_' + np.max.__name__ ] + X[feature23 + '_action2_' + np.max.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action2_' + np.max.__name__ ] + X[feature23 + '_action3_' + np.max.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action3_' + np.max.__name__ ] + + X[feature13 + '_action1_' + np.max.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action1_' + np.max.__name__ ] + X[feature13 + '_action2_' + np.max.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action2_' + np.max.__name__ ] + X[feature13 + '_action3_' + np.max.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action3_' + np.max.__name__ ] + + X[feature12 + '_action1_' + np.min.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action1_' + np.min.__name__ ] + X[feature12 + '_action2_' + np.min.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action2_' + np.min.__name__ ] + X[feature12 + '_action3_' + np.min.__name__ + '_dif'] = X[feature12] - X[feature12 + '_action3_' + np.min.__name__ ] + + X[feature23 + '_action1_' + np.min.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action1_' + np.min.__name__ ] + X[feature23 + '_action2_' + np.min.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action2_' + np.min.__name__ ] + X[feature23 + '_action3_' + np.min.__name__ + '_dif'] = X[feature23] - X[feature23 + '_action3_' + np.min.__name__ ] + + X[feature13 + '_action1_' + np.min.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action1_' + np.min.__name__ ] + X[feature13 + '_action2_' + np.min.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action2_' + np.min.__name__ ] + X[feature13 + '_action3_' + np.min.__name__ + '_dif'] = X[feature13] - X[feature13 + '_action3_' + np.min.__name__ ] + + + end_ind_col = len(X.columns) + + return X, list(range(start_ind_col, end_ind_col)) + +def _static_feature_generation(X): + X = _generate_pref_gaps(X) + X = _generate_gaps(X) + #X = _generate_scenario_type(X) + + return X + +def _dynamic_feature_generation(X, X_train, y_train): + all_voters = pd.DataFrame(X[["VoterID", "SessionIDX"]].drop_duplicates()) + for voter in all_voters.iterrows(): + X, added_columns = _generate_A_ratios(X, X_train, y_train, voter[1]) + if len(added_columns)>0: + a_ratio_columns = added_columns + X, added_columns = _generate_gaps_features(X, X_train, y_train, voter[1]) + if len(added_columns)>0: + gaps_columns = added_columns + X = _generate_action_aggregation_features(X, X_train, y_train, voter[1]) + + #Gaps features encoding + X = X.fillna(X.mean()) #X.fillna(1000) #fill na with some high value (maybe maximum) because the voters with na values didn't choose the action (say q'', 3) in all gaps they incounterd. + X, gaps_dif_columns = _generate_gap_dif_features(X) + total_gaps_columns = a_ratio_columns + gaps_columns + gaps_dif_columns + total_gaps_columns.append(X.columns.get_loc("GAP12_pref_poll")) + total_gaps_columns.append(X.columns.get_loc("GAP23_pref_poll")) + total_gaps_columns.append(X.columns.get_loc("GAP13_pref_poll")) + + normalized_gap_fs = pd.DataFrame(preprocessing.normalize(X.iloc[:,total_gaps_columns])) + encoded_gap_fs = pd.DataFrame(_autoencode(normalized_gap_fs)) + + X = pd.concat([X, encoded_gap_fs], axis=1, join='inner') + + X = X.drop(X.columns[gaps_columns + gaps_dif_columns], axis=1) + + X = _generate_is_random_voter(X) + X = _generate_voter_type(X) + + + + return X + +def _evaluation(X, clf, target): + #tests + + #static features generation + X = _static_feature_generation(X) + # Encoders definitions + le = sklearn.preprocessing.LabelEncoder() + target_le = sklearn.preprocessing.LabelEncoder() + + # Split into features and target + features_df, target_df = X.drop([target], axis=1),X[target] + + + n_folds = 10 + results_df = pd.DataFrame(columns=['Measure', 'Result']) + # Initialize metrics: + results_df.loc[0] = ['PRECISION', 0] + results_df.loc[1] = ['RECALL', 0] + results_df.loc[2] = ['F_MEASURE', 0] + + + # 10 fold cross validation + kf = KFold(n_folds, shuffle=True, random_state=1) # 10 fold cross validation + for train_indices, test_indices in kf.split(features_df, target_df): + # Feature Generation + features_train = features_df.loc[[ii for ii in train_indices],] + targets_train = target_df[[ii for ii in train_indices]] + features_ext_df = _dynamic_feature_generation(features_df, features_train, targets_train) + features_ext_df = features_ext_df.drop(["Action_name"], axis=1) + + # encoding the dataframes + features_encoded_df = pd.DataFrame(preprocessing.normalize(preprocessing.scale(_data_cleaning(features_ext_df, False, le)))) + target_encoded_df = _data_cleaning(target_df, True, target_le) + + + # make training and testing datasets + features_train = features_encoded_df.loc[[ii for ii in train_indices],] + features_test = features_encoded_df.loc[[ii for ii in test_indices],] + targets_train = target_encoded_df[[ii for ii in train_indices]] + targets_test = target_encoded_df[[ii for ii in test_indices]] + + # Train + clf.fit(X = features_train.as_matrix(), y = targets_train) + # Test + predicated = clf.predict(features_test.as_matrix()) + + # Measures + results_df.iloc[0, 1] = results_df.iloc[0, 1] + precision_score(targets_test, predicated, average='weighted') + results_df.iloc[1, 1] = results_df.iloc[1, 1] + recall_score(targets_test, predicated, average='weighted') + results_df.iloc[2, 1] = results_df.iloc[2, 1] + f1_score(targets_test, predicated, average='weighted') + + results_df.Result = results_df.Result.apply(lambda x: x / n_folds) + + + return results_df + +def intersection(lst1, lst2): + lst3 = [value for value in lst1 if value in lst2] + return lst3 + +def _read_roy_folds(folds_file): + lines = folds_file.read().split('\n') + folds = list() + for index in range(0,len(lines)): + line = lines[index] + if not ('fold' in line) and line != '': + folds.append([int(ii) for ii in line[1:len(line)-1].split(',')]) + + + return folds + +def _get_loo_folds(X): + folds = list() + for x in X.iterrows(): + fold = [x[1].RoundIndex] + folds.append(fold) + + return folds + +def _get_k_folds(X,k): + folds = list() + kf = KFold(k, shuffle=True, random_state=1) # 10 fold cross validation + for train_indices, test_indices in kf.split(X): + folds.append(test_indices) + return folds + +def _autoencode(features): + #test + + encoding_dim = int(len(features.columns)/5) + input_votes = Input(shape = (len(features.columns),)) + encoded = Dense(encoding_dim, activation='relu')(input_votes) + decoded = Dense(len(features.columns), activation='tanh')(encoded) + autoencoder = Model(input_votes, decoded) + encoder = Model(input_votes, encoded) + + # encoded_input = Input(shape=(encoding_dim,)) +# decoder_layer = autoencoder.layers[-1] + # decoder = Model(encoded_input, decoder_layer(encoded_input)) + + autoencoder.compile(optimizer='adadelta', loss='MSE') + + autoencoder.fit(features, features, + epochs=150, + batch_size=256, + shuffle=True,verbose=False) + + encoded_votes = encoder.predict(features) + + return encoded_votes + + +def _evaluation_roy_splits(raw_data, clfs, target, scenario_filter): + data = raw_data.copy() + #static features generation + data = _static_feature_generation(data) + # Encoders definitions + le = sklearn.preprocessing.LabelEncoder() + target_le = sklearn.preprocessing.LabelEncoder() + + + #n_folds = len(folds) + i=0 + + results_df = pd.DataFrame(columns=['Classifier', 'PRECISION', 'RECALL','F_MEASURE','ACCURACY']) + + prediction = pd.DataFrame(np.matrix([])) + feature_importances = pd.DataFrame() + features_train = pd.DataFrame() + # 10 fold cross validation + #for i in range(0,len(folds)): + + #print(str(100*(i/len(folds)))+"%") + # Split into features and target + features_df, target_df = data.drop([target], axis=1),data[target] + + test_indices = data.index.tolist()#data.index[[x[1].RoundIndex in folds[i] for x in data.iterrows()]].tolist() + train_indices = data.index.tolist()#data.index[[not (x[1].RoundIndex in folds[i] or x[1].Scenario == scenario_filter) for x in data.iterrows()]].tolist() + # Feature Generation + features_train = features_df.loc[[ii for ii in train_indices],] + targets_train = target_df[[ii for ii in train_indices]] + features_ext_df = _dynamic_feature_generation(features_df, features_train , targets_train) + features_ext_df = features_ext_df.drop(["Action_name"], axis=1) + features_ext_df = features_ext_df.drop(["Vote"], axis=1) + # encoding the dataframes + features_encoded_df = pd.DataFrame(preprocessing.normalize(preprocessing.scale(_data_cleaning(features_ext_df, False, le).as_matrix()), axis=0, norm='max'))# + target_encoded_df = target_df #_data_cleaning(target_df, True, target_le) + + + # make training and testing datasets + features_train = features_encoded_df.loc[[ii for ii in train_indices],] + features_test = features_encoded_df.loc[[ii for ii in test_indices],] + targets_train = target_encoded_df[[ii for ii in train_indices]] + targets_test = target_encoded_df[[ii for ii in test_indices]] + + +# training = pd.concat([features_train, pd.DataFrame(targets_train)], axis=1, join='inner') +# testing = pd.concat([features_test, pd.DataFrame(targets_test)], axis=1, join='inner') +# training.to_csv("training_fold_"+str(i)+".csv") +# testing.to_csv("testing_fold_"+str(i)+".csv") + + for j in range(0,len(clfs)): + clf = clfs[j] + clf_name = str(clf).split("(")[0] + if i == 0: + #Initialize metrics + results_df.loc[j] = [str(clf), 0, 0, 0, 0] + + if clf != "baseline": + + # Train + clf.fit(X = features_train.as_matrix(), y = targets_train) + # Test + predicated = clf.predict(features_test.as_matrix()) + + # #feature importance + # current_feature_importances = pd.DataFrame(clf.feature_importances_, + # index=features_ext_df.columns, + # columns=['importance']).sort_values('importance', + # ascending=False) + # if len(feature_importances) == 0: + # feature_importances = current_feature_importances + # else: + # feature_importances['importance'] = feature_importances['importance'] + current_feature_importances['importance'] + # + # print(feature_importances) + + else: + features_ext_df_test = features_ext_df.loc[[ii for ii in test_indices],] + n_samples = len(features_ext_df_test) + predicated = np.zeros((n_samples),dtype=int) + + features_ext_df_test["Prediction"] = 1 + features_ext_df_test.loc[(features_ext_df_test["Scenario"] == "C") & (features_ext_df_test["VoterType"] == "LB"), "Prediction"] = 2 + features_ext_df_test.loc[(features_ext_df_test['Scenario'].isin(["E","F"])) & (features_ext_df_test["VoterType"] != "TRT"), "Prediction"] = 2 + + predicated = features_ext_df_test["Prediction"] + + features_ext_df_test = pd.concat([features_ext_df_test, pd.DataFrame(predicated)], axis=1) + + + + + #aggregate results + if len(prediction) == 0: + prediction = pd.DataFrame(predicated) + else: + prediction = pd.concat([prediction, pd.DataFrame(predicated)]) + + raw_data.loc[[ii for ii in test_indices],"Prediction" + "_" + clf_name] = predicated + + raw_data = _convert_prediction(raw_data, "Prediction" + "_" + clf_name) + + print(str(clf) +": F_score = " + str(f1_score(targets_test, predicated, average='weighted'))) + # Measures + results_df.iloc[j, 1] = results_df.iloc[j, 1] + precision_score(targets_test, predicated, average='weighted') + results_df.iloc[j, 2] = results_df.iloc[j, 2] + recall_score(targets_test, predicated, average='weighted') + results_df.iloc[j, 3] = results_df.iloc[j, 3] + f1_score(targets_test, predicated, average='weighted') + results_df.iloc[j, 4] = results_df.iloc[j, 4] + accuracy_score(targets_test, predicated) + + # if i == n_folds - 1: + # results_df.iloc[j, 1] = results_df.iloc[j, 1]/n_folds + # results_df.iloc[j, 2] = results_df.iloc[j, 2]/n_folds + # results_df.iloc[j, 3] = results_df.iloc[j, 3]/n_folds + # results_df.iloc[j, 4] = results_df.iloc[j, 4]/n_folds + + + #results_df.Result = results_df.Result.apply(lambda x: x / n_folds) + return results_df, prediction, raw_data, feature_importances, features_train + + +def _build_data_by_folds(data, folds): + transformed_data = pd.DataFrame() + for i in range(0,len(folds)): + # Split into features and target + fold_indices = data.index[[x[1].RoundIndex in folds[i] for x in data.iterrows()]].tolist() + fold_df = data.iloc[fold_indices,:] + + if len(transformed_data) == 0: + transformed_data = fold_df + else: + transformed_data = pd.concat([transformed_data, fold_df]) + return transformed_data + +# +# data = pd.read_excel("datasets/oneshot/full_data_no_timeout_no_first_round.xlsx") +# +# data_with_action_name = _generate_action_name(data) +# data_with_action_name.to_csv("datasets\\oneshot\\full_data_no_timeout_no_first_round_with_action_name.csv") +# + +# #Load rawdata +# oneshot_df = pd.read_csv("datasets/oneshot/full_data_no_timeout_no_first_round_with_action_name.csv")#read_excel("datasets/oneshot/NewData.xlsx") +# +# #sorted_data = _build_data_by_folds(oneshot_df) +# #sorted_data.to_csv("Results\\sorted_fulldata.csv") +# # +# #print("F_score = " + str(f1_score(sorted_data.Action, nn_prediction[1:], average='weighted'))) +# # +# +# raw_data = oneshot_df +# +# +# +# raw_data = raw_data.drop(['GameConfiguration','FileName','PlayerID','DateTime','CandName1', 'CandName2','CandName3','WinnerPostVote', 'VotesCand1', 'VotesCand2', 'VotesCan3', 'PointsPostVote','ResponseTime'], axis=1) + + +#classifier = RandomForestClassifier(n_estimators=100)#MLPClassifier(hidden_layer_sizes = (92), max_iter = 500) + +# +# #Model Phase +# #rf_results_df = _evaluation_roy_splits(data, RandomForestClassifier(n_estimators=100), 'Action') +# +# +# nn_results_df, nn_prediction, data_with_pred = _evaluation_roy_splits(raw_data, classifier, 'Action') +# +# nn_results_df.to_csv("Results\\rf_new_performance_df.csv") +# nn_prediction.to_csv("Results\\rf_new_prediction.csv") +# data_with_pred.to_csv("Results\\rf_new_data_with_pred.csv") +# + +#baseline_results_df, baseline_prediction, baseline_full_pred = _evaluation_roy_splits(data, "baseline", 'Action') +# +# +#baseline_results_df.to_csv("Results\\baseline_performance_df.csv") +#baseline_prediction.to_csv("Results\\baseline_prediction.csv") +#baseline_full_pred.to_csv("Results\\baseline_full_pred.csv") + + +# +# # #other data sets +# data = pd.read_excel("datasets/oneshot/tal.xlsx") +# data_with_action_name = _generate_action_name(data) +# data_with_action_name.to_csv("datasets\\oneshot\\tal_no_timeout_with_action_name.csv") +# folds = _read_roy_folds(open("datasets/oneshot/tal_folds.txt", "r")) +# # +# +# oneshot_df = pd.read_csv("datasets/oneshot/tal_no_timeout_with_action_name.csv")#read_excel("datasets/oneshot/NewData.xlsx") +# oneshot_df = oneshot_df.fillna(oneshot_df.mean()) +# +# +# raw_data = oneshot_df +# +# raw_data = raw_data.drop(['DateTime','Util4','LabExperiment','ExperimentComments','CandName1', 'CandName2','CandName3','CanName4','GameID', 'PrefsObserved','Pref4', 'VotesCand1','ResponseTime', 'VotesCand2', 'VotesCand3'], axis=1) +# +# #Model Phase +# #rf_results_df = _evaluation_roy_splits(data, RandomForestClassifier(n_estimators=100), 'Action') +# +# +# tal_nn_performance_df, tal_nn_prediction, tal_nn_pred = _evaluation_roy_splits(raw_data, classifier, 'Action', folds) +# +# tal_nn_performance_df.to_csv("Results\\tal_rf_performance_df.csv") +# tal_nn_prediction.to_csv("Results\\tal_rf_prediction.csv") +# tal_nn_pred.to_csv("Results\\tal_rf_pred.csv") +# +# +# print("F_score = " + str(f1_score(tal_nn_pred.Action, tal_nn_pred.nn_predction.astype(int), average='weighted'))) +# +# #scharm +# scharm_data = pd.read_excel("datasets/oneshot/schram.xlsx") +# +# data_with_action_name = _generate_action_name(scharm_data) +# data_with_action_name.to_csv("datasets\\oneshot\\scharm_data_with_action_name.csv") +# #other data sets +# scharm_df = pd.read_csv("datasets/oneshot/scharm_data_with_action_name.csv")#read_excel("datasets/oneshot/NewData.xlsx") +# scharm_df = scharm_df.fillna(scharm_df.mean()) +# folds = _read_roy_folds(open("datasets/oneshot/schram_folds.txt", "r")) +# +# raw_data = scharm_df +# +# #raw_data = raw_data.drop(['DateTime','Util4','LabExperiment','ExperimentComments','CandName1', 'CandName2','CandName3','CanName4','GameID', 'PrefsObserved','Pref4', 'VotesCand1','ResponseTime', 'VotesCand2', 'VotesCand3'], axis=1) +# +# #Model Phase +# #rf_results_df = _evaluation_roy_splits(data, RandomForestClassifier(n_estimators=100), 'Action') +# +# +# scharm_nn_performance_df, scharm_nn_prediction, scharm_nn_pred = _evaluation_roy_splits(raw_data, classifier, 'Action', folds) +# +# scharm_nn_performance_df.to_csv("Results\\scharm_rf_performance_df.csv") +# scharm_nn_prediction.to_csv("Results\\scharm_rf_prediction.csv") +# scharm_nn_pred.to_csv("Results\\scharm_rf_pred.csv") +# +# print("F_score = " + str(f1_score(scharm_nn_pred.Action, scharm_nn_pred.nn_predction.astype(int), average='weighted'))) + + + + # d32 + +datasets = ["schram","tal","d32_updated","d36_updated"]#["d36_2_folds","d36_4_folds","d36_6_folds","d32_2_folds","d32_4_folds","d32_6_folds"] + +#folds = _read_roy_folds(open("datasets/oneshot/schram_folds.txt", "r")) + + +for dataset in datasets: + file_path = "datasets/oneshot/"+dataset+".xlsx" + xls = pd.ExcelFile(file_path) + + for sheet in xls.sheet_names: + + data = pd.read_excel(file_path, sheet_name=sheet) + # + # + data_with_action_name = _generate_action_name(data) + data_with_action_name.to_csv("datasets\\oneshot\\"+dataset+"_with_action_name.csv") + # #other data sets + d_df = pd.read_csv("datasets/oneshot/"+dataset+"_with_action_name.csv")#read_excel("datasets/oneshot/NewData.xlsx") + d_df = d_df.fillna(d_df.mean()) + #folds = _get_k_folds(d_df,10)#_get_loo_folds(d_df) #_read_roy_folds(open("datasets/oneshot/d32_folds.txt", "r")) + + #Model Phase + #rf_results_df = _evaluation_roy_splits(data, RandomForestClassifier(n_estimators=100), 'Action') + + for scenario in ['NONE']:#['A','B','C','D','E','F','NONE']: + raw_data = d_df.copy() + # loo_folds = _get_loo_folds(raw_data) + personal_rf_clf = PersonalClassifier(id_index=raw_data.columns.get_loc("VoterID"), n_upsample=3)#RandomForestClassifier(n_estimators=100) # MLPClassifier(hidden_layer_sizes = (92), max_iter = 500) + # personal_nn_clf = PersonalClassifier(id_index=raw_data.columns.get_loc("VoterID"), base_classifier=MLPClassifier(hidden_layer_sizes = (92), max_iter = 500), n_upsample=10, general_base_classifier=True)#RandomForestClassifier(n_estimators=100) # MLPClassifier(hidden_layer_sizes = (92), max_iter = 500) + # neural_net_cf = MLPClassifier(hidden_layer_sizes = (92), max_iter = 500) + rf_clf = RandomForestClassifier(n_estimators=100) + # dt_clf = DecisionTreeClassifier() + # adaboost_clf = AdaBoostClassifier(n_estimators=200) + # svm_clf = SVC() + # logistics_clf = logistic.LogisticRegression() + # + + classifiers = [rf_clf, personal_rf_clf]#,personal_nn_clf,neural_net_cf, rf_clf,dt_clf,adaboost_clf, svm_clf,logistics_clf] + + d_performance_df,d_prediction, d_pred, feature_importances, features_train = _evaluation_roy_splits(raw_data, classifiers, 'Action', scenario) + + d_performance_df.to_csv("Results\\"+dataset+"_"+sheet+"_performance_df_"+scenario+".csv") + + d_pred.to_csv("Results\\"+dataset+"_"+sheet+"_pred_"+scenario+".csv") + + #print("F_score = " + str(f1_score(d_pred.Action, d_pred.Prediction.astype(int), average='weighted'))) + + +#d36 +# d36_data = pd.read_excel("datasets/oneshot/d36_updated.xlsx") +# +# data_with_action_name = _generate_action_name(d36_data) +# data_with_action_name.to_csv("datasets\\oneshot\\d36_data_with_action_name.csv") +# #other data sets +# d36_df = pd.read_csv("datasets/oneshot/d36_data_with_action_name.csv")#read_excel("datasets/oneshot/NewData.xlsx") +# d36_df = d36_df.fillna(d36_df.mean()) +# folds = _read_roy_folds(open("datasets/oneshot/d36_folds.txt", "r")) +# +# +# +# #Model Phase +# #rf_results_df = _evaluation_roy_splits(data, RandomForestClassifier(n_estimators=100), 'Action') +# +# for scenario in ['A','B','C','D','E','F','NONE']: +# classifier = RandomForestClassifier(n_estimators=100) # MLPClassifier(hidden_layer_sizes = (92), max_iter = 500) +# raw_data = d36_df.copy() +# +# raw_data = raw_data.drop(['DateTime', 'CandName1', 'CandName2', 'CandName3', 'VotesCand1', 'ResponseTime', 'VotesCand2', 'VotesCan3', +# 'PointsPostVote', 'WinnerPostVote', 'AU', 'KP', 'LD', 'LDLB', 'CV', 'BW', 'HF', 'heuristic', 'parameter'], +# axis=1) +# +# d36_nn_performance_df,d36_nn_prediction, d36_nn_pred,feature_importances,features_train = _evaluation_roy_splits(raw_data, classifier, 'Action', folds,scenario) +# +# d36_nn_performance_df.to_csv("Results\\d36_rf_performance_df_"+scenario+".csv") +# d36_nn_prediction.to_csv("Results\\d36_rf_prediction_"+scenario+".csv") +# d36_nn_pred.to_csv("Results\\d36_rf_pred_"+scenario+".csv") +# feature_importances.to_csv("Results\\d36_feature_importance_"+scenario+".csv") +# features_train.to_csv("Results\\d36_feature_train_"+scenario+".csv") +# +# d36_nn_pred = _convert_prediction(d36_nn_pred) +# +# d36_nn_pred.to_csv("Results\\V_d36_rf_pred_"+scenario+".csv") +# +# +# print("F_score = " + str(f1_score(d36_nn_pred.Action, d36_nn_pred.Prediction.astype(int), average='weighted'))) +# +# + diff --git a/OrdinalClassifier.py b/OrdinalClassifier.py new file mode 100644 index 0000000..b9247bc --- /dev/null +++ b/OrdinalClassifier.py @@ -0,0 +1,277 @@ +import numbers +import numpy as np +import sklearn + +from six import with_metaclass +from abc import ABCMeta +from sklearn.base import ClassifierMixin +from sklearn.ensemble.base import BaseEnsemble +from sklearn.utils import check_X_y, check_array, column_or_1d +from sklearn.utils.multiclass import check_classification_targets + +from sklearn.externals.joblib import Parallel, delayed #For parallel computing TODO: check if we need to be parallel or not +from sklearn.utils.validation import has_fit_parameter, check_is_fitted + +def _transform_data(ordered_class, class_value, y): + """" private function used to transform the data into len(ordered_classes)-1 derived datasets of binary classification problems + returns a pair of (class_value, derived_y) + """ + ordered_class = ordered_class.tolist() + y_derived = [int(ordered_class.index(i) > ordered_class.index(class_value)) for i in y] + + return y_derived + +def _build_classifier(binary_classifier, X, y_derived): + """Private function used to build a batch of classifiers within a job.""" + # Build classifier + return binary_classifier.fit(X, np.array(y_derived)) + + +def _build_classifiers(binary_classifier, ordered_class, X, y): + """Private function used to build a batch of classifiers within a job.""" + # Build classifiers + classifiers = [] + for i in range(0,len(ordered_class)-1): #I Assume that this will be in the correct order of the class values! pass on the last one as it not needed in the prediction phase + y_derived = _transform_data(ordered_class, ordered_class[i], y) + classifier = _build_classifier(binary_classifier(), X, y_derived) + classifiers.append(classifier) # TODO: Approve that the order is maintained! + + return classifiers + +def _predict_proba(binary_classifiers, n_classes, X): + """Private function used to compute (proba-)predictions.""" + n_samples = X.shape[0] + final_proba = np.zeros((n_samples, n_classes)) + + for i in range(0, n_classes): + if i==0: + current_proba = binary_classifiers[i].predict_proba(X) + current_index = binary_classifiers[i].classes_.tolist().index(1) + final_proba[:,i] += 1 - current_proba[:,current_index] + else: + previous_proba = binary_classifiers[i - 1].predict_proba(X) + previous_index = binary_classifiers[i - 1].classes_.tolist().index(1) + if i==n_classes-1: + final_proba[:, i] += previous_proba[:, previous_index] + else: + current_proba = binary_classifiers[i].predict_proba(X) + current_index = binary_classifiers[i].classes_.tolist().index(1) + final_proba[:, i] += previous_proba[:, previous_index] - current_proba[:,current_index] + + return final_proba + +def _decision_function(estimators, estimators_features, X): + """Private function used to compute decisions within a job.""" + # TODO: Check if we need this! + pass + +class OrdinalClassifier(with_metaclass(ABCMeta, BaseEnsemble, ClassifierMixin)): + """Base class for ordinal meta-classifier. + + """ + + def __init__(self, + base_classifier=None, + ordered_class=None, + max_samples=1.0, + max_features=1.0, + warm_start=False, + n_jobs=1, + verbose=0): + + self.base_classifier = base_classifier + self.ordered_class = ordered_class + self.max_samples = max_samples + self.max_features = max_features + self.warm_start = warm_start + self.n_jobs = n_jobs + self.verbose = verbose + + super(OrdinalClassifier, self).__init__( + base_estimator=base_classifier, + n_estimators=len(ordered_class)) + + + def _validate_estimator(self): + """Check the estimator and set the base_estimator_ attribute.""" + pass + + def fit(self, X, y, sample_weight=None): + """Build a ordinal meta classifier of binary classifiers from the training + set (X, y). + + Parameters + ---------- + X : {array-like, sparse matrix} of shape = [n_samples, n_features] + The training input samples. Sparse matrices are accepted only if + they are supported by the base estimator. + + y : array-like, shape = [n_samples] + The target values (class labels in classification, real numbers in + regression). + + sample_weight : array-like, shape = [n_samples] or None + Sample weights. If None, then samples are equally weighted. + Note that this is supported only if the base estimator supports + sample weighting. + + Returns + ------- + self : object + Returns self. + """ + return self._fit(X, y, self.max_samples, sample_weight=sample_weight) + + def _fit(self, X, y, max_samples=None, max_depth=None, sample_weight=None): + """Build a ordinal meta classifier of binary classifiers from the training + set (X, y). + + Parameters + ---------- + X : {array-like, sparse matrix} of shape = [n_samples, n_features] + The training input samples. Sparse matrices are accepted only if + they are supported by the base estimator. + + y : array-like, shape = [n_samples] + The target values (class labels in classification, real numbers in + regression). + + max_samples : int or float, optional (default=None) + Argument to use instead of self.max_samples. + + max_depth : int, optional (default=None) + Override value used when constructing base estimator. Only + supported if the base estimator has a max_depth parameter. + + sample_weight : array-like, shape = [n_samples] or None + Sample weights. If None, then samples are equally weighted. + Note that this is supported only if the base estimator supports + sample weighting. + + Returns + ------- + self : object + Returns self. + """ + # Convert data + X, y = check_X_y(X, y, ['csr', 'csc']) + + # Remap output + n_samples, self.n_features_ = X.shape + self._n_samples = n_samples + y = self._validate_y(y) + + # Check parameters + self._validate_estimator() + + if max_depth is not None: + self.base_estimator_.max_depth = max_depth + + # Validate max_samples + if max_samples is None: + max_samples = self.max_samples + elif not isinstance(max_samples, (numbers.Integral, np.integer)): + max_samples = int(max_samples * X.shape[0]) + + if not (0 < max_samples <= X.shape[0]): + raise ValueError("max_samples must be in (0, n_samples]") + + # Store validated integer row sampling value + self._max_samples = max_samples + + # Validate max_features + if isinstance(self.max_features, (numbers.Integral, np.integer)): + max_features = self.max_features + else: # float + max_features = int(self.max_features * self.n_features_) + + if not (0 < max_features <= self.n_features_): + raise ValueError("max_features must be in (0, n_features]") + + # Store validated integer feature sampling value + self._max_features = max_features + + # ---------------------------------------------Our CODE + + # Here we train the sub classifiers. for each classifier we transform the y differently according to the method + self.classifiers_ = _build_classifiers(self.base_classifier, self.ordered_class, X, y) + + #----------------------------------------------END of Our CODE + + return self + + def _validate_y(self, y): + y = column_or_1d(y, warn=True) + check_classification_targets(y) + self.classes_ = np.array(self.ordered_class) + self.n_classes_ = len(self.classes_) + + return y + + def predict(self, X): + """Predict class for X. + + The predicted class of an input sample is computed as the class with + the highest mean predicted probability. If base estimators do not + implement a ``predict_proba`` method, then it resorts to voting. + + Parameters + ---------- + X : {array-like, sparse matrix} of shape = [n_samples, n_features] + The training input samples. Sparse matrices are accepted only if + they are supported by the base estimator. + + Returns + ------- + y : array of shape = [n_samples] + The predicted classes. + """ + predicted_probabilitiy = self.predict_proba(X) + return self.classes_.take((np.argmax(predicted_probabilitiy, axis=1)), + axis=0) + + def predict_proba(self, X): + """Predict class probabilities for X. + + The predicted class probabilities of an input sample is computed as + the mean predicted class probabilities of the base estimators in the + ensemble. If base estimators do not implement a ``predict_proba`` + method, then it resorts to voting and the predicted class probabilities + of an input sample represents the proportion of estimators predicting + each class. + + Parameters + ---------- + X : {array-like, sparse matrix} of shape = [n_samples, n_features] + The training input samples. Sparse matrices are accepted only if + they are supported by the base estimator. + + Returns + ------- + p : array of shape = [n_samples, n_classes] + The class probabilities of the input samples. The order of the + classes corresponds to that in the attribute `classes_`. + """ + #check_is_fitted(self, "classes_") + # Check data + X = check_array(X, accept_sparse=['csr', 'csc']) + if self.n_features_ != X.shape[1]: + raise ValueError("Number of features of the model must " + "match the input. Model n_features is {0} and " + "input n_features is {1}." + "".format(self.n_features_, X.shape[1])) + + # ---------------------------------------------Our CODE + proba = _predict_proba(self.classifiers_, self.n_classes_, X) + #----------------------------------------------END of Our CODE + + + return proba + + def decision_function(self, X): + pass + #TODO: check if we need this! + + def print_tree(self,index,out_file='self.classifiers_[index]'): + sklearn.tree.export_graphviz(self.classifiers_[index], out_file=out_file) + # return self.classifiers_[index] \ No newline at end of file diff --git a/PersonalClassifier.py b/PersonalClassifier.py new file mode 100644 index 0000000..3dd9a1f --- /dev/null +++ b/PersonalClassifier.py @@ -0,0 +1,68 @@ +import numbers +import numpy as np +import sklearn +import pandas as pd + +from six import with_metaclass +from abc import ABCMeta +from sklearn.base import ClassifierMixin +from sklearn.ensemble.base import BaseEnsemble +from sklearn.base import clone +from copy import deepcopy +from sklearn.ensemble import RandomForestClassifier +from sklearn.utils import resample + +class PersonalClassifier(with_metaclass(ABCMeta, BaseEnsemble, ClassifierMixin)): + """Base class for oneshot personal clasifier ensemble + + """ + + def __init__(self, + id_index, + base_classifier = RandomForestClassifier(n_estimators=40), + n_upsample = 1, + general_base_classifier = False): + self.base_classifier = base_classifier + self.id_index = id_index + self.personal_classifiers = dict() + self.n_upsample = n_upsample + self.general_base_classifier = general_base_classifier + + + def _make_classifier(self): + classifier = deepcopy(self.base_classifier)#clone(self.base_classifier) + return classifier + + def fit(self, X, y, sample_weight=None): + #Fit general population classifier + if self.general_base_classifier == True: + self.base_classifier.fit(X,y) + + #Fit persinal classifiers + all_voters = pd.DataFrame(X[:,self.id_index]).drop_duplicates() + for voter in all_voters.iterrows(): + voter_classifier = self._make_classifier() + X_v = X[X[:,self.id_index] == voter[1][0]] + y_v = y[X[:,self.id_index] == voter[1][0]] + combined = np.c_[X_v,y_v] + combined_upsample = resample(combined, replace=True, n_samples=self.n_upsample*X_v.shape[0], random_state=0) + X_v = combined_upsample[:,0:X_v.shape[1]] + y_v = combined_upsample[:,-1] + if self.general_base_classifier == True: + voter_classifier.partial_fit(X_v, y_v, [1,2,3]) + else: + voter_classifier.fit(X_v, y_v) + self.personal_classifiers[voter[1][0]] = voter_classifier + return self + + def predict(self, X): + n_samples = X.shape[0] + prediction = np.zeros(n_samples) + for i in range(0, n_samples): + voterID = X[i,self.id_index] + if self.personal_classifiers.keys().__contains__(voterID): + voter_classifier = self.personal_classifiers[voterID] + prediction[i] = voter_classifier.predict(np.reshape(X[i,:],(1,X.shape[1])))[0] + else: + prediction[i] = 1 #predict q if we didn't had the voter is training set. + return prediction \ No newline at end of file