diff --git a/README.md b/README.md index 74809d7..38ff4fd 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ Now training models.. Plz wait. |*backbone* |*resolution*|*VOCmAP* |*COCOmAP*|*Inference[ms]*|*model*| |:------: |:------------:|:----------:|:-------:|:-------------:|:-----:| |EfficientnetB0(wo/BiFPN)|512 |77.0 |TBD | | | -|EfficientnetB0(w/BiFPN) |512 |79.0 |TBD | | | +|EfficientnetB0(w/BiFPN) |512 |77.2 |TBD | | | |EfficientnetB2(wo/BiFPN)|768 |TBD |TBD | | | |EfficientnetB2(w/BiFPN) |768 |TBD |TBD | | | |EfficientnetB4(wo/BiFPN)|1024 |TBD |TBD | | | diff --git a/eval.ipynb b/eval.ipynb index 8124e25..790b0fb 100644 --- a/eval.ipynb +++ b/eval.ipynb @@ -49,8 +49,8 @@ "train_img_list, train_anno_list, val_img_list, val_anno_list = make_datapath_list(vocpath)\n", "\n", "model=\"efficientdet\"\n", - "backbone = \"efficientnet-b2\"\n", - "scale = 2" + "backbone = \"efficientnet-b0\"\n", + "scale = 1" ] }, { @@ -148,18 +148,18 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 11, "metadata": {}, "outputs": [ { - "ename": "NameError", - "evalue": "name 'DATASET' is not defined", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mDATASET\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m\"COCO\"\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mnum_class\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m21\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mnum_class\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m81\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mNameError\u001b[0m: name 'DATASET' is not defined" + "name": "stdout", + "output_type": "stream", + "text": [ + "Loaded pretrained weights for efficientnet-b0\n", + "320\n", + "use BiFPN\n", + "loaded the trained weights\n", + "using: cuda:0\n" ] } ], @@ -212,7 +212,7 @@ " map_location={'cuda:0': 'cpu'})\n", "else:\n", " net = EfficientDet(phase=\"inference\", cfg=ssd_cfg, verbose=False, backbone=backbone, useBiFPN=True)\n", - " net_weights = torch.load('./weights/VOC_efficientnet-b2_600_BiFPN_100.pth',\n", + " net_weights = torch.load('./weights/VOC_efficientnet-b0_300_BiFPN_200.pth',\n", " map_location={'cuda:0': 'cpu'})\n", "\n", "net.load_state_dict(net_weights)\n", @@ -227,7 +227,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": {}, "outputs": [], "source": [ @@ -243,9 +243,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "cuda:0\n" + ] + } + ], "source": [ "from utils.ssd_predict_show import SSDPredictShow\n", "ssd = SSDPredictShow(eval_categories=voc_classes, net=net, device=device, image_size=input_size)" @@ -260,7 +268,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": { "scrolled": true }, @@ -271,18 +279,65 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "['../VOCdevkit/VOC2007/JPEGImages/000001.jpg',\n", + " '../VOCdevkit/VOC2007/JPEGImages/000002.jpg',\n", + " '../VOCdevkit/VOC2007/JPEGImages/000003.jpg',\n", + " '../VOCdevkit/VOC2007/JPEGImages/000004.jpg',\n", + " '../VOCdevkit/VOC2007/JPEGImages/000006.jpg',\n", + " '../VOCdevkit/VOC2007/JPEGImages/000008.jpg',\n", + " '../VOCdevkit/VOC2007/JPEGImages/000010.jpg',\n", + " '../VOCdevkit/VOC2007/JPEGImages/000011.jpg',\n", + " '../VOCdevkit/VOC2007/JPEGImages/000013.jpg',\n", + " '../VOCdevkit/VOC2007/JPEGImages/000014.jpg']" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "val_img_list[0:10]" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "iter: 0\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2457: UserWarning: nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\n", + " warnings.warn(\"nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\")\n", + "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2539: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n", + " \"See the documentation of nn.Upsample for details.\".format(mode))\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "iter: 1000\n", + "iter: 3000\n", + "iter: 4000\n" + ] + } + ], "source": [ "for i, imp in enumerate(val_img_list):\n", " detections, pre_dict_label_index = ssd.ssd_predict2(imp, data_confidence_level=0.05)\n", @@ -302,9 +357,30 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 17, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[array([], shape=(0, 5), dtype=float64),\n", + " array([], shape=(0, 5), dtype=float64),\n", + " array([], shape=(0, 5), dtype=float64),\n", + " array([], shape=(0, 5), dtype=float64),\n", + " array([], shape=(0, 5), dtype=float64),\n", + " array([], shape=(0, 5), dtype=float64),\n", + " array([], shape=(0, 5), dtype=float64),\n", + " [array([ 0.7535214, 137.66829 , 84.52406 , 326.27124 , 302.4893 ],\n", + " dtype=float32)],\n", + " array([], shape=(0, 5), dtype=float64),\n", + " array([], shape=(0, 5), dtype=float64)]" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "all_boxes[7][0:10]" ] @@ -318,7 +394,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, "metadata": {}, "outputs": [], "source": [ @@ -476,7 +552,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 19, "metadata": {}, "outputs": [], "source": [ @@ -497,7 +573,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 20, "metadata": {}, "outputs": [], "source": [ @@ -585,7 +661,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "metadata": {}, "outputs": [], "source": [ @@ -594,9 +670,49 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 22, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Writing aeroplane VOC results file\n", + "Writing bicycle VOC results file\n", + "Writing bird VOC results file\n", + "Writing boat VOC results file\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/ipykernel_launcher.py:21: DeprecationWarning: elementwise == comparison failed; this will raise an error in the future.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Writing bottle VOC results file\n", + "Writing bus VOC results file\n", + "Writing car VOC results file\n", + "Writing cat VOC results file\n", + "Writing chair VOC results file\n", + "Writing cow VOC results file\n", + "Writing diningtable VOC results file\n", + "Writing dog VOC results file\n", + "Writing horse VOC results file\n", + "Writing motorbike VOC results file\n", + "Writing person VOC results file\n", + "Writing pottedplant VOC results file\n", + "Writing sheep VOC results file\n", + "Writing sofa VOC results file\n", + "Writing train VOC results file\n", + "Writing tvmonitor VOC results file\n" + ] + } + ], "source": [ "write_voc_results_file(pascal_classes, all_boxes, val_img_list)" ] @@ -610,7 +726,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 23, "metadata": {}, "outputs": [], "source": [ @@ -663,9 +779,69 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 24, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "VOC07 metric? No\n", + "AP for aeroplane = 0.8093\n", + "AP for bicycle = 0.8631\n", + "AP for bird = 0.7861\n", + "AP for boat = 0.7077\n", + "AP for bottle = 0.4031\n", + "AP for bus = 0.8470\n", + "AP for car = 0.8519\n", + "AP for cat = 0.9062\n", + "AP for chair = 0.5554\n", + "AP for cow = 0.8457\n", + "AP for diningtable = 0.7194\n", + "AP for dog = 0.9009\n", + "AP for horse = 0.8980\n", + "AP for motorbike = 0.8618\n", + "AP for person = 0.7593\n", + "AP for pottedplant = 0.4591\n", + "AP for sheep = 0.8089\n", + "AP for sofa = 0.8020\n", + "AP for train = 0.8793\n", + "AP for tvmonitor = 0.7795\n", + "Mean AP = 0.7722\n", + "~~~~~~~~\n", + "Results:\n", + "0.809\n", + "0.863\n", + "0.786\n", + "0.708\n", + "0.403\n", + "0.847\n", + "0.852\n", + "0.906\n", + "0.555\n", + "0.846\n", + "0.719\n", + "0.901\n", + "0.898\n", + "0.862\n", + "0.759\n", + "0.459\n", + "0.809\n", + "0.802\n", + "0.879\n", + "0.779\n", + "0.772\n", + "~~~~~~~~\n", + "\n", + "--------------------------------------------------------------\n", + "Results computed with the **unofficial** Python eval code.\n", + "Results should be very close to the official MATLAB eval code.\n", + "Recompute with `./tools/reval.py --matlab ...` for your paper.\n", + "-- Thanks, The Management\n", + "--------------------------------------------------------------\n" + ] + } + ], "source": [ "# evaluate detections\n", "python_eval()" diff --git a/log/VOC_efficientnet-b0_300log_output.csv b/log/VOC_efficientnet-b0_300log_output.csv new file mode 100644 index 0000000..3329d41 --- /dev/null +++ b/log/VOC_efficientnet-b0_300log_output.csv @@ -0,0 +1,202 @@ +,epoch,train_loss,val_loss +0,1,4191.018894672394,0.0 +1,2,3208.291733264923,0.0 +2,3,2891.90118598938,0.0 +3,4,2695.5930576324463,0.0 +4,5,2559.603127479553,0.0 +5,6,2462.9198610782623,0.0 +6,7,2391.164759874344,0.0 +7,8,2328.399730205536,0.0 +8,9,2275.6866767406464,0.0 +9,10,2233.376097202301,635.859982252121 +10,11,2191.5712382793427,0.0 +11,12,2162.7914848327637,0.0 +12,13,2140.0701899528503,0.0 +13,14,2097.4003031253815,0.0 +14,15,2082.4217534065247,0.0 +15,16,2063.780701160431,0.0 +16,17,2029.6269252300262,0.0 +17,18,2012.1134586334229,0.0 +18,19,1989.255601644516,0.0 +19,20,1983.725335597992,562.1011707782745 +20,21,1968.0882499217987,0.0 +21,22,1942.0924394130707,0.0 +22,23,1936.5521280765533,0.0 +23,24,1906.5480768680573,0.0 +24,25,1898.1282017230988,0.0 +25,26,1891.6207494735718,0.0 +26,27,1880.703584909439,0.0 +27,28,1860.4774129390717,0.0 +28,29,1852.2044532299042,0.0 +29,30,1837.552724123001,533.4152498245239 +30,31,1839.8368878364563,0.0 +31,32,1822.616930961609,0.0 +32,33,1812.2739923000336,0.0 +33,34,1802.1683847904205,0.0 +34,35,1785.6982197761536,0.0 +35,36,1787.7535145282745,0.0 +36,37,1774.204342842102,0.0 +37,38,1768.1708118915558,0.0 +38,39,1758.2914674282074,0.0 +39,40,1752.1981842517853,527.7736353874207 +40,41,1756.8646051883698,0.0 +41,42,1741.9479384422302,0.0 +42,43,1728.1174912452698,0.0 +43,44,1716.4083013534546,0.0 +44,45,1715.0774536132812,0.0 +45,46,1710.8392703533173,0.0 +46,47,1708.3408176898956,0.0 +47,48,1693.5347616672516,0.0 +48,49,1684.4491548538208,0.0 +49,50,1681.7612891197205,513.7225489616394 +50,51,1680.8716859817505,0.0 +51,52,1662.2139685153961,0.0 +52,53,1663.960535287857,0.0 +53,54,1656.665378332138,0.0 +54,55,1651.2554976940155,0.0 +55,56,1647.0551013946533,0.0 +56,57,1632.1256108283997,0.0 +57,58,1632.6223022937775,0.0 +58,59,1628.840321302414,0.0 +59,60,1628.2087075710297,491.5274729728699 +60,61,1620.1345798969269,0.0 +61,62,1613.4061210155487,0.0 +62,63,1606.9150323867798,0.0 +63,64,1606.5417530536652,0.0 +64,65,1601.3724563121796,0.0 +65,66,1596.4282891750336,0.0 +66,67,1586.320986032486,0.0 +67,68,1579.0204982757568,0.0 +68,69,1579.5337283611298,0.0 +69,70,1571.4263033866882,480.57652759552 +70,71,1569.4709496498108,0.0 +71,72,1571.8604443073273,0.0 +72,73,1566.6611363887787,0.0 +73,74,1560.4627485275269,0.0 +74,75,1552.3694615364075,0.0 +75,76,1547.2304124832153,0.0 +76,77,1541.1446621418,0.0 +77,78,1539.9733655452728,0.0 +78,79,1541.0307025909424,0.0 +79,80,1534.8119056224823,478.9340636730194 +80,81,1528.048803806305,0.0 +81,82,1526.3381695747375,0.0 +82,83,1523.4687705039978,0.0 +83,84,1522.2154624462128,0.0 +84,85,1519.2776083946228,0.0 +85,86,1520.1784255504608,0.0 +86,87,1510.9549901485443,0.0 +87,88,1498.7584223747253,0.0 +88,89,1507.0955646038055,0.0 +89,90,1494.6895451545715,467.9532964229584 +90,91,1492.367690563202,0.0 +91,92,1489.5928783416748,0.0 +92,93,1492.7413516044617,0.0 +93,94,1487.6735711097717,0.0 +94,95,1483.4847152233124,0.0 +95,96,1478.8455131053925,0.0 +96,97,1481.0820325613022,0.0 +97,98,1470.2935826778412,0.0 +98,99,1476.2892144918442,0.0 +99,100,1464.997254371643,479.021919965744 +100,101,1465.6706683635712,0.0 +101,102,1462.3130745887756,0.0 +102,103,1464.0673986673355,0.0 +103,104,1454.5691764354706,0.0 +104,105,1454.6508476734161,0.0 +105,106,1455.331818819046,0.0 +106,107,1457.416954278946,0.0 +107,108,1451.2384159564972,0.0 +108,109,1444.4895584583282,0.0 +109,110,1437.1545176506042,465.4479022026062 +110,111,1430.6126811504364,0.0 +111,112,1425.2806899547577,0.0 +112,113,1437.3635444641113,0.0 +113,114,1424.916005373001,0.0 +114,115,1426.710844874382,0.0 +115,116,1428.7636272907257,0.0 +116,117,1422.9889876842499,0.0 +117,118,1415.9637871980667,0.0 +118,119,1423.1539200544357,0.0 +119,120,1410.3804932832718,466.18494987487793 +120,121,1386.7728679180145,0.0 +121,122,1374.5213080644608,0.0 +122,123,1379.3579845428467,0.0 +123,124,1370.7275975942612,0.0 +124,125,1369.4099550247192,0.0 +125,126,1365.8796694278717,0.0 +126,127,1367.217402935028,0.0 +127,128,1363.7538449764252,0.0 +128,129,1364.0667176246643,0.0 +129,130,1362.1786986589432,462.15464210510254 +130,131,1362.9910786151886,0.0 +131,132,1361.8042430877686,0.0 +132,133,1352.655747294426,0.0 +133,134,1351.003624200821,0.0 +134,135,1356.0517230033875,0.0 +135,136,1353.475989818573,0.0 +136,137,1351.127161026001,0.0 +137,138,1347.939881682396,0.0 +138,139,1355.146633386612,0.0 +139,140,1352.7669841051102,454.6317472457886 +140,141,1355.9812222719193,0.0 +141,142,1356.605939745903,0.0 +142,143,1350.6262387037277,0.0 +143,144,1346.3498272895813,0.0 +144,145,1355.0814954042435,0.0 +145,146,1345.863077044487,0.0 +146,147,1343.675259232521,0.0 +147,148,1345.7298988103867,0.0 +148,149,1343.2370958328247,0.0 +149,150,1345.3904041051865,464.66852378845215 +150,151,1349.5347521305084,0.0 +151,152,1345.9551899433136,0.0 +152,153,1346.0116629600525,0.0 +153,154,1346.0393829345703,0.0 +154,155,1349.127351641655,0.0 +155,156,1344.2950706481934,0.0 +156,157,1335.4670921564102,0.0 +157,158,1346.1436307430267,0.0 +158,159,1340.7762662172318,0.0 +159,160,1342.7488824129105,450.3588137626648 +160,161,1335.0512247085571,0.0 +161,162,1338.2180701494217,0.0 +162,163,1336.874949812889,0.0 +163,164,1339.440555214882,0.0 +164,165,1337.0111018419266,0.0 +165,166,1333.1809544563293,0.0 +166,167,1332.588035106659,0.0 +167,168,1337.4649494886398,0.0 +168,169,1334.1348651647568,0.0 +169,170,1335.2681194543839,454.15646481513977 +170,171,1333.7099833488464,0.0 +171,172,1334.1965676546097,0.0 +172,173,1332.8124705553055,0.0 +173,174,1333.2414445877075,0.0 +174,175,1336.148027896881,0.0 +175,176,1341.7645715475082,0.0 +176,177,1332.1233954429626,0.0 +177,178,1330.6264842748642,0.0 +178,179,1333.9608628749847,0.0 +179,180,1324.8475515842438,451.2165684700012 +180,181,1331.4648576974869,0.0 +181,182,1327.3915095329285,0.0 +182,183,1327.0214451551437,0.0 +183,184,1328.280256986618,0.0 +184,185,1322.4660276174545,0.0 +185,186,1326.7145321369171,0.0 +186,187,1324.261370897293,0.0 +187,188,1330.1805911064148,0.0 +188,189,1326.5842658281326,0.0 +189,190,1328.1316194534302,456.16129446029663 +190,191,1332.8988428115845,0.0 +191,192,1329.4629385471344,0.0 +192,193,1322.6057691574097,0.0 +193,194,1333.1927734613419,0.0 +194,195,1320.952076435089,0.0 +195,196,1328.0999420881271,0.0 +196,197,1317.3528127670288,0.0 +197,198,1319.3398444652557,0.0 +198,199,1323.5927567481995,0.0 +199,200,1326.5833152532578,450.3784935474396 +200,201,1327.549081325531,0.0 diff --git a/log/VOC_efficientnet-b2_300log_output.csv b/log/VOC_efficientnet-b2_300log_output.csv new file mode 100644 index 0000000..89fe025 --- /dev/null +++ b/log/VOC_efficientnet-b2_300log_output.csv @@ -0,0 +1,123 @@ +,epoch,train_loss,val_loss +0,1,4080.546498775482,0.0 +1,2,3357.0347900390625,0.0 +2,3,3100.452305316925,0.0 +3,4,2915.817193031311,0.0 +4,5,2787.773383617401,0.0 +5,6,2710.0842905044556,0.0 +6,7,2621.9838876724243,0.0 +7,8,2555.2802696228027,0.0 +8,9,2498.0551686286926,0.0 +9,10,2481.6380763053894,721.794195652008 +10,11,2457.6362590789795,0.0 +11,12,2397.594162940979,0.0 +12,13,2372.768909215927,0.0 +13,14,2349.8119065761566,0.0 +14,15,2316.371333360672,0.0 +15,16,2296.465172767639,0.0 +16,17,2273.3958065509796,0.0 +17,18,2342.7093346118927,0.0 +18,19,2388.3940556049347,0.0 +19,20,2278.9284472465515,651.411762714386 +20,21,2236.573585510254,0.0 +21,22,2206.304221868515,0.0 +22,23,2176.7432882785797,0.0 +23,24,2157.8942172527313,0.0 +24,25,2140.7692694664,0.0 +25,26,2121.6442954540253,0.0 +26,27,2109.5080564022064,0.0 +27,28,2082.154736995697,0.0 +28,29,2074.0109679698944,0.0 +29,30,2055.57546544075,591.7532317638397 +30,31,2049.73934841156,0.0 +31,32,2042.176113128662,0.0 +32,33,2013.1486585140228,0.0 +33,34,2021.4329919815063,0.0 +34,35,2004.6909937858582,0.0 +35,36,1987.8399419784546,0.0 +36,37,1981.9751126766205,0.0 +37,38,1978.881615638733,0.0 +38,39,1970.5005178451538,0.0 +39,40,1970.4495074748993,567.8454167842865 +40,41,1953.169471502304,0.0 +41,42,1944.5477933883667,0.0 +42,43,1943.5476791858673,0.0 +43,44,1930.6251966953278,0.0 +44,45,1937.883210659027,0.0 +45,46,1928.1097190380096,0.0 +46,47,1920.1050381660461,0.0 +47,48,1914.3854622840881,0.0 +48,49,1920.1651501655579,0.0 +49,50,1912.8063135147095,554.1709237098694 +50,51,1916.3988749980927,0.0 +51,52,1907.5016129016876,0.0 +52,53,1893.7956924438477,0.0 +53,54,1903.0384695529938,0.0 +54,55,1907.509554862976,0.0 +55,56,1881.4611706733704,0.0 +56,57,1880.284513950348,0.0 +57,58,1888.3071002960205,0.0 +58,59,1875.4002630710602,0.0 +59,60,1877.9063906669617,545.7643139362335 +60,61,1864.0616507530212,0.0 +61,62,1869.4361650943756,0.0 +62,63,1866.822259426117,0.0 +63,64,1854.8717441558838,0.0 +64,65,1843.2391996383667,0.0 +65,66,1825.6913306713104,0.0 +66,67,1826.346867799759,0.0 +67,68,1824.1016137599945,0.0 +68,69,1812.414344072342,0.0 +69,70,1804.1277635097504,530.7630858421326 +70,71,1807.6722190380096,0.0 +71,72,1804.6604704856873,0.0 +72,73,1816.3653264045715,0.0 +73,74,1814.3766193389893,0.0 +74,75,1807.037192106247,0.0 +75,76,1791.4761283397675,0.0 +76,77,1786.2714273929596,0.0 +77,78,1785.2875399589539,0.0 +78,79,1766.2459998130798,0.0 +79,80,1756.0367708206177,523.8822541236877 +80,81,1762.1240818500519,0.0 +81,82,1752.2986652851105,0.0 +82,83,1748.511078596115,0.0 +83,84,1740.7923076152802,0.0 +84,85,1715.833832502365,0.0 +85,86,1723.006133556366,0.0 +86,87,1733.6477625370026,0.0 +87,88,1728.6577792167664,0.0 +88,89,1731.19802236557,0.0 +89,90,1727.2793402671814,508.0979163646698 +90,91,1730.4382228851318,0.0 +91,92,1720.0883026123047,0.0 +92,93,1707.6173305511475,0.0 +93,94,1712.9860563278198,0.0 +94,95,1713.4753861427307,0.0 +95,96,1700.3397679328918,0.0 +96,97,1687.8762998580933,0.0 +97,98,1693.1143724918365,0.0 +98,99,1685.6889843940735,0.0 +99,100,1672.838994026184,500.3957133293152 +100,101,1681.1322660446167,0.0 +101,102,1670.6560609340668,0.0 +102,103,1672.5301184654236,0.0 +103,104,1673.8641622066498,0.0 +104,105,1674.050901889801,0.0 +105,106,1663.4111099243164,0.0 +106,107,1659.8907821178436,0.0 +107,108,1670.6921529769897,0.0 +108,109,1657.5689063072205,0.0 +109,110,1650.6779477596283,499.2176616191864 +110,111,1639.6299970149994,0.0 +111,112,1643.811627626419,0.0 +112,113,1649.5680134296417,0.0 +113,114,1647.450050830841,0.0 +114,115,1635.5424242019653,0.0 +115,116,1632.5112507343292,0.0 +116,117,1635.2805507183075,0.0 +117,118,1625.9529983997345,0.0 +118,119,1629.5350003242493,0.0 +119,120,1619.2213261127472,489.54677057266235 +120,121,1589.59574842453,0.0 +121,122,1586.3031919002533,0.0 diff --git a/log/freeze_b0.csv b/log/freeze_b0.csv new file mode 100644 index 0000000..3ba72c5 --- /dev/null +++ b/log/freeze_b0.csv @@ -0,0 +1,90 @@ +,epoch,train_loss,val_loss +0,1,4267.480539798737,0.0 +1,2,3352.640256881714,0.0 +2,3,3108.117500782013,0.0 +3,4,2929.919186115265,0.0 +4,5,2822.293752670288,0.0 +5,6,2732.765166759491,0.0 +6,7,2666.690396308899,0.0 +7,8,2600.083775997162,0.0 +8,9,2562.0891723632812,0.0 +9,10,2522.9051456451416,719.8962407112122 +10,11,2495.27193069458,0.0 +11,12,2461.446991443634,0.0 +12,13,2440.2641773223877,0.0 +13,14,2429.7539319992065,0.0 +14,15,2401.8078305721283,0.0 +15,16,2372.8990461826324,0.0 +16,17,2357.4049956798553,0.0 +17,18,2346.725809812546,0.0 +18,19,2324.845602989197,0.0 +19,20,2332.188576936722,658.3294279575348 +20,21,2307.5619056224823,0.0 +21,22,2293.461701631546,0.0 +22,23,2281.30326962471,0.0 +23,24,2272.2050976753235,0.0 +24,25,2265.5049970149994,0.0 +25,26,2255.9774856567383,0.0 +26,27,2246.4105837345123,0.0 +27,28,2238.7843277454376,0.0 +28,29,2226.619653940201,0.0 +29,30,2222.0341572761536,635.9737462997437 +30,31,2214.860984325409,0.0 +31,32,2195.5047976970673,0.0 +32,33,2190.4324781894684,0.0 +33,34,2190.2079117298126,0.0 +34,35,2187.931234359741,0.0 +35,36,2178.6109960079193,0.0 +36,37,2171.890043258667,0.0 +37,38,2161.689619064331,0.0 +38,39,2160.78453540802,0.0 +39,40,2153.3485515117645,622.700211763382 +40,41,2154.636126756668,0.0 +41,42,2141.4525010585785,0.0 +42,43,2143.406218290329,0.0 +43,44,2138.6465327739716,0.0 +44,45,2133.728410720825,0.0 +45,46,2131.1718697547913,0.0 +46,47,2120.5101442337036,0.0 +47,48,2115.8950040340424,0.0 +48,49,2119.922224998474,0.0 +49,50,2110.409837961197,599.7404828071594 +50,51,2111.5042457580566,0.0 +51,52,2099.0852568149567,0.0 +52,53,2103.0030879974365,0.0 +53,54,2101.872190475464,0.0 +54,55,2092.919868707657,0.0 +55,56,2088.959501504898,0.0 +56,57,2083.9934573173523,0.0 +57,58,2086.0683255195618,0.0 +58,59,2087.5189938545227,0.0 +59,60,2074.1816816329956,588.3181626796722 +60,61,2071.6009800434113,0.0 +61,62,2070.8576834201813,0.0 +62,63,2069.9420306682587,0.0 +63,64,2056.761828184128,0.0 +64,65,2068.212695121765,0.0 +65,66,2055.835328102112,0.0 +66,67,2056.2286257743835,0.0 +67,68,2050.788646221161,0.0 +68,69,2057.1840374469757,0.0 +69,70,2051.821357727051,583.698784828186 +70,71,2041.6475853919983,0.0 +71,72,2041.1317002773285,0.0 +72,73,2042.514305114746,0.0 +73,74,2037.2027022838593,0.0 +74,75,2031.3412942886353,0.0 +75,76,2038.504406452179,0.0 +76,77,2030.4000315666199,0.0 +77,78,2028.791092634201,0.0 +78,79,2028.162209033966,0.0 +79,80,2026.697705745697,577.7652583122253 +80,81,2028.729036808014,0.0 +81,82,2021.037141084671,0.0 +82,83,2015.1633405685425,0.0 +83,84,2020.8585810661316,0.0 +84,85,2017.1045317649841,0.0 +85,86,2007.5159180164337,0.0 +86,87,2001.9440939426422,0.0 +87,88,2007.5213589668274,0.0 +88,89,1999.7022178173065,0.0 diff --git a/log_output.csv b/log_output.csv index 9185cfb..3ba72c5 100644 --- a/log_output.csv +++ b/log_output.csv @@ -1,17 +1,90 @@ ,epoch,train_loss,val_loss -0,1,8116.442349433899,0.0 -1,2,6614.853562831879,0.0 -2,3,6204.870180130005,0.0 -3,4,5950.069442749023,0.0 -4,5,5790.543559074402,0.0 -5,6,5678.689483642578,0.0 -6,7,5571.751577377319,0.0 -7,8,5503.650438308716,0.0 -8,9,5476.726466655731,0.0 -9,10,5406.832299232483,1519.4685802459717 -10,11,5344.693814754486,0.0 -11,12,5323.968416690826,0.0 -12,13,5291.520055770874,0.0 -13,14,5254.536097288132,0.0 -14,15,5214.717974662781,0.0 -15,16,5194.190457344055,0.0 +0,1,4267.480539798737,0.0 +1,2,3352.640256881714,0.0 +2,3,3108.117500782013,0.0 +3,4,2929.919186115265,0.0 +4,5,2822.293752670288,0.0 +5,6,2732.765166759491,0.0 +6,7,2666.690396308899,0.0 +7,8,2600.083775997162,0.0 +8,9,2562.0891723632812,0.0 +9,10,2522.9051456451416,719.8962407112122 +10,11,2495.27193069458,0.0 +11,12,2461.446991443634,0.0 +12,13,2440.2641773223877,0.0 +13,14,2429.7539319992065,0.0 +14,15,2401.8078305721283,0.0 +15,16,2372.8990461826324,0.0 +16,17,2357.4049956798553,0.0 +17,18,2346.725809812546,0.0 +18,19,2324.845602989197,0.0 +19,20,2332.188576936722,658.3294279575348 +20,21,2307.5619056224823,0.0 +21,22,2293.461701631546,0.0 +22,23,2281.30326962471,0.0 +23,24,2272.2050976753235,0.0 +24,25,2265.5049970149994,0.0 +25,26,2255.9774856567383,0.0 +26,27,2246.4105837345123,0.0 +27,28,2238.7843277454376,0.0 +28,29,2226.619653940201,0.0 +29,30,2222.0341572761536,635.9737462997437 +30,31,2214.860984325409,0.0 +31,32,2195.5047976970673,0.0 +32,33,2190.4324781894684,0.0 +33,34,2190.2079117298126,0.0 +34,35,2187.931234359741,0.0 +35,36,2178.6109960079193,0.0 +36,37,2171.890043258667,0.0 +37,38,2161.689619064331,0.0 +38,39,2160.78453540802,0.0 +39,40,2153.3485515117645,622.700211763382 +40,41,2154.636126756668,0.0 +41,42,2141.4525010585785,0.0 +42,43,2143.406218290329,0.0 +43,44,2138.6465327739716,0.0 +44,45,2133.728410720825,0.0 +45,46,2131.1718697547913,0.0 +46,47,2120.5101442337036,0.0 +47,48,2115.8950040340424,0.0 +48,49,2119.922224998474,0.0 +49,50,2110.409837961197,599.7404828071594 +50,51,2111.5042457580566,0.0 +51,52,2099.0852568149567,0.0 +52,53,2103.0030879974365,0.0 +53,54,2101.872190475464,0.0 +54,55,2092.919868707657,0.0 +55,56,2088.959501504898,0.0 +56,57,2083.9934573173523,0.0 +57,58,2086.0683255195618,0.0 +58,59,2087.5189938545227,0.0 +59,60,2074.1816816329956,588.3181626796722 +60,61,2071.6009800434113,0.0 +61,62,2070.8576834201813,0.0 +62,63,2069.9420306682587,0.0 +63,64,2056.761828184128,0.0 +64,65,2068.212695121765,0.0 +65,66,2055.835328102112,0.0 +66,67,2056.2286257743835,0.0 +67,68,2050.788646221161,0.0 +68,69,2057.1840374469757,0.0 +69,70,2051.821357727051,583.698784828186 +70,71,2041.6475853919983,0.0 +71,72,2041.1317002773285,0.0 +72,73,2042.514305114746,0.0 +73,74,2037.2027022838593,0.0 +74,75,2031.3412942886353,0.0 +75,76,2038.504406452179,0.0 +76,77,2030.4000315666199,0.0 +77,78,2028.791092634201,0.0 +78,79,2028.162209033966,0.0 +79,80,2026.697705745697,577.7652583122253 +80,81,2028.729036808014,0.0 +81,82,2021.037141084671,0.0 +82,83,2015.1633405685425,0.0 +83,84,2020.8585810661316,0.0 +84,85,2017.1045317649841,0.0 +85,86,2007.5159180164337,0.0 +86,87,2001.9440939426422,0.0 +87,88,2007.5213589668274,0.0 +88,89,1999.7022178173065,0.0 diff --git a/train_efficientdet.ipynb b/train_efficientdet.ipynb index 147e386..a1b0ff3 100644 --- a/train_efficientdet.ipynb +++ b/train_efficientdet.ipynb @@ -50,8 +50,8 @@ "## meta settings\n", "\n", "# select from efficientnet backbone or resnet backbone\n", - "backbone = \"efficientnet-b0\"\n", - "scale = 2\n", + "backbone = \"efficientnet-b2\"\n", + "scale = 1\n", "# scale==1: resolution 300\n", "# scale==2: resolution 600\n", "useBiFPN = True\n", @@ -157,8 +157,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "torch.Size([16, 3, 512, 512])\n", - "16\n", + "torch.Size([32, 3, 300, 300])\n", + "32\n", "torch.Size([1, 5])\n" ] } @@ -197,465 +197,10 @@ "name": "stdout", "output_type": "stream", "text": [ - "Loaded pretrained weights for efficientnet-b0\n", - "EfficientNet(\n", - " (_conv_stem): Conv2dStaticSamePadding(\n", - " 3, 32, kernel_size=(3, 3), stride=(2, 2), bias=False\n", - " (static_padding): ZeroPad2d(padding=(0, 1, 0, 1), value=0.0)\n", - " )\n", - " (_bn0): BatchNorm2d(32, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_blocks): ModuleList(\n", - " (0): MBConvBlock(\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 32, 32, kernel_size=(3, 3), stride=[1, 1], groups=32, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(32, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 32, 8, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 8, 32, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 32, 16, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(16, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (1): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 16, 96, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(96, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 96, 96, kernel_size=(3, 3), stride=[2, 2], groups=96, bias=False\n", - " (static_padding): ZeroPad2d(padding=(0, 1, 0, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(96, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 96, 4, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 4, 96, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 96, 24, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(24, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (2): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 24, 144, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(144, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 144, 144, kernel_size=(3, 3), stride=(1, 1), groups=144, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(144, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 144, 6, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 6, 144, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 144, 24, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(24, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (3): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 24, 144, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(144, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 144, 144, kernel_size=(5, 5), stride=[2, 2], groups=144, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 2, 1, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(144, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 144, 6, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 6, 144, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 144, 40, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(40, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (4): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 40, 240, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(240, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 240, 240, kernel_size=(5, 5), stride=(1, 1), groups=240, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(240, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 240, 10, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 10, 240, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 240, 40, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(40, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (5): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 40, 240, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(240, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 240, 240, kernel_size=(3, 3), stride=[2, 2], groups=240, bias=False\n", - " (static_padding): ZeroPad2d(padding=(0, 1, 0, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(240, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 240, 10, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 10, 240, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 240, 80, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(80, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (6): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 80, 480, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 480, 480, kernel_size=(3, 3), stride=(1, 1), groups=480, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 480, 20, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 20, 480, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 480, 80, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(80, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (7): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 80, 480, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 480, 480, kernel_size=(3, 3), stride=(1, 1), groups=480, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 480, 20, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 20, 480, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 480, 80, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(80, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (8): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 80, 480, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 480, 480, kernel_size=(5, 5), stride=[1, 1], groups=480, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 480, 20, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 20, 480, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 480, 112, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(112, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (9): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 112, 672, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 672, 672, kernel_size=(5, 5), stride=(1, 1), groups=672, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 672, 28, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 28, 672, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 672, 112, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(112, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (10): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 112, 672, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 672, 672, kernel_size=(5, 5), stride=(1, 1), groups=672, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 672, 28, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 28, 672, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 672, 112, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(112, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (11): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 112, 672, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 672, 672, kernel_size=(5, 5), stride=[2, 2], groups=672, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 2, 1, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 672, 28, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 28, 672, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 672, 192, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(192, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (12): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 192, 1152, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 1152, 1152, kernel_size=(5, 5), stride=(1, 1), groups=1152, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 1152, 48, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 48, 1152, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 1152, 192, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(192, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (13): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 192, 1152, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 1152, 1152, kernel_size=(5, 5), stride=(1, 1), groups=1152, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 1152, 48, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 48, 1152, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 1152, 192, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(192, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (14): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 192, 1152, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 1152, 1152, kernel_size=(5, 5), stride=(1, 1), groups=1152, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 1152, 48, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 48, 1152, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 1152, 192, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(192, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (15): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 192, 1152, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 1152, 1152, kernel_size=(3, 3), stride=[1, 1], groups=1152, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 1152, 48, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 48, 1152, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 1152, 320, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(320, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " )\n", - " (_conv_head): Conv2dStaticSamePadding(\n", - " 320, 1280, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn1): BatchNorm2d(1280, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_avg_pooling): AdaptiveAvgPool2d(output_size=1)\n", - " (_dropout): Dropout(p=0.2)\n", - " (_fc): Linear(in_features=1280, out_features=1000, bias=True)\n", - " (_swish): MemoryEfficientSwish()\n", - ")\n", - "320\n", + "Loaded pretrained weights for efficientnet-b2\n", + "208\n", "use BiFPN\n" ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "layerc3: torch.Size([1, 40, 64, 64])\n", - "layerc4: torch.Size([1, 80, 32, 32])\n", - "layerc5: torch.Size([1, 320, 16, 16])\n", - "layer size: torch.Size([1, 256, 64, 64])\n", - "layer size: torch.Size([1, 256, 32, 32])\n", - "layer size: torch.Size([1, 256, 16, 16])\n", - "layer size: torch.Size([1, 256, 8, 8])\n", - "layer size: torch.Size([1, 256, 4, 4])\n", - "layer size: torch.Size([1, 256, 2, 2])\n", - "torch.Size([1, 24528, 4])\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2457: UserWarning: nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\n", - " warnings.warn(\"nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\")\n", - "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2539: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n", - " \"See the documentation of nn.Upsample for details.\".format(mode))\n" - ] } ], "source": [ @@ -689,8 +234,8 @@ "\n", "# test if net works\n", "net = EfficientDet(phase=\"train\", cfg=ssd_cfg, verbose=True, backbone=backbone, useBiFPN=useBiFPN)\n", - "out = net(torch.rand([1,3,input_size,input_size]))\n", - "print(out[0].size())" + "#out = net(torch.rand([1,3,input_size,input_size]))\n", + "#print(out[0].size())" ] }, { @@ -702,437 +247,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Loaded pretrained weights for efficientnet-b0\n", - "EfficientNet(\n", - " (_conv_stem): Conv2dStaticSamePadding(\n", - " 3, 32, kernel_size=(3, 3), stride=(2, 2), bias=False\n", - " (static_padding): ZeroPad2d(padding=(0, 1, 0, 1), value=0.0)\n", - " )\n", - " (_bn0): BatchNorm2d(32, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_blocks): ModuleList(\n", - " (0): MBConvBlock(\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 32, 32, kernel_size=(3, 3), stride=[1, 1], groups=32, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(32, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 32, 8, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 8, 32, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 32, 16, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(16, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (1): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 16, 96, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(96, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 96, 96, kernel_size=(3, 3), stride=[2, 2], groups=96, bias=False\n", - " (static_padding): ZeroPad2d(padding=(0, 1, 0, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(96, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 96, 4, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 4, 96, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 96, 24, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(24, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (2): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 24, 144, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(144, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 144, 144, kernel_size=(3, 3), stride=(1, 1), groups=144, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(144, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 144, 6, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 6, 144, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 144, 24, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(24, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (3): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 24, 144, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(144, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 144, 144, kernel_size=(5, 5), stride=[2, 2], groups=144, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 2, 1, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(144, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 144, 6, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 6, 144, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 144, 40, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(40, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (4): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 40, 240, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(240, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 240, 240, kernel_size=(5, 5), stride=(1, 1), groups=240, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(240, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 240, 10, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 10, 240, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 240, 40, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(40, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (5): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 40, 240, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(240, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 240, 240, kernel_size=(3, 3), stride=[2, 2], groups=240, bias=False\n", - " (static_padding): ZeroPad2d(padding=(0, 1, 0, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(240, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 240, 10, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 10, 240, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 240, 80, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(80, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (6): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 80, 480, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 480, 480, kernel_size=(3, 3), stride=(1, 1), groups=480, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 480, 20, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 20, 480, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 480, 80, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(80, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (7): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 80, 480, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 480, 480, kernel_size=(3, 3), stride=(1, 1), groups=480, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 480, 20, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 20, 480, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 480, 80, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(80, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (8): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 80, 480, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 480, 480, kernel_size=(5, 5), stride=[1, 1], groups=480, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 480, 20, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 20, 480, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 480, 112, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(112, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (9): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 112, 672, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 672, 672, kernel_size=(5, 5), stride=(1, 1), groups=672, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 672, 28, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 28, 672, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 672, 112, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(112, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (10): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 112, 672, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 672, 672, kernel_size=(5, 5), stride=(1, 1), groups=672, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 672, 28, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 28, 672, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 672, 112, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(112, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (11): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 112, 672, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 672, 672, kernel_size=(5, 5), stride=[2, 2], groups=672, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 2, 1, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 672, 28, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 28, 672, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 672, 192, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(192, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (12): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 192, 1152, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 1152, 1152, kernel_size=(5, 5), stride=(1, 1), groups=1152, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 1152, 48, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 48, 1152, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 1152, 192, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(192, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (13): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 192, 1152, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 1152, 1152, kernel_size=(5, 5), stride=(1, 1), groups=1152, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 1152, 48, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 48, 1152, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 1152, 192, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(192, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (14): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 192, 1152, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 1152, 1152, kernel_size=(5, 5), stride=(1, 1), groups=1152, bias=False\n", - " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 1152, 48, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 48, 1152, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 1152, 192, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(192, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " (15): MBConvBlock(\n", - " (_expand_conv): Conv2dStaticSamePadding(\n", - " 192, 1152, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn0): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_depthwise_conv): Conv2dStaticSamePadding(\n", - " 1152, 1152, kernel_size=(3, 3), stride=[1, 1], groups=1152, bias=False\n", - " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", - " )\n", - " (_bn1): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_se_reduce): Conv2dStaticSamePadding(\n", - " 1152, 48, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_se_expand): Conv2dStaticSamePadding(\n", - " 48, 1152, kernel_size=(1, 1), stride=(1, 1)\n", - " (static_padding): Identity()\n", - " )\n", - " (_project_conv): Conv2dStaticSamePadding(\n", - " 1152, 320, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn2): BatchNorm2d(320, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_swish): MemoryEfficientSwish()\n", - " )\n", - " )\n", - " (_conv_head): Conv2dStaticSamePadding(\n", - " 320, 1280, kernel_size=(1, 1), stride=(1, 1), bias=False\n", - " (static_padding): Identity()\n", - " )\n", - " (_bn1): BatchNorm2d(1280, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", - " (_avg_pooling): AdaptiveAvgPool2d(output_size=1)\n", - " (_dropout): Dropout(p=0.2)\n", - " (_fc): Linear(in_features=1280, out_features=1000, bias=True)\n", - " (_swish): MemoryEfficientSwish()\n", - ")\n", - "320\n", + "Loaded pretrained weights for efficientnet-b2\n", + "208\n", "use BiFPN\n", "using: cuda:0\n", "set weights!\n" @@ -1182,16 +298,16 @@ "outputs": [], "source": [ "# Freeze backbone layers\n", - "for param in net.layer0.parameters():\n", - " param.requires_grad = False\n", - "for param in net.layer2.parameters():\n", - " param.requires_grad = False\n", - "for param in net.layer3.parameters():\n", - " param.requires_grad = False\n", - "for param in net.layer4.parameters():\n", - " param.requires_grad = False\n", - "for param in net.layer5.parameters():\n", - " param.requires_grad = False" + "#for param in net.layer0.parameters():\n", + "# param.requires_grad = False\n", + "#for param in net.layer2.parameters():\n", + "# param.requires_grad = False\n", + "#for param in net.layer3.parameters():\n", + "# param.requires_grad = False\n", + "#for param in net.layer4.parameters():\n", + "# param.requires_grad = False\n", + "#for param in net.layer5.parameters():\n", + "# param.requires_grad = False" ] }, { @@ -1348,7 +464,7 @@ " 'train_loss': epoch_train_loss, 'val_loss': epoch_val_loss}\n", " logs.append(log_epoch)\n", " df = pd.DataFrame(logs)\n", - " df.to_csv(\"log_output.csv\")\n", + " df.to_csv(\"log/\"+DATASET+\"_\"+backbone+\"_\" + str(300*scale) +\"log_output.csv\")\n", "\n", " epoch_train_loss = 0.0 # epochの損失和\n", " epoch_val_loss = 0.0 # epochの損失和\n", @@ -1386,9 +502,9 @@ "name": "stderr", "output_type": "stream", "text": [ - "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2457: UserWarning: nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\n", + "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2390: UserWarning: nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\n", " warnings.warn(\"nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\")\n", - "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2539: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n", + "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2479: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n", " \"See the documentation of nn.Upsample for details.\".format(mode))\n" ] }, @@ -1396,53 +512,60 @@ "name": "stdout", "output_type": "stream", "text": [ - "Iter 10 || Loss: 15.7518 || 10iter: 4.7307 sec.\n", - "Iter 20 || Loss: 14.4122 || 10iter: 1.8921 sec.\n", - "Iter 30 || Loss: 13.8655 || 10iter: 1.9784 sec.\n", - "Iter 40 || Loss: 12.7425 || 10iter: 1.9309 sec.\n", - "Iter 50 || Loss: 11.7954 || 10iter: 1.9372 sec.\n", - "Iter 60 || Loss: 11.3030 || 10iter: 1.8859 sec.\n", - "Iter 70 || Loss: 10.5033 || 10iter: 1.8864 sec.\n", - "Iter 80 || Loss: 10.3696 || 10iter: 1.9323 sec.\n", - "Iter 90 || Loss: 9.3117 || 10iter: 1.9685 sec.\n", - "Iter 100 || Loss: 8.8864 || 10iter: 1.9152 sec.\n", - "Iter 110 || Loss: 9.4674 || 10iter: 1.9103 sec.\n", - "Iter 120 || Loss: 9.3020 || 10iter: 1.8883 sec.\n", - "Iter 130 || Loss: 8.4071 || 10iter: 1.9002 sec.\n", - "Iter 140 || Loss: 8.6193 || 10iter: 1.9468 sec.\n", - "Iter 150 || Loss: 8.3351 || 10iter: 1.9050 sec.\n", - "Iter 160 || Loss: 8.5078 || 10iter: 1.9140 sec.\n", - "Iter 170 || Loss: 7.6491 || 10iter: 1.8982 sec.\n", - "Iter 180 || Loss: 8.7354 || 10iter: 1.9168 sec.\n", - "Iter 190 || Loss: 8.0486 || 10iter: 1.8867 sec.\n", - "Iter 200 || Loss: 8.0660 || 10iter: 1.9110 sec.\n", - "Iter 210 || Loss: 8.1561 || 10iter: 1.9034 sec.\n", - "Iter 220 || Loss: 7.6240 || 10iter: 1.9517 sec.\n", - "Iter 230 || Loss: 7.3998 || 10iter: 1.9581 sec.\n", - "Iter 240 || Loss: 7.8412 || 10iter: 1.9302 sec.\n", - "Iter 250 || Loss: 8.0879 || 10iter: 1.9219 sec.\n", - "Iter 260 || Loss: 7.7194 || 10iter: 1.9054 sec.\n", - "Iter 270 || Loss: 7.3940 || 10iter: 1.9516 sec.\n", - "Iter 280 || Loss: 7.6676 || 10iter: 1.9248 sec.\n", - "Iter 290 || Loss: 7.7508 || 10iter: 1.9279 sec.\n", - "Iter 300 || Loss: 7.1962 || 10iter: 1.9403 sec.\n", - "Iter 310 || Loss: 7.8204 || 10iter: 1.9398 sec.\n", - "Iter 320 || Loss: 8.5559 || 10iter: 1.8789 sec.\n", - "Iter 330 || Loss: 8.2730 || 10iter: 1.9394 sec.\n", - "Iter 340 || Loss: 7.8743 || 10iter: 1.9160 sec.\n", - "Iter 350 || Loss: 7.0550 || 10iter: 1.9448 sec.\n", - "Iter 360 || Loss: 7.6786 || 10iter: 1.9152 sec.\n", - "Iter 370 || Loss: 7.1771 || 10iter: 1.9409 sec.\n", - "Iter 380 || Loss: 7.4616 || 10iter: 1.9308 sec.\n", - "Iter 390 || Loss: 7.8728 || 10iter: 1.8980 sec.\n", - "Iter 400 || Loss: 7.2624 || 10iter: 1.9601 sec.\n", - "Iter 410 || Loss: 7.2867 || 10iter: 1.9470 sec.\n", - "Iter 420 || Loss: 8.0774 || 10iter: 1.9203 sec.\n", - "Iter 430 || Loss: 8.2089 || 10iter: 1.9373 sec.\n", - "Iter 440 || Loss: 7.6132 || 10iter: 1.9623 sec.\n", + "Iter 10 || Loss: 14.6762 || 10iter: 7.9883 sec.\n", + "Iter 20 || Loss: 12.6147 || 10iter: 3.3072 sec.\n", + "Iter 30 || Loss: 10.7831 || 10iter: 3.3764 sec.\n", + "Iter 40 || Loss: 9.5539 || 10iter: 3.4383 sec.\n", + "Iter 50 || Loss: 8.7597 || 10iter: 3.3863 sec.\n", + "Iter 60 || Loss: 8.3781 || 10iter: 3.4459 sec.\n", + "Iter 70 || Loss: 8.5997 || 10iter: 3.4078 sec.\n", + "Iter 80 || Loss: 8.3461 || 10iter: 3.4021 sec.\n", + "Iter 90 || Loss: 8.3254 || 10iter: 3.3984 sec.\n", + "Iter 100 || Loss: 8.1916 || 10iter: 3.4102 sec.\n", + "Iter 110 || Loss: 8.5010 || 10iter: 3.3978 sec.\n", + "Iter 120 || Loss: 8.0312 || 10iter: 3.3895 sec.\n", + "Iter 130 || Loss: 7.5776 || 10iter: 3.4147 sec.\n", + "Iter 140 || Loss: 7.4582 || 10iter: 3.3953 sec.\n", + "Iter 150 || Loss: 7.8578 || 10iter: 3.4497 sec.\n", + "Iter 160 || Loss: 7.9628 || 10iter: 3.3826 sec.\n", + "Iter 170 || Loss: 7.7113 || 10iter: 3.4399 sec.\n", + "Iter 180 || Loss: 7.7354 || 10iter: 3.3790 sec.\n", + "Iter 190 || Loss: 7.6620 || 10iter: 3.3862 sec.\n", + "Iter 200 || Loss: 7.6940 || 10iter: 3.3755 sec.\n", + "Iter 210 || Loss: 7.4468 || 10iter: 3.4130 sec.\n", + "Iter 220 || Loss: 7.0901 || 10iter: 3.3777 sec.\n", + "Iter 230 || Loss: 7.6807 || 10iter: 3.4167 sec.\n", + "Iter 240 || Loss: 7.3931 || 10iter: 3.3779 sec.\n", + "Iter 250 || Loss: 7.0309 || 10iter: 3.3901 sec.\n", + "Iter 260 || Loss: 7.7965 || 10iter: 3.3897 sec.\n", + "Iter 270 || Loss: 7.2047 || 10iter: 3.4298 sec.\n", + "Iter 280 || Loss: 7.4838 || 10iter: 3.3914 sec.\n", + "Iter 290 || Loss: 7.2852 || 10iter: 3.3917 sec.\n", + "Iter 300 || Loss: 7.8419 || 10iter: 3.5271 sec.\n", + "Iter 310 || Loss: 7.4730 || 10iter: 3.4624 sec.\n", + "Iter 320 || Loss: 6.9173 || 10iter: 3.4627 sec.\n", + "Iter 330 || Loss: 6.9936 || 10iter: 3.4804 sec.\n", + "Iter 340 || Loss: 7.0920 || 10iter: 3.4729 sec.\n", + "Iter 350 || Loss: 7.2672 || 10iter: 3.3754 sec.\n", + "Iter 360 || Loss: 6.7265 || 10iter: 3.4447 sec.\n", + "Iter 370 || Loss: 7.0137 || 10iter: 3.3979 sec.\n", + "Iter 380 || Loss: 6.7315 || 10iter: 3.4203 sec.\n", + "Iter 390 || Loss: 6.9814 || 10iter: 3.4017 sec.\n", + "Iter 400 || Loss: 7.2596 || 10iter: 3.3769 sec.\n", + "Iter 410 || Loss: 7.5059 || 10iter: 3.4078 sec.\n", + "Iter 420 || Loss: 6.9578 || 10iter: 3.4079 sec.\n", + "Iter 430 || Loss: 6.8931 || 10iter: 3.3843 sec.\n", + "Iter 440 || Loss: 7.0741 || 10iter: 3.3803 sec.\n", + "Iter 450 || Loss: 6.9327 || 10iter: 3.3740 sec.\n", + "Iter 460 || Loss: 6.7873 || 10iter: 3.4166 sec.\n", + "Iter 470 || Loss: 7.0147 || 10iter: 3.3570 sec.\n", + "Iter 480 || Loss: 6.9948 || 10iter: 3.3959 sec.\n", + "Iter 490 || Loss: 6.7472 || 10iter: 3.3642 sec.\n", + "Iter 500 || Loss: 6.3939 || 10iter: 3.3633 sec.\n", + "Iter 510 || Loss: 6.7486 || 10iter: 3.3512 sec.\n", "-------------\n", - "epoch 1 || Epoch_TRAIN_Loss:8116.4423 ||Epoch_VAL_Loss:0.0000\n", - "timer: 205.5965 sec.\n", + "epoch 1 || Epoch_TRAIN_Loss:4080.5465 ||Epoch_VAL_Loss:0.0000\n", + "timer: 182.0846 sec.\n", "lr is: 0.001\n", "-------------\n", "Epoch 2/200\n", @@ -1454,9 +577,9 @@ "name": "stderr", "output_type": "stream", "text": [ - "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2457: UserWarning: nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\n", + "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2390: UserWarning: nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\n", " warnings.warn(\"nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\")\n", - "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2539: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n", + "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2479: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n", " \"See the documentation of nn.Upsample for details.\".format(mode))\n" ] }, @@ -1464,1722 +587,585 @@ "name": "stdout", "output_type": "stream", "text": [ - "Iter 1040 || Loss: 7.3981 || 10iter: 2.7324 sec.\n", - "Iter 1050 || Loss: 7.1401 || 10iter: 2.0177 sec.\n", - "Iter 1060 || Loss: 6.3296 || 10iter: 1.9268 sec.\n", - "Iter 1070 || Loss: 6.9658 || 10iter: 1.9337 sec.\n", - "Iter 1080 || Loss: 6.6879 || 10iter: 1.9037 sec.\n", - "Iter 1090 || Loss: 6.2444 || 10iter: 1.9686 sec.\n", - "Iter 1100 || Loss: 7.0513 || 10iter: 1.9276 sec.\n", - "Iter 1110 || Loss: 6.4635 || 10iter: 1.9627 sec.\n", - "Iter 1120 || Loss: 7.0456 || 10iter: 1.9197 sec.\n", - "Iter 1130 || Loss: 7.0069 || 10iter: 1.8963 sec.\n", - "Iter 1140 || Loss: 7.0612 || 10iter: 1.9401 sec.\n", - "Iter 1150 || Loss: 5.8751 || 10iter: 1.9078 sec.\n", - "Iter 1160 || Loss: 6.3783 || 10iter: 1.9610 sec.\n", - "Iter 1170 || Loss: 7.1168 || 10iter: 1.8801 sec.\n", - "Iter 1180 || Loss: 6.9482 || 10iter: 1.8925 sec.\n", - "Iter 1190 || Loss: 6.2115 || 10iter: 1.8916 sec.\n", - "Iter 1200 || Loss: 6.2355 || 10iter: 1.9432 sec.\n", - "Iter 1210 || Loss: 7.4168 || 10iter: 1.9235 sec.\n", - "Iter 1220 || Loss: 5.5916 || 10iter: 1.9344 sec.\n", - "Iter 1230 || Loss: 6.1026 || 10iter: 1.8963 sec.\n", - "Iter 1240 || Loss: 6.3649 || 10iter: 1.9062 sec.\n", - "Iter 1250 || Loss: 7.0068 || 10iter: 1.9557 sec.\n", - "Iter 1260 || Loss: 6.5162 || 10iter: 1.9458 sec.\n", - "Iter 1270 || Loss: 6.8819 || 10iter: 1.9029 sec.\n", - "Iter 1280 || Loss: 5.9832 || 10iter: 1.9077 sec.\n", - "Iter 1290 || Loss: 6.6554 || 10iter: 1.9100 sec.\n", - "Iter 1300 || Loss: 5.8981 || 10iter: 1.9431 sec.\n", - "Iter 1310 || Loss: 6.2569 || 10iter: 1.8985 sec.\n", - "Iter 1320 || Loss: 6.0941 || 10iter: 1.9482 sec.\n", - "Iter 1330 || Loss: 5.7917 || 10iter: 1.9389 sec.\n", - "Iter 1340 || Loss: 5.8668 || 10iter: 1.9393 sec.\n", - "Iter 1350 || Loss: 6.1126 || 10iter: 1.9015 sec.\n", - "Iter 1360 || Loss: 6.6019 || 10iter: 1.9271 sec.\n", - "Iter 1370 || Loss: 5.9754 || 10iter: 1.9343 sec.\n", - "Iter 1380 || Loss: 6.9415 || 10iter: 1.9116 sec.\n", - "Iter 1390 || Loss: 6.4699 || 10iter: 1.9182 sec.\n", - "Iter 1400 || Loss: 6.1201 || 10iter: 1.8840 sec.\n", - "Iter 1410 || Loss: 6.9341 || 10iter: 1.9241 sec.\n", - "Iter 1420 || Loss: 5.9466 || 10iter: 1.9144 sec.\n", - "Iter 1430 || Loss: 6.8495 || 10iter: 1.9034 sec.\n", - "Iter 1440 || Loss: 6.7957 || 10iter: 1.9057 sec.\n", - "Iter 1450 || Loss: 6.3031 || 10iter: 1.9131 sec.\n", - "Iter 1460 || Loss: 6.5252 || 10iter: 1.9576 sec.\n", - "Iter 1470 || Loss: 6.5337 || 10iter: 1.9047 sec.\n", - "Iter 1480 || Loss: 6.1693 || 10iter: 1.9563 sec.\n", - "Iter 1490 || Loss: 6.4933 || 10iter: 1.9209 sec.\n", - "Iter 1500 || Loss: 6.4583 || 10iter: 1.9039 sec.\n", - "Iter 1510 || Loss: 5.2166 || 10iter: 1.9425 sec.\n", - "Iter 1520 || Loss: 7.0859 || 10iter: 1.9332 sec.\n", - "Iter 1530 || Loss: 6.1202 || 10iter: 1.8999 sec.\n", - "Iter 1540 || Loss: 6.5705 || 10iter: 1.9209 sec.\n", - "Iter 1550 || Loss: 6.5723 || 10iter: 1.9181 sec.\n", - "Iter 1560 || Loss: 6.0770 || 10iter: 1.9187 sec.\n", - "Iter 1570 || Loss: 6.5784 || 10iter: 1.9138 sec.\n", - "Iter 1580 || Loss: 5.7582 || 10iter: 1.9248 sec.\n", - "Iter 1590 || Loss: 6.6615 || 10iter: 1.9167 sec.\n", - "Iter 1600 || Loss: 6.0470 || 10iter: 1.9091 sec.\n", - "Iter 1610 || Loss: 6.4892 || 10iter: 1.9129 sec.\n", - "Iter 1620 || Loss: 6.2785 || 10iter: 1.9371 sec.\n", - "Iter 1630 || Loss: 6.5960 || 10iter: 1.9564 sec.\n", - "Iter 1640 || Loss: 6.9436 || 10iter: 1.9231 sec.\n", - "Iter 1650 || Loss: 6.5662 || 10iter: 1.9311 sec.\n", - "Iter 1660 || Loss: 6.9712 || 10iter: 1.8700 sec.\n", - "Iter 1670 || Loss: 6.3291 || 10iter: 1.8912 sec.\n", - "Iter 1680 || Loss: 6.6033 || 10iter: 1.9763 sec.\n", - "Iter 1690 || Loss: 6.3726 || 10iter: 1.9093 sec.\n", - "Iter 1700 || Loss: 6.7758 || 10iter: 1.9224 sec.\n", - "Iter 1710 || Loss: 6.8447 || 10iter: 1.9486 sec.\n", - "Iter 1720 || Loss: 6.1328 || 10iter: 1.9175 sec.\n", - "Iter 1730 || Loss: 5.9720 || 10iter: 1.9292 sec.\n", - "Iter 1740 || Loss: 5.7320 || 10iter: 1.9601 sec.\n", - "Iter 1750 || Loss: 6.0891 || 10iter: 1.9216 sec.\n", - "Iter 1760 || Loss: 5.8682 || 10iter: 1.8739 sec.\n", - "Iter 1770 || Loss: 6.8553 || 10iter: 1.9453 sec.\n", - "Iter 1780 || Loss: 6.2981 || 10iter: 1.9153 sec.\n", - "Iter 1790 || Loss: 6.3228 || 10iter: 1.9104 sec.\n", - "Iter 1800 || Loss: 5.8865 || 10iter: 1.9279 sec.\n", - "Iter 1810 || Loss: 6.5598 || 10iter: 1.9481 sec.\n", - "Iter 1820 || Loss: 6.2339 || 10iter: 1.9025 sec.\n", - "Iter 1830 || Loss: 6.7696 || 10iter: 1.9371 sec.\n", - "Iter 1840 || Loss: 6.5442 || 10iter: 1.9026 sec.\n", - "Iter 1850 || Loss: 6.1437 || 10iter: 1.9116 sec.\n", - "Iter 1860 || Loss: 5.9688 || 10iter: 1.9156 sec.\n", - "Iter 1870 || Loss: 6.7230 || 10iter: 1.9290 sec.\n", - "Iter 1880 || Loss: 6.8780 || 10iter: 1.9226 sec.\n", - "Iter 1890 || Loss: 6.5111 || 10iter: 1.9059 sec.\n", - "Iter 1900 || Loss: 5.7888 || 10iter: 1.9135 sec.\n", - "Iter 1910 || Loss: 6.1375 || 10iter: 1.9197 sec.\n", - "Iter 1920 || Loss: 6.4233 || 10iter: 1.8984 sec.\n", - "Iter 1930 || Loss: 5.9493 || 10iter: 1.9152 sec.\n", - "Iter 1940 || Loss: 5.9321 || 10iter: 1.8934 sec.\n", - "Iter 1950 || Loss: 5.9742 || 10iter: 1.8896 sec.\n", - "Iter 1960 || Loss: 6.2150 || 10iter: 1.9080 sec.\n", - "Iter 1970 || Loss: 6.3895 || 10iter: 1.9304 sec.\n", - "Iter 1980 || Loss: 6.6977 || 10iter: 1.8916 sec.\n", - "Iter 1990 || Loss: 6.7708 || 10iter: 1.9209 sec.\n", - "Iter 2000 || Loss: 6.7084 || 10iter: 1.9303 sec.\n", - "Iter 2010 || Loss: 5.2741 || 10iter: 1.9240 sec.\n", - "Iter 2020 || Loss: 6.3070 || 10iter: 1.9108 sec.\n", - "Iter 2030 || Loss: 6.0307 || 10iter: 1.9247 sec.\n", - "Iter 2040 || Loss: 6.0952 || 10iter: 1.9266 sec.\n", - "Iter 2050 || Loss: 5.7931 || 10iter: 1.8955 sec.\n", - "Iter 2060 || Loss: 5.5522 || 10iter: 1.8569 sec.\n", - "Iter 2070 || Loss: 6.8603 || 10iter: 1.7381 sec.\n", + "Iter 520 || Loss: 6.9472 || 10iter: 2.3994 sec.\n", + "Iter 530 || Loss: 6.9510 || 10iter: 3.4278 sec.\n", + "Iter 540 || Loss: 6.6578 || 10iter: 3.3295 sec.\n", + "Iter 550 || Loss: 6.7569 || 10iter: 3.3849 sec.\n", + "Iter 560 || Loss: 6.6651 || 10iter: 3.4692 sec.\n", + "Iter 570 || Loss: 6.7154 || 10iter: 3.3967 sec.\n", + "Iter 580 || Loss: 6.8194 || 10iter: 3.3336 sec.\n", + "Iter 590 || Loss: 6.6416 || 10iter: 3.4128 sec.\n", + "Iter 600 || Loss: 7.0260 || 10iter: 3.4065 sec.\n", + "Iter 610 || Loss: 6.6993 || 10iter: 3.4378 sec.\n", + "Iter 620 || Loss: 6.7130 || 10iter: 3.3429 sec.\n", + "Iter 630 || Loss: 6.6879 || 10iter: 3.3698 sec.\n", + "Iter 640 || Loss: 6.7656 || 10iter: 3.4128 sec.\n", + "Iter 650 || Loss: 6.6890 || 10iter: 3.4234 sec.\n", + "Iter 660 || Loss: 6.7733 || 10iter: 3.4324 sec.\n", + "Iter 670 || Loss: 6.4825 || 10iter: 3.5050 sec.\n", + "Iter 680 || Loss: 6.7527 || 10iter: 3.3603 sec.\n", + "Iter 690 || Loss: 6.6672 || 10iter: 3.3518 sec.\n", + "Iter 700 || Loss: 6.3197 || 10iter: 3.3612 sec.\n", + "Iter 710 || Loss: 6.6545 || 10iter: 3.3912 sec.\n", + "Iter 720 || Loss: 6.8526 || 10iter: 3.3532 sec.\n", + "Iter 730 || Loss: 6.1515 || 10iter: 3.3754 sec.\n", + "Iter 740 || Loss: 6.2790 || 10iter: 3.3865 sec.\n", + "Iter 750 || Loss: 6.4217 || 10iter: 3.3455 sec.\n", + "Iter 760 || Loss: 6.1095 || 10iter: 3.4578 sec.\n", + "Iter 770 || Loss: 6.9900 || 10iter: 3.3986 sec.\n", + "Iter 780 || Loss: 6.5425 || 10iter: 3.3769 sec.\n", + "Iter 790 || Loss: 6.2541 || 10iter: 3.3947 sec.\n", + "Iter 800 || Loss: 6.2351 || 10iter: 3.5440 sec.\n", + "Iter 810 || Loss: 6.3850 || 10iter: 3.3746 sec.\n", + "Iter 820 || Loss: 6.3424 || 10iter: 3.3778 sec.\n", + "Iter 830 || Loss: 6.3329 || 10iter: 3.3422 sec.\n", + "Iter 840 || Loss: 6.1427 || 10iter: 3.3788 sec.\n", + "Iter 850 || Loss: 6.3155 || 10iter: 3.3990 sec.\n", + "Iter 860 || Loss: 6.5385 || 10iter: 3.4287 sec.\n", + "Iter 870 || Loss: 6.7060 || 10iter: 3.3748 sec.\n", + "Iter 880 || Loss: 6.6168 || 10iter: 3.4113 sec.\n", + "Iter 890 || Loss: 6.3317 || 10iter: 3.4622 sec.\n", + "Iter 900 || Loss: 6.1234 || 10iter: 3.3928 sec.\n", + "Iter 910 || Loss: 6.3658 || 10iter: 3.3789 sec.\n", + "Iter 920 || Loss: 6.1676 || 10iter: 3.3857 sec.\n", + "Iter 930 || Loss: 6.5839 || 10iter: 3.3807 sec.\n", + "Iter 940 || Loss: 6.2183 || 10iter: 3.3744 sec.\n", + "Iter 950 || Loss: 6.0194 || 10iter: 3.3649 sec.\n", + "Iter 960 || Loss: 6.1450 || 10iter: 3.3765 sec.\n", + "Iter 970 || Loss: 6.0713 || 10iter: 3.4035 sec.\n", + "Iter 980 || Loss: 5.8554 || 10iter: 3.4733 sec.\n", + "Iter 990 || Loss: 6.5032 || 10iter: 3.3750 sec.\n", + "Iter 1000 || Loss: 5.9616 || 10iter: 3.4450 sec.\n", + "Iter 1010 || Loss: 5.9796 || 10iter: 3.3341 sec.\n", + "Iter 1020 || Loss: 6.4863 || 10iter: 3.4197 sec.\n", + "Iter 1030 || Loss: 5.8776 || 10iter: 3.3085 sec.\n", "-------------\n", - "epoch 2 || Epoch_TRAIN_Loss:6614.8536 ||Epoch_VAL_Loss:0.0000\n", - "timer: 203.4751 sec.\n", + "epoch 2 || Epoch_TRAIN_Loss:3357.0348 ||Epoch_VAL_Loss:0.0000\n", + "timer: 177.4286 sec.\n", "lr is: 0.001\n", "-------------\n", "Epoch 3/200\n", "-------------\n", "(train)\n", - "Iter 2080 || Loss: 6.2009 || 10iter: 3.6872 sec.\n", - "Iter 2090 || Loss: 6.2204 || 10iter: 1.8831 sec.\n", - "Iter 2100 || Loss: 5.9224 || 10iter: 1.9122 sec.\n", - "Iter 2110 || Loss: 6.6301 || 10iter: 1.9271 sec.\n", - "Iter 2120 || Loss: 5.7328 || 10iter: 1.8922 sec.\n", - "Iter 2130 || Loss: 6.5620 || 10iter: 1.9473 sec.\n", - "Iter 2140 || Loss: 5.9839 || 10iter: 1.9144 sec.\n", - "Iter 2150 || Loss: 5.7984 || 10iter: 1.9296 sec.\n", - "Iter 2160 || Loss: 6.9789 || 10iter: 1.8936 sec.\n", - "Iter 2170 || Loss: 4.9826 || 10iter: 1.9362 sec.\n", - "Iter 2180 || Loss: 6.0308 || 10iter: 1.9365 sec.\n", - "Iter 2190 || Loss: 6.1750 || 10iter: 1.9060 sec.\n", - "Iter 2200 || Loss: 6.1274 || 10iter: 1.8972 sec.\n", - "Iter 2210 || Loss: 6.7538 || 10iter: 1.8774 sec.\n", - "Iter 2220 || Loss: 6.1418 || 10iter: 1.9135 sec.\n", - "Iter 2230 || Loss: 5.3181 || 10iter: 1.9372 sec.\n", - "Iter 2240 || Loss: 6.2427 || 10iter: 1.9094 sec.\n", - "Iter 2250 || Loss: 5.8552 || 10iter: 1.8982 sec.\n", - "Iter 2260 || Loss: 5.5481 || 10iter: 1.9138 sec.\n", - "Iter 2270 || Loss: 6.5120 || 10iter: 1.9137 sec.\n", - "Iter 2280 || Loss: 6.2690 || 10iter: 1.9085 sec.\n", - "Iter 2290 || Loss: 5.8348 || 10iter: 1.9713 sec.\n", - "Iter 2300 || Loss: 5.4405 || 10iter: 1.9432 sec.\n", - "Iter 2310 || Loss: 6.1716 || 10iter: 1.9180 sec.\n", - "Iter 2320 || Loss: 5.8104 || 10iter: 1.9229 sec.\n", - "Iter 2330 || Loss: 5.9316 || 10iter: 1.9480 sec.\n", - "Iter 2340 || Loss: 6.2018 || 10iter: 1.9445 sec.\n", - "Iter 2350 || Loss: 6.0464 || 10iter: 1.9033 sec.\n", - "Iter 2360 || Loss: 5.8741 || 10iter: 1.9045 sec.\n", - "Iter 2370 || Loss: 5.9396 || 10iter: 1.9461 sec.\n", - "Iter 2380 || Loss: 6.6027 || 10iter: 1.9085 sec.\n", - "Iter 2390 || Loss: 5.5763 || 10iter: 1.9209 sec.\n", - "Iter 2400 || Loss: 5.9338 || 10iter: 1.9226 sec.\n", - "Iter 2410 || Loss: 5.6852 || 10iter: 1.9464 sec.\n", - "Iter 2420 || Loss: 5.7879 || 10iter: 1.9467 sec.\n", - "Iter 2430 || Loss: 6.4833 || 10iter: 1.8930 sec.\n", - "Iter 2440 || Loss: 6.1358 || 10iter: 1.9259 sec.\n", - "Iter 2450 || Loss: 6.0421 || 10iter: 1.9404 sec.\n", - "Iter 2460 || Loss: 6.2651 || 10iter: 1.8849 sec.\n", - "Iter 2470 || Loss: 6.2316 || 10iter: 1.9379 sec.\n", - "Iter 2480 || Loss: 6.6244 || 10iter: 1.9085 sec.\n", - "Iter 2490 || Loss: 5.9621 || 10iter: 1.9359 sec.\n", - "Iter 2500 || Loss: 5.7220 || 10iter: 1.9060 sec.\n", - "Iter 2510 || Loss: 5.5667 || 10iter: 1.8986 sec.\n", - "Iter 2520 || Loss: 5.7661 || 10iter: 1.9421 sec.\n", - "Iter 2530 || Loss: 6.2610 || 10iter: 1.9490 sec.\n", - "Iter 2540 || Loss: 6.1295 || 10iter: 2.0034 sec.\n", - "Iter 2550 || Loss: 6.2075 || 10iter: 1.9340 sec.\n", - "Iter 2560 || Loss: 5.7161 || 10iter: 1.9302 sec.\n", - "Iter 2570 || Loss: 6.1298 || 10iter: 1.9426 sec.\n", - "Iter 2580 || Loss: 5.1992 || 10iter: 1.9298 sec.\n", - "Iter 2590 || Loss: 5.6399 || 10iter: 1.9380 sec.\n", - "Iter 2600 || Loss: 6.2890 || 10iter: 1.9024 sec.\n", - "Iter 2610 || Loss: 6.1346 || 10iter: 1.8967 sec.\n", - "Iter 2620 || Loss: 6.6686 || 10iter: 1.9314 sec.\n", - "Iter 2630 || Loss: 5.5704 || 10iter: 1.9538 sec.\n", - "Iter 2640 || Loss: 6.2783 || 10iter: 1.9141 sec.\n", - "Iter 2650 || Loss: 6.0719 || 10iter: 1.9036 sec.\n", - "Iter 2660 || Loss: 5.9588 || 10iter: 1.8919 sec.\n", - "Iter 2670 || Loss: 5.5576 || 10iter: 1.9184 sec.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Iter 2680 || Loss: 6.6553 || 10iter: 1.9460 sec.\n", - "Iter 2690 || Loss: 6.7202 || 10iter: 1.8867 sec.\n", - "Iter 2700 || Loss: 5.9778 || 10iter: 1.8967 sec.\n", - "Iter 2710 || Loss: 6.0141 || 10iter: 1.9559 sec.\n", - "Iter 2720 || Loss: 5.4180 || 10iter: 1.9364 sec.\n", - "Iter 2730 || Loss: 6.5242 || 10iter: 1.9199 sec.\n", - "Iter 2740 || Loss: 6.6022 || 10iter: 1.9561 sec.\n", - "Iter 2750 || Loss: 5.9271 || 10iter: 1.9231 sec.\n", - "Iter 2760 || Loss: 6.3372 || 10iter: 1.9598 sec.\n", - "Iter 2770 || Loss: 5.7391 || 10iter: 1.9252 sec.\n", - "Iter 2780 || Loss: 6.3043 || 10iter: 1.8984 sec.\n", - "Iter 2790 || Loss: 7.3497 || 10iter: 1.9225 sec.\n", - "Iter 2800 || Loss: 5.5991 || 10iter: 1.9239 sec.\n", - "Iter 2810 || Loss: 5.7196 || 10iter: 1.9760 sec.\n", - "Iter 2820 || Loss: 6.8417 || 10iter: 1.9412 sec.\n", - "Iter 2830 || Loss: 6.6544 || 10iter: 1.9212 sec.\n", - "Iter 2840 || Loss: 5.4433 || 10iter: 1.9410 sec.\n", - "Iter 2850 || Loss: 5.5030 || 10iter: 1.8911 sec.\n", - "Iter 2860 || Loss: 5.8773 || 10iter: 1.9169 sec.\n", - "Iter 2870 || Loss: 6.0134 || 10iter: 1.9294 sec.\n", - "Iter 2880 || Loss: 5.5524 || 10iter: 1.9469 sec.\n", - "Iter 2890 || Loss: 5.7362 || 10iter: 1.8994 sec.\n", - "Iter 2900 || Loss: 5.5710 || 10iter: 1.8991 sec.\n", - "Iter 2910 || Loss: 5.6592 || 10iter: 1.9354 sec.\n", - "Iter 2920 || Loss: 5.2004 || 10iter: 1.9027 sec.\n", - "Iter 2930 || Loss: 6.5301 || 10iter: 1.9168 sec.\n", - "Iter 2940 || Loss: 6.3754 || 10iter: 1.9007 sec.\n", - "Iter 2950 || Loss: 5.2248 || 10iter: 1.8894 sec.\n", - "Iter 2960 || Loss: 5.4470 || 10iter: 1.9255 sec.\n", - "Iter 2970 || Loss: 6.0676 || 10iter: 1.9486 sec.\n", - "Iter 2980 || Loss: 6.2856 || 10iter: 1.9066 sec.\n", - "Iter 2990 || Loss: 5.8748 || 10iter: 1.9250 sec.\n", - "Iter 3000 || Loss: 5.8902 || 10iter: 1.9392 sec.\n", - "Iter 3010 || Loss: 5.7938 || 10iter: 1.9204 sec.\n", - "Iter 3020 || Loss: 6.1163 || 10iter: 1.8698 sec.\n", - "Iter 3030 || Loss: 6.0617 || 10iter: 1.8985 sec.\n", - "Iter 3040 || Loss: 6.3836 || 10iter: 1.8962 sec.\n", - "Iter 3050 || Loss: 6.4083 || 10iter: 1.9479 sec.\n", - "Iter 3060 || Loss: 6.1602 || 10iter: 1.9320 sec.\n", - "Iter 3070 || Loss: 7.1081 || 10iter: 1.8975 sec.\n", - "Iter 3080 || Loss: 5.8629 || 10iter: 1.9246 sec.\n", - "Iter 3090 || Loss: 6.1259 || 10iter: 1.9222 sec.\n", - "Iter 3100 || Loss: 5.5650 || 10iter: 1.8193 sec.\n", + "Iter 1040 || Loss: 5.5674 || 10iter: 3.3006 sec.\n", + "Iter 1050 || Loss: 6.1985 || 10iter: 3.3766 sec.\n", + "Iter 1060 || Loss: 6.1799 || 10iter: 3.4175 sec.\n", + "Iter 1070 || Loss: 5.8693 || 10iter: 3.3989 sec.\n", + "Iter 1080 || Loss: 6.6458 || 10iter: 3.3712 sec.\n", + "Iter 1090 || Loss: 6.1802 || 10iter: 3.4018 sec.\n", + "Iter 1100 || Loss: 6.1411 || 10iter: 3.3660 sec.\n", + "Iter 1110 || Loss: 6.4173 || 10iter: 3.4167 sec.\n", + "Iter 1120 || Loss: 6.0469 || 10iter: 3.3572 sec.\n", + "Iter 1130 || Loss: 5.9920 || 10iter: 3.3654 sec.\n", + "Iter 1140 || Loss: 6.1453 || 10iter: 3.4093 sec.\n", + "Iter 1150 || Loss: 6.3970 || 10iter: 3.3702 sec.\n", + "Iter 1160 || Loss: 6.5036 || 10iter: 3.3677 sec.\n", + "Iter 1170 || Loss: 5.8871 || 10iter: 3.3874 sec.\n", + "Iter 1180 || Loss: 5.7497 || 10iter: 3.3774 sec.\n", + "Iter 1190 || Loss: 6.4121 || 10iter: 3.3427 sec.\n", + "Iter 1200 || Loss: 6.2581 || 10iter: 3.4103 sec.\n", + "Iter 1210 || Loss: 6.0120 || 10iter: 3.3796 sec.\n", + "Iter 1220 || Loss: 5.9606 || 10iter: 3.3459 sec.\n", + "Iter 1230 || Loss: 5.8456 || 10iter: 3.4624 sec.\n", + "Iter 1240 || Loss: 5.5922 || 10iter: 3.3915 sec.\n", + "Iter 1250 || Loss: 5.9974 || 10iter: 3.3961 sec.\n", + "Iter 1260 || Loss: 6.2425 || 10iter: 3.3770 sec.\n", + "Iter 1270 || Loss: 5.6455 || 10iter: 3.4496 sec.\n", + "Iter 1280 || Loss: 5.9387 || 10iter: 3.5558 sec.\n", + "Iter 1290 || Loss: 5.9458 || 10iter: 3.3486 sec.\n", + "Iter 1300 || Loss: 5.7390 || 10iter: 3.3495 sec.\n", + "Iter 1310 || Loss: 5.9368 || 10iter: 3.5734 sec.\n", + "Iter 1320 || Loss: 5.9990 || 10iter: 3.4123 sec.\n", + "Iter 1330 || Loss: 6.4312 || 10iter: 3.3843 sec.\n", + "Iter 1340 || Loss: 6.2937 || 10iter: 3.3758 sec.\n", + "Iter 1350 || Loss: 5.7269 || 10iter: 3.4153 sec.\n", + "Iter 1360 || Loss: 5.9134 || 10iter: 3.3875 sec.\n", + "Iter 1370 || Loss: 5.8195 || 10iter: 3.3964 sec.\n", + "Iter 1380 || Loss: 5.9071 || 10iter: 3.3968 sec.\n", + "Iter 1390 || Loss: 5.7844 || 10iter: 3.4501 sec.\n", + "Iter 1400 || Loss: 5.3931 || 10iter: 3.3701 sec.\n", + "Iter 1410 || Loss: 6.1411 || 10iter: 3.3563 sec.\n", + "Iter 1420 || Loss: 5.5713 || 10iter: 3.3507 sec.\n", + "Iter 1430 || Loss: 5.6741 || 10iter: 3.4549 sec.\n", + "Iter 1440 || Loss: 6.0635 || 10iter: 3.3693 sec.\n", + "Iter 1450 || Loss: 5.9443 || 10iter: 3.3515 sec.\n", + "Iter 1460 || Loss: 5.5470 || 10iter: 3.3982 sec.\n", + "Iter 1470 || Loss: 5.9251 || 10iter: 3.3495 sec.\n", + "Iter 1480 || Loss: 5.8048 || 10iter: 3.3586 sec.\n", + "Iter 1490 || Loss: 4.9968 || 10iter: 3.4164 sec.\n", + "Iter 1500 || Loss: 6.0812 || 10iter: 3.4000 sec.\n", + "Iter 1510 || Loss: 5.6916 || 10iter: 3.4305 sec.\n", + "Iter 1520 || Loss: 6.1723 || 10iter: 3.3464 sec.\n", + "Iter 1530 || Loss: 5.8203 || 10iter: 3.4091 sec.\n", + "Iter 1540 || Loss: 5.8156 || 10iter: 3.3567 sec.\n", + "Iter 1550 || Loss: 5.9790 || 10iter: 3.3206 sec.\n", "-------------\n", - "epoch 3 || Epoch_TRAIN_Loss:6204.8702 ||Epoch_VAL_Loss:0.0000\n", - "timer: 203.5415 sec.\n", + "epoch 3 || Epoch_TRAIN_Loss:3100.4523 ||Epoch_VAL_Loss:0.0000\n", + "timer: 177.5168 sec.\n", "lr is: 0.001\n", "-------------\n", "Epoch 4/200\n", "-------------\n", "(train)\n", - "Iter 3110 || Loss: 5.8793 || 10iter: 2.6717 sec.\n", - "Iter 3120 || Loss: 6.2001 || 10iter: 1.9694 sec.\n", - "Iter 3130 || Loss: 6.0468 || 10iter: 1.9720 sec.\n", - "Iter 3140 || Loss: 6.4812 || 10iter: 1.9011 sec.\n", - "Iter 3150 || Loss: 5.3816 || 10iter: 1.9075 sec.\n", - "Iter 3160 || Loss: 5.7660 || 10iter: 1.9035 sec.\n", - "Iter 3170 || Loss: 5.7194 || 10iter: 1.9134 sec.\n", - "Iter 3180 || Loss: 5.7383 || 10iter: 1.9536 sec.\n", - "Iter 3190 || Loss: 6.3789 || 10iter: 1.9037 sec.\n", - "Iter 3200 || Loss: 5.7865 || 10iter: 1.9110 sec.\n", - "Iter 3210 || Loss: 5.9149 || 10iter: 1.9037 sec.\n", - "Iter 3220 || Loss: 5.3373 || 10iter: 1.9284 sec.\n", - "Iter 3230 || Loss: 6.3403 || 10iter: 1.9133 sec.\n", - "Iter 3240 || Loss: 6.0725 || 10iter: 1.9351 sec.\n", - "Iter 3250 || Loss: 5.4783 || 10iter: 1.9017 sec.\n", - "Iter 3260 || Loss: 5.8148 || 10iter: 1.9476 sec.\n", - "Iter 3270 || Loss: 5.7243 || 10iter: 1.8892 sec.\n", - "Iter 3280 || Loss: 5.2606 || 10iter: 1.9149 sec.\n", - "Iter 3290 || Loss: 6.1021 || 10iter: 1.9236 sec.\n", - "Iter 3300 || Loss: 5.0021 || 10iter: 1.8952 sec.\n", - "Iter 3310 || Loss: 5.6538 || 10iter: 1.9349 sec.\n", - "Iter 3320 || Loss: 5.8735 || 10iter: 1.9225 sec.\n", - "Iter 3330 || Loss: 6.0720 || 10iter: 1.9147 sec.\n", - "Iter 3340 || Loss: 5.7793 || 10iter: 1.9126 sec.\n", - "Iter 3350 || Loss: 5.6325 || 10iter: 1.9483 sec.\n", - "Iter 3360 || Loss: 5.9388 || 10iter: 1.9324 sec.\n", - "Iter 3370 || Loss: 5.5870 || 10iter: 1.9048 sec.\n", - "Iter 3380 || Loss: 6.0012 || 10iter: 1.9528 sec.\n", - "Iter 3390 || Loss: 6.2153 || 10iter: 1.9066 sec.\n", - "Iter 3400 || Loss: 5.9682 || 10iter: 1.9020 sec.\n", - "Iter 3410 || Loss: 6.5897 || 10iter: 1.9316 sec.\n", - "Iter 3420 || Loss: 5.0391 || 10iter: 1.9382 sec.\n", - "Iter 3430 || Loss: 5.4903 || 10iter: 1.9828 sec.\n", - "Iter 3440 || Loss: 5.5313 || 10iter: 1.9535 sec.\n", - "Iter 3450 || Loss: 5.9243 || 10iter: 2.0076 sec.\n", - "Iter 3460 || Loss: 5.4482 || 10iter: 1.9587 sec.\n", - "Iter 3470 || Loss: 5.3784 || 10iter: 1.9217 sec.\n", - "Iter 3480 || Loss: 5.3976 || 10iter: 1.9065 sec.\n", - "Iter 3490 || Loss: 5.4845 || 10iter: 1.9064 sec.\n", - "Iter 3500 || Loss: 5.3729 || 10iter: 1.9155 sec.\n", - "Iter 3510 || Loss: 6.0532 || 10iter: 1.9102 sec.\n", - "Iter 3520 || Loss: 6.5765 || 10iter: 1.9432 sec.\n", - "Iter 3530 || Loss: 5.6584 || 10iter: 1.9218 sec.\n", - "Iter 3540 || Loss: 5.0806 || 10iter: 1.9423 sec.\n", - "Iter 3550 || Loss: 5.7217 || 10iter: 1.9580 sec.\n", - "Iter 3560 || Loss: 4.9283 || 10iter: 1.9375 sec.\n", - "Iter 3570 || Loss: 6.1907 || 10iter: 1.9083 sec.\n", - "Iter 3580 || Loss: 5.3850 || 10iter: 1.8684 sec.\n", - "Iter 3590 || Loss: 4.9205 || 10iter: 1.9359 sec.\n", - "Iter 3600 || Loss: 6.2988 || 10iter: 1.8874 sec.\n", - "Iter 3610 || Loss: 5.9377 || 10iter: 1.9525 sec.\n", - "Iter 3620 || Loss: 5.9317 || 10iter: 1.9065 sec.\n", - "Iter 3630 || Loss: 5.4728 || 10iter: 1.9354 sec.\n", - "Iter 3640 || Loss: 6.1968 || 10iter: 1.9133 sec.\n", - "Iter 3650 || Loss: 5.3487 || 10iter: 1.8898 sec.\n", - "Iter 3660 || Loss: 5.8080 || 10iter: 1.9198 sec.\n", - "Iter 3670 || Loss: 6.0591 || 10iter: 1.9011 sec.\n", - "Iter 3680 || Loss: 5.8152 || 10iter: 1.9185 sec.\n", - "Iter 3690 || Loss: 5.7057 || 10iter: 1.8776 sec.\n", - "Iter 3700 || Loss: 6.3647 || 10iter: 1.8982 sec.\n", - "Iter 3710 || Loss: 5.8901 || 10iter: 1.9278 sec.\n", - "Iter 3720 || Loss: 5.5554 || 10iter: 1.9389 sec.\n", - "Iter 3730 || Loss: 5.1569 || 10iter: 1.9105 sec.\n", - "Iter 3740 || Loss: 6.7121 || 10iter: 1.9014 sec.\n", - "Iter 3750 || Loss: 5.8162 || 10iter: 1.9498 sec.\n", - "Iter 3760 || Loss: 5.6945 || 10iter: 1.9121 sec.\n", - "Iter 3770 || Loss: 6.0207 || 10iter: 1.9130 sec.\n", - "Iter 3780 || Loss: 6.3730 || 10iter: 1.9626 sec.\n", - "Iter 3790 || Loss: 6.0466 || 10iter: 1.9721 sec.\n", - "Iter 3800 || Loss: 5.9875 || 10iter: 1.9130 sec.\n", - "Iter 3810 || Loss: 5.1606 || 10iter: 1.9301 sec.\n", - "Iter 3820 || Loss: 5.8891 || 10iter: 1.8851 sec.\n", - "Iter 3830 || Loss: 6.2091 || 10iter: 1.9202 sec.\n", - "Iter 3840 || Loss: 5.5418 || 10iter: 1.9226 sec.\n", - "Iter 3850 || Loss: 5.3073 || 10iter: 1.9183 sec.\n", - "Iter 3860 || Loss: 5.8605 || 10iter: 1.9406 sec.\n", - "Iter 3870 || Loss: 5.7970 || 10iter: 1.8944 sec.\n", - "Iter 3880 || Loss: 6.1666 || 10iter: 1.9309 sec.\n", - "Iter 3890 || Loss: 5.5670 || 10iter: 1.9488 sec.\n", - "Iter 3900 || Loss: 5.3022 || 10iter: 1.8933 sec.\n", - "Iter 3910 || Loss: 6.0552 || 10iter: 1.9317 sec.\n", - "Iter 3920 || Loss: 6.4889 || 10iter: 1.9128 sec.\n", - "Iter 3930 || Loss: 5.7632 || 10iter: 1.8857 sec.\n", - "Iter 3940 || Loss: 5.3335 || 10iter: 1.8960 sec.\n", - "Iter 3950 || Loss: 5.7937 || 10iter: 1.9391 sec.\n", - "Iter 3960 || Loss: 5.7296 || 10iter: 1.9269 sec.\n", - "Iter 3970 || Loss: 5.5872 || 10iter: 1.9561 sec.\n", - "Iter 3980 || Loss: 5.4882 || 10iter: 1.9564 sec.\n", - "Iter 3990 || Loss: 5.9010 || 10iter: 1.9668 sec.\n", - "Iter 4000 || Loss: 6.2337 || 10iter: 1.9490 sec.\n", - "Iter 4010 || Loss: 5.4732 || 10iter: 1.9008 sec.\n", - "Iter 4020 || Loss: 5.3963 || 10iter: 1.9346 sec.\n", - "Iter 4030 || Loss: 5.3623 || 10iter: 1.8886 sec.\n", - "Iter 4040 || Loss: 5.6274 || 10iter: 1.8993 sec.\n", - "Iter 4050 || Loss: 4.8907 || 10iter: 1.9147 sec.\n", - "Iter 4060 || Loss: 5.8309 || 10iter: 1.9083 sec.\n", - "Iter 4070 || Loss: 6.3519 || 10iter: 1.9104 sec.\n", - "Iter 4080 || Loss: 6.0935 || 10iter: 1.9479 sec.\n", - "Iter 4090 || Loss: 5.6040 || 10iter: 1.9521 sec.\n", - "Iter 4100 || Loss: 6.6016 || 10iter: 1.9053 sec.\n", - "Iter 4110 || Loss: 5.8515 || 10iter: 1.9514 sec.\n", - "Iter 4120 || Loss: 5.4065 || 10iter: 1.8861 sec.\n", - "Iter 4130 || Loss: 5.9942 || 10iter: 1.8570 sec.\n", - "Iter 4140 || Loss: 4.9276 || 10iter: 1.7437 sec.\n", + "Iter 1560 || Loss: 5.9799 || 10iter: 3.8830 sec.\n", + "Iter 1570 || Loss: 5.5508 || 10iter: 3.3975 sec.\n", + "Iter 1580 || Loss: 5.6072 || 10iter: 3.3891 sec.\n", + "Iter 1590 || Loss: 5.9854 || 10iter: 3.3590 sec.\n", + "Iter 1600 || Loss: 5.4864 || 10iter: 3.4496 sec.\n", + "Iter 1610 || Loss: 5.7796 || 10iter: 3.3447 sec.\n", + "Iter 1620 || Loss: 5.1998 || 10iter: 3.3758 sec.\n", + "Iter 1630 || Loss: 5.4988 || 10iter: 3.4389 sec.\n", + "Iter 1640 || Loss: 6.5233 || 10iter: 3.3597 sec.\n", + "Iter 1650 || Loss: 6.1757 || 10iter: 3.4362 sec.\n", + "Iter 1660 || Loss: 5.7565 || 10iter: 3.3796 sec.\n", + "Iter 1670 || Loss: 6.0810 || 10iter: 3.3692 sec.\n", + "Iter 1680 || Loss: 5.6137 || 10iter: 3.3973 sec.\n", + "Iter 1690 || Loss: 5.7863 || 10iter: 3.3457 sec.\n", + "Iter 1700 || Loss: 5.3208 || 10iter: 3.3608 sec.\n", + "Iter 1710 || Loss: 5.8368 || 10iter: 3.3854 sec.\n", + "Iter 1720 || Loss: 6.3569 || 10iter: 3.3897 sec.\n", + "Iter 1730 || Loss: 5.6071 || 10iter: 3.3572 sec.\n", + "Iter 1740 || Loss: 6.0559 || 10iter: 3.4194 sec.\n", + "Iter 1750 || Loss: 5.5833 || 10iter: 3.3956 sec.\n", + "Iter 1760 || Loss: 5.6293 || 10iter: 3.4716 sec.\n", + "Iter 1770 || Loss: 5.1258 || 10iter: 3.4242 sec.\n", + "Iter 1780 || Loss: 5.8077 || 10iter: 3.3856 sec.\n", + "Iter 1790 || Loss: 6.0024 || 10iter: 3.3742 sec.\n", + "Iter 1800 || Loss: 5.8675 || 10iter: 3.3650 sec.\n", + "Iter 1810 || Loss: 5.4309 || 10iter: 3.3867 sec.\n", + "Iter 1820 || Loss: 5.7111 || 10iter: 3.4044 sec.\n", + "Iter 1830 || Loss: 5.5614 || 10iter: 3.4254 sec.\n", + "Iter 1840 || Loss: 5.4821 || 10iter: 3.3395 sec.\n", + "Iter 1850 || Loss: 5.6724 || 10iter: 3.4383 sec.\n", + "Iter 1860 || Loss: 5.2310 || 10iter: 3.4124 sec.\n", + "Iter 1870 || Loss: 5.7805 || 10iter: 3.3480 sec.\n", + "Iter 1880 || Loss: 5.4647 || 10iter: 3.3888 sec.\n", + "Iter 1890 || Loss: 5.6374 || 10iter: 3.3505 sec.\n", + "Iter 1900 || Loss: 6.1289 || 10iter: 3.3597 sec.\n", + "Iter 1910 || Loss: 5.3480 || 10iter: 3.3675 sec.\n", + "Iter 1920 || Loss: 5.4142 || 10iter: 3.3739 sec.\n", + "Iter 1930 || Loss: 5.4423 || 10iter: 3.4005 sec.\n", + "Iter 1940 || Loss: 5.3832 || 10iter: 3.3777 sec.\n", + "Iter 1950 || Loss: 5.6668 || 10iter: 3.4947 sec.\n", + "Iter 1960 || Loss: 5.7623 || 10iter: 3.4734 sec.\n", + "Iter 1970 || Loss: 5.6145 || 10iter: 3.3711 sec.\n", + "Iter 1980 || Loss: 5.3514 || 10iter: 3.4602 sec.\n", + "Iter 1990 || Loss: 5.5273 || 10iter: 3.4239 sec.\n", + "Iter 2000 || Loss: 5.3952 || 10iter: 3.4728 sec.\n", + "Iter 2010 || Loss: 5.7252 || 10iter: 3.4553 sec.\n", + "Iter 2020 || Loss: 5.2708 || 10iter: 3.4196 sec.\n", + "Iter 2030 || Loss: 5.3306 || 10iter: 3.3798 sec.\n", + "Iter 2040 || Loss: 5.5061 || 10iter: 3.3608 sec.\n", + "Iter 2050 || Loss: 5.5394 || 10iter: 3.3605 sec.\n", + "Iter 2060 || Loss: 5.4732 || 10iter: 3.3161 sec.\n", + "Iter 2070 || Loss: 5.5331 || 10iter: 3.2867 sec.\n", "-------------\n", - "epoch 4 || Epoch_TRAIN_Loss:5950.0694 ||Epoch_VAL_Loss:0.0000\n", - "timer: 203.6692 sec.\n", + "epoch 4 || Epoch_TRAIN_Loss:2915.8172 ||Epoch_VAL_Loss:0.0000\n", + "timer: 177.4296 sec.\n", "lr is: 0.001\n", "-------------\n", "Epoch 5/200\n", "-------------\n", "(train)\n", - "Iter 4150 || Loss: 5.7835 || 10iter: 3.7371 sec.\n", - "Iter 4160 || Loss: 5.4967 || 10iter: 1.8938 sec.\n", - "Iter 4170 || Loss: 5.3168 || 10iter: 1.9162 sec.\n", - "Iter 4180 || Loss: 6.4354 || 10iter: 1.8962 sec.\n", - "Iter 4190 || Loss: 5.3139 || 10iter: 1.8956 sec.\n", - "Iter 4200 || Loss: 4.8802 || 10iter: 1.9128 sec.\n", - "Iter 4210 || Loss: 5.8364 || 10iter: 1.9055 sec.\n", - "Iter 4220 || Loss: 5.2796 || 10iter: 1.9006 sec.\n", - "Iter 4230 || Loss: 5.7652 || 10iter: 1.9082 sec.\n", - "Iter 4240 || Loss: 5.2618 || 10iter: 1.9158 sec.\n", - "Iter 4250 || Loss: 6.0142 || 10iter: 1.8908 sec.\n", - "Iter 4260 || Loss: 5.1867 || 10iter: 1.8945 sec.\n", - "Iter 4270 || Loss: 5.3560 || 10iter: 1.8920 sec.\n", - "Iter 4280 || Loss: 5.4599 || 10iter: 1.9221 sec.\n" + "Iter 2080 || Loss: 5.3294 || 10iter: 4.3892 sec.\n", + "Iter 2090 || Loss: 5.0832 || 10iter: 3.4082 sec.\n", + "Iter 2100 || Loss: 5.9817 || 10iter: 3.3547 sec.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "Iter 4290 || Loss: 5.1026 || 10iter: 1.9341 sec.\n", - "Iter 4300 || Loss: 5.7481 || 10iter: 1.9045 sec.\n", - "Iter 4310 || Loss: 5.5600 || 10iter: 1.9480 sec.\n", - "Iter 4320 || Loss: 5.7486 || 10iter: 1.9326 sec.\n", - "Iter 4330 || Loss: 6.1395 || 10iter: 1.9198 sec.\n", - "Iter 4340 || Loss: 5.8731 || 10iter: 1.9244 sec.\n", - "Iter 4350 || Loss: 5.4092 || 10iter: 2.0157 sec.\n", - "Iter 4360 || Loss: 5.3820 || 10iter: 2.0196 sec.\n", - "Iter 4370 || Loss: 5.5349 || 10iter: 2.0085 sec.\n", - "Iter 4380 || Loss: 6.0740 || 10iter: 1.9467 sec.\n", - "Iter 4390 || Loss: 5.1966 || 10iter: 1.9536 sec.\n", - "Iter 4400 || Loss: 5.6187 || 10iter: 1.9000 sec.\n", - "Iter 4410 || Loss: 5.6328 || 10iter: 1.9419 sec.\n", - "Iter 4420 || Loss: 5.4136 || 10iter: 1.9053 sec.\n", - "Iter 4430 || Loss: 5.2870 || 10iter: 1.9082 sec.\n", - "Iter 4440 || Loss: 5.1681 || 10iter: 1.8873 sec.\n", - "Iter 4450 || Loss: 5.4315 || 10iter: 1.9121 sec.\n", - "Iter 4460 || Loss: 5.6397 || 10iter: 1.9025 sec.\n", - "Iter 4470 || Loss: 5.5429 || 10iter: 1.9126 sec.\n", - "Iter 4480 || Loss: 5.7347 || 10iter: 1.9035 sec.\n", - "Iter 4490 || Loss: 4.9496 || 10iter: 1.9348 sec.\n", - "Iter 4500 || Loss: 5.8241 || 10iter: 1.9303 sec.\n", - "Iter 4510 || Loss: 5.3387 || 10iter: 1.9374 sec.\n", - "Iter 4520 || Loss: 6.2921 || 10iter: 1.9568 sec.\n", - "Iter 4530 || Loss: 5.2400 || 10iter: 1.9184 sec.\n", - "Iter 4540 || Loss: 5.4240 || 10iter: 1.8974 sec.\n", - "Iter 4550 || Loss: 5.2452 || 10iter: 1.8924 sec.\n", - "Iter 4560 || Loss: 5.3237 || 10iter: 1.9101 sec.\n", - "Iter 4570 || Loss: 5.0983 || 10iter: 1.9800 sec.\n", - "Iter 4580 || Loss: 5.6004 || 10iter: 1.9079 sec.\n", - "Iter 4590 || Loss: 6.0337 || 10iter: 1.9064 sec.\n", - "Iter 4600 || Loss: 5.4315 || 10iter: 1.9183 sec.\n", - "Iter 4610 || Loss: 6.3973 || 10iter: 1.9171 sec.\n", - "Iter 4620 || Loss: 5.5012 || 10iter: 1.9081 sec.\n", - "Iter 4630 || Loss: 5.6937 || 10iter: 1.9125 sec.\n", - "Iter 4640 || Loss: 5.9823 || 10iter: 1.9130 sec.\n", - "Iter 4650 || Loss: 5.7043 || 10iter: 1.9059 sec.\n", - "Iter 4660 || Loss: 5.5554 || 10iter: 1.9564 sec.\n", - "Iter 4670 || Loss: 6.3228 || 10iter: 1.8811 sec.\n", - "Iter 4680 || Loss: 5.2833 || 10iter: 1.9029 sec.\n", - "Iter 4690 || Loss: 5.4958 || 10iter: 1.9139 sec.\n", - "Iter 4700 || Loss: 5.6965 || 10iter: 1.9336 sec.\n", - "Iter 4710 || Loss: 5.7161 || 10iter: 1.9094 sec.\n", - "Iter 4720 || Loss: 5.4580 || 10iter: 1.9110 sec.\n", - "Iter 4730 || Loss: 5.3730 || 10iter: 1.9265 sec.\n", - "Iter 4740 || Loss: 5.2783 || 10iter: 1.9118 sec.\n", - "Iter 4750 || Loss: 5.8961 || 10iter: 1.9264 sec.\n", - "Iter 4760 || Loss: 5.4321 || 10iter: 1.9768 sec.\n", - "Iter 4770 || Loss: 5.4692 || 10iter: 1.8904 sec.\n", - "Iter 4780 || Loss: 5.1850 || 10iter: 1.9236 sec.\n", - "Iter 4790 || Loss: 5.8611 || 10iter: 1.9340 sec.\n", - "Iter 4800 || Loss: 5.7889 || 10iter: 2.0052 sec.\n", - "Iter 4810 || Loss: 5.7990 || 10iter: 1.9305 sec.\n", - "Iter 4820 || Loss: 5.3129 || 10iter: 1.9487 sec.\n", - "Iter 4830 || Loss: 5.3679 || 10iter: 1.9505 sec.\n", - "Iter 4840 || Loss: 5.5169 || 10iter: 1.9072 sec.\n", - "Iter 4850 || Loss: 6.1362 || 10iter: 1.9469 sec.\n", - "Iter 4860 || Loss: 5.4825 || 10iter: 1.9287 sec.\n", - "Iter 4870 || Loss: 5.4229 || 10iter: 1.9047 sec.\n", - "Iter 4880 || Loss: 5.0863 || 10iter: 1.9454 sec.\n", - "Iter 4890 || Loss: 5.5825 || 10iter: 1.9071 sec.\n", - "Iter 4900 || Loss: 5.7294 || 10iter: 1.9023 sec.\n", - "Iter 4910 || Loss: 5.8915 || 10iter: 1.9258 sec.\n", - "Iter 4920 || Loss: 4.8500 || 10iter: 1.9195 sec.\n", - "Iter 4930 || Loss: 5.5978 || 10iter: 1.8760 sec.\n", - "Iter 4940 || Loss: 4.6089 || 10iter: 1.9476 sec.\n", - "Iter 4950 || Loss: 5.9339 || 10iter: 1.9312 sec.\n", - "Iter 4960 || Loss: 5.0940 || 10iter: 1.9334 sec.\n", - "Iter 4970 || Loss: 5.6068 || 10iter: 1.9421 sec.\n", - "Iter 4980 || Loss: 5.7186 || 10iter: 1.9092 sec.\n", - "Iter 4990 || Loss: 5.8860 || 10iter: 1.9578 sec.\n", - "Iter 5000 || Loss: 5.8620 || 10iter: 1.8596 sec.\n", - "Iter 5010 || Loss: 5.1140 || 10iter: 1.9097 sec.\n", - "Iter 5020 || Loss: 5.2869 || 10iter: 1.8951 sec.\n", - "Iter 5030 || Loss: 5.8781 || 10iter: 1.9597 sec.\n", - "Iter 5040 || Loss: 5.7860 || 10iter: 1.9091 sec.\n", - "Iter 5050 || Loss: 5.1871 || 10iter: 1.9201 sec.\n", - "Iter 5060 || Loss: 5.4153 || 10iter: 1.9224 sec.\n", - "Iter 5070 || Loss: 5.3042 || 10iter: 1.9205 sec.\n", - "Iter 5080 || Loss: 5.1713 || 10iter: 1.8786 sec.\n", - "Iter 5090 || Loss: 5.9232 || 10iter: 1.9183 sec.\n", - "Iter 5100 || Loss: 6.3301 || 10iter: 1.9304 sec.\n", - "Iter 5110 || Loss: 5.3224 || 10iter: 1.9140 sec.\n", - "Iter 5120 || Loss: 5.7514 || 10iter: 1.9121 sec.\n", - "Iter 5130 || Loss: 5.0587 || 10iter: 1.9115 sec.\n", - "Iter 5140 || Loss: 5.2688 || 10iter: 1.9375 sec.\n", - "Iter 5150 || Loss: 5.6588 || 10iter: 1.9162 sec.\n", - "Iter 5160 || Loss: 5.2459 || 10iter: 1.9397 sec.\n", - "Iter 5170 || Loss: 5.4048 || 10iter: 1.8131 sec.\n", + "Iter 2110 || Loss: 5.3264 || 10iter: 3.3978 sec.\n", + "Iter 2120 || Loss: 5.3107 || 10iter: 3.3645 sec.\n", + "Iter 2130 || Loss: 5.4049 || 10iter: 3.3747 sec.\n", + "Iter 2140 || Loss: 5.6159 || 10iter: 3.4130 sec.\n", + "Iter 2150 || Loss: 5.3737 || 10iter: 3.3554 sec.\n", + "Iter 2160 || Loss: 5.6128 || 10iter: 3.3987 sec.\n", + "Iter 2170 || Loss: 5.6840 || 10iter: 3.4465 sec.\n", + "Iter 2180 || Loss: 5.4251 || 10iter: 3.4079 sec.\n", + "Iter 2190 || Loss: 5.4585 || 10iter: 3.4962 sec.\n", + "Iter 2200 || Loss: 4.8977 || 10iter: 3.3691 sec.\n", + "Iter 2210 || Loss: 5.7564 || 10iter: 3.3954 sec.\n", + "Iter 2220 || Loss: 5.2696 || 10iter: 3.3692 sec.\n", + "Iter 2230 || Loss: 5.3867 || 10iter: 3.3577 sec.\n", + "Iter 2240 || Loss: 4.7236 || 10iter: 3.3490 sec.\n", + "Iter 2250 || Loss: 5.1422 || 10iter: 3.3428 sec.\n", + "Iter 2260 || Loss: 5.4216 || 10iter: 3.3415 sec.\n", + "Iter 2270 || Loss: 5.6682 || 10iter: 3.3664 sec.\n", + "Iter 2280 || Loss: 5.5349 || 10iter: 3.3840 sec.\n", + "Iter 2290 || Loss: 5.5123 || 10iter: 3.4147 sec.\n", + "Iter 2300 || Loss: 5.8075 || 10iter: 3.3575 sec.\n", + "Iter 2310 || Loss: 5.2453 || 10iter: 3.3589 sec.\n", + "Iter 2320 || Loss: 5.5742 || 10iter: 3.3421 sec.\n", + "Iter 2330 || Loss: 5.2661 || 10iter: 3.3936 sec.\n", + "Iter 2340 || Loss: 5.1020 || 10iter: 3.4413 sec.\n", + "Iter 2350 || Loss: 5.2872 || 10iter: 3.3606 sec.\n", + "Iter 2360 || Loss: 5.3270 || 10iter: 3.3486 sec.\n", + "Iter 2370 || Loss: 5.8302 || 10iter: 3.3676 sec.\n", + "Iter 2380 || Loss: 5.1423 || 10iter: 3.4300 sec.\n", + "Iter 2390 || Loss: 5.2949 || 10iter: 3.3819 sec.\n", + "Iter 2400 || Loss: 5.2698 || 10iter: 3.4141 sec.\n", + "Iter 2410 || Loss: 5.1737 || 10iter: 3.4935 sec.\n", + "Iter 2420 || Loss: 5.1211 || 10iter: 3.4262 sec.\n", + "Iter 2430 || Loss: 5.3296 || 10iter: 3.4220 sec.\n", + "Iter 2440 || Loss: 5.1766 || 10iter: 3.4188 sec.\n", + "Iter 2450 || Loss: 5.8961 || 10iter: 3.4120 sec.\n", + "Iter 2460 || Loss: 5.4441 || 10iter: 3.4251 sec.\n", + "Iter 2470 || Loss: 5.8294 || 10iter: 3.3646 sec.\n", + "Iter 2480 || Loss: 5.7525 || 10iter: 3.3862 sec.\n", + "Iter 2490 || Loss: 4.7155 || 10iter: 3.4026 sec.\n", + "Iter 2500 || Loss: 5.6746 || 10iter: 3.4795 sec.\n", + "Iter 2510 || Loss: 5.2167 || 10iter: 3.3580 sec.\n", + "Iter 2520 || Loss: 5.5177 || 10iter: 3.3717 sec.\n", + "Iter 2530 || Loss: 5.8399 || 10iter: 3.3669 sec.\n", + "Iter 2540 || Loss: 5.4010 || 10iter: 3.3840 sec.\n", + "Iter 2550 || Loss: 5.4305 || 10iter: 3.3418 sec.\n", + "Iter 2560 || Loss: 5.3445 || 10iter: 3.4084 sec.\n", + "Iter 2570 || Loss: 5.0642 || 10iter: 3.3754 sec.\n", + "Iter 2580 || Loss: 5.4277 || 10iter: 3.3155 sec.\n", + "Iter 2590 || Loss: 5.7585 || 10iter: 3.0804 sec.\n", "-------------\n", - "epoch 5 || Epoch_TRAIN_Loss:5790.5436 ||Epoch_VAL_Loss:0.0000\n", - "timer: 203.5833 sec.\n", + "epoch 5 || Epoch_TRAIN_Loss:2787.7734 ||Epoch_VAL_Loss:0.0000\n", + "timer: 176.9936 sec.\n", "lr is: 0.001\n", "-------------\n", "Epoch 6/200\n", "-------------\n", "(train)\n", - "Iter 5180 || Loss: 5.5638 || 10iter: 2.7937 sec.\n", - "Iter 5190 || Loss: 5.8528 || 10iter: 1.9518 sec.\n", - "Iter 5200 || Loss: 4.8745 || 10iter: 1.9240 sec.\n", - "Iter 5210 || Loss: 6.0627 || 10iter: 1.9147 sec.\n", - "Iter 5220 || Loss: 5.6845 || 10iter: 1.9163 sec.\n", - "Iter 5230 || Loss: 5.3720 || 10iter: 1.9192 sec.\n", - "Iter 5240 || Loss: 6.4919 || 10iter: 1.8958 sec.\n", - "Iter 5250 || Loss: 5.6495 || 10iter: 1.9123 sec.\n", - "Iter 5260 || Loss: 6.2360 || 10iter: 1.9169 sec.\n", - "Iter 5270 || Loss: 6.2473 || 10iter: 1.8988 sec.\n", - "Iter 5280 || Loss: 5.8871 || 10iter: 1.9522 sec.\n", - "Iter 5290 || Loss: 4.9475 || 10iter: 1.9183 sec.\n", - "Iter 5300 || Loss: 5.1125 || 10iter: 1.9202 sec.\n", - "Iter 5310 || Loss: 6.0746 || 10iter: 1.8638 sec.\n", - "Iter 5320 || Loss: 5.5862 || 10iter: 1.8918 sec.\n", - "Iter 5330 || Loss: 5.3952 || 10iter: 1.8982 sec.\n", - "Iter 5340 || Loss: 4.7200 || 10iter: 1.9306 sec.\n", - "Iter 5350 || Loss: 5.0349 || 10iter: 1.9348 sec.\n", - "Iter 5360 || Loss: 6.1627 || 10iter: 1.9330 sec.\n", - "Iter 5370 || Loss: 6.4506 || 10iter: 1.9468 sec.\n", - "Iter 5380 || Loss: 5.2366 || 10iter: 1.9015 sec.\n", - "Iter 5390 || Loss: 5.9505 || 10iter: 1.9157 sec.\n", - "Iter 5400 || Loss: 5.3242 || 10iter: 1.9716 sec.\n", - "Iter 5410 || Loss: 5.3698 || 10iter: 1.9072 sec.\n", - "Iter 5420 || Loss: 5.4650 || 10iter: 1.8689 sec.\n", - "Iter 5430 || Loss: 5.1035 || 10iter: 1.8852 sec.\n", - "Iter 5440 || Loss: 5.1974 || 10iter: 1.8970 sec.\n", - "Iter 5450 || Loss: 5.2812 || 10iter: 1.9002 sec.\n", - "Iter 5460 || Loss: 5.8294 || 10iter: 1.9487 sec.\n", - "Iter 5470 || Loss: 5.3021 || 10iter: 1.9577 sec.\n", - "Iter 5480 || Loss: 5.2302 || 10iter: 1.9784 sec.\n", - "Iter 5490 || Loss: 5.1360 || 10iter: 1.9248 sec.\n", - "Iter 5500 || Loss: 5.3144 || 10iter: 1.9104 sec.\n", - "Iter 5510 || Loss: 6.2622 || 10iter: 1.9346 sec.\n", - "Iter 5520 || Loss: 5.6689 || 10iter: 1.9643 sec.\n", - "Iter 5530 || Loss: 5.3527 || 10iter: 1.9297 sec.\n", - "Iter 5540 || Loss: 4.9406 || 10iter: 1.8663 sec.\n", - "Iter 5550 || Loss: 6.1685 || 10iter: 1.9012 sec.\n", - "Iter 5560 || Loss: 5.5947 || 10iter: 1.8669 sec.\n", - "Iter 5570 || Loss: 5.0937 || 10iter: 1.8930 sec.\n", - "Iter 5580 || Loss: 5.1344 || 10iter: 1.9181 sec.\n", - "Iter 5590 || Loss: 5.3126 || 10iter: 1.9355 sec.\n", - "Iter 5600 || Loss: 5.6176 || 10iter: 1.9327 sec.\n", - "Iter 5610 || Loss: 5.2543 || 10iter: 1.9238 sec.\n", - "Iter 5620 || Loss: 6.3741 || 10iter: 1.9597 sec.\n", - "Iter 5630 || Loss: 5.0453 || 10iter: 1.8962 sec.\n", - "Iter 5640 || Loss: 5.5964 || 10iter: 1.9427 sec.\n", - "Iter 5650 || Loss: 5.7842 || 10iter: 1.9593 sec.\n", - "Iter 5660 || Loss: 4.8601 || 10iter: 1.9253 sec.\n", - "Iter 5670 || Loss: 6.0115 || 10iter: 1.9005 sec.\n", - "Iter 5680 || Loss: 5.8189 || 10iter: 1.9070 sec.\n", - "Iter 5690 || Loss: 5.5126 || 10iter: 1.9451 sec.\n", - "Iter 5700 || Loss: 4.3626 || 10iter: 1.9566 sec.\n", - "Iter 5710 || Loss: 4.9804 || 10iter: 1.8964 sec.\n", - "Iter 5720 || Loss: 4.8742 || 10iter: 1.8785 sec.\n", - "Iter 5730 || Loss: 5.2110 || 10iter: 1.9027 sec.\n", - "Iter 5740 || Loss: 4.7075 || 10iter: 1.9507 sec.\n", - "Iter 5750 || Loss: 5.8188 || 10iter: 1.8942 sec.\n", - "Iter 5760 || Loss: 5.1781 || 10iter: 1.9153 sec.\n", - "Iter 5770 || Loss: 6.0694 || 10iter: 1.9613 sec.\n", - "Iter 5780 || Loss: 5.1264 || 10iter: 1.9148 sec.\n", - "Iter 5790 || Loss: 5.1921 || 10iter: 1.9229 sec.\n", - "Iter 5800 || Loss: 5.3062 || 10iter: 1.9264 sec.\n", - "Iter 5810 || Loss: 5.9371 || 10iter: 1.9243 sec.\n", - "Iter 5820 || Loss: 6.5336 || 10iter: 1.9147 sec.\n", - "Iter 5830 || Loss: 5.8122 || 10iter: 1.9053 sec.\n", - "Iter 5840 || Loss: 5.5347 || 10iter: 1.9658 sec.\n", - "Iter 5850 || Loss: 5.6121 || 10iter: 1.9831 sec.\n", - "Iter 5860 || Loss: 5.2309 || 10iter: 1.9376 sec.\n", - "Iter 5870 || Loss: 5.7739 || 10iter: 1.9340 sec.\n", - "Iter 5880 || Loss: 4.7775 || 10iter: 1.9409 sec.\n", - "Iter 5890 || Loss: 5.3465 || 10iter: 1.9388 sec.\n", - "Iter 5900 || Loss: 5.0776 || 10iter: 1.9280 sec.\n", - "Iter 5910 || Loss: 4.8831 || 10iter: 1.9094 sec.\n", - "Iter 5920 || Loss: 5.1110 || 10iter: 1.9068 sec.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Iter 5930 || Loss: 5.4469 || 10iter: 1.9012 sec.\n", - "Iter 5940 || Loss: 5.4560 || 10iter: 1.9149 sec.\n", - "Iter 5950 || Loss: 5.1948 || 10iter: 1.9584 sec.\n", - "Iter 5960 || Loss: 5.2100 || 10iter: 1.9072 sec.\n", - "Iter 5970 || Loss: 5.4193 || 10iter: 1.9015 sec.\n", - "Iter 5980 || Loss: 5.8716 || 10iter: 1.9516 sec.\n", - "Iter 5990 || Loss: 5.2573 || 10iter: 1.9030 sec.\n", - "Iter 6000 || Loss: 5.3483 || 10iter: 1.9348 sec.\n", - "Iter 6010 || Loss: 5.1394 || 10iter: 1.9322 sec.\n", - "Iter 6020 || Loss: 5.3814 || 10iter: 1.9413 sec.\n", - "Iter 6030 || Loss: 5.2683 || 10iter: 1.9003 sec.\n", - "Iter 6040 || Loss: 5.4217 || 10iter: 1.9310 sec.\n", - "Iter 6050 || Loss: 5.8575 || 10iter: 1.9128 sec.\n", - "Iter 6060 || Loss: 5.6449 || 10iter: 1.9036 sec.\n", - "Iter 6070 || Loss: 5.8926 || 10iter: 1.9544 sec.\n", - "Iter 6080 || Loss: 5.6739 || 10iter: 1.8953 sec.\n", - "Iter 6090 || Loss: 5.3722 || 10iter: 1.8978 sec.\n", - "Iter 6100 || Loss: 4.5837 || 10iter: 1.9282 sec.\n", - "Iter 6110 || Loss: 5.6616 || 10iter: 1.9304 sec.\n", - "Iter 6120 || Loss: 4.8514 || 10iter: 1.9411 sec.\n", - "Iter 6130 || Loss: 6.0938 || 10iter: 1.9359 sec.\n", - "Iter 6140 || Loss: 4.9848 || 10iter: 1.9362 sec.\n", - "Iter 6150 || Loss: 6.2113 || 10iter: 1.9316 sec.\n", - "Iter 6160 || Loss: 5.5231 || 10iter: 1.9122 sec.\n", - "Iter 6170 || Loss: 5.6310 || 10iter: 1.9237 sec.\n", - "Iter 6180 || Loss: 4.9772 || 10iter: 1.9184 sec.\n", - "Iter 6190 || Loss: 5.9068 || 10iter: 1.9219 sec.\n", - "Iter 6200 || Loss: 5.9587 || 10iter: 1.8712 sec.\n", - "Iter 6210 || Loss: 6.1575 || 10iter: 1.7432 sec.\n", + "Iter 2600 || Loss: 5.1288 || 10iter: 5.2803 sec.\n", + "Iter 2610 || Loss: 5.0692 || 10iter: 3.4138 sec.\n", + "Iter 2620 || Loss: 4.8678 || 10iter: 3.4073 sec.\n", + "Iter 2630 || Loss: 5.7774 || 10iter: 3.4430 sec.\n", + "Iter 2640 || Loss: 4.8985 || 10iter: 3.3773 sec.\n", + "Iter 2650 || Loss: 5.5676 || 10iter: 3.4293 sec.\n", + "Iter 2660 || Loss: 5.2061 || 10iter: 3.4343 sec.\n", + "Iter 2670 || Loss: 5.5605 || 10iter: 3.4016 sec.\n", + "Iter 2680 || Loss: 5.9544 || 10iter: 3.3901 sec.\n", + "Iter 2690 || Loss: 5.0868 || 10iter: 3.3688 sec.\n", + "Iter 2700 || Loss: 5.2073 || 10iter: 3.3698 sec.\n", + "Iter 2710 || Loss: 5.1464 || 10iter: 3.3585 sec.\n", + "Iter 2720 || Loss: 5.2350 || 10iter: 3.3562 sec.\n", + "Iter 2730 || Loss: 4.8155 || 10iter: 3.3503 sec.\n", + "Iter 2740 || Loss: 5.3389 || 10iter: 3.4014 sec.\n", + "Iter 2750 || Loss: 5.5505 || 10iter: 3.4290 sec.\n", + "Iter 2760 || Loss: 4.8003 || 10iter: 3.3623 sec.\n", + "Iter 2770 || Loss: 4.9201 || 10iter: 3.3576 sec.\n", + "Iter 2780 || Loss: 5.3539 || 10iter: 3.3448 sec.\n", + "Iter 2790 || Loss: 5.2739 || 10iter: 3.4221 sec.\n", + "Iter 2800 || Loss: 5.2493 || 10iter: 3.3922 sec.\n", + "Iter 2810 || Loss: 5.0645 || 10iter: 3.3891 sec.\n", + "Iter 2820 || Loss: 5.4998 || 10iter: 3.3893 sec.\n", + "Iter 2830 || Loss: 4.9218 || 10iter: 3.3521 sec.\n", + "Iter 2840 || Loss: 5.4572 || 10iter: 3.3764 sec.\n", + "Iter 2850 || Loss: 5.2022 || 10iter: 3.3731 sec.\n", + "Iter 2860 || Loss: 5.0886 || 10iter: 3.3638 sec.\n", + "Iter 2870 || Loss: 5.3664 || 10iter: 3.3613 sec.\n", + "Iter 2880 || Loss: 5.6510 || 10iter: 3.3509 sec.\n", + "Iter 2890 || Loss: 4.9461 || 10iter: 3.3517 sec.\n", + "Iter 2900 || Loss: 5.2534 || 10iter: 3.3601 sec.\n", + "Iter 2910 || Loss: 5.4036 || 10iter: 3.3772 sec.\n", + "Iter 2920 || Loss: 5.1080 || 10iter: 3.4180 sec.\n", + "Iter 2930 || Loss: 5.0302 || 10iter: 3.3692 sec.\n", + "Iter 2940 || Loss: 5.7639 || 10iter: 3.4611 sec.\n", + "Iter 2950 || Loss: 5.0709 || 10iter: 3.3403 sec.\n", + "Iter 2960 || Loss: 5.1821 || 10iter: 3.4102 sec.\n", + "Iter 2970 || Loss: 5.1731 || 10iter: 3.4250 sec.\n", + "Iter 2980 || Loss: 4.6264 || 10iter: 3.4121 sec.\n", + "Iter 2990 || Loss: 5.0477 || 10iter: 3.3859 sec.\n", + "Iter 3000 || Loss: 4.9625 || 10iter: 3.3554 sec.\n", + "Iter 3010 || Loss: 5.1828 || 10iter: 3.3571 sec.\n", + "Iter 3020 || Loss: 5.3519 || 10iter: 3.3411 sec.\n", + "Iter 3030 || Loss: 5.2634 || 10iter: 3.3441 sec.\n", + "Iter 3040 || Loss: 5.1069 || 10iter: 3.3460 sec.\n", + "Iter 3050 || Loss: 5.0603 || 10iter: 3.3598 sec.\n", + "Iter 3060 || Loss: 5.0679 || 10iter: 3.3526 sec.\n", + "Iter 3070 || Loss: 4.7272 || 10iter: 3.3563 sec.\n", + "Iter 3080 || Loss: 5.0513 || 10iter: 3.3924 sec.\n", + "Iter 3090 || Loss: 4.8833 || 10iter: 3.4150 sec.\n", + "Iter 3100 || Loss: 4.5966 || 10iter: 3.3033 sec.\n", "-------------\n", - "epoch 6 || Epoch_TRAIN_Loss:5678.6895 ||Epoch_VAL_Loss:0.0000\n", - "timer: 203.5752 sec.\n", + "epoch 6 || Epoch_TRAIN_Loss:2710.0843 ||Epoch_VAL_Loss:0.0000\n", + "timer: 176.7840 sec.\n", "lr is: 0.001\n", "-------------\n", "Epoch 7/200\n", "-------------\n", "(train)\n", - "Iter 6220 || Loss: 5.3276 || 10iter: 3.6976 sec.\n", - "Iter 6230 || Loss: 5.9927 || 10iter: 1.8670 sec.\n", - "Iter 6240 || Loss: 5.3716 || 10iter: 1.9228 sec.\n", - "Iter 6250 || Loss: 5.0341 || 10iter: 1.8773 sec.\n", - "Iter 6260 || Loss: 4.8785 || 10iter: 1.9555 sec.\n", - "Iter 6270 || Loss: 5.1969 || 10iter: 1.9145 sec.\n", - "Iter 6280 || Loss: 5.5633 || 10iter: 1.9388 sec.\n", - "Iter 6290 || Loss: 5.0515 || 10iter: 2.0314 sec.\n", - "Iter 6300 || Loss: 6.0010 || 10iter: 1.9420 sec.\n", - "Iter 6310 || Loss: 5.4549 || 10iter: 1.8755 sec.\n", - "Iter 6320 || Loss: 5.0633 || 10iter: 1.8809 sec.\n", - "Iter 6330 || Loss: 5.2250 || 10iter: 1.9092 sec.\n", - "Iter 6340 || Loss: 5.6289 || 10iter: 1.9018 sec.\n", - "Iter 6350 || Loss: 5.7008 || 10iter: 1.9331 sec.\n", - "Iter 6360 || Loss: 5.1054 || 10iter: 1.9022 sec.\n", - "Iter 6370 || Loss: 5.1999 || 10iter: 1.8995 sec.\n", - "Iter 6380 || Loss: 5.8094 || 10iter: 1.9219 sec.\n", - "Iter 6390 || Loss: 5.2512 || 10iter: 1.9247 sec.\n", - "Iter 6400 || Loss: 5.7110 || 10iter: 1.8957 sec.\n", - "Iter 6410 || Loss: 5.1866 || 10iter: 1.9235 sec.\n", - "Iter 6420 || Loss: 6.3414 || 10iter: 1.9359 sec.\n", - "Iter 6430 || Loss: 5.3007 || 10iter: 1.8936 sec.\n", - "Iter 6440 || Loss: 4.9592 || 10iter: 1.8694 sec.\n", - "Iter 6450 || Loss: 5.3950 || 10iter: 1.9272 sec.\n", - "Iter 6460 || Loss: 5.0373 || 10iter: 1.9082 sec.\n", - "Iter 6470 || Loss: 5.9786 || 10iter: 1.9228 sec.\n", - "Iter 6480 || Loss: 5.1692 || 10iter: 1.9840 sec.\n", - "Iter 6490 || Loss: 5.0207 || 10iter: 1.9623 sec.\n", - "Iter 6500 || Loss: 5.4802 || 10iter: 1.9513 sec.\n", - "Iter 6510 || Loss: 4.8867 || 10iter: 1.8955 sec.\n", - "Iter 6520 || Loss: 5.3232 || 10iter: 1.9491 sec.\n", - "Iter 6530 || Loss: 5.6932 || 10iter: 1.9145 sec.\n", - "Iter 6540 || Loss: 5.0516 || 10iter: 1.9212 sec.\n", - "Iter 6550 || Loss: 6.3195 || 10iter: 1.9464 sec.\n", - "Iter 6560 || Loss: 5.4216 || 10iter: 1.9459 sec.\n", - "Iter 6570 || Loss: 6.1527 || 10iter: 1.9326 sec.\n", - "Iter 6580 || Loss: 4.9233 || 10iter: 1.9031 sec.\n", - "Iter 6590 || Loss: 5.2672 || 10iter: 1.9241 sec.\n", - "Iter 6600 || Loss: 5.7909 || 10iter: 1.8963 sec.\n", - "Iter 6610 || Loss: 5.0991 || 10iter: 1.9190 sec.\n", - "Iter 6620 || Loss: 5.9675 || 10iter: 1.9053 sec.\n", - "Iter 6630 || Loss: 4.9585 || 10iter: 1.9257 sec.\n", - "Iter 6640 || Loss: 5.8158 || 10iter: 1.9156 sec.\n", - "Iter 6650 || Loss: 4.7083 || 10iter: 1.9064 sec.\n", - "Iter 6660 || Loss: 4.8529 || 10iter: 1.8876 sec.\n", - "Iter 6670 || Loss: 6.0777 || 10iter: 1.8866 sec.\n", - "Iter 6680 || Loss: 5.0549 || 10iter: 1.8917 sec.\n", - "Iter 6690 || Loss: 4.9110 || 10iter: 1.9456 sec.\n", - "Iter 6700 || Loss: 5.4523 || 10iter: 1.8810 sec.\n", - "Iter 6710 || Loss: 5.3366 || 10iter: 1.9179 sec.\n", - "Iter 6720 || Loss: 5.4078 || 10iter: 1.9461 sec.\n", - "Iter 6730 || Loss: 5.6472 || 10iter: 1.9237 sec.\n", - "Iter 6740 || Loss: 5.5324 || 10iter: 1.9232 sec.\n", - "Iter 6750 || Loss: 5.4342 || 10iter: 1.9088 sec.\n", - "Iter 6760 || Loss: 5.2852 || 10iter: 1.9094 sec.\n", - "Iter 6770 || Loss: 4.8751 || 10iter: 1.9237 sec.\n", - "Iter 6780 || Loss: 5.8178 || 10iter: 1.9029 sec.\n", - "Iter 6790 || Loss: 5.0503 || 10iter: 1.8887 sec.\n", - "Iter 6800 || Loss: 5.0628 || 10iter: 1.9535 sec.\n", - "Iter 6810 || Loss: 5.8114 || 10iter: 1.8843 sec.\n", - "Iter 6820 || Loss: 5.5054 || 10iter: 1.9081 sec.\n", - "Iter 6830 || Loss: 5.5460 || 10iter: 1.8877 sec.\n", - "Iter 6840 || Loss: 5.2072 || 10iter: 1.8973 sec.\n", - "Iter 6850 || Loss: 5.4865 || 10iter: 1.9405 sec.\n", - "Iter 6860 || Loss: 5.4291 || 10iter: 1.9139 sec.\n", - "Iter 6870 || Loss: 5.6141 || 10iter: 1.9551 sec.\n", - "Iter 6880 || Loss: 5.6275 || 10iter: 1.9194 sec.\n", - "Iter 6890 || Loss: 5.3716 || 10iter: 1.9544 sec.\n", - "Iter 6900 || Loss: 5.0124 || 10iter: 1.9088 sec.\n", - "Iter 6910 || Loss: 5.3573 || 10iter: 1.8962 sec.\n", - "Iter 6920 || Loss: 5.3543 || 10iter: 1.8975 sec.\n", - "Iter 6930 || Loss: 4.9271 || 10iter: 1.9240 sec.\n", - "Iter 6940 || Loss: 4.9061 || 10iter: 1.8924 sec.\n", - "Iter 6950 || Loss: 4.7066 || 10iter: 1.9618 sec.\n", - "Iter 6960 || Loss: 5.5158 || 10iter: 1.9264 sec.\n", - "Iter 6970 || Loss: 5.0725 || 10iter: 1.9455 sec.\n", - "Iter 6980 || Loss: 5.2451 || 10iter: 1.9331 sec.\n", - "Iter 6990 || Loss: 5.0387 || 10iter: 1.9163 sec.\n", - "Iter 7000 || Loss: 4.8452 || 10iter: 1.9324 sec.\n", - "Iter 7010 || Loss: 5.0454 || 10iter: 1.9326 sec.\n", - "Iter 7020 || Loss: 5.4362 || 10iter: 1.9154 sec.\n", - "Iter 7030 || Loss: 4.9035 || 10iter: 1.8707 sec.\n", - "Iter 7040 || Loss: 5.4696 || 10iter: 1.9546 sec.\n", - "Iter 7050 || Loss: 4.7480 || 10iter: 1.9047 sec.\n", - "Iter 7060 || Loss: 5.3677 || 10iter: 1.9549 sec.\n", - "Iter 7070 || Loss: 5.9861 || 10iter: 1.9150 sec.\n", - "Iter 7080 || Loss: 5.5432 || 10iter: 1.9120 sec.\n", - "Iter 7090 || Loss: 5.1011 || 10iter: 1.9121 sec.\n", - "Iter 7100 || Loss: 6.3403 || 10iter: 1.9217 sec.\n", - "Iter 7110 || Loss: 5.1173 || 10iter: 1.9612 sec.\n", - "Iter 7120 || Loss: 5.1864 || 10iter: 1.9630 sec.\n", - "Iter 7130 || Loss: 4.8266 || 10iter: 1.9308 sec.\n", - "Iter 7140 || Loss: 5.2680 || 10iter: 1.9285 sec.\n", - "Iter 7150 || Loss: 5.1111 || 10iter: 1.8982 sec.\n", - "Iter 7160 || Loss: 5.4961 || 10iter: 1.9047 sec.\n", - "Iter 7170 || Loss: 5.3933 || 10iter: 1.9529 sec.\n", - "Iter 7180 || Loss: 5.0901 || 10iter: 1.9352 sec.\n", - "Iter 7190 || Loss: 5.2337 || 10iter: 1.9372 sec.\n", - "Iter 7200 || Loss: 5.0882 || 10iter: 1.9437 sec.\n", - "Iter 7210 || Loss: 5.6472 || 10iter: 1.9388 sec.\n", - "Iter 7220 || Loss: 6.3728 || 10iter: 1.8769 sec.\n", - "Iter 7230 || Loss: 4.6770 || 10iter: 1.9422 sec.\n", - "Iter 7240 || Loss: 4.9421 || 10iter: 1.8021 sec.\n", + "Iter 3110 || Loss: 4.7860 || 10iter: 2.5910 sec.\n", + "Iter 3120 || Loss: 5.5041 || 10iter: 3.3748 sec.\n", + "Iter 3130 || Loss: 5.3920 || 10iter: 3.3158 sec.\n", + "Iter 3140 || Loss: 5.3774 || 10iter: 3.4849 sec.\n", + "Iter 3150 || Loss: 5.1896 || 10iter: 3.3832 sec.\n", + "Iter 3160 || Loss: 4.9054 || 10iter: 3.3577 sec.\n", + "Iter 3170 || Loss: 4.7008 || 10iter: 3.3493 sec.\n", + "Iter 3180 || Loss: 5.2452 || 10iter: 3.3764 sec.\n", + "Iter 3190 || Loss: 5.2865 || 10iter: 3.4220 sec.\n", + "Iter 3200 || Loss: 5.0380 || 10iter: 3.3539 sec.\n", + "Iter 3210 || Loss: 5.0281 || 10iter: 3.4014 sec.\n", + "Iter 3220 || Loss: 5.6139 || 10iter: 3.3525 sec.\n", + "Iter 3230 || Loss: 4.9781 || 10iter: 3.3728 sec.\n", + "Iter 3240 || Loss: 5.3442 || 10iter: 3.4350 sec.\n", + "Iter 3250 || Loss: 5.5887 || 10iter: 3.3930 sec.\n", + "Iter 3260 || Loss: 5.2225 || 10iter: 3.3867 sec.\n", + "Iter 3270 || Loss: 5.1588 || 10iter: 3.4158 sec.\n", + "Iter 3280 || Loss: 5.1048 || 10iter: 3.3779 sec.\n", + "Iter 3290 || Loss: 5.3939 || 10iter: 3.3705 sec.\n", + "Iter 3300 || Loss: 5.2658 || 10iter: 3.3673 sec.\n", + "Iter 3310 || Loss: 5.2850 || 10iter: 3.3665 sec.\n", + "Iter 3320 || Loss: 4.6721 || 10iter: 3.4389 sec.\n", + "Iter 3330 || Loss: 5.1712 || 10iter: 3.3641 sec.\n", + "Iter 3340 || Loss: 4.8982 || 10iter: 3.4887 sec.\n", + "Iter 3350 || Loss: 4.7286 || 10iter: 3.3784 sec.\n", + "Iter 3360 || Loss: 5.0570 || 10iter: 3.3560 sec.\n", + "Iter 3370 || Loss: 5.1286 || 10iter: 3.3831 sec.\n", + "Iter 3380 || Loss: 4.9128 || 10iter: 3.3658 sec.\n", + "Iter 3390 || Loss: 5.0205 || 10iter: 3.3868 sec.\n", + "Iter 3400 || Loss: 5.1961 || 10iter: 3.3624 sec.\n", + "Iter 3410 || Loss: 5.1354 || 10iter: 3.3665 sec.\n", + "Iter 3420 || Loss: 4.7186 || 10iter: 3.3691 sec.\n", + "Iter 3430 || Loss: 5.0709 || 10iter: 3.3734 sec.\n", + "Iter 3440 || Loss: 5.2659 || 10iter: 3.4313 sec.\n", + "Iter 3450 || Loss: 4.7667 || 10iter: 3.3649 sec.\n", + "Iter 3460 || Loss: 5.1038 || 10iter: 3.4342 sec.\n", + "Iter 3470 || Loss: 4.8508 || 10iter: 3.3645 sec.\n", + "Iter 3480 || Loss: 5.0479 || 10iter: 3.3485 sec.\n", + "Iter 3490 || Loss: 4.9161 || 10iter: 3.3587 sec.\n", + "Iter 3500 || Loss: 4.8759 || 10iter: 3.3661 sec.\n", + "Iter 3510 || Loss: 4.8505 || 10iter: 3.3777 sec.\n", + "Iter 3520 || Loss: 5.3492 || 10iter: 3.3906 sec.\n", + "Iter 3530 || Loss: 5.1807 || 10iter: 3.4205 sec.\n", + "Iter 3540 || Loss: 4.8861 || 10iter: 3.3694 sec.\n", + "Iter 3550 || Loss: 4.8535 || 10iter: 3.4204 sec.\n", + "Iter 3560 || Loss: 5.2251 || 10iter: 3.3683 sec.\n", + "Iter 3570 || Loss: 5.7424 || 10iter: 3.3468 sec.\n", + "Iter 3580 || Loss: 5.1441 || 10iter: 3.4248 sec.\n", + "Iter 3590 || Loss: 5.1995 || 10iter: 3.3633 sec.\n", + "Iter 3600 || Loss: 5.0320 || 10iter: 3.3571 sec.\n", + "Iter 3610 || Loss: 4.6265 || 10iter: 3.3839 sec.\n", + "Iter 3620 || Loss: 5.3315 || 10iter: 3.2952 sec.\n", "-------------\n", - "epoch 7 || Epoch_TRAIN_Loss:5571.7516 ||Epoch_VAL_Loss:0.0000\n", - "timer: 203.3650 sec.\n", + "epoch 7 || Epoch_TRAIN_Loss:2621.9839 ||Epoch_VAL_Loss:0.0000\n", + "timer: 176.9218 sec.\n", "lr is: 0.001\n", "-------------\n", "Epoch 8/200\n", "-------------\n", "(train)\n", - "Iter 7250 || Loss: 5.7486 || 10iter: 2.7172 sec.\n", - "Iter 7260 || Loss: 6.0077 || 10iter: 2.0038 sec.\n", - "Iter 7270 || Loss: 5.9509 || 10iter: 1.9195 sec.\n", - "Iter 7280 || Loss: 5.1916 || 10iter: 1.9556 sec.\n", - "Iter 7290 || Loss: 4.2582 || 10iter: 1.8916 sec.\n", - "Iter 7300 || Loss: 5.0374 || 10iter: 1.9302 sec.\n", - "Iter 7310 || Loss: 5.2894 || 10iter: 1.8731 sec.\n", - "Iter 7320 || Loss: 4.8249 || 10iter: 1.9153 sec.\n", - "Iter 7330 || Loss: 5.1845 || 10iter: 1.8910 sec.\n", - "Iter 7340 || Loss: 4.7425 || 10iter: 1.8678 sec.\n", - "Iter 7350 || Loss: 5.2149 || 10iter: 1.9193 sec.\n", - "Iter 7360 || Loss: 5.3914 || 10iter: 1.8853 sec.\n", - "Iter 7370 || Loss: 5.0954 || 10iter: 1.9086 sec.\n", - "Iter 7380 || Loss: 4.9616 || 10iter: 1.9579 sec.\n", - "Iter 7390 || Loss: 5.0336 || 10iter: 1.9258 sec.\n", - "Iter 7400 || Loss: 5.4775 || 10iter: 1.8878 sec.\n", - "Iter 7410 || Loss: 5.3904 || 10iter: 1.9070 sec.\n", - "Iter 7420 || Loss: 5.0947 || 10iter: 1.9831 sec.\n", - "Iter 7430 || Loss: 5.4183 || 10iter: 1.9790 sec.\n", - "Iter 7440 || Loss: 6.2816 || 10iter: 2.0177 sec.\n", - "Iter 7450 || Loss: 5.1575 || 10iter: 2.0295 sec.\n", - "Iter 7460 || Loss: 5.8364 || 10iter: 1.9251 sec.\n", - "Iter 7470 || Loss: 5.1171 || 10iter: 1.9054 sec.\n", - "Iter 7480 || Loss: 4.6392 || 10iter: 1.9062 sec.\n", - "Iter 7490 || Loss: 5.1518 || 10iter: 1.9199 sec.\n", - "Iter 7500 || Loss: 5.3727 || 10iter: 1.9499 sec.\n", - "Iter 7510 || Loss: 5.2105 || 10iter: 1.9581 sec.\n", - "Iter 7520 || Loss: 4.8140 || 10iter: 1.9574 sec.\n", - "Iter 7530 || Loss: 5.9450 || 10iter: 1.9052 sec.\n" + "Iter 3630 || Loss: 4.8520 || 10iter: 3.1964 sec.\n", + "Iter 3640 || Loss: 5.0182 || 10iter: 3.3516 sec.\n", + "Iter 3650 || Loss: 5.1061 || 10iter: 3.3968 sec.\n", + "Iter 3660 || Loss: 5.0838 || 10iter: 3.4812 sec.\n", + "Iter 3670 || Loss: 4.5199 || 10iter: 3.3855 sec.\n", + "Iter 3680 || Loss: 5.0928 || 10iter: 3.4722 sec.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "Iter 7540 || Loss: 5.4203 || 10iter: 1.9277 sec.\n", - "Iter 7550 || Loss: 5.6426 || 10iter: 1.9230 sec.\n", - "Iter 7560 || Loss: 5.5743 || 10iter: 1.9535 sec.\n", - "Iter 7570 || Loss: 4.9691 || 10iter: 1.9032 sec.\n", - "Iter 7580 || Loss: 5.3590 || 10iter: 1.8872 sec.\n", - "Iter 7590 || Loss: 5.3064 || 10iter: 1.9940 sec.\n", - "Iter 7600 || Loss: 5.4071 || 10iter: 1.9078 sec.\n", - "Iter 7610 || Loss: 5.5328 || 10iter: 1.9019 sec.\n", - "Iter 7620 || Loss: 4.5438 || 10iter: 1.9248 sec.\n", - "Iter 7630 || Loss: 4.6554 || 10iter: 1.8960 sec.\n", - "Iter 7640 || Loss: 5.7452 || 10iter: 1.8856 sec.\n", - "Iter 7650 || Loss: 5.5052 || 10iter: 2.0103 sec.\n", - "Iter 7660 || Loss: 4.3756 || 10iter: 1.9704 sec.\n", - "Iter 7670 || Loss: 5.0586 || 10iter: 2.0165 sec.\n", - "Iter 7680 || Loss: 5.7227 || 10iter: 2.0719 sec.\n", - "Iter 7690 || Loss: 5.6114 || 10iter: 2.0187 sec.\n", - "Iter 7700 || Loss: 4.8371 || 10iter: 1.8732 sec.\n", - "Iter 7710 || Loss: 5.3792 || 10iter: 1.9124 sec.\n", - "Iter 7720 || Loss: 5.2324 || 10iter: 1.9028 sec.\n", - "Iter 7730 || Loss: 5.2070 || 10iter: 1.9346 sec.\n", - "Iter 7740 || Loss: 5.8493 || 10iter: 2.0196 sec.\n", - "Iter 7750 || Loss: 5.4353 || 10iter: 1.9612 sec.\n", - "Iter 7760 || Loss: 5.5819 || 10iter: 1.9216 sec.\n", - "Iter 7770 || Loss: 6.0865 || 10iter: 1.9445 sec.\n", - "Iter 7780 || Loss: 4.5834 || 10iter: 1.9246 sec.\n", - "Iter 7790 || Loss: 5.7070 || 10iter: 1.9063 sec.\n", - "Iter 7800 || Loss: 5.9228 || 10iter: 1.9155 sec.\n", - "Iter 7810 || Loss: 4.8464 || 10iter: 1.9417 sec.\n", - "Iter 7820 || Loss: 4.9128 || 10iter: 1.9260 sec.\n", - "Iter 7830 || Loss: 5.6045 || 10iter: 1.9303 sec.\n", - "Iter 7840 || Loss: 4.9797 || 10iter: 1.9510 sec.\n", - "Iter 7850 || Loss: 5.5633 || 10iter: 1.9208 sec.\n", - "Iter 7860 || Loss: 4.9747 || 10iter: 1.9230 sec.\n", - "Iter 7870 || Loss: 5.2380 || 10iter: 1.9581 sec.\n", - "Iter 7880 || Loss: 5.4359 || 10iter: 1.9057 sec.\n", - "Iter 7890 || Loss: 4.9200 || 10iter: 1.9079 sec.\n", - "Iter 7900 || Loss: 5.1683 || 10iter: 1.8994 sec.\n", - "Iter 7910 || Loss: 5.9024 || 10iter: 1.9141 sec.\n", - "Iter 7920 || Loss: 5.3544 || 10iter: 1.9035 sec.\n", - "Iter 7930 || Loss: 5.1927 || 10iter: 1.9399 sec.\n", - "Iter 7940 || Loss: 4.9489 || 10iter: 1.9159 sec.\n", - "Iter 7950 || Loss: 4.5147 || 10iter: 1.9056 sec.\n", - "Iter 7960 || Loss: 4.7378 || 10iter: 1.9057 sec.\n", - "Iter 7970 || Loss: 4.8943 || 10iter: 1.8957 sec.\n", - "Iter 7980 || Loss: 4.5592 || 10iter: 1.9570 sec.\n", - "Iter 7990 || Loss: 5.1942 || 10iter: 1.9208 sec.\n", - "Iter 8000 || Loss: 4.5264 || 10iter: 1.9165 sec.\n", - "Iter 8010 || Loss: 4.4643 || 10iter: 1.9514 sec.\n", - "Iter 8020 || Loss: 5.2241 || 10iter: 1.9444 sec.\n", - "Iter 8030 || Loss: 5.5478 || 10iter: 1.9940 sec.\n", - "Iter 8040 || Loss: 4.9554 || 10iter: 1.9710 sec.\n", - "Iter 8050 || Loss: 5.0335 || 10iter: 1.9586 sec.\n", - "Iter 8060 || Loss: 5.0360 || 10iter: 1.9746 sec.\n", - "Iter 8070 || Loss: 5.0443 || 10iter: 1.9244 sec.\n", - "Iter 8080 || Loss: 4.8811 || 10iter: 1.9076 sec.\n", - "Iter 8090 || Loss: 4.9864 || 10iter: 1.9287 sec.\n", - "Iter 8100 || Loss: 5.0268 || 10iter: 1.9837 sec.\n", - "Iter 8110 || Loss: 4.8533 || 10iter: 1.8789 sec.\n", - "Iter 8120 || Loss: 4.6960 || 10iter: 1.8951 sec.\n", - "Iter 8130 || Loss: 5.2413 || 10iter: 1.9460 sec.\n", - "Iter 8140 || Loss: 4.2581 || 10iter: 1.9307 sec.\n", - "Iter 8150 || Loss: 4.8521 || 10iter: 1.9101 sec.\n", - "Iter 8160 || Loss: 5.7603 || 10iter: 1.9492 sec.\n", - "Iter 8170 || Loss: 5.2972 || 10iter: 1.9435 sec.\n", - "Iter 8180 || Loss: 5.8701 || 10iter: 1.8901 sec.\n", - "Iter 8190 || Loss: 5.1242 || 10iter: 1.9074 sec.\n", - "Iter 8200 || Loss: 5.3602 || 10iter: 1.9090 sec.\n", - "Iter 8210 || Loss: 5.3887 || 10iter: 1.9065 sec.\n", - "Iter 8220 || Loss: 4.5722 || 10iter: 1.9665 sec.\n", - "Iter 8230 || Loss: 5.3554 || 10iter: 1.9102 sec.\n", - "Iter 8240 || Loss: 5.6532 || 10iter: 1.9284 sec.\n", - "Iter 8250 || Loss: 6.0951 || 10iter: 1.9134 sec.\n", - "Iter 8260 || Loss: 5.0124 || 10iter: 1.9398 sec.\n", - "Iter 8270 || Loss: 5.8652 || 10iter: 1.8623 sec.\n", - "Iter 8280 || Loss: 4.6907 || 10iter: 1.7396 sec.\n", + "Iter 3690 || Loss: 4.8675 || 10iter: 3.3999 sec.\n", + "Iter 3700 || Loss: 4.7384 || 10iter: 3.4340 sec.\n", + "Iter 3710 || Loss: 4.6953 || 10iter: 3.3850 sec.\n", + "Iter 3720 || Loss: 5.2084 || 10iter: 3.3636 sec.\n", + "Iter 3730 || Loss: 4.3088 || 10iter: 3.4519 sec.\n", + "Iter 3740 || Loss: 4.9228 || 10iter: 3.4815 sec.\n", + "Iter 3750 || Loss: 4.7602 || 10iter: 3.4038 sec.\n", + "Iter 3760 || Loss: 4.9588 || 10iter: 3.4107 sec.\n", + "Iter 3770 || Loss: 4.8657 || 10iter: 3.4762 sec.\n", + "Iter 3780 || Loss: 5.0428 || 10iter: 3.5091 sec.\n", + "Iter 3790 || Loss: 5.7787 || 10iter: 3.4346 sec.\n", + "Iter 3800 || Loss: 4.7304 || 10iter: 3.3827 sec.\n", + "Iter 3810 || Loss: 4.7113 || 10iter: 3.3482 sec.\n", + "Iter 3820 || Loss: 4.7078 || 10iter: 3.3526 sec.\n", + "Iter 3830 || Loss: 4.8722 || 10iter: 3.4191 sec.\n", + "Iter 3840 || Loss: 5.0261 || 10iter: 3.3982 sec.\n", + "Iter 3850 || Loss: 5.0775 || 10iter: 3.3621 sec.\n", + "Iter 3860 || Loss: 5.1993 || 10iter: 3.3771 sec.\n", + "Iter 3870 || Loss: 4.8228 || 10iter: 3.3633 sec.\n", + "Iter 3880 || Loss: 5.2044 || 10iter: 3.4146 sec.\n", + "Iter 3890 || Loss: 4.4160 || 10iter: 3.3567 sec.\n", + "Iter 3900 || Loss: 5.4897 || 10iter: 3.3647 sec.\n", + "Iter 3910 || Loss: 4.9229 || 10iter: 3.3357 sec.\n", + "Iter 3920 || Loss: 5.0649 || 10iter: 3.3550 sec.\n", + "Iter 3930 || Loss: 4.7691 || 10iter: 3.3685 sec.\n", + "Iter 3940 || Loss: 4.6749 || 10iter: 3.3633 sec.\n", + "Iter 3950 || Loss: 4.9752 || 10iter: 3.4146 sec.\n", + "Iter 3960 || Loss: 4.4758 || 10iter: 3.4739 sec.\n", + "Iter 3970 || Loss: 4.6764 || 10iter: 3.3567 sec.\n", + "Iter 3980 || Loss: 4.8096 || 10iter: 3.3532 sec.\n", + "Iter 3990 || Loss: 4.2373 || 10iter: 3.3972 sec.\n", + "Iter 4000 || Loss: 5.0002 || 10iter: 3.4392 sec.\n", + "Iter 4010 || Loss: 4.8334 || 10iter: 3.3937 sec.\n", + "Iter 4020 || Loss: 4.8937 || 10iter: 3.4781 sec.\n", + "Iter 4030 || Loss: 5.1886 || 10iter: 3.4792 sec.\n", + "Iter 4040 || Loss: 4.8998 || 10iter: 3.5177 sec.\n", + "Iter 4050 || Loss: 4.9248 || 10iter: 3.3835 sec.\n", + "Iter 4060 || Loss: 4.8018 || 10iter: 3.3820 sec.\n", + "Iter 4070 || Loss: 4.8926 || 10iter: 3.3714 sec.\n", + "Iter 4080 || Loss: 4.6250 || 10iter: 3.3960 sec.\n", + "Iter 4090 || Loss: 4.6695 || 10iter: 3.4329 sec.\n", + "Iter 4100 || Loss: 4.7882 || 10iter: 3.4402 sec.\n", + "Iter 4110 || Loss: 5.0476 || 10iter: 3.3635 sec.\n", + "Iter 4120 || Loss: 5.2872 || 10iter: 3.4194 sec.\n", + "Iter 4130 || Loss: 5.1095 || 10iter: 3.3922 sec.\n", + "Iter 4140 || Loss: 4.7579 || 10iter: 3.3162 sec.\n", "-------------\n", - "epoch 8 || Epoch_TRAIN_Loss:5503.6504 ||Epoch_VAL_Loss:0.0000\n", - "timer: 204.5813 sec.\n", + "epoch 8 || Epoch_TRAIN_Loss:2555.2803 ||Epoch_VAL_Loss:0.0000\n", + "timer: 177.9926 sec.\n", "lr is: 0.001\n", "-------------\n", "Epoch 9/200\n", "-------------\n", "(train)\n", - "Iter 8290 || Loss: 5.3625 || 10iter: 3.8027 sec.\n", - "Iter 8300 || Loss: 5.0152 || 10iter: 1.8864 sec.\n", - "Iter 8310 || Loss: 4.8168 || 10iter: 1.9177 sec.\n", - "Iter 8320 || Loss: 5.1531 || 10iter: 1.9298 sec.\n", - "Iter 8330 || Loss: 4.9626 || 10iter: 1.9128 sec.\n", - "Iter 8340 || Loss: 4.5365 || 10iter: 1.8878 sec.\n", - "Iter 8350 || Loss: 5.6752 || 10iter: 1.8943 sec.\n", - "Iter 8360 || Loss: 5.3261 || 10iter: 1.9358 sec.\n", - "Iter 8370 || Loss: 5.9367 || 10iter: 1.9033 sec.\n", - "Iter 8380 || Loss: 4.9739 || 10iter: 1.9126 sec.\n", - "Iter 8390 || Loss: 5.3735 || 10iter: 1.8837 sec.\n", - "Iter 8400 || Loss: 4.7848 || 10iter: 1.9055 sec.\n", - "Iter 8410 || Loss: 5.3975 || 10iter: 1.9068 sec.\n", - "Iter 8420 || Loss: 5.9356 || 10iter: 1.8961 sec.\n", - "Iter 8430 || Loss: 5.3209 || 10iter: 1.8821 sec.\n", - "Iter 8440 || Loss: 4.8257 || 10iter: 1.9466 sec.\n", - "Iter 8450 || Loss: 5.5585 || 10iter: 1.9185 sec.\n", - "Iter 8460 || Loss: 4.7140 || 10iter: 1.9316 sec.\n", - "Iter 8470 || Loss: 5.7380 || 10iter: 1.8995 sec.\n", - "Iter 8480 || Loss: 5.0984 || 10iter: 1.9489 sec.\n", - "Iter 8490 || Loss: 5.5181 || 10iter: 1.9049 sec.\n", - "Iter 8500 || Loss: 4.7479 || 10iter: 1.9184 sec.\n", - "Iter 8510 || Loss: 5.4506 || 10iter: 1.8964 sec.\n", - "Iter 8520 || Loss: 4.9063 || 10iter: 1.8915 sec.\n", - "Iter 8530 || Loss: 4.9865 || 10iter: 1.9343 sec.\n", - "Iter 8540 || Loss: 5.4177 || 10iter: 1.9502 sec.\n", - "Iter 8550 || Loss: 5.2765 || 10iter: 1.8809 sec.\n", - "Iter 8560 || Loss: 5.7412 || 10iter: 1.9401 sec.\n", - "Iter 8570 || Loss: 4.7722 || 10iter: 1.8989 sec.\n", - "Iter 8580 || Loss: 4.9497 || 10iter: 1.9543 sec.\n", - "Iter 8590 || Loss: 5.2122 || 10iter: 1.9294 sec.\n", - "Iter 8600 || Loss: 5.4184 || 10iter: 1.9395 sec.\n", - "Iter 8610 || Loss: 5.0879 || 10iter: 1.9035 sec.\n", - "Iter 8620 || Loss: 6.2657 || 10iter: 1.9152 sec.\n", - "Iter 8630 || Loss: 4.9063 || 10iter: 1.9894 sec.\n", - "Iter 8640 || Loss: 4.7548 || 10iter: 1.9495 sec.\n", - "Iter 8650 || Loss: 5.0629 || 10iter: 1.9096 sec.\n", - "Iter 8660 || Loss: 5.5581 || 10iter: 1.8905 sec.\n", - "Iter 8670 || Loss: 5.7950 || 10iter: 1.9159 sec.\n", - "Iter 8680 || Loss: 6.2267 || 10iter: 1.8859 sec.\n", - "Iter 8690 || Loss: 5.6308 || 10iter: 1.9473 sec.\n", - "Iter 8700 || Loss: 5.5183 || 10iter: 1.9767 sec.\n", - "Iter 8710 || Loss: 5.3372 || 10iter: 1.9917 sec.\n", - "Iter 8720 || Loss: 5.3697 || 10iter: 2.0109 sec.\n", - "Iter 8730 || Loss: 5.4175 || 10iter: 2.0299 sec.\n", - "Iter 8740 || Loss: 5.4704 || 10iter: 2.0782 sec.\n", - "Iter 8750 || Loss: 5.6216 || 10iter: 1.9273 sec.\n", - "Iter 8760 || Loss: 5.3787 || 10iter: 1.8960 sec.\n", - "Iter 8770 || Loss: 5.4913 || 10iter: 1.9225 sec.\n", - "Iter 8780 || Loss: 5.2316 || 10iter: 1.9473 sec.\n", - "Iter 8790 || Loss: 5.2420 || 10iter: 1.8999 sec.\n", - "Iter 8800 || Loss: 5.1001 || 10iter: 1.9128 sec.\n", - "Iter 8810 || Loss: 4.9831 || 10iter: 1.8909 sec.\n", - "Iter 8820 || Loss: 5.0436 || 10iter: 1.9328 sec.\n", - "Iter 8830 || Loss: 4.8579 || 10iter: 1.9036 sec.\n", - "Iter 8840 || Loss: 5.5833 || 10iter: 1.9194 sec.\n", - "Iter 8850 || Loss: 4.9346 || 10iter: 1.9502 sec.\n", - "Iter 8860 || Loss: 4.9270 || 10iter: 1.9145 sec.\n", - "Iter 8870 || Loss: 5.9153 || 10iter: 1.9583 sec.\n", - "Iter 8880 || Loss: 5.7150 || 10iter: 1.9245 sec.\n", - "Iter 8890 || Loss: 5.5443 || 10iter: 1.8980 sec.\n", - "Iter 8900 || Loss: 5.8026 || 10iter: 1.9310 sec.\n", - "Iter 8910 || Loss: 5.1202 || 10iter: 1.9230 sec.\n", - "Iter 8920 || Loss: 5.3887 || 10iter: 1.8711 sec.\n", - "Iter 8930 || Loss: 5.9049 || 10iter: 1.9401 sec.\n", - "Iter 8940 || Loss: 5.4065 || 10iter: 2.0016 sec.\n", - "Iter 8950 || Loss: 5.6403 || 10iter: 1.9294 sec.\n", - "Iter 8960 || Loss: 5.2463 || 10iter: 1.9699 sec.\n", - "Iter 8970 || Loss: 5.9570 || 10iter: 1.9660 sec.\n", - "Iter 8980 || Loss: 5.6819 || 10iter: 1.9002 sec.\n", - "Iter 8990 || Loss: 5.6900 || 10iter: 1.9016 sec.\n", - "Iter 9000 || Loss: 5.0954 || 10iter: 1.9744 sec.\n", - "Iter 9010 || Loss: 5.2382 || 10iter: 1.9220 sec.\n", - "Iter 9020 || Loss: 5.3536 || 10iter: 1.9449 sec.\n", - "Iter 9030 || Loss: 5.2050 || 10iter: 1.9437 sec.\n", - "Iter 9040 || Loss: 5.7883 || 10iter: 1.9095 sec.\n", - "Iter 9050 || Loss: 4.9560 || 10iter: 1.8664 sec.\n", - "Iter 9060 || Loss: 5.5300 || 10iter: 1.8751 sec.\n", - "Iter 9070 || Loss: 5.4661 || 10iter: 1.8976 sec.\n", - "Iter 9080 || Loss: 5.0177 || 10iter: 1.8901 sec.\n", - "Iter 9090 || Loss: 5.7886 || 10iter: 1.9078 sec.\n", - "Iter 9100 || Loss: 5.9473 || 10iter: 1.9171 sec.\n", - "Iter 9110 || Loss: 5.7017 || 10iter: 1.9635 sec.\n", - "Iter 9120 || Loss: 5.3289 || 10iter: 1.9278 sec.\n", - "Iter 9130 || Loss: 4.7557 || 10iter: 1.9191 sec.\n", - "Iter 9140 || Loss: 4.7393 || 10iter: 1.9058 sec.\n", - "Iter 9150 || Loss: 5.0956 || 10iter: 1.9135 sec.\n", - "Iter 9160 || Loss: 5.1558 || 10iter: 2.1368 sec.\n", - "Iter 9170 || Loss: 5.5507 || 10iter: 1.9152 sec.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Iter 9180 || Loss: 5.2094 || 10iter: 1.8939 sec.\n", - "Iter 9190 || Loss: 5.9387 || 10iter: 1.9174 sec.\n", - "Iter 9200 || Loss: 4.8921 || 10iter: 1.9552 sec.\n", - "Iter 9210 || Loss: 5.4270 || 10iter: 2.0010 sec.\n", - "Iter 9220 || Loss: 4.9059 || 10iter: 1.8728 sec.\n", - "Iter 9230 || Loss: 5.6045 || 10iter: 1.8955 sec.\n", - "Iter 9240 || Loss: 5.0029 || 10iter: 1.9200 sec.\n", - "Iter 9250 || Loss: 5.0002 || 10iter: 1.9235 sec.\n", - "Iter 9260 || Loss: 5.1014 || 10iter: 1.9489 sec.\n", - "Iter 9270 || Loss: 4.9806 || 10iter: 1.9019 sec.\n", - "Iter 9280 || Loss: 6.4522 || 10iter: 1.9158 sec.\n", - "Iter 9290 || Loss: 4.9896 || 10iter: 1.9013 sec.\n", - "Iter 9300 || Loss: 5.3658 || 10iter: 1.9329 sec.\n", - "Iter 9310 || Loss: 5.1430 || 10iter: 1.8043 sec.\n", + "Iter 4150 || Loss: 5.3729 || 10iter: 3.8096 sec.\n", + "Iter 4160 || Loss: 4.6862 || 10iter: 3.4390 sec.\n", + "Iter 4170 || Loss: 5.0589 || 10iter: 3.3859 sec.\n", + "Iter 4180 || Loss: 5.2166 || 10iter: 3.3430 sec.\n", + "Iter 4190 || Loss: 4.7616 || 10iter: 3.4508 sec.\n", + "Iter 4200 || Loss: 5.0148 || 10iter: 3.4321 sec.\n", + "Iter 4210 || Loss: 5.1099 || 10iter: 3.3958 sec.\n", + "Iter 4220 || Loss: 5.6618 || 10iter: 3.4176 sec.\n", + "Iter 4230 || Loss: 4.9446 || 10iter: 3.4774 sec.\n", + "Iter 4240 || Loss: 4.3385 || 10iter: 3.5152 sec.\n", + "Iter 4250 || Loss: 4.5135 || 10iter: 3.4295 sec.\n", + "Iter 4260 || Loss: 4.9414 || 10iter: 3.3857 sec.\n", + "Iter 4270 || Loss: 4.5580 || 10iter: 3.3841 sec.\n", + "Iter 4280 || Loss: 4.9216 || 10iter: 3.3791 sec.\n", + "Iter 4290 || Loss: 5.3744 || 10iter: 3.3603 sec.\n", + "Iter 4300 || Loss: 5.1480 || 10iter: 3.3985 sec.\n", + "Iter 4310 || Loss: 4.5511 || 10iter: 3.3961 sec.\n", + "Iter 4320 || Loss: 4.4567 || 10iter: 3.3377 sec.\n", + "Iter 4330 || Loss: 4.4962 || 10iter: 3.3639 sec.\n", + "Iter 4340 || Loss: 4.5858 || 10iter: 3.4235 sec.\n", + "Iter 4350 || Loss: 4.7921 || 10iter: 3.4280 sec.\n", + "Iter 4360 || Loss: 5.0798 || 10iter: 3.4183 sec.\n", + "Iter 4370 || Loss: 4.7491 || 10iter: 3.3676 sec.\n", + "Iter 4380 || Loss: 4.3596 || 10iter: 3.3812 sec.\n", + "Iter 4390 || Loss: 4.9253 || 10iter: 3.4043 sec.\n", + "Iter 4400 || Loss: 4.6998 || 10iter: 3.4040 sec.\n", + "Iter 4410 || Loss: 4.8110 || 10iter: 3.4000 sec.\n", + "Iter 4420 || Loss: 4.7903 || 10iter: 3.3720 sec.\n", + "Iter 4430 || Loss: 5.0314 || 10iter: 3.3699 sec.\n", + "Iter 4440 || Loss: 4.5039 || 10iter: 3.3740 sec.\n", + "Iter 4450 || Loss: 4.9875 || 10iter: 3.4682 sec.\n", + "Iter 4460 || Loss: 4.7288 || 10iter: 3.5049 sec.\n", + "Iter 4470 || Loss: 4.3252 || 10iter: 3.3860 sec.\n", + "Iter 4480 || Loss: 4.1238 || 10iter: 3.3769 sec.\n", + "Iter 4490 || Loss: 4.5631 || 10iter: 3.4242 sec.\n", + "Iter 4500 || Loss: 4.6952 || 10iter: 3.4846 sec.\n", + "Iter 4510 || Loss: 4.5824 || 10iter: 3.5772 sec.\n", + "Iter 4520 || Loss: 5.2866 || 10iter: 3.5147 sec.\n", + "Iter 4530 || Loss: 5.3828 || 10iter: 3.3726 sec.\n", + "Iter 4540 || Loss: 4.5647 || 10iter: 3.4417 sec.\n", + "Iter 4550 || Loss: 4.7673 || 10iter: 3.3703 sec.\n", + "Iter 4560 || Loss: 5.1230 || 10iter: 3.3770 sec.\n", + "Iter 4570 || Loss: 4.2401 || 10iter: 3.4479 sec.\n", + "Iter 4580 || Loss: 4.5470 || 10iter: 3.3797 sec.\n", + "Iter 4590 || Loss: 5.0054 || 10iter: 3.3656 sec.\n", + "Iter 4600 || Loss: 4.7521 || 10iter: 3.3538 sec.\n", + "Iter 4610 || Loss: 5.0759 || 10iter: 3.3792 sec.\n", + "Iter 4620 || Loss: 5.0964 || 10iter: 3.4048 sec.\n", + "Iter 4630 || Loss: 4.9847 || 10iter: 3.3483 sec.\n", + "Iter 4640 || Loss: 4.5138 || 10iter: 3.3515 sec.\n", + "Iter 4650 || Loss: 4.9079 || 10iter: 3.3414 sec.\n", + "Iter 4660 || Loss: 4.3615 || 10iter: 3.3065 sec.\n", "-------------\n", - "epoch 9 || Epoch_TRAIN_Loss:5476.7265 ||Epoch_VAL_Loss:0.0000\n", - "timer: 204.0673 sec.\n", + "epoch 9 || Epoch_TRAIN_Loss:2498.0552 ||Epoch_VAL_Loss:0.0000\n", + "timer: 177.9608 sec.\n", "lr is: 0.001\n", "-------------\n", "Epoch 10/200\n", "-------------\n", "(train)\n", - "Iter 9320 || Loss: 4.7028 || 10iter: 2.7259 sec.\n", - "Iter 9330 || Loss: 5.4410 || 10iter: 1.9497 sec.\n", - "Iter 9340 || Loss: 4.3508 || 10iter: 1.8773 sec.\n", - "Iter 9350 || Loss: 5.1644 || 10iter: 1.9103 sec.\n", - "Iter 9360 || Loss: 5.2258 || 10iter: 1.8987 sec.\n", - "Iter 9370 || Loss: 5.3748 || 10iter: 1.9440 sec.\n", - "Iter 9380 || Loss: 5.4821 || 10iter: 1.8942 sec.\n", - "Iter 9390 || Loss: 5.1075 || 10iter: 1.9149 sec.\n", - "Iter 9400 || Loss: 5.4030 || 10iter: 1.9310 sec.\n", - "Iter 9410 || Loss: 5.3791 || 10iter: 1.8964 sec.\n", - "Iter 9420 || Loss: 5.7231 || 10iter: 1.9056 sec.\n", - "Iter 9430 || Loss: 5.6963 || 10iter: 1.9259 sec.\n", - "Iter 9440 || Loss: 5.1640 || 10iter: 1.9196 sec.\n", - "Iter 9450 || Loss: 6.2628 || 10iter: 1.9256 sec.\n", - "Iter 9460 || Loss: 4.9614 || 10iter: 1.8854 sec.\n", - "Iter 9470 || Loss: 5.7793 || 10iter: 1.8860 sec.\n", - "Iter 9480 || Loss: 5.5716 || 10iter: 1.9562 sec.\n", - "Iter 9490 || Loss: 5.4174 || 10iter: 1.9272 sec.\n", - "Iter 9500 || Loss: 4.6934 || 10iter: 1.9241 sec.\n", - "Iter 9510 || Loss: 5.0595 || 10iter: 1.9233 sec.\n", - "Iter 9520 || Loss: 5.0582 || 10iter: 1.9650 sec.\n", - "Iter 9530 || Loss: 5.7955 || 10iter: 1.9038 sec.\n", - "Iter 9540 || Loss: 5.2041 || 10iter: 1.9198 sec.\n", - "Iter 9550 || Loss: 5.5626 || 10iter: 1.8656 sec.\n", - "Iter 9560 || Loss: 5.0851 || 10iter: 1.9419 sec.\n", - "Iter 9570 || Loss: 4.7103 || 10iter: 1.9589 sec.\n", - "Iter 9580 || Loss: 4.9856 || 10iter: 1.9357 sec.\n", - "Iter 9590 || Loss: 5.2953 || 10iter: 1.9591 sec.\n", - "Iter 9600 || Loss: 4.1966 || 10iter: 1.9259 sec.\n", - "Iter 9610 || Loss: 4.3204 || 10iter: 1.9321 sec.\n", - "Iter 9620 || Loss: 5.0818 || 10iter: 1.9283 sec.\n", - "Iter 9630 || Loss: 4.7996 || 10iter: 1.9095 sec.\n", - "Iter 9640 || Loss: 5.2652 || 10iter: 1.9129 sec.\n", - "Iter 9650 || Loss: 4.7649 || 10iter: 1.9602 sec.\n", - "Iter 9660 || Loss: 5.5118 || 10iter: 1.9069 sec.\n", - "Iter 9670 || Loss: 5.5286 || 10iter: 1.9708 sec.\n", - "Iter 9680 || Loss: 4.9230 || 10iter: 1.9152 sec.\n", - "Iter 9690 || Loss: 5.1051 || 10iter: 1.9374 sec.\n", - "Iter 9700 || Loss: 4.8454 || 10iter: 1.9184 sec.\n", - "Iter 9710 || Loss: 5.3123 || 10iter: 1.9110 sec.\n", - "Iter 9720 || Loss: 5.2429 || 10iter: 1.9261 sec.\n", - "Iter 9730 || Loss: 5.1681 || 10iter: 1.9605 sec.\n", - "Iter 9740 || Loss: 5.2179 || 10iter: 1.9391 sec.\n", - "Iter 9750 || Loss: 4.7723 || 10iter: 1.9348 sec.\n", - "Iter 9760 || Loss: 5.6249 || 10iter: 1.9151 sec.\n", - "Iter 9770 || Loss: 4.1575 || 10iter: 1.8844 sec.\n", - "Iter 9780 || Loss: 5.4961 || 10iter: 1.9222 sec.\n", - "Iter 9790 || Loss: 5.4147 || 10iter: 1.9126 sec.\n", - "Iter 9800 || Loss: 4.6546 || 10iter: 1.8747 sec.\n", - "Iter 9810 || Loss: 4.8900 || 10iter: 1.9449 sec.\n", - "Iter 9820 || Loss: 4.7227 || 10iter: 1.8999 sec.\n", - "Iter 9830 || Loss: 4.9963 || 10iter: 1.9162 sec.\n", - "Iter 9840 || Loss: 5.8973 || 10iter: 1.9929 sec.\n", - "Iter 9850 || Loss: 6.1090 || 10iter: 1.9585 sec.\n", - "Iter 9860 || Loss: 5.4087 || 10iter: 1.9296 sec.\n", - "Iter 9870 || Loss: 5.4700 || 10iter: 1.8836 sec.\n", - "Iter 9880 || Loss: 4.4542 || 10iter: 1.8907 sec.\n", - "Iter 9890 || Loss: 5.9678 || 10iter: 1.8996 sec.\n", - "Iter 9900 || Loss: 5.3585 || 10iter: 1.9307 sec.\n", - "Iter 9910 || Loss: 4.8997 || 10iter: 1.8838 sec.\n", - "Iter 9920 || Loss: 5.1049 || 10iter: 1.9326 sec.\n", - "Iter 9930 || Loss: 5.0847 || 10iter: 1.9255 sec.\n", - "Iter 9940 || Loss: 5.1202 || 10iter: 1.9232 sec.\n", - "Iter 9950 || Loss: 5.3619 || 10iter: 1.9409 sec.\n", - "Iter 9960 || Loss: 5.1533 || 10iter: 1.9413 sec.\n", - "Iter 9970 || Loss: 5.3760 || 10iter: 1.9130 sec.\n", - "Iter 9980 || Loss: 5.1678 || 10iter: 1.8976 sec.\n", - "Iter 9990 || Loss: 5.4373 || 10iter: 1.8803 sec.\n", - "Iter 10000 || Loss: 5.5037 || 10iter: 1.9289 sec.\n", - "Iter 10010 || Loss: 5.3892 || 10iter: 1.9339 sec.\n", - "Iter 10020 || Loss: 4.6527 || 10iter: 1.9281 sec.\n", - "Iter 10030 || Loss: 4.7520 || 10iter: 1.8943 sec.\n", - "Iter 10040 || Loss: 5.0529 || 10iter: 1.8898 sec.\n", - "Iter 10050 || Loss: 5.4313 || 10iter: 1.9363 sec.\n", - "Iter 10060 || Loss: 5.7829 || 10iter: 1.9450 sec.\n", - "Iter 10070 || Loss: 5.6492 || 10iter: 1.9231 sec.\n", - "Iter 10080 || Loss: 4.9842 || 10iter: 1.9129 sec.\n", - "Iter 10090 || Loss: 4.3759 || 10iter: 1.8937 sec.\n", - "Iter 10100 || Loss: 4.7426 || 10iter: 1.9129 sec.\n", - "Iter 10110 || Loss: 5.2795 || 10iter: 1.8920 sec.\n", - "Iter 10120 || Loss: 4.7472 || 10iter: 1.9368 sec.\n", - "Iter 10130 || Loss: 5.5929 || 10iter: 1.9025 sec.\n", - "Iter 10140 || Loss: 6.0711 || 10iter: 1.9054 sec.\n", - "Iter 10150 || Loss: 4.7826 || 10iter: 1.9033 sec.\n", - "Iter 10160 || Loss: 5.3375 || 10iter: 1.9155 sec.\n", - "Iter 10170 || Loss: 6.0965 || 10iter: 1.9222 sec.\n", - "Iter 10180 || Loss: 4.8344 || 10iter: 1.9137 sec.\n", - "Iter 10190 || Loss: 6.2410 || 10iter: 1.8879 sec.\n", - "Iter 10200 || Loss: 4.9539 || 10iter: 1.9397 sec.\n", - "Iter 10210 || Loss: 5.1718 || 10iter: 1.9444 sec.\n", - "Iter 10220 || Loss: 6.1866 || 10iter: 1.9404 sec.\n", - "Iter 10230 || Loss: 5.8270 || 10iter: 2.0026 sec.\n", - "Iter 10240 || Loss: 5.0525 || 10iter: 1.9925 sec.\n", - "Iter 10250 || Loss: 4.9629 || 10iter: 2.0084 sec.\n", - "Iter 10260 || Loss: 4.3650 || 10iter: 1.9383 sec.\n", - "Iter 10270 || Loss: 5.9446 || 10iter: 1.8766 sec.\n", - "Iter 10280 || Loss: 4.8732 || 10iter: 1.9404 sec.\n", - "Iter 10290 || Loss: 5.8198 || 10iter: 1.8935 sec.\n", - "Iter 10300 || Loss: 5.3013 || 10iter: 1.8946 sec.\n", - "Iter 10310 || Loss: 5.9169 || 10iter: 1.9248 sec.\n", - "Iter 10320 || Loss: 4.9155 || 10iter: 1.9570 sec.\n", - "Iter 10330 || Loss: 4.4008 || 10iter: 1.9125 sec.\n", - "Iter 10340 || Loss: 5.0415 || 10iter: 1.9073 sec.\n", - "Iter 10350 || Loss: 4.6114 || 10iter: 1.7326 sec.\n", + "Iter 4670 || Loss: 4.9244 || 10iter: 4.6045 sec.\n", + "Iter 4680 || Loss: 4.8562 || 10iter: 3.5092 sec.\n", + "Iter 4690 || Loss: 5.1253 || 10iter: 3.4212 sec.\n", + "Iter 4700 || Loss: 5.1765 || 10iter: 3.3747 sec.\n", + "Iter 4710 || Loss: 4.3773 || 10iter: 3.3938 sec.\n", + "Iter 4720 || Loss: 4.5292 || 10iter: 3.4678 sec.\n", + "Iter 4730 || Loss: 4.7601 || 10iter: 3.3888 sec.\n", + "Iter 4740 || Loss: 4.6672 || 10iter: 3.3640 sec.\n", + "Iter 4750 || Loss: 4.8667 || 10iter: 3.4133 sec.\n", + "Iter 4760 || Loss: 4.7417 || 10iter: 3.3557 sec.\n", + "Iter 4770 || Loss: 4.9857 || 10iter: 3.3633 sec.\n", + "Iter 4780 || Loss: 4.7186 || 10iter: 3.3536 sec.\n", + "Iter 4790 || Loss: 4.6322 || 10iter: 3.3493 sec.\n", + "Iter 4800 || Loss: 5.1384 || 10iter: 3.3689 sec.\n", + "Iter 4810 || Loss: 4.6204 || 10iter: 3.3918 sec.\n", + "Iter 4820 || Loss: 4.2610 || 10iter: 3.3528 sec.\n", + "Iter 4830 || Loss: 4.2542 || 10iter: 3.3506 sec.\n", + "Iter 4840 || Loss: 4.6505 || 10iter: 3.3628 sec.\n", + "Iter 4850 || Loss: 4.8179 || 10iter: 3.3586 sec.\n", + "Iter 4860 || Loss: 5.1188 || 10iter: 3.3525 sec.\n", + "Iter 4870 || Loss: 4.8651 || 10iter: 3.3730 sec.\n", + "Iter 4880 || Loss: 4.5491 || 10iter: 3.4543 sec.\n", + "Iter 4890 || Loss: 4.9177 || 10iter: 3.3561 sec.\n", + "Iter 4900 || Loss: 4.7518 || 10iter: 3.3450 sec.\n", + "Iter 4910 || Loss: 4.7989 || 10iter: 3.3465 sec.\n", + "Iter 4920 || Loss: 4.4850 || 10iter: 3.3628 sec.\n", + "Iter 4930 || Loss: 4.9113 || 10iter: 3.3797 sec.\n", + "Iter 4940 || Loss: 5.1727 || 10iter: 3.3825 sec.\n", + "Iter 4950 || Loss: 4.1367 || 10iter: 3.3871 sec.\n", + "Iter 4960 || Loss: 4.7088 || 10iter: 3.4134 sec.\n", + "Iter 4970 || Loss: 4.6547 || 10iter: 3.3420 sec.\n", + "Iter 4980 || Loss: 5.4347 || 10iter: 3.3918 sec.\n", + "Iter 4990 || Loss: 5.0373 || 10iter: 3.3766 sec.\n", + "Iter 5000 || Loss: 4.9320 || 10iter: 3.4926 sec.\n", + "Iter 5010 || Loss: 4.8331 || 10iter: 3.4114 sec.\n", + "Iter 5020 || Loss: 4.8247 || 10iter: 3.3540 sec.\n", + "Iter 5030 || Loss: 4.6116 || 10iter: 3.3679 sec.\n", + "Iter 5040 || Loss: 4.7980 || 10iter: 3.3538 sec.\n", + "Iter 5050 || Loss: 4.8942 || 10iter: 3.3990 sec.\n", + "Iter 5060 || Loss: 5.2979 || 10iter: 3.3871 sec.\n", + "Iter 5070 || Loss: 4.9388 || 10iter: 3.3548 sec.\n", + "Iter 5080 || Loss: 4.8695 || 10iter: 3.4696 sec.\n", + "Iter 5090 || Loss: 4.5920 || 10iter: 3.3879 sec.\n", + "Iter 5100 || Loss: 5.0763 || 10iter: 3.3522 sec.\n", + "Iter 5110 || Loss: 4.5290 || 10iter: 3.3437 sec.\n", + "Iter 5120 || Loss: 4.7130 || 10iter: 3.3729 sec.\n", + "Iter 5130 || Loss: 4.8790 || 10iter: 3.4069 sec.\n", + "Iter 5140 || Loss: 5.0792 || 10iter: 3.3834 sec.\n", + "Iter 5150 || Loss: 5.0664 || 10iter: 3.3826 sec.\n", + "Iter 5160 || Loss: 5.2099 || 10iter: 3.3813 sec.\n", + "Iter 5170 || Loss: 4.9749 || 10iter: 3.3163 sec.\n", + "Iter 5180 || Loss: 6.1031 || 10iter: 3.0821 sec.\n", "-------------\n", "(val)\n", "-------------\n", - "epoch 10 || Epoch_TRAIN_Loss:5406.8323 ||Epoch_VAL_Loss:1519.4686\n", - "timer: 246.5118 sec.\n", + "epoch 10 || Epoch_TRAIN_Loss:2481.6381 ||Epoch_VAL_Loss:721.7942\n", + "timer: 205.4946 sec.\n", "lr is: 0.001\n", "-------------\n", "Epoch 11/200\n", "-------------\n", "(train)\n", - "Iter 10360 || Loss: 5.4187 || 10iter: 3.7286 sec.\n", - "Iter 10370 || Loss: 5.0149 || 10iter: 1.9014 sec.\n", - "Iter 10380 || Loss: 5.4323 || 10iter: 1.9052 sec.\n", - "Iter 10390 || Loss: 5.5029 || 10iter: 1.9177 sec.\n", - "Iter 10400 || Loss: 4.9864 || 10iter: 1.9046 sec.\n", - "Iter 10410 || Loss: 5.1180 || 10iter: 1.9087 sec.\n", - "Iter 10420 || Loss: 5.6769 || 10iter: 1.9221 sec.\n", - "Iter 10430 || Loss: 5.4431 || 10iter: 1.9562 sec.\n", - "Iter 10440 || Loss: 4.9671 || 10iter: 1.9256 sec.\n", - "Iter 10450 || Loss: 5.2796 || 10iter: 1.9773 sec.\n", - "Iter 10460 || Loss: 4.1618 || 10iter: 1.9207 sec.\n", - "Iter 10470 || Loss: 5.3567 || 10iter: 1.9164 sec.\n", - "Iter 10480 || Loss: 5.0847 || 10iter: 1.9085 sec.\n", - "Iter 10490 || Loss: 5.0676 || 10iter: 1.8860 sec.\n", - "Iter 10500 || Loss: 5.1934 || 10iter: 1.8841 sec.\n", - "Iter 10510 || Loss: 4.8118 || 10iter: 1.8974 sec.\n", - "Iter 10520 || Loss: 5.3521 || 10iter: 1.9189 sec.\n", - "Iter 10530 || Loss: 5.2803 || 10iter: 1.9148 sec.\n", - "Iter 10540 || Loss: 5.0059 || 10iter: 1.9575 sec.\n", - "Iter 10550 || Loss: 5.0449 || 10iter: 2.0083 sec.\n", - "Iter 10560 || Loss: 5.2641 || 10iter: 1.9689 sec.\n", - "Iter 10570 || Loss: 5.2289 || 10iter: 1.9147 sec.\n", - "Iter 10580 || Loss: 4.6226 || 10iter: 1.9335 sec.\n", - "Iter 10590 || Loss: 5.4674 || 10iter: 1.9716 sec.\n", - "Iter 10600 || Loss: 4.9209 || 10iter: 1.9779 sec.\n", - "Iter 10610 || Loss: 5.2473 || 10iter: 1.9933 sec.\n", - "Iter 10620 || Loss: 4.9255 || 10iter: 2.0157 sec.\n", - "Iter 10630 || Loss: 5.9055 || 10iter: 1.9298 sec.\n", - "Iter 10640 || Loss: 4.8934 || 10iter: 1.9595 sec.\n", - "Iter 10650 || Loss: 4.8569 || 10iter: 1.9006 sec.\n", - "Iter 10660 || Loss: 4.8509 || 10iter: 1.8770 sec.\n", - "Iter 10670 || Loss: 4.4943 || 10iter: 1.9015 sec.\n", - "Iter 10680 || Loss: 5.6717 || 10iter: 1.9218 sec.\n", - "Iter 10690 || Loss: 4.9989 || 10iter: 1.9242 sec.\n", - "Iter 10700 || Loss: 5.0211 || 10iter: 1.9570 sec.\n", - "Iter 10710 || Loss: 4.9693 || 10iter: 1.8873 sec.\n", - "Iter 10720 || Loss: 4.7547 || 10iter: 1.9414 sec.\n", - "Iter 10730 || Loss: 5.1898 || 10iter: 1.9379 sec.\n", - "Iter 10740 || Loss: 4.6290 || 10iter: 1.9163 sec.\n", - "Iter 10750 || Loss: 5.0557 || 10iter: 1.9205 sec.\n", - "Iter 10760 || Loss: 5.4432 || 10iter: 1.9438 sec.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Iter 10770 || Loss: 5.3237 || 10iter: 1.9322 sec.\n", - "Iter 10780 || Loss: 5.6913 || 10iter: 1.9236 sec.\n", - "Iter 10790 || Loss: 5.4656 || 10iter: 1.9038 sec.\n", - "Iter 10800 || Loss: 5.8244 || 10iter: 1.9645 sec.\n", - "Iter 10810 || Loss: 5.4652 || 10iter: 1.9265 sec.\n", - "Iter 10820 || Loss: 5.6042 || 10iter: 1.8911 sec.\n", - "Iter 10830 || Loss: 4.7095 || 10iter: 1.9338 sec.\n", - "Iter 10840 || Loss: 5.3835 || 10iter: 1.9390 sec.\n", - "Iter 10850 || Loss: 4.8674 || 10iter: 1.9307 sec.\n", - "Iter 10860 || Loss: 4.8937 || 10iter: 1.9208 sec.\n", - "Iter 10870 || Loss: 5.0901 || 10iter: 1.9088 sec.\n", - "Iter 10880 || Loss: 5.0186 || 10iter: 1.9463 sec.\n", - "Iter 10890 || Loss: 4.7500 || 10iter: 1.9539 sec.\n", - "Iter 10900 || Loss: 5.4548 || 10iter: 1.9261 sec.\n", - "Iter 10910 || Loss: 4.4846 || 10iter: 1.9562 sec.\n", - "Iter 10920 || Loss: 5.3497 || 10iter: 1.9009 sec.\n", - "Iter 10930 || Loss: 5.1473 || 10iter: 1.8944 sec.\n", - "Iter 10940 || Loss: 5.4784 || 10iter: 1.9549 sec.\n", - "Iter 10950 || Loss: 5.2840 || 10iter: 1.9256 sec.\n", - "Iter 10960 || Loss: 4.5664 || 10iter: 1.9380 sec.\n", - "Iter 10970 || Loss: 4.7914 || 10iter: 1.9279 sec.\n", - "Iter 10980 || Loss: 5.0464 || 10iter: 1.9278 sec.\n", - "Iter 10990 || Loss: 4.8586 || 10iter: 1.9597 sec.\n", - "Iter 11000 || Loss: 5.4742 || 10iter: 2.0104 sec.\n", - "Iter 11010 || Loss: 5.4618 || 10iter: 2.0265 sec.\n", - "Iter 11020 || Loss: 4.9040 || 10iter: 2.0371 sec.\n", - "Iter 11030 || Loss: 4.7851 || 10iter: 1.9326 sec.\n", - "Iter 11040 || Loss: 5.1874 || 10iter: 1.9177 sec.\n", - "Iter 11050 || Loss: 5.1790 || 10iter: 1.9333 sec.\n", - "Iter 11060 || Loss: 5.3363 || 10iter: 1.9155 sec.\n", - "Iter 11070 || Loss: 4.8506 || 10iter: 1.9328 sec.\n", - "Iter 11080 || Loss: 4.9219 || 10iter: 1.9123 sec.\n", - "Iter 11090 || Loss: 4.7631 || 10iter: 1.9077 sec.\n", - "Iter 11100 || Loss: 5.0066 || 10iter: 1.9680 sec.\n", - "Iter 11110 || Loss: 5.0048 || 10iter: 1.9300 sec.\n", - "Iter 11120 || Loss: 4.8208 || 10iter: 1.9256 sec.\n", - "Iter 11130 || Loss: 4.4022 || 10iter: 1.9171 sec.\n", - "Iter 11140 || Loss: 4.9707 || 10iter: 1.9492 sec.\n", - "Iter 11150 || Loss: 5.6035 || 10iter: 1.9628 sec.\n", - "Iter 11160 || Loss: 5.4060 || 10iter: 1.9106 sec.\n", - "Iter 11170 || Loss: 5.5603 || 10iter: 1.9156 sec.\n", - "Iter 11180 || Loss: 5.1362 || 10iter: 1.8818 sec.\n", - "Iter 11190 || Loss: 5.0182 || 10iter: 1.9177 sec.\n", - "Iter 11200 || Loss: 5.2954 || 10iter: 1.9387 sec.\n", - "Iter 11210 || Loss: 5.8631 || 10iter: 1.9486 sec.\n", - "Iter 11220 || Loss: 4.9044 || 10iter: 1.9193 sec.\n", - "Iter 11230 || Loss: 4.7383 || 10iter: 1.9050 sec.\n", - "Iter 11240 || Loss: 5.1080 || 10iter: 1.9415 sec.\n", - "Iter 11250 || Loss: 5.0176 || 10iter: 1.9356 sec.\n", - "Iter 11260 || Loss: 4.7281 || 10iter: 1.9036 sec.\n", - "Iter 11270 || Loss: 6.6153 || 10iter: 1.9473 sec.\n", - "Iter 11280 || Loss: 5.5868 || 10iter: 1.9594 sec.\n", - "Iter 11290 || Loss: 5.5104 || 10iter: 1.9231 sec.\n", - "Iter 11300 || Loss: 4.9528 || 10iter: 1.9140 sec.\n", - "Iter 11310 || Loss: 4.9470 || 10iter: 1.9025 sec.\n", - "Iter 11320 || Loss: 5.0940 || 10iter: 1.8850 sec.\n", - "Iter 11330 || Loss: 5.5952 || 10iter: 1.9191 sec.\n", - "Iter 11340 || Loss: 5.4968 || 10iter: 1.8884 sec.\n", - "Iter 11350 || Loss: 4.9171 || 10iter: 1.9275 sec.\n", - "Iter 11360 || Loss: 5.1741 || 10iter: 1.9124 sec.\n", - "Iter 11370 || Loss: 5.0552 || 10iter: 1.9286 sec.\n", - "Iter 11380 || Loss: 5.3198 || 10iter: 1.8371 sec.\n", - "-------------\n", - "epoch 11 || Epoch_TRAIN_Loss:5344.6938 ||Epoch_VAL_Loss:0.0000\n", - "timer: 204.5119 sec.\n", - "lr is: 0.001\n", - "-------------\n", - "Epoch 12/200\n", - "-------------\n", - "(train)\n", - "Iter 11390 || Loss: 4.7750 || 10iter: 3.0098 sec.\n", - "Iter 11400 || Loss: 4.8311 || 10iter: 2.0108 sec.\n", - "Iter 11410 || Loss: 5.3834 || 10iter: 1.9322 sec.\n", - "Iter 11420 || Loss: 5.5810 || 10iter: 1.9297 sec.\n", - "Iter 11430 || Loss: 5.1715 || 10iter: 1.9153 sec.\n", - "Iter 11440 || Loss: 4.5261 || 10iter: 1.9059 sec.\n", - "Iter 11450 || Loss: 5.4182 || 10iter: 1.9161 sec.\n", - "Iter 11460 || Loss: 4.7574 || 10iter: 1.9953 sec.\n", - "Iter 11470 || Loss: 4.8330 || 10iter: 1.9152 sec.\n", - "Iter 11480 || Loss: 5.4270 || 10iter: 1.8767 sec.\n", - "Iter 11490 || Loss: 4.2749 || 10iter: 1.9411 sec.\n", - "Iter 11500 || Loss: 5.3731 || 10iter: 1.9138 sec.\n", - "Iter 11510 || Loss: 4.7400 || 10iter: 1.9435 sec.\n", - "Iter 11520 || Loss: 5.6541 || 10iter: 1.8850 sec.\n", - "Iter 11530 || Loss: 5.3755 || 10iter: 1.9359 sec.\n", - "Iter 11540 || Loss: 5.1963 || 10iter: 1.9131 sec.\n", - "Iter 11550 || Loss: 4.9724 || 10iter: 1.9296 sec.\n", - "Iter 11560 || Loss: 4.6669 || 10iter: 1.9225 sec.\n", - "Iter 11570 || Loss: 4.5498 || 10iter: 1.9462 sec.\n", - "Iter 11580 || Loss: 5.9594 || 10iter: 1.9228 sec.\n", - "Iter 11590 || Loss: 4.3686 || 10iter: 1.9198 sec.\n", - "Iter 11600 || Loss: 5.7748 || 10iter: 1.9111 sec.\n", - "Iter 11610 || Loss: 5.4146 || 10iter: 1.9216 sec.\n", - "Iter 11620 || Loss: 4.8277 || 10iter: 1.9221 sec.\n", - "Iter 11630 || Loss: 5.7600 || 10iter: 1.9104 sec.\n", - "Iter 11640 || Loss: 5.0438 || 10iter: 1.9202 sec.\n", - "Iter 11650 || Loss: 5.0459 || 10iter: 1.9479 sec.\n", - "Iter 11660 || Loss: 5.1793 || 10iter: 1.9397 sec.\n", - "Iter 11670 || Loss: 5.1987 || 10iter: 1.9597 sec.\n", - "Iter 11680 || Loss: 4.2431 || 10iter: 1.9195 sec.\n", - "Iter 11690 || Loss: 5.9989 || 10iter: 1.9323 sec.\n", - "Iter 11700 || Loss: 5.0867 || 10iter: 1.8725 sec.\n", - "Iter 11710 || Loss: 4.4389 || 10iter: 1.9450 sec.\n", - "Iter 11720 || Loss: 4.8250 || 10iter: 1.9484 sec.\n", - "Iter 11730 || Loss: 5.1996 || 10iter: 1.9209 sec.\n", - "Iter 11740 || Loss: 5.2472 || 10iter: 1.9066 sec.\n", - "Iter 11750 || Loss: 4.3301 || 10iter: 1.9058 sec.\n", - "Iter 11760 || Loss: 5.5556 || 10iter: 1.9063 sec.\n", - "Iter 11770 || Loss: 5.7064 || 10iter: 1.8996 sec.\n", - "Iter 11780 || Loss: 5.0104 || 10iter: 1.9183 sec.\n", - "Iter 11790 || Loss: 4.8303 || 10iter: 1.9166 sec.\n", - "Iter 11800 || Loss: 5.1812 || 10iter: 1.9357 sec.\n", - "Iter 11810 || Loss: 5.3757 || 10iter: 1.9032 sec.\n", - "Iter 11820 || Loss: 5.1290 || 10iter: 1.9491 sec.\n", - "Iter 11830 || Loss: 5.5458 || 10iter: 1.9147 sec.\n", - "Iter 11840 || Loss: 4.7665 || 10iter: 1.9126 sec.\n", - "Iter 11850 || Loss: 4.1972 || 10iter: 1.9072 sec.\n", - "Iter 11860 || Loss: 5.3758 || 10iter: 1.8922 sec.\n", - "Iter 11870 || Loss: 5.2149 || 10iter: 1.8934 sec.\n", - "Iter 11880 || Loss: 5.8153 || 10iter: 1.9030 sec.\n", - "Iter 11890 || Loss: 5.0704 || 10iter: 1.9385 sec.\n", - "Iter 11900 || Loss: 5.3109 || 10iter: 1.9570 sec.\n", - "Iter 11910 || Loss: 5.0736 || 10iter: 1.9586 sec.\n", - "Iter 11920 || Loss: 5.2753 || 10iter: 1.9265 sec.\n", - "Iter 11930 || Loss: 5.0960 || 10iter: 1.9017 sec.\n", - "Iter 11940 || Loss: 5.0338 || 10iter: 1.8801 sec.\n", - "Iter 11950 || Loss: 5.4490 || 10iter: 1.9291 sec.\n", - "Iter 11960 || Loss: 4.1153 || 10iter: 1.9147 sec.\n", - "Iter 11970 || Loss: 4.5397 || 10iter: 1.9123 sec.\n", - "Iter 11980 || Loss: 4.8204 || 10iter: 1.9154 sec.\n", - "Iter 11990 || Loss: 5.1229 || 10iter: 1.9676 sec.\n", - "Iter 12000 || Loss: 4.6447 || 10iter: 1.9459 sec.\n", - "Iter 12010 || Loss: 5.6929 || 10iter: 1.9278 sec.\n", - "Iter 12020 || Loss: 5.6893 || 10iter: 1.9237 sec.\n", - "Iter 12030 || Loss: 4.8732 || 10iter: 1.9119 sec.\n", - "Iter 12040 || Loss: 5.1677 || 10iter: 1.9736 sec.\n", - "Iter 12050 || Loss: 4.8761 || 10iter: 1.9572 sec.\n", - "Iter 12060 || Loss: 5.2687 || 10iter: 1.9500 sec.\n", - "Iter 12070 || Loss: 5.3635 || 10iter: 1.9120 sec.\n", - "Iter 12080 || Loss: 5.4838 || 10iter: 1.9132 sec.\n", - "Iter 12090 || Loss: 5.2798 || 10iter: 1.9313 sec.\n", - "Iter 12100 || Loss: 5.0127 || 10iter: 1.8947 sec.\n", - "Iter 12110 || Loss: 4.7587 || 10iter: 1.8684 sec.\n", - "Iter 12120 || Loss: 4.9004 || 10iter: 1.8883 sec.\n", - "Iter 12130 || Loss: 4.9088 || 10iter: 1.9433 sec.\n", - "Iter 12140 || Loss: 5.6535 || 10iter: 1.9837 sec.\n", - "Iter 12150 || Loss: 4.6956 || 10iter: 1.9380 sec.\n", - "Iter 12160 || Loss: 5.7253 || 10iter: 1.9108 sec.\n", - "Iter 12170 || Loss: 5.0420 || 10iter: 1.9182 sec.\n", - "Iter 12180 || Loss: 4.3406 || 10iter: 1.9114 sec.\n", - "Iter 12190 || Loss: 5.4743 || 10iter: 1.9224 sec.\n", - "Iter 12200 || Loss: 5.9723 || 10iter: 1.9161 sec.\n", - "Iter 12210 || Loss: 5.2955 || 10iter: 1.9055 sec.\n", - "Iter 12220 || Loss: 4.8915 || 10iter: 1.8855 sec.\n", - "Iter 12230 || Loss: 5.0431 || 10iter: 1.9473 sec.\n", - "Iter 12240 || Loss: 4.7737 || 10iter: 1.9236 sec.\n", - "Iter 12250 || Loss: 5.4055 || 10iter: 1.8997 sec.\n", - "Iter 12260 || Loss: 4.8885 || 10iter: 1.9283 sec.\n", - "Iter 12270 || Loss: 5.8617 || 10iter: 1.9610 sec.\n", - "Iter 12280 || Loss: 5.8804 || 10iter: 1.9146 sec.\n", - "Iter 12290 || Loss: 4.9682 || 10iter: 1.9191 sec.\n", - "Iter 12300 || Loss: 5.4565 || 10iter: 1.8959 sec.\n", - "Iter 12310 || Loss: 4.9438 || 10iter: 1.8897 sec.\n", - "Iter 12320 || Loss: 5.0288 || 10iter: 1.9383 sec.\n", - "Iter 12330 || Loss: 5.4460 || 10iter: 1.9326 sec.\n", - "Iter 12340 || Loss: 4.8275 || 10iter: 1.9392 sec.\n", - "Iter 12350 || Loss: 5.2651 || 10iter: 1.9351 sec.\n", - "Iter 12360 || Loss: 6.1065 || 10iter: 1.9240 sec.\n", - "Iter 12370 || Loss: 5.1523 || 10iter: 1.9446 sec.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Iter 12380 || Loss: 5.7821 || 10iter: 1.9876 sec.\n", - "Iter 12390 || Loss: 5.6177 || 10iter: 1.9172 sec.\n", - "Iter 12400 || Loss: 4.6723 || 10iter: 1.9027 sec.\n", - "Iter 12410 || Loss: 5.2864 || 10iter: 1.8680 sec.\n", - "Iter 12420 || Loss: 5.6567 || 10iter: 1.7462 sec.\n", - "-------------\n", - "epoch 12 || Epoch_TRAIN_Loss:5323.9684 ||Epoch_VAL_Loss:0.0000\n", - "timer: 203.9996 sec.\n", - "lr is: 0.001\n", - "-------------\n", - "Epoch 13/200\n", - "-------------\n", - "(train)\n", - "Iter 12430 || Loss: 5.2301 || 10iter: 3.8338 sec.\n", - "Iter 12440 || Loss: 5.2184 || 10iter: 1.8786 sec.\n", - "Iter 12450 || Loss: 5.0316 || 10iter: 1.8974 sec.\n", - "Iter 12460 || Loss: 4.6722 || 10iter: 1.9490 sec.\n", - "Iter 12470 || Loss: 5.5091 || 10iter: 1.9512 sec.\n", - "Iter 12480 || Loss: 5.0956 || 10iter: 1.9118 sec.\n", - "Iter 12490 || Loss: 6.1115 || 10iter: 1.9043 sec.\n", - "Iter 12500 || Loss: 4.3868 || 10iter: 1.9399 sec.\n", - "Iter 12510 || Loss: 5.1611 || 10iter: 1.9371 sec.\n", - "Iter 12520 || Loss: 5.1821 || 10iter: 1.9531 sec.\n", - "Iter 12530 || Loss: 4.9484 || 10iter: 1.8945 sec.\n", - "Iter 12540 || Loss: 4.9977 || 10iter: 1.9271 sec.\n", - "Iter 12550 || Loss: 4.9142 || 10iter: 1.8628 sec.\n", - "Iter 12560 || Loss: 4.3997 || 10iter: 1.8815 sec.\n", - "Iter 12570 || Loss: 5.4001 || 10iter: 1.9232 sec.\n", - "Iter 12580 || Loss: 5.1149 || 10iter: 1.8984 sec.\n", - "Iter 12590 || Loss: 5.1816 || 10iter: 1.8941 sec.\n", - "Iter 12600 || Loss: 4.3929 || 10iter: 1.9418 sec.\n", - "Iter 12610 || Loss: 5.1714 || 10iter: 1.9078 sec.\n", - "Iter 12620 || Loss: 5.5324 || 10iter: 1.8891 sec.\n", - "Iter 12630 || Loss: 5.0689 || 10iter: 1.9393 sec.\n", - "Iter 12640 || Loss: 4.9728 || 10iter: 1.9461 sec.\n", - "Iter 12650 || Loss: 4.9129 || 10iter: 1.8995 sec.\n", - "Iter 12660 || Loss: 5.1512 || 10iter: 1.8970 sec.\n", - "Iter 12670 || Loss: 5.5214 || 10iter: 1.9203 sec.\n", - "Iter 12680 || Loss: 4.4159 || 10iter: 1.9222 sec.\n", - "Iter 12690 || Loss: 4.8964 || 10iter: 1.9823 sec.\n", - "Iter 12700 || Loss: 5.7488 || 10iter: 1.9428 sec.\n", - "Iter 12710 || Loss: 5.0433 || 10iter: 1.9251 sec.\n", - "Iter 12720 || Loss: 5.5926 || 10iter: 1.9433 sec.\n", - "Iter 12730 || Loss: 4.6116 || 10iter: 1.9337 sec.\n", - "Iter 12740 || Loss: 4.4592 || 10iter: 1.9689 sec.\n", - "Iter 12750 || Loss: 4.6413 || 10iter: 1.9124 sec.\n", - "Iter 12760 || Loss: 4.7018 || 10iter: 1.9268 sec.\n", - "Iter 12770 || Loss: 4.9697 || 10iter: 1.9432 sec.\n", - "Iter 12780 || Loss: 5.3603 || 10iter: 1.8884 sec.\n", - "Iter 12790 || Loss: 4.9755 || 10iter: 1.9113 sec.\n", - "Iter 12800 || Loss: 5.4964 || 10iter: 1.8915 sec.\n", - "Iter 12810 || Loss: 5.0532 || 10iter: 1.9204 sec.\n", - "Iter 12820 || Loss: 4.4793 || 10iter: 1.8983 sec.\n", - "Iter 12830 || Loss: 5.5627 || 10iter: 1.8912 sec.\n", - "Iter 12840 || Loss: 5.8465 || 10iter: 1.9371 sec.\n", - "Iter 12850 || Loss: 4.7378 || 10iter: 1.9209 sec.\n", - "Iter 12860 || Loss: 5.0794 || 10iter: 1.9230 sec.\n", - "Iter 12870 || Loss: 5.0669 || 10iter: 1.9100 sec.\n", - "Iter 12880 || Loss: 5.1008 || 10iter: 1.9086 sec.\n", - "Iter 12890 || Loss: 4.8866 || 10iter: 1.9292 sec.\n", - "Iter 12900 || Loss: 4.9822 || 10iter: 1.8927 sec.\n", - "Iter 12910 || Loss: 5.3962 || 10iter: 1.9076 sec.\n", - "Iter 12920 || Loss: 4.9047 || 10iter: 1.9157 sec.\n", - "Iter 12930 || Loss: 5.3013 || 10iter: 1.9158 sec.\n", - "Iter 12940 || Loss: 4.8607 || 10iter: 1.9111 sec.\n", - "Iter 12950 || Loss: 5.0855 || 10iter: 1.9306 sec.\n", - "Iter 12960 || Loss: 4.5123 || 10iter: 1.8773 sec.\n", - "Iter 12970 || Loss: 5.2036 || 10iter: 1.8715 sec.\n", - "Iter 12980 || Loss: 4.3877 || 10iter: 1.9267 sec.\n", - "Iter 12990 || Loss: 4.9904 || 10iter: 1.9027 sec.\n", - "Iter 13000 || Loss: 4.7755 || 10iter: 1.9159 sec.\n", - "Iter 13010 || Loss: 4.8058 || 10iter: 1.9041 sec.\n", - "Iter 13020 || Loss: 5.2457 || 10iter: 1.9061 sec.\n", - "Iter 13030 || Loss: 4.6844 || 10iter: 1.8939 sec.\n", - "Iter 13040 || Loss: 4.8132 || 10iter: 1.8844 sec.\n", - "Iter 13050 || Loss: 5.0010 || 10iter: 1.8933 sec.\n", - "Iter 13060 || Loss: 5.1245 || 10iter: 1.9875 sec.\n", - "Iter 13070 || Loss: 4.5701 || 10iter: 1.9182 sec.\n", - "Iter 13080 || Loss: 5.6142 || 10iter: 1.9013 sec.\n", - "Iter 13090 || Loss: 3.7525 || 10iter: 1.9071 sec.\n", - "Iter 13100 || Loss: 5.2722 || 10iter: 1.9548 sec.\n", - "Iter 13110 || Loss: 6.1519 || 10iter: 1.9009 sec.\n", - "Iter 13120 || Loss: 4.9032 || 10iter: 1.9004 sec.\n", - "Iter 13130 || Loss: 4.4008 || 10iter: 1.9444 sec.\n", - "Iter 13140 || Loss: 5.2507 || 10iter: 1.8855 sec.\n", - "Iter 13150 || Loss: 4.8242 || 10iter: 1.9162 sec.\n", - "Iter 13160 || Loss: 5.1528 || 10iter: 1.9256 sec.\n", - "Iter 13170 || Loss: 5.7173 || 10iter: 1.8879 sec.\n", - "Iter 13180 || Loss: 6.0553 || 10iter: 1.9058 sec.\n", - "Iter 13190 || Loss: 4.8135 || 10iter: 1.8840 sec.\n", - "Iter 13200 || Loss: 5.3619 || 10iter: 1.9079 sec.\n", - "Iter 13210 || Loss: 4.9571 || 10iter: 1.9264 sec.\n", - "Iter 13220 || Loss: 4.6518 || 10iter: 1.9454 sec.\n", - "Iter 13230 || Loss: 5.4219 || 10iter: 1.8946 sec.\n", - "Iter 13240 || Loss: 6.4885 || 10iter: 1.9716 sec.\n", - "Iter 13250 || Loss: 5.1057 || 10iter: 1.8921 sec.\n", - "Iter 13260 || Loss: 4.5243 || 10iter: 1.9594 sec.\n", - "Iter 13270 || Loss: 5.2861 || 10iter: 1.9383 sec.\n", - "Iter 13280 || Loss: 5.0121 || 10iter: 1.8985 sec.\n", - "Iter 13290 || Loss: 4.9412 || 10iter: 1.9665 sec.\n", - "Iter 13300 || Loss: 5.8559 || 10iter: 1.9757 sec.\n", - "Iter 13310 || Loss: 5.3053 || 10iter: 1.9246 sec.\n", - "Iter 13320 || Loss: 5.6057 || 10iter: 1.8850 sec.\n", - "Iter 13330 || Loss: 5.1894 || 10iter: 1.9516 sec.\n", - "Iter 13340 || Loss: 5.7093 || 10iter: 1.9000 sec.\n", - "Iter 13350 || Loss: 5.6248 || 10iter: 1.9327 sec.\n", - "Iter 13360 || Loss: 4.6371 || 10iter: 1.9053 sec.\n", - "Iter 13370 || Loss: 5.5010 || 10iter: 1.8888 sec.\n", - "Iter 13380 || Loss: 4.8812 || 10iter: 1.8967 sec.\n", - "Iter 13390 || Loss: 5.2612 || 10iter: 1.9440 sec.\n", - "Iter 13400 || Loss: 5.6705 || 10iter: 1.9615 sec.\n", - "Iter 13410 || Loss: 4.6866 || 10iter: 1.9157 sec.\n", - "Iter 13420 || Loss: 5.5697 || 10iter: 1.9359 sec.\n", - "Iter 13430 || Loss: 5.5386 || 10iter: 1.9511 sec.\n", - "Iter 13440 || Loss: 6.1821 || 10iter: 1.9140 sec.\n", - "Iter 13450 || Loss: 5.0993 || 10iter: 1.8032 sec.\n", - "-------------\n", - "epoch 13 || Epoch_TRAIN_Loss:5291.5201 ||Epoch_VAL_Loss:0.0000\n", - "timer: 203.3208 sec.\n", - "lr is: 0.001\n", - "-------------\n", - "Epoch 14/200\n", - "-------------\n", - "(train)\n", - "Iter 13460 || Loss: 4.5210 || 10iter: 2.6290 sec.\n", - "Iter 13470 || Loss: 5.4727 || 10iter: 1.9444 sec.\n", - "Iter 13480 || Loss: 5.0352 || 10iter: 1.8964 sec.\n", - "Iter 13490 || Loss: 5.6213 || 10iter: 1.8886 sec.\n", - "Iter 13500 || Loss: 4.9960 || 10iter: 1.8904 sec.\n", - "Iter 13510 || Loss: 5.4474 || 10iter: 1.9474 sec.\n", - "Iter 13520 || Loss: 4.3910 || 10iter: 1.9055 sec.\n", - "Iter 13530 || Loss: 4.4784 || 10iter: 1.8923 sec.\n", - "Iter 13540 || Loss: 5.3017 || 10iter: 1.9216 sec.\n", - "Iter 13550 || Loss: 4.9129 || 10iter: 1.8835 sec.\n", - "Iter 13560 || Loss: 5.2320 || 10iter: 1.9027 sec.\n", - "Iter 13570 || Loss: 4.0755 || 10iter: 1.8992 sec.\n", - "Iter 13580 || Loss: 5.4922 || 10iter: 1.9236 sec.\n", - "Iter 13590 || Loss: 4.4459 || 10iter: 1.9332 sec.\n", - "Iter 13600 || Loss: 4.3499 || 10iter: 1.8899 sec.\n", - "Iter 13610 || Loss: 5.6240 || 10iter: 1.8911 sec.\n", - "Iter 13620 || Loss: 4.7290 || 10iter: 1.9117 sec.\n", - "Iter 13630 || Loss: 5.6805 || 10iter: 1.9328 sec.\n", - "Iter 13640 || Loss: 4.9943 || 10iter: 1.9196 sec.\n", - "Iter 13650 || Loss: 5.4547 || 10iter: 1.9546 sec.\n", - "Iter 13660 || Loss: 4.7711 || 10iter: 1.9491 sec.\n", - "Iter 13670 || Loss: 5.3370 || 10iter: 1.9140 sec.\n", - "Iter 13680 || Loss: 5.1131 || 10iter: 1.9134 sec.\n", - "Iter 13690 || Loss: 5.0109 || 10iter: 1.9250 sec.\n", - "Iter 13700 || Loss: 4.9752 || 10iter: 1.9224 sec.\n", - "Iter 13710 || Loss: 5.6353 || 10iter: 1.9001 sec.\n", - "Iter 13720 || Loss: 5.0625 || 10iter: 1.9325 sec.\n", - "Iter 13730 || Loss: 5.5450 || 10iter: 1.9251 sec.\n", - "Iter 13740 || Loss: 5.6375 || 10iter: 1.9629 sec.\n", - "Iter 13750 || Loss: 5.3169 || 10iter: 1.9374 sec.\n", - "Iter 13760 || Loss: 5.4010 || 10iter: 1.9465 sec.\n", - "Iter 13770 || Loss: 5.2134 || 10iter: 1.9510 sec.\n", - "Iter 13780 || Loss: 4.8756 || 10iter: 1.9500 sec.\n", - "Iter 13790 || Loss: 4.9692 || 10iter: 1.9252 sec.\n", - "Iter 13800 || Loss: 5.4179 || 10iter: 1.9378 sec.\n", - "Iter 13810 || Loss: 5.1914 || 10iter: 1.9168 sec.\n", - "Iter 13820 || Loss: 4.9806 || 10iter: 1.9203 sec.\n", - "Iter 13830 || Loss: 5.6334 || 10iter: 1.8896 sec.\n", - "Iter 13840 || Loss: 4.4186 || 10iter: 1.9193 sec.\n", - "Iter 13850 || Loss: 4.7319 || 10iter: 1.9135 sec.\n", - "Iter 13860 || Loss: 4.7129 || 10iter: 1.9089 sec.\n", - "Iter 13870 || Loss: 4.4372 || 10iter: 1.9218 sec.\n", - "Iter 13880 || Loss: 4.5038 || 10iter: 1.9074 sec.\n", - "Iter 13890 || Loss: 5.5476 || 10iter: 1.9621 sec.\n", - "Iter 13900 || Loss: 5.3331 || 10iter: 1.9066 sec.\n", - "Iter 13910 || Loss: 4.9898 || 10iter: 1.9182 sec.\n", - "Iter 13920 || Loss: 4.8255 || 10iter: 1.8930 sec.\n", - "Iter 13930 || Loss: 4.8232 || 10iter: 1.9245 sec.\n", - "Iter 13940 || Loss: 4.5329 || 10iter: 1.9046 sec.\n", - "Iter 13950 || Loss: 4.5911 || 10iter: 1.8953 sec.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Iter 13960 || Loss: 5.2317 || 10iter: 1.9100 sec.\n", - "Iter 13970 || Loss: 5.4712 || 10iter: 1.9166 sec.\n", - "Iter 13980 || Loss: 5.2736 || 10iter: 1.9241 sec.\n", - "Iter 13990 || Loss: 5.4525 || 10iter: 1.9305 sec.\n", - "Iter 14000 || Loss: 6.0101 || 10iter: 1.9083 sec.\n", - "Iter 14010 || Loss: 5.4147 || 10iter: 1.9020 sec.\n", - "Iter 14020 || Loss: 5.1907 || 10iter: 1.9243 sec.\n", - "Iter 14030 || Loss: 5.1204 || 10iter: 1.9381 sec.\n", - "Iter 14040 || Loss: 4.4785 || 10iter: 1.9198 sec.\n", - "Iter 14050 || Loss: 4.7081 || 10iter: 1.9138 sec.\n", - "Iter 14060 || Loss: 4.9156 || 10iter: 1.9636 sec.\n", - "Iter 14070 || Loss: 4.7927 || 10iter: 1.9209 sec.\n", - "Iter 14080 || Loss: 5.0901 || 10iter: 1.9288 sec.\n", - "Iter 14090 || Loss: 5.2445 || 10iter: 1.9286 sec.\n", - "Iter 14100 || Loss: 5.1526 || 10iter: 1.9376 sec.\n", - "Iter 14110 || Loss: 5.8268 || 10iter: 1.9312 sec.\n", - "Iter 14120 || Loss: 4.9961 || 10iter: 1.9255 sec.\n", - "Iter 14130 || Loss: 5.0759 || 10iter: 1.9575 sec.\n", - "Iter 14140 || Loss: 4.2812 || 10iter: 1.9595 sec.\n", - "Iter 14150 || Loss: 5.6307 || 10iter: 1.9874 sec.\n", - "Iter 14160 || Loss: 5.0764 || 10iter: 1.9451 sec.\n", - "Iter 14170 || Loss: 5.4706 || 10iter: 1.9340 sec.\n", - "Iter 14180 || Loss: 5.0539 || 10iter: 1.9384 sec.\n", - "Iter 14190 || Loss: 4.9099 || 10iter: 1.9077 sec.\n", - "Iter 14200 || Loss: 5.6264 || 10iter: 1.9492 sec.\n", - "Iter 14210 || Loss: 5.3213 || 10iter: 2.0122 sec.\n", - "Iter 14220 || Loss: 5.1273 || 10iter: 1.9023 sec.\n", - "Iter 14230 || Loss: 5.1503 || 10iter: 1.8928 sec.\n", - "Iter 14240 || Loss: 5.1845 || 10iter: 1.9076 sec.\n", - "Iter 14250 || Loss: 4.9728 || 10iter: 1.9614 sec.\n", - "Iter 14260 || Loss: 5.2138 || 10iter: 1.9493 sec.\n", - "Iter 14270 || Loss: 5.2054 || 10iter: 1.9851 sec.\n", - "Iter 14280 || Loss: 4.8065 || 10iter: 1.9160 sec.\n", - "Iter 14290 || Loss: 4.6658 || 10iter: 1.8953 sec.\n", - "Iter 14300 || Loss: 5.0286 || 10iter: 1.9217 sec.\n", - "Iter 14310 || Loss: 5.5786 || 10iter: 1.8748 sec.\n", - "Iter 14320 || Loss: 5.2729 || 10iter: 1.9038 sec.\n", - "Iter 14330 || Loss: 5.8730 || 10iter: 1.9190 sec.\n", - "Iter 14340 || Loss: 4.8313 || 10iter: 1.9179 sec.\n", - "Iter 14350 || Loss: 5.4018 || 10iter: 1.9032 sec.\n", - "Iter 14360 || Loss: 4.6700 || 10iter: 1.9147 sec.\n", - "Iter 14370 || Loss: 5.5133 || 10iter: 1.8806 sec.\n", - "Iter 14380 || Loss: 4.8291 || 10iter: 1.9056 sec.\n", - "Iter 14390 || Loss: 4.5910 || 10iter: 1.9114 sec.\n", - "Iter 14400 || Loss: 5.0783 || 10iter: 1.8987 sec.\n", - "Iter 14410 || Loss: 4.3804 || 10iter: 1.8842 sec.\n", - "Iter 14420 || Loss: 5.1877 || 10iter: 1.9600 sec.\n", - "Iter 14430 || Loss: 5.1031 || 10iter: 1.9088 sec.\n", - "Iter 14440 || Loss: 4.8108 || 10iter: 1.9229 sec.\n", - "Iter 14450 || Loss: 4.6017 || 10iter: 1.9129 sec.\n", - "Iter 14460 || Loss: 5.0334 || 10iter: 1.9134 sec.\n", - "Iter 14470 || Loss: 4.8651 || 10iter: 1.9371 sec.\n", - "Iter 14480 || Loss: 4.9189 || 10iter: 1.8544 sec.\n", - "Iter 14490 || Loss: 4.5236 || 10iter: 1.7392 sec.\n", - "-------------\n", - "epoch 14 || Epoch_TRAIN_Loss:5254.5361 ||Epoch_VAL_Loss:0.0000\n", - "timer: 203.4716 sec.\n", - "lr is: 0.001\n", - "-------------\n", - "Epoch 15/200\n", - "-------------\n", - "(train)\n", - "Iter 14500 || Loss: 4.2958 || 10iter: 3.6537 sec.\n", - "Iter 14510 || Loss: 5.4762 || 10iter: 1.9064 sec.\n", - "Iter 14520 || Loss: 4.6766 || 10iter: 1.9025 sec.\n", - "Iter 14530 || Loss: 6.0038 || 10iter: 1.9602 sec.\n", - "Iter 14540 || Loss: 4.9355 || 10iter: 1.9260 sec.\n", - "Iter 14550 || Loss: 4.4211 || 10iter: 1.8979 sec.\n", - "Iter 14560 || Loss: 4.8367 || 10iter: 1.9109 sec.\n", - "Iter 14570 || Loss: 4.4004 || 10iter: 1.8890 sec.\n", - "Iter 14580 || Loss: 3.7890 || 10iter: 1.9330 sec.\n", - "Iter 14590 || Loss: 4.6837 || 10iter: 1.9198 sec.\n", - "Iter 14600 || Loss: 5.3433 || 10iter: 1.9299 sec.\n", - "Iter 14610 || Loss: 5.1091 || 10iter: 1.9408 sec.\n", - "Iter 14620 || Loss: 5.2660 || 10iter: 1.8961 sec.\n", - "Iter 14630 || Loss: 5.2327 || 10iter: 1.9279 sec.\n", - "Iter 14640 || Loss: 4.4804 || 10iter: 1.9333 sec.\n", - "Iter 14650 || Loss: 4.5865 || 10iter: 1.8844 sec.\n", - "Iter 14660 || Loss: 5.3314 || 10iter: 1.9578 sec.\n", - "Iter 14670 || Loss: 4.4987 || 10iter: 1.9213 sec.\n", - "Iter 14680 || Loss: 6.4284 || 10iter: 1.9419 sec.\n", - "Iter 14690 || Loss: 4.8119 || 10iter: 1.9075 sec.\n", - "Iter 14700 || Loss: 5.1161 || 10iter: 1.9020 sec.\n", - "Iter 14710 || Loss: 5.2958 || 10iter: 1.9227 sec.\n", - "Iter 14720 || Loss: 4.5182 || 10iter: 1.9682 sec.\n", - "Iter 14730 || Loss: 5.4464 || 10iter: 1.9210 sec.\n", - "Iter 14740 || Loss: 5.2420 || 10iter: 1.9182 sec.\n", - "Iter 14750 || Loss: 5.4516 || 10iter: 1.9241 sec.\n", - "Iter 14760 || Loss: 4.9256 || 10iter: 1.9287 sec.\n", - "Iter 14770 || Loss: 5.2563 || 10iter: 1.9332 sec.\n", - "Iter 14780 || Loss: 5.2164 || 10iter: 1.9717 sec.\n", - "Iter 14790 || Loss: 4.7350 || 10iter: 1.9272 sec.\n", - "Iter 14800 || Loss: 5.0903 || 10iter: 1.8851 sec.\n", - "Iter 14810 || Loss: 5.1602 || 10iter: 1.9280 sec.\n", - "Iter 14820 || Loss: 5.8497 || 10iter: 1.9156 sec.\n", - "Iter 14830 || Loss: 4.8598 || 10iter: 1.9032 sec.\n", - "Iter 14840 || Loss: 5.1419 || 10iter: 1.9459 sec.\n", - "Iter 14850 || Loss: 4.8418 || 10iter: 1.9331 sec.\n", - "Iter 14860 || Loss: 5.1535 || 10iter: 1.9320 sec.\n", - "Iter 14870 || Loss: 4.8725 || 10iter: 1.9019 sec.\n", - "Iter 14880 || Loss: 4.5627 || 10iter: 1.9206 sec.\n", - "Iter 14890 || Loss: 4.8348 || 10iter: 1.9818 sec.\n", - "Iter 14900 || Loss: 4.7991 || 10iter: 1.8950 sec.\n", - "Iter 14910 || Loss: 5.3418 || 10iter: 1.9406 sec.\n", - "Iter 14920 || Loss: 5.2596 || 10iter: 1.9358 sec.\n", - "Iter 14930 || Loss: 4.7718 || 10iter: 1.9231 sec.\n", - "Iter 14940 || Loss: 5.1979 || 10iter: 1.9216 sec.\n", - "Iter 14950 || Loss: 5.2286 || 10iter: 1.9045 sec.\n", - "Iter 14960 || Loss: 5.3072 || 10iter: 1.8796 sec.\n", - "Iter 14970 || Loss: 4.6527 || 10iter: 1.9409 sec.\n", - "Iter 14980 || Loss: 5.4916 || 10iter: 1.9171 sec.\n", - "Iter 14990 || Loss: 5.2987 || 10iter: 1.9225 sec.\n", - "Iter 15000 || Loss: 5.1279 || 10iter: 1.8996 sec.\n", - "Iter 15010 || Loss: 5.0290 || 10iter: 1.9094 sec.\n", - "Iter 15020 || Loss: 5.1419 || 10iter: 1.9421 sec.\n", - "Iter 15030 || Loss: 5.0679 || 10iter: 1.8805 sec.\n", - "Iter 15040 || Loss: 4.3048 || 10iter: 1.9042 sec.\n", - "Iter 15050 || Loss: 4.8955 || 10iter: 1.9439 sec.\n", - "Iter 15060 || Loss: 4.4525 || 10iter: 1.8989 sec.\n", - "Iter 15070 || Loss: 5.0268 || 10iter: 1.9108 sec.\n", - "Iter 15080 || Loss: 5.2244 || 10iter: 1.9512 sec.\n", - "Iter 15090 || Loss: 4.9594 || 10iter: 1.9051 sec.\n", - "Iter 15100 || Loss: 4.3387 || 10iter: 1.9290 sec.\n", - "Iter 15110 || Loss: 3.9875 || 10iter: 1.9812 sec.\n", - "Iter 15120 || Loss: 4.5900 || 10iter: 1.9518 sec.\n", - "Iter 15130 || Loss: 5.3459 || 10iter: 1.9837 sec.\n", - "Iter 15140 || Loss: 5.1805 || 10iter: 1.9558 sec.\n", - "Iter 15150 || Loss: 5.4880 || 10iter: 1.8802 sec.\n", - "Iter 15160 || Loss: 5.1238 || 10iter: 1.9036 sec.\n", - "Iter 15170 || Loss: 5.3673 || 10iter: 1.9646 sec.\n", - "Iter 15180 || Loss: 4.9127 || 10iter: 1.9405 sec.\n", - "Iter 15190 || Loss: 4.8398 || 10iter: 1.9170 sec.\n", - "Iter 15200 || Loss: 4.5170 || 10iter: 1.8948 sec.\n", - "Iter 15210 || Loss: 4.9435 || 10iter: 1.9327 sec.\n", - "Iter 15220 || Loss: 5.9581 || 10iter: 1.9365 sec.\n", - "Iter 15230 || Loss: 4.4164 || 10iter: 1.9041 sec.\n", - "Iter 15240 || Loss: 5.7761 || 10iter: 1.9012 sec.\n", - "Iter 15250 || Loss: 5.1189 || 10iter: 1.8908 sec.\n", - "Iter 15260 || Loss: 5.6582 || 10iter: 1.9139 sec.\n", - "Iter 15270 || Loss: 4.8810 || 10iter: 1.9107 sec.\n", - "Iter 15280 || Loss: 4.7088 || 10iter: 1.9598 sec.\n", - "Iter 15290 || Loss: 5.9506 || 10iter: 1.9530 sec.\n", - "Iter 15300 || Loss: 4.7556 || 10iter: 1.9034 sec.\n", - "Iter 15310 || Loss: 5.3522 || 10iter: 1.9082 sec.\n", - "Iter 15320 || Loss: 4.8143 || 10iter: 1.8930 sec.\n", - "Iter 15330 || Loss: 4.7779 || 10iter: 1.8796 sec.\n", - "Iter 15340 || Loss: 5.3515 || 10iter: 1.9420 sec.\n", - "Iter 15350 || Loss: 4.8562 || 10iter: 1.9202 sec.\n", - "Iter 15360 || Loss: 4.5858 || 10iter: 1.9345 sec.\n", - "Iter 15370 || Loss: 4.2171 || 10iter: 1.9174 sec.\n", - "Iter 15380 || Loss: 4.0410 || 10iter: 1.8697 sec.\n", - "Iter 15390 || Loss: 4.6586 || 10iter: 1.9074 sec.\n", - "Iter 15400 || Loss: 4.7783 || 10iter: 1.9659 sec.\n", - "Iter 15410 || Loss: 4.7335 || 10iter: 1.9520 sec.\n", - "Iter 15420 || Loss: 5.4327 || 10iter: 1.9403 sec.\n", - "Iter 15430 || Loss: 4.8151 || 10iter: 1.9240 sec.\n", - "Iter 15440 || Loss: 5.5808 || 10iter: 1.9359 sec.\n", - "Iter 15450 || Loss: 5.2550 || 10iter: 1.8985 sec.\n", - "Iter 15460 || Loss: 4.3371 || 10iter: 1.9137 sec.\n", - "Iter 15470 || Loss: 5.9032 || 10iter: 1.8851 sec.\n", - "Iter 15480 || Loss: 4.9175 || 10iter: 1.9058 sec.\n", - "Iter 15490 || Loss: 5.4471 || 10iter: 1.9238 sec.\n", - "Iter 15500 || Loss: 5.1351 || 10iter: 1.9067 sec.\n", - "Iter 15510 || Loss: 4.6564 || 10iter: 1.9125 sec.\n", - "Iter 15520 || Loss: 4.9512 || 10iter: 1.8063 sec.\n", - "-------------\n", - "epoch 15 || Epoch_TRAIN_Loss:5214.7180 ||Epoch_VAL_Loss:0.0000\n", - "timer: 203.5020 sec.\n", - "lr is: 0.001\n", - "-------------\n", - "Epoch 16/200\n", - "-------------\n", - "(train)\n", - "Iter 15530 || Loss: 4.8589 || 10iter: 2.6975 sec.\n" + "Iter 5190 || Loss: 4.4229 || 10iter: 5.4113 sec.\n", + "Iter 5200 || Loss: 5.0464 || 10iter: 3.3802 sec.\n", + "Iter 5210 || Loss: 4.7616 || 10iter: 3.3827 sec.\n", + "Iter 5220 || Loss: 4.9551 || 10iter: 3.3709 sec.\n", + "Iter 5230 || Loss: 5.0204 || 10iter: 3.3988 sec.\n", + "Iter 5240 || Loss: 4.4892 || 10iter: 3.3844 sec.\n", + "Iter 5250 || Loss: 5.0293 || 10iter: 3.4037 sec.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "Iter 15540 || Loss: 5.8025 || 10iter: 1.9934 sec.\n", - "Iter 15550 || Loss: 5.0796 || 10iter: 1.8539 sec.\n", - "Iter 15560 || Loss: 5.5387 || 10iter: 1.9182 sec.\n", - "Iter 15570 || Loss: 4.8442 || 10iter: 1.9398 sec.\n", - "Iter 15580 || Loss: 4.8590 || 10iter: 1.9199 sec.\n", - "Iter 15590 || Loss: 5.2256 || 10iter: 1.9237 sec.\n", - "Iter 15600 || Loss: 4.4678 || 10iter: 1.8979 sec.\n", - "Iter 15610 || Loss: 5.6006 || 10iter: 1.9012 sec.\n", - "Iter 15620 || Loss: 5.3900 || 10iter: 1.9227 sec.\n", - "Iter 15630 || Loss: 5.2369 || 10iter: 1.8937 sec.\n", - "Iter 15640 || Loss: 5.2440 || 10iter: 1.8926 sec.\n", - "Iter 15650 || Loss: 5.4751 || 10iter: 1.8861 sec.\n", - "Iter 15660 || Loss: 5.3793 || 10iter: 1.9427 sec.\n", - "Iter 15670 || Loss: 5.2494 || 10iter: 1.9208 sec.\n", - "Iter 15680 || Loss: 5.7264 || 10iter: 1.9400 sec.\n", - "Iter 15690 || Loss: 4.5855 || 10iter: 1.9523 sec.\n", - "Iter 15700 || Loss: 5.5591 || 10iter: 1.9249 sec.\n", - "Iter 15710 || Loss: 5.4837 || 10iter: 1.9512 sec.\n", - "Iter 15720 || Loss: 4.9216 || 10iter: 1.9012 sec.\n", - "Iter 15730 || Loss: 4.4263 || 10iter: 1.9071 sec.\n", - "Iter 15740 || Loss: 5.4240 || 10iter: 1.8928 sec.\n", - "Iter 15750 || Loss: 5.0460 || 10iter: 1.9402 sec.\n", - "Iter 15760 || Loss: 5.2160 || 10iter: 1.8890 sec.\n", - "Iter 15770 || Loss: 5.1441 || 10iter: 1.9188 sec.\n", - "Iter 15780 || Loss: 5.5111 || 10iter: 1.9352 sec.\n", - "Iter 15790 || Loss: 5.2516 || 10iter: 1.9114 sec.\n", - "Iter 15800 || Loss: 4.8033 || 10iter: 1.9041 sec.\n", - "Iter 15810 || Loss: 4.9286 || 10iter: 1.9282 sec.\n", - "Iter 15820 || Loss: 4.9610 || 10iter: 1.9341 sec.\n", - "Iter 15830 || Loss: 4.9275 || 10iter: 1.9478 sec.\n", - "Iter 15840 || Loss: 5.0074 || 10iter: 1.9153 sec.\n", - "Iter 15850 || Loss: 5.0793 || 10iter: 1.9203 sec.\n", - "Iter 15860 || Loss: 4.4727 || 10iter: 1.9199 sec.\n", - "Iter 15870 || Loss: 4.9073 || 10iter: 1.9862 sec.\n", - "Iter 15880 || Loss: 4.9151 || 10iter: 1.9075 sec.\n", - "Iter 15890 || Loss: 4.9718 || 10iter: 1.8930 sec.\n", - "Iter 15900 || Loss: 5.2034 || 10iter: 1.9030 sec.\n", - "Iter 15910 || Loss: 4.7498 || 10iter: 1.9010 sec.\n", - "Iter 15920 || Loss: 5.5919 || 10iter: 1.9774 sec.\n", - "Iter 15930 || Loss: 5.3474 || 10iter: 1.8928 sec.\n", - "Iter 15940 || Loss: 5.4458 || 10iter: 1.8798 sec.\n", - "Iter 15950 || Loss: 5.2783 || 10iter: 1.8855 sec.\n", - "Iter 15960 || Loss: 5.1126 || 10iter: 1.9281 sec.\n", - "Iter 15970 || Loss: 5.0929 || 10iter: 1.9443 sec.\n", - "Iter 15980 || Loss: 5.4044 || 10iter: 1.9452 sec.\n", - "Iter 15990 || Loss: 5.1864 || 10iter: 1.9404 sec.\n", - "Iter 16000 || Loss: 4.6108 || 10iter: 1.8571 sec.\n", - "Iter 16010 || Loss: 5.4976 || 10iter: 1.8860 sec.\n", - "Iter 16020 || Loss: 5.3181 || 10iter: 1.9184 sec.\n", - "Iter 16030 || Loss: 5.0945 || 10iter: 1.9540 sec.\n", - "Iter 16040 || Loss: 4.9498 || 10iter: 1.9753 sec.\n", - "Iter 16050 || Loss: 5.0888 || 10iter: 1.9905 sec.\n", - "Iter 16060 || Loss: 5.1403 || 10iter: 1.9833 sec.\n", - "Iter 16070 || Loss: 5.3758 || 10iter: 1.9911 sec.\n", - "Iter 16080 || Loss: 4.4210 || 10iter: 2.0281 sec.\n", - "Iter 16090 || Loss: 3.8988 || 10iter: 1.9181 sec.\n", - "Iter 16100 || Loss: 5.1507 || 10iter: 1.9322 sec.\n", - "Iter 16110 || Loss: 5.8649 || 10iter: 1.8971 sec.\n", - "Iter 16120 || Loss: 4.6396 || 10iter: 1.8969 sec.\n", - "Iter 16130 || Loss: 4.2593 || 10iter: 1.8912 sec.\n", - "Iter 16140 || Loss: 4.6350 || 10iter: 1.9270 sec.\n", - "Iter 16150 || Loss: 5.0824 || 10iter: 2.0418 sec.\n", - "Iter 16160 || Loss: 5.3986 || 10iter: 1.9170 sec.\n", - "Iter 16170 || Loss: 5.4058 || 10iter: 1.9110 sec.\n", - "Iter 16180 || Loss: 5.1361 || 10iter: 1.9350 sec.\n", - "Iter 16190 || Loss: 4.5617 || 10iter: 1.8979 sec.\n", - "Iter 16200 || Loss: 4.7005 || 10iter: 1.9658 sec.\n", - "Iter 16210 || Loss: 5.0582 || 10iter: 1.9073 sec.\n", - "Iter 16220 || Loss: 5.3389 || 10iter: 1.9294 sec.\n", - "Iter 16230 || Loss: 4.6105 || 10iter: 1.9419 sec.\n", - "Iter 16240 || Loss: 5.7215 || 10iter: 1.9468 sec.\n", - "Iter 16250 || Loss: 5.0735 || 10iter: 1.9289 sec.\n", - "Iter 16260 || Loss: 4.6622 || 10iter: 1.9304 sec.\n", - "Iter 16270 || Loss: 4.5883 || 10iter: 1.9470 sec.\n", - "Iter 16280 || Loss: 4.7803 || 10iter: 1.9388 sec.\n", - "Iter 16290 || Loss: 4.6865 || 10iter: 1.8859 sec.\n", - "Iter 16300 || Loss: 4.6144 || 10iter: 1.9257 sec.\n", - "Iter 16310 || Loss: 5.0478 || 10iter: 1.9122 sec.\n", - "Iter 16320 || Loss: 5.5735 || 10iter: 1.9097 sec.\n", - "Iter 16330 || Loss: 4.9333 || 10iter: 1.9100 sec.\n", - "Iter 16340 || Loss: 4.7054 || 10iter: 1.8936 sec.\n", - "Iter 16350 || Loss: 5.0866 || 10iter: 1.9577 sec.\n", - "Iter 16360 || Loss: 4.8149 || 10iter: 1.9928 sec.\n", - "Iter 16370 || Loss: 4.2935 || 10iter: 1.9782 sec.\n", - "Iter 16380 || Loss: 4.7538 || 10iter: 2.0578 sec.\n", - "Iter 16390 || Loss: 5.2114 || 10iter: 2.1233 sec.\n", - "Iter 16400 || Loss: 4.7524 || 10iter: 2.0172 sec.\n", - "Iter 16410 || Loss: 4.8717 || 10iter: 2.0131 sec.\n", - "Iter 16420 || Loss: 4.8024 || 10iter: 1.9762 sec.\n", - "Iter 16430 || Loss: 6.0655 || 10iter: 2.0855 sec.\n", - "Iter 16440 || Loss: 4.5962 || 10iter: 2.0473 sec.\n", - "Iter 16450 || Loss: 5.3304 || 10iter: 2.1144 sec.\n", - "Iter 16460 || Loss: 5.3420 || 10iter: 2.1251 sec.\n", - "Iter 16470 || Loss: 4.8643 || 10iter: 1.9269 sec.\n", - "Iter 16480 || Loss: 5.1812 || 10iter: 1.9320 sec.\n", - "Iter 16490 || Loss: 4.8109 || 10iter: 1.9089 sec.\n", - "Iter 16500 || Loss: 5.2851 || 10iter: 1.8870 sec.\n", - "Iter 16510 || Loss: 5.0556 || 10iter: 1.9395 sec.\n" + "Iter 5260 || Loss: 4.4406 || 10iter: 3.4452 sec.\n", + "Iter 5270 || Loss: 4.4373 || 10iter: 3.3466 sec.\n", + "Iter 5280 || Loss: 4.5616 || 10iter: 3.4416 sec.\n", + "Iter 5290 || Loss: 4.5757 || 10iter: 3.3746 sec.\n", + "Iter 5300 || Loss: 4.7119 || 10iter: 3.3633 sec.\n", + "Iter 5310 || Loss: 4.4746 || 10iter: 3.4697 sec.\n", + "Iter 5320 || Loss: 4.2691 || 10iter: 3.4707 sec.\n", + "Iter 5330 || Loss: 4.8476 || 10iter: 3.4119 sec.\n", + "Iter 5340 || Loss: 5.0027 || 10iter: 3.3624 sec.\n", + "Iter 5350 || Loss: 4.7545 || 10iter: 3.3863 sec.\n", + "Iter 5360 || Loss: 4.8939 || 10iter: 3.4075 sec.\n", + "Iter 5370 || Loss: 4.5954 || 10iter: 3.3554 sec.\n", + "Iter 5380 || Loss: 4.3597 || 10iter: 3.3572 sec.\n" ] } ], diff --git a/train_fp16_apex.ipynb b/train_fp16_apex.ipynb new file mode 100644 index 0000000..3ddc849 --- /dev/null +++ b/train_fp16_apex.ipynb @@ -0,0 +1,2041 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# setup dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "# import stuff\n", + "import os\n", + "import numpy as np\n", + "import time\n", + "import pandas as pd\n", + "\n", + "import torch\n", + "import torch.utils.data as data\n", + "from itertools import product as product\n", + "\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.init as init\n", + "import torch.nn.functional as F\n", + "from torch.autograd import Function\n", + "from utils.to_fp16 import network_to_half" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "# import dataset\n", + "from utils.dataset import VOCDataset, DatasetTransform, make_datapath_list, Anno_xml2list, od_collate_fn" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "## meta settings\n", + "\n", + "# select from efficientnet backbone or resnet backbone\n", + "backbone = \"efficientnet-b0\"\n", + "scale = 2\n", + "# scale==1: resolution 300\n", + "# scale==2: resolution 600\n", + "useBiFPN = True\n", + "HALF = True # enable FP16\n", + "DATASET = \"VOC\"\n", + "retina = False # for trying retinanets" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## make data.Dataset for training" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "trainlist: 16551\n", + "vallist: 4952\n" + ] + } + ], + "source": [ + "if not DATASET == \"COCO\":\n", + " # load files\n", + " # set your VOCdevkit path here.\n", + " vocpath = \"../VOCdevkit/VOC2007\"\n", + " train_img_list, train_anno_list, val_img_list, val_anno_list = make_datapath_list(vocpath)\n", + "\n", + " vocpath = \"../VOCdevkit/VOC2012\"\n", + " train_img_list2, train_anno_list2, _, _ = make_datapath_list(vocpath)\n", + "\n", + " train_img_list.extend(train_img_list2)\n", + " train_anno_list.extend(train_anno_list2)\n", + "\n", + " print(\"trainlist: \", len(train_img_list))\n", + " print(\"vallist: \", len(val_img_list))\n", + "\n", + " # make Dataset\n", + " voc_classes = ['aeroplane', 'bicycle', 'bird', 'boat',\n", + " 'bottle', 'bus', 'car', 'cat', 'chair',\n", + " 'cow', 'diningtable', 'dog', 'horse',\n", + " 'motorbike', 'person', 'pottedplant',\n", + " 'sheep', 'sofa', 'train', 'tvmonitor']\n", + "\n", + " color_mean = (104, 117, 123) # (BGR)の色の平均値\n", + " if scale == 1:\n", + " input_size = 300 # 画像のinputサイズを300×300にする\n", + " else:\n", + " input_size = 512\n", + "\n", + " ## DatasetTransformを適応\n", + " transform = DatasetTransform(input_size, color_mean)\n", + " transform_anno = Anno_xml2list(voc_classes)\n", + "\n", + " # Dataloaderに入れるデータセットファイル。\n", + " # ゲットで叩くと画像とGTを前処理して出力してくれる。\n", + " train_dataset = VOCDataset(train_img_list, train_anno_list, phase = \"train\", transform=transform, transform_anno = transform_anno)\n", + " val_dataset = VOCDataset(val_img_list, val_anno_list, phase=\"val\", transform=DatasetTransform(\n", + " input_size, color_mean), transform_anno=Anno_xml2list(voc_classes))\n", + "\n", + "else:\n", + " from dataset.coco import COCODetection\n", + " import torch.utils.data as data\n", + " from utils.dataset import VOCDataset, COCODatasetTransform, make_datapath_list, Anno_xml2list, od_collate_fn\n", + "\n", + " color_mean = (104, 117, 123) # (BGR)の色の平均値\n", + " if scale == 1:\n", + " input_size = 300 # 画像のinputサイズを300×300にする\n", + " else:\n", + " input_size = 512\n", + "\n", + " ## DatasetTransformを適応\n", + " transform = COCODatasetTransform(input_size, color_mean)\n", + " train_dataset = COCODetection(\"../data/coco/\", image_set=\"train2014\", phase=\"train\", transform=transform)\n", + " val_dataset = COCODetection(\"../data/coco/\", image_set=\"val2014\", phase=\"val\", transform=transform)\n", + " \n", + "batch_size = 32\n", + "\n", + "train_dataloader = data.DataLoader(\n", + " train_dataset, batch_size=batch_size, shuffle=True, collate_fn=od_collate_fn, num_workers=8)\n", + "\n", + "val_dataloader = data.DataLoader(\n", + " val_dataset, batch_size=batch_size, shuffle=False, collate_fn=od_collate_fn, num_workers=8)\n", + "\n", + "# 辞書型変数にまとめる\n", + "dataloaders_dict = {\"train\": train_dataloader, \"val\": val_dataloader}" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([32, 3, 512, 512])\n", + "32\n", + "torch.Size([1, 5])\n" + ] + } + ], + "source": [ + "# 動作の確認\n", + "batch_iterator = iter(dataloaders_dict[\"val\"]) # イタレータに変換\n", + "images, targets = next(batch_iterator) # 1番目の要素を取り出す\n", + "print(images.size()) # torch.Size([4, 3, 300, 300])\n", + "print(len(targets))\n", + "print(targets[1].shape) # ミニバッチのサイズのリスト、各要素は[n, 5]、nは物体数" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# define EfficientDet model" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "from utils.efficientdet import EfficientDet" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loaded pretrained weights for efficientnet-b0\n", + "use BiFPN\n", + "layerc3: torch.Size([1, 40, 37, 37])\n", + "layerc4: torch.Size([1, 80, 18, 18])\n", + "layerc5: torch.Size([1, 320, 9, 9])\n", + "layer size: torch.Size([1, 256, 37, 37])\n", + "layer size: torch.Size([1, 256, 18, 18])\n", + "layer size: torch.Size([1, 256, 9, 9])\n", + "layer size: torch.Size([1, 256, 5, 5])\n", + "layer size: torch.Size([1, 256, 3, 3])\n", + "layer size: torch.Size([1, 256, 1, 1])\n", + "torch.Size([1, 8096, 4])\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2457: UserWarning: nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\n", + " warnings.warn(\"nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\")\n", + "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2539: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n", + " \"See the documentation of nn.Upsample for details.\".format(mode))\n" + ] + } + ], + "source": [ + "if not DATASET == \"COCO\":\n", + " num_class = 21\n", + "else:\n", + " num_class = 81\n", + "\n", + "if scale==1:\n", + " ssd_cfg = {\n", + " 'num_classes': num_class, # 背景クラスを含めた合計クラス数\n", + " 'input_size': 300*scale, # 画像の入力サイズ\n", + " 'bbox_aspect_num': [4, 6, 6, 6, 4, 4], # 出力するDBoxのアスペクト比の種類\n", + " 'feature_maps': [37, 18, 9, 5, 3, 1], # 各sourceの画像サイズ\n", + " 'steps': [8, 16, 32, 64, 100, 300], # DBOXの大きさを決める\n", + " 'min_sizes': [30, 60, 111, 162, 213, 264], # DBOXの大きさを決める\n", + " 'max_sizes': [60, 111, 162, 213, 264, 315], # DBOXの大きさを決める\n", + " 'aspect_ratios': [[2], [2, 3], [2, 3], [2, 3], [2], [2]],\n", + " }\n", + "elif scale==2:\n", + " ssd_cfg = {\n", + " 'num_classes': num_class, # 背景クラスを含めた合計クラス数\n", + " 'input_size': 512, # 画像の入力サイズ\n", + " 'bbox_aspect_num': [4, 6, 6, 6, 4, 4], # 出力するDBoxのアスペクト比の種類\n", + " 'feature_maps': [64, 32, 16, 8, 4, 2], # 各sourceの画像サイズ\n", + " 'steps': [8, 16, 32, 64, 100, 300], # DBOXの大きさを決める\n", + " 'min_sizes': [30, 60, 111, 162, 213, 264]*scale, # DBOXの大きさを決める\n", + " 'max_sizes': [60, 111, 162, 213, 264, 315]*scale, # DBOXの大きさを決める\n", + " 'aspect_ratios': [[2], [2, 3], [2, 3], [2, 3], [2], [2]],\n", + " }\n", + "\n", + "# test if net works\n", + "net = EfficientDet(phase=\"train\", cfg=ssd_cfg, verbose=True, backbone=backbone, useBiFPN=useBiFPN)\n", + "out = net(torch.rand([1,3,300,300]))\n", + "print(out[0].size())" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loaded pretrained weights for efficientnet-b0\n", + "use BiFPN\n", + "using: cuda:0\n", + "set weights!\n" + ] + } + ], + "source": [ + "net = EfficientDet(phase=\"train\", cfg=ssd_cfg, verbose=False, backbone=backbone, useBiFPN=useBiFPN)\n", + "\n", + "if retina:\n", + " from utils.retinanet import RetinaFPN\n", + " ssd_cfg = {\n", + " 'num_classes': num_class, # 背景クラスを含めた合計クラス数\n", + " 'input_size': 300*scale, # 画像の入力サイズ\n", + " 'bbox_aspect_num': [4, 6, 6, 6, 4, 4], # 出力するDBoxのアスペクト比の種類\n", + " 'feature_maps': [38, 19, 10, 5, 3, 1], # 各sourceの画像サイズ\n", + " 'steps': [8, 16, 32, 64, 100, 300], # DBOXの大きさを決める\n", + " 'min_sizes': [30, 60, 111, 162, 213, 264], # DBOXの大きさを決める\n", + " 'max_sizes': [60, 111, 162, 213, 264, 315], # DBOXの大きさを決める\n", + " 'aspect_ratios': [[2], [2, 3], [2, 3], [2, 3], [2], [2]],\n", + " }\n", + " net = RetinaFPN(\"train\", ssd_cfg)\n", + "\n", + "# GPUが使えるか確認\n", + "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n", + "print(\"using:\", device)\n", + "net = net.to(device)\n", + "print(\"set weights!\")" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EfficientDet(\n", + " (layer0): Sequential(\n", + " (0): Conv2dStaticSamePadding(\n", + " 3, 32, kernel_size=(3, 3), stride=(2, 2), bias=False\n", + " (static_padding): ZeroPad2d(padding=(0, 1, 0, 1), value=0.0)\n", + " )\n", + " (1): BatchNorm2d(32, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " )\n", + " (layer2): Sequential(\n", + " (0): MBConvBlock(\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 32, 32, kernel_size=(3, 3), stride=[1, 1], groups=32, bias=False\n", + " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(32, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 32, 8, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 8, 32, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 32, 16, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(16, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " (1): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 16, 96, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(96, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 96, 96, kernel_size=(3, 3), stride=[2, 2], groups=96, bias=False\n", + " (static_padding): ZeroPad2d(padding=(0, 1, 0, 1), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(96, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 96, 4, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 4, 96, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 96, 24, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(24, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " (2): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 24, 144, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(144, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 144, 144, kernel_size=(3, 3), stride=(1, 1), groups=144, bias=False\n", + " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(144, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 144, 6, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 6, 144, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 144, 24, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(24, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " (3): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 24, 144, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(144, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 144, 144, kernel_size=(5, 5), stride=[2, 2], groups=144, bias=False\n", + " (static_padding): ZeroPad2d(padding=(1, 2, 1, 2), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(144, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 144, 6, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 6, 144, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 144, 40, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(40, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 40, 240, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(240, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 240, 240, kernel_size=(5, 5), stride=(1, 1), groups=240, bias=False\n", + " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(240, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 240, 10, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 10, 240, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 240, 40, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(40, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " (1): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 40, 240, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(240, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 240, 240, kernel_size=(3, 3), stride=[2, 2], groups=240, bias=False\n", + " (static_padding): ZeroPad2d(padding=(0, 1, 0, 1), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(240, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 240, 10, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 10, 240, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 240, 80, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(80, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 80, 480, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 480, 480, kernel_size=(3, 3), stride=(1, 1), groups=480, bias=False\n", + " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 480, 20, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 20, 480, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 480, 80, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(80, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " (1): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 80, 480, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 480, 480, kernel_size=(3, 3), stride=(1, 1), groups=480, bias=False\n", + " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 480, 20, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 20, 480, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 480, 80, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(80, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " (2): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 80, 480, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 480, 480, kernel_size=(5, 5), stride=[1, 1], groups=480, bias=False\n", + " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(480, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 480, 20, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 20, 480, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 480, 112, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(112, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " (3): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 112, 672, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 672, 672, kernel_size=(5, 5), stride=(1, 1), groups=672, bias=False\n", + " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 672, 28, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 28, 672, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 672, 112, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(112, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " (4): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 112, 672, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 672, 672, kernel_size=(5, 5), stride=(1, 1), groups=672, bias=False\n", + " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 672, 28, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 28, 672, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 672, 112, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(112, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " (5): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 112, 672, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 672, 672, kernel_size=(5, 5), stride=[2, 2], groups=672, bias=False\n", + " (static_padding): ZeroPad2d(padding=(1, 2, 1, 2), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(672, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 672, 28, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 28, 672, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 672, 192, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(192, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " )\n", + " (layer5): Sequential(\n", + " (0): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 192, 1152, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 1152, 1152, kernel_size=(5, 5), stride=(1, 1), groups=1152, bias=False\n", + " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 1152, 48, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 48, 1152, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 1152, 192, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(192, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " (1): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 192, 1152, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 1152, 1152, kernel_size=(5, 5), stride=(1, 1), groups=1152, bias=False\n", + " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 1152, 48, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 48, 1152, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 1152, 192, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(192, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " (2): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 192, 1152, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 1152, 1152, kernel_size=(5, 5), stride=(1, 1), groups=1152, bias=False\n", + " (static_padding): ZeroPad2d(padding=(2, 2, 2, 2), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 1152, 48, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 48, 1152, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 1152, 192, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(192, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " (3): MBConvBlock(\n", + " (_expand_conv): Conv2dStaticSamePadding(\n", + " 192, 1152, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn0): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_depthwise_conv): Conv2dStaticSamePadding(\n", + " 1152, 1152, kernel_size=(3, 3), stride=[1, 1], groups=1152, bias=False\n", + " (static_padding): ZeroPad2d(padding=(1, 1, 1, 1), value=0.0)\n", + " )\n", + " (_bn1): BatchNorm2d(1152, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_se_reduce): Conv2dStaticSamePadding(\n", + " 1152, 48, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_se_expand): Conv2dStaticSamePadding(\n", + " 48, 1152, kernel_size=(1, 1), stride=(1, 1)\n", + " (static_padding): Identity()\n", + " )\n", + " (_project_conv): Conv2dStaticSamePadding(\n", + " 1152, 320, kernel_size=(1, 1), stride=(1, 1), bias=False\n", + " (static_padding): Identity()\n", + " )\n", + " (_bn2): BatchNorm2d(320, eps=0.001, momentum=0.010000000000000009, affine=True, track_running_stats=True)\n", + " (_swish): MemoryEfficientSwish()\n", + " )\n", + " )\n", + " (conv6): Conv2d(320, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n", + " (conv7): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n", + " (conv8): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1))\n", + " (toplayer): Conv2d(320, 256, kernel_size=(1, 1), stride=(1, 1))\n", + " (smooth1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (smooth2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (latlayer1): Conv2d(80, 256, kernel_size=(1, 1), stride=(1, 1))\n", + " (latlayer2): Conv2d(40, 256, kernel_size=(1, 1), stride=(1, 1))\n", + " (loc): ModuleList(\n", + " (0): Conv2d(256, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (1): Conv2d(256, 24, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (2): Conv2d(256, 24, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (3): Conv2d(256, 24, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (4): Conv2d(256, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (5): Conv2d(256, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " )\n", + " (conf): ModuleList(\n", + " (0): Conv2d(256, 84, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (1): Conv2d(256, 126, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (2): Conv2d(256, 126, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (3): Conv2d(256, 126, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (4): Conv2d(256, 84, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (5): Conv2d(256, 84, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " )\n", + " (BiFPN1): BiFPN(\n", + " (conv7up): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv6up): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv5up): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv4up): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv3up): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv4dw): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv5dw): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv6dw): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv7dw): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " )\n", + " (BiFPN2): BiFPN(\n", + " (conv7up): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv6up): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv5up): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv4up): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv3up): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv4dw): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv5dw): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv6dw): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " (conv7dw): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), groups=256)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU()\n", + " )\n", + " )\n", + ")\n" + ] + } + ], + "source": [ + "print(net)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "from utils.ssd_model import MultiBoxLoss\n", + "\n", + "# define loss\n", + "criterion = MultiBoxLoss(jaccard_thresh=0.5,neg_pos=3, device=device, half=HALF)\n", + "\n", + "# optim\n", + "import torch.optim as optim\n", + "optimizer = optim.SGD(net.parameters(), lr=1e-3, momentum=0.9, weight_decay=5e-4)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Selected optimization level O1: Insert automatic casts around Pytorch functions and Tensor methods.\n", + "\n", + "Defaults for this optimization level are:\n", + "enabled : True\n", + "opt_level : O1\n", + "cast_model_type : None\n", + "patch_torch_functions : True\n", + "keep_batchnorm_fp32 : None\n", + "master_weights : None\n", + "loss_scale : dynamic\n", + "Processing user overrides (additional kwargs that are not None)...\n", + "After processing overrides, optimization options are:\n", + "enabled : True\n", + "opt_level : O1\n", + "cast_model_type : None\n", + "patch_torch_functions : True\n", + "keep_batchnorm_fp32 : None\n", + "master_weights : None\n", + "loss_scale : dynamic\n" + ] + } + ], + "source": [ + "if HALF:\n", + " from apex import amp, optimizers\n", + " # Initialization\n", + " opt_level = 'O1'\n", + " net, optimizer = amp.initialize(net, optimizer, opt_level=opt_level)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "def get_current_lr(epoch):\n", + " \n", + " if DATASET == \"COCO\":\n", + " reduce = [20, 40]\n", + " # warmup\n", + " if epoch < 1:\n", + " lr = 1e-4\n", + " else:\n", + " lr = 1e-3\n", + " else:\n", + " reduce = [120,180]\n", + " lr = 1e-3\n", + " \n", + " for i,lr_decay_epoch in enumerate(reduce):\n", + " if epoch >= lr_decay_epoch:\n", + " lr *= 0.1\n", + " return lr\n", + "\n", + "def adjust_learning_rate(optimizer, epoch):\n", + " lr = get_current_lr(epoch)\n", + " print(\"lr is:\", lr)\n", + " for param_group in optimizer.param_groups:\n", + " param_group['lr'] = lr" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "# モデルを学習させる関数を作成\n", + "batcht=[]\n", + "def train_model(net, dataloaders_dict, criterion, optimizer, num_epochs):\n", + "\n", + " # GPUが使えるかを確認\n", + " device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n", + " print(\"used device:\", device)\n", + "\n", + " # ネットワークをGPUへ\n", + " net.to(device)\n", + "\n", + " # ネットワークがある程度固定であれば、高速化させる\n", + " torch.backends.cudnn.benchmark = True\n", + "\n", + " # イテレーションカウンタをセット\n", + " iteration = 1\n", + " epoch_train_loss = 0.0 # epochの損失和\n", + " epoch_val_loss = 0.0 # epochの損失和\n", + " logs = []\n", + "\n", + " # epochのループ\n", + " for epoch in range(num_epochs+1):\n", + " \n", + " adjust_learning_rate(optimizer, epoch)\n", + " \n", + " # 開始時刻を保存\n", + " t_epoch_start = time.time()\n", + " t_iter_start = time.time()\n", + "\n", + " print('-------------')\n", + " print('Epoch {}/{}'.format(epoch+1, num_epochs))\n", + " print('-------------')\n", + "\n", + " # epochごとの訓練と検証のループ\n", + " for phase in ['train', 'val']:\n", + " if phase == 'train':\n", + " net.train() # モデルを訓練モードに\n", + " print('(train)')\n", + " else:\n", + " if((epoch+1) % 10 == 0):\n", + " net.eval() # モデルを検証モードに\n", + " print('-------------')\n", + " print('(val)')\n", + " else:\n", + " # 検証は10回に1回だけ行う\n", + " continue\n", + "\n", + " # データローダーからminibatchずつ取り出すループ\n", + " for images, targets in dataloaders_dict[phase]:\n", + "\n", + " # GPUが使えるならGPUにデータを送る\n", + " images = images.to(device)\n", + " targets = [ann.to(device)\n", + " for ann in targets] # リストの各要素のテンソルをGPUへ\n", + " if HALF:\n", + " images = images.half()\n", + " targets = [ann.half() for ann in targets]\n", + " # optimizerを初期化\n", + " optimizer.zero_grad()\n", + "\n", + " # 順伝搬(forward)計算\n", + " with torch.set_grad_enabled(phase == 'train'):\n", + " # 順伝搬(forward)計算\n", + " tick = time.time()\n", + " outputs = net(images)\n", + " tock = time.time()\n", + " batcht.append(tock-tick)\n", + " print(\"batch time:\", np.mean(batcht))\n", + " #print(outputs[0].type())\n", + " # 損失の計算\n", + " loss_l, loss_c = criterion(outputs, targets)\n", + " loss = loss_l + loss_c\n", + "\n", + " # 訓練時はバックプロパゲーション\n", + " if phase == 'train':\n", + " if HALF:\n", + " with amp.scale_loss(loss, optimizer) as scaled_loss:\n", + " scaled_loss.backward()\n", + " else:\n", + " loss.backward() # 勾配の計算\n", + "\n", + " # 勾配が大きくなりすぎると計算が不安定になるので、clipで最大でも勾配2.0に留める\n", + " nn.utils.clip_grad_value_(\n", + " net.parameters(), clip_value=2.0)\n", + "\n", + " optimizer.step() # パラメータ更新\n", + "\n", + " if (iteration % 10 == 0): # 10iterに1度、lossを表示\n", + " t_iter_finish = time.time()\n", + " duration = t_iter_finish - t_iter_start\n", + " print('Iter {} || Loss: {:.4f} || 10iter: {:.4f} sec.'.format(\n", + " iteration, loss.item(), duration))\n", + " t_iter_start = time.time()\n", + "\n", + " epoch_train_loss += loss.item()\n", + " iteration += 1\n", + "\n", + " # 検証時\n", + " else:\n", + " epoch_val_loss += loss.item()\n", + "\n", + " # epochのphaseごとのlossと正解率\n", + " t_epoch_finish = time.time()\n", + " print('-------------')\n", + " print('epoch {} || Epoch_TRAIN_Loss:{:.4f} ||Epoch_VAL_Loss:{:.4f}'.format(\n", + " epoch+1, epoch_train_loss, epoch_val_loss))\n", + " print('timer: {:.4f} sec.'.format(t_epoch_finish - t_epoch_start))\n", + " t_epoch_start = time.time()\n", + "\n", + " # ログを保存\n", + " log_epoch = {'epoch': epoch+1,\n", + " 'train_loss': epoch_train_loss, 'val_loss': epoch_val_loss}\n", + " logs.append(log_epoch)\n", + " df = pd.DataFrame(logs)\n", + " df.to_csv(\"log_output.csv\")\n", + "\n", + " epoch_train_loss = 0.0 # epochの損失和\n", + " epoch_val_loss = 0.0 # epochの損失和\n", + "\n", + " # ネットワークを保存する\n", + " if ((epoch+1) % 10 == 0):\n", + " if useBiFPN:\n", + " word=\"BiFPN\"\n", + " else:\n", + " word=\"FPN\"\n", + " torch.save(net.state_dict(), 'weights/'+DATASET+\"_\"+backbone+\"_\" + str(300*scale) + \"_\" + word + \"_\" + \n", + " str(epoch+1) + '.pth')\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "used device: cuda:0\n", + "lr is: 0.001\n", + "-------------\n", + "Epoch 1/200\n", + "-------------\n", + "(train)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2457: UserWarning: nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\n", + " warnings.warn(\"nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\")\n", + "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2539: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n", + " \"See the documentation of nn.Upsample for details.\".format(mode))\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "batch time: 1.4208438396453857\n", + "Gradient overflow. Skipping step, loss scaler 0 reducing loss scale to 32768.0\n", + "batch time: 0.749925971031189\n", + "Gradient overflow. Skipping step, loss scaler 0 reducing loss scale to 16384.0\n", + "batch time: 0.5173380374908447\n", + "Gradient overflow. Skipping step, loss scaler 0 reducing loss scale to 8192.0\n", + "batch time: 0.400715708732605\n", + "batch time: 0.3310044288635254\n", + "batch time: 0.288063923517863\n", + "batch time: 0.2540460654667446\n", + "batch time: 0.22861766815185547\n", + "batch time: 0.2091119024488661\n", + "batch time: 0.19361252784729005\n", + "Iter 10 || Loss: 16.3804 || 10iter: 10.8001 sec.\n", + "batch time: 0.18068920482288708\n", + "batch time: 0.16980804999669394\n", + "batch time: 0.16061062079209548\n", + "batch time: 0.15274827820914133\n", + "batch time: 0.1459245522816976\n", + "batch time: 0.1399286687374115\n", + "batch time: 0.13468425414141486\n", + "batch time: 0.13013850318060982\n", + "batch time: 0.12593797633522436\n", + "batch time: 0.12223780155181885\n", + "Iter 20 || Loss: 14.7553 || 10iter: 5.4631 sec.\n", + "batch time: 0.11879262470063709\n", + "batch time: 0.11564928835088556\n", + "batch time: 0.11281104709791101\n", + "batch time: 0.11022448539733887\n", + "batch time: 0.10784938812255859\n", + "batch time: 0.10563441423269418\n", + "batch time: 0.10446588198343913\n", + "batch time: 0.1025756938116891\n", + "batch time: 0.10080654867764177\n", + "batch time: 0.0991185983022054\n", + "Iter 30 || Loss: 13.8155 || 10iter: 5.5794 sec.\n", + "batch time: 0.09798166828770791\n", + "batch time: 0.09649457037448883\n", + "batch time: 0.09507901740796638\n", + "batch time: 0.09375166893005371\n", + "batch time: 0.09251454898289271\n", + "batch time: 0.09133277999030219\n", + "batch time: 0.09037358696396286\n", + "batch time: 0.08930755288977373\n", + "batch time: 0.08887559328323756\n", + "batch time: 0.0884905755519867\n", + "Iter 40 || Loss: 12.4186 || 10iter: 5.7278 sec.\n", + "batch time: 0.08762475920886528\n", + "batch time: 0.08675802321661086\n", + "batch time: 0.0859188201815583\n", + "batch time: 0.08509161255576393\n", + "batch time: 0.08436999320983887\n", + "batch time: 0.08364476328310759\n", + "batch time: 0.08342444643061211\n", + "batch time: 0.08273407816886902\n", + "batch time: 0.0820651540950853\n", + "batch time: 0.08142916679382324\n", + "Iter 50 || Loss: 12.3549 || 10iter: 5.6079 sec.\n", + "batch time: 0.08083164458181344\n", + "batch time: 0.08024632930755615\n", + "batch time: 0.07966609271067493\n", + "batch time: 0.07954637209574382\n", + "batch time: 0.07902218211780895\n", + "batch time: 0.0784937356199537\n", + "batch time: 0.0779898626762524\n", + "batch time: 0.07751142156535182\n", + "batch time: 0.07705127182653394\n", + "batch time: 0.07659920454025268\n", + "Iter 60 || Loss: 11.4274 || 10iter: 5.6148 sec.\n", + "batch time: 0.0761757131482734\n", + "batch time: 0.07574760913848877\n", + "batch time: 0.07532939078315856\n", + "batch time: 0.07494180649518967\n", + "batch time: 0.07455210318932166\n", + "batch time: 0.07417322650100246\n", + "batch time: 0.07415402469350331\n", + "batch time: 0.0737981165156645\n", + "batch time: 0.0734901117241901\n", + "batch time: 0.07318522930145263\n", + "Iter 70 || Loss: 11.1522 || 10iter: 5.6541 sec.\n", + "batch time: 0.07289124878359513\n", + "batch time: 0.07256931066513062\n", + "batch time: 0.07257019330377448\n", + "batch time: 0.07226589563730601\n", + "batch time: 0.07197989463806152\n", + "batch time: 0.07171386794040077\n", + "batch time: 0.0714576801696381\n", + "batch time: 0.0712275199401073\n", + "batch time: 0.0709799150877361\n", + "batch time: 0.0707482635974884\n", + "Iter 80 || Loss: 10.4597 || 10iter: 5.5704 sec.\n", + "batch time: 0.07053080311527958\n", + "batch time: 0.0703033703129466\n", + "batch time: 0.07008306089654026\n", + "batch time: 0.0699147071157183\n", + "batch time: 0.06969512771157657\n", + "batch time: 0.06946807683900345\n", + "batch time: 0.06926029303978229\n", + "batch time: 0.06914151527664879\n", + "batch time: 0.06892639599489363\n", + "batch time: 0.06898348066541883\n", + "Iter 90 || Loss: 9.9863 || 10iter: 5.6744 sec.\n", + "batch time: 0.06881661205501347\n", + "batch time: 0.06861689038898634\n", + "batch time: 0.06842262514175908\n", + "batch time: 0.06822733168906354\n", + "batch time: 0.06804877080415425\n", + "batch time: 0.06787678847710292\n", + "batch time: 0.06769012175884444\n", + "batch time: 0.06750818904565305\n", + "batch time: 0.06734067261821092\n", + "batch time: 0.06740596532821655\n", + "Iter 100 || Loss: 9.1614 || 10iter: 5.6141 sec.\n", + "batch time: 0.06742441536176323\n", + "batch time: 0.06751562333574482\n", + "batch time: 0.0673784654117325\n", + "batch time: 0.06721041523493253\n", + "batch time: 0.06707943961733863\n", + "batch time: 0.06692614195481786\n", + "batch time: 0.06677444404530748\n", + "batch time: 0.06663252689220288\n", + "batch time: 0.06647817366713778\n", + "batch time: 0.0663298476826061\n", + "Iter 110 || Loss: 9.1399 || 10iter: 5.6615 sec.\n", + "batch time: 0.06618345750344766\n", + "batch time: 0.06605063591684614\n", + "batch time: 0.065926631995007\n", + "batch time: 0.06580085921705815\n", + "batch time: 0.06569127414537512\n", + "batch time: 0.06558253230719731\n", + "batch time: 0.06546792617210975\n", + "batch time: 0.06533877122200142\n", + "batch time: 0.06523782265286486\n", + "batch time: 0.0651070515314738\n", + "Iter 120 || Loss: 8.7496 || 10iter: 5.6069 sec.\n", + "batch time: 0.06498512748844368\n", + "batch time: 0.06486767041878622\n", + "batch time: 0.06474677527823099\n", + "batch time: 0.0646277243091214\n", + "batch time: 0.0645151195526123\n", + "batch time: 0.06440833636692592\n", + "batch time: 0.06430324419276921\n", + "batch time: 0.06419537216424942\n", + "batch time: 0.06410771007685698\n", + "batch time: 0.0640063762664795\n", + "Iter 130 || Loss: 8.4081 || 10iter: 5.5237 sec.\n", + "batch time: 0.06389738585202749\n", + "batch time: 0.06379228830337524\n", + "batch time: 0.06368623102517952\n", + "batch time: 0.06358459458422305\n", + "batch time: 0.06348912804215043\n", + "batch time: 0.0633924919016221\n", + "batch time: 0.06329547053706037\n", + "batch time: 0.06320468411929366\n", + "batch time: 0.06312381106314899\n", + "batch time: 0.06304847683225359\n", + "Iter 140 || Loss: 8.4660 || 10iter: 5.5505 sec.\n", + "batch time: 0.06295868521886515\n", + "batch time: 0.06286315179206955\n", + "batch time: 0.06277639215642755\n", + "batch time: 0.06282255550225575\n", + "batch time: 0.0627371097433156\n", + "batch time: 0.06265100387677755\n", + "batch time: 0.06256636308164013\n", + "batch time: 0.06248215726903967\n", + "batch time: 0.06240285002945253\n", + "batch time: 0.062321132024129235\n", + "Iter 150 || Loss: 8.1069 || 10iter: 5.5379 sec.\n", + "batch time: 0.062241726363731535\n", + "batch time: 0.0621589626136579\n", + "batch time: 0.062074679954379215\n", + "batch time: 0.0619990113493684\n", + "batch time: 0.06192571424668835\n", + "batch time: 0.06198828189800947\n", + "batch time: 0.06192863822742632\n", + "batch time: 0.0618744484985931\n", + "batch time: 0.06191416506497365\n", + "batch time: 0.06185644268989563\n", + "Iter 160 || Loss: 8.4378 || 10iter: 5.6182 sec.\n", + "batch time: 0.0617834514712695\n", + "batch time: 0.06171338940844124\n", + "batch time: 0.061659482359154826\n", + "batch time: 0.061590488364056846\n", + "batch time: 0.061523058920195606\n", + "batch time: 0.061594081212239096\n", + "batch time: 0.06152993048022607\n", + "batch time: 0.06146144015448434\n", + "batch time: 0.06139544622432551\n", + "batch time: 0.06146418487324434\n", + "Iter 170 || Loss: 8.3321 || 10iter: 5.6247 sec.\n", + "batch time: 0.06141030439856457\n", + "batch time: 0.06134352573128634\n", + "batch time: 0.061289001751497305\n", + "batch time: 0.06123678026528194\n", + "batch time: 0.061178490774972095\n", + "batch time: 0.061126745559952476\n", + "batch time: 0.06108187551552293\n", + "batch time: 0.06104277894738015\n", + "batch time: 0.060980218748806575\n", + "batch time: 0.060931126276652016\n", + "Iter 180 || Loss: 8.5638 || 10iter: 5.5763 sec.\n", + "batch time: 0.06087437123883495\n", + "batch time: 0.060813310382130385\n", + "batch time: 0.06076785384631548\n", + "batch time: 0.06072536240453306\n", + "batch time: 0.06068295401495856\n", + "batch time: 0.06063347862612817\n", + "batch time: 0.06057402284387599\n", + "batch time: 0.0605190599218328\n", + "batch time: 0.060466370254597335\n", + "batch time: 0.06041745637592517\n", + "Iter 190 || Loss: 8.2222 || 10iter: 5.5282 sec.\n", + "batch time: 0.06037704357926134\n", + "batch time: 0.060333751142024994\n", + "batch time: 0.06029024642983866\n", + "batch time: 0.060237734588151126\n", + "batch time: 0.06019675915057843\n", + "batch time: 0.06027836459023612\n", + "batch time: 0.06023728908015992\n", + "batch time: 0.060189170066756434\n", + "batch time: 0.060140676833876416\n", + "batch time: 0.0600959575176239\n", + "Iter 200 || Loss: 7.9824 || 10iter: 5.6508 sec.\n", + "batch time: 0.06004442385773161\n", + "batch time: 0.06000755919088231\n", + "batch time: 0.059963792415675274\n", + "batch time: 0.05992546969769048\n", + "batch time: 0.05988603684960342\n", + "batch time: 0.05983674179003077\n", + "batch time: 0.05979261997241329\n", + "batch time: 0.0597435121352856\n", + "batch time: 0.059695886082626416\n", + "batch time: 0.05965495563688732\n", + "Gradient overflow. Skipping step, loss scaler 0 reducing loss scale to 4096.0\n", + "Iter 210 || Loss: 7.5960 || 10iter: 5.4994 sec.\n", + "batch time: 0.05961109224653922\n", + "batch time: 0.05956816335894027\n", + "batch time: 0.059528297101947625\n", + "batch time: 0.05948166980921665\n", + "batch time: 0.059438094427419264\n", + "batch time: 0.05940400229560004\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "batch time: 0.0593738775648829\n", + "batch time: 0.059330021569488245\n", + "batch time: 0.05928614912512096\n", + "batch time: 0.059240774674849075\n", + "Iter 220 || Loss: 7.7707 || 10iter: 5.5319 sec.\n", + "batch time: 0.05919642362119925\n", + "batch time: 0.05915580461691092\n", + "batch time: 0.05911224305362445\n", + "batch time: 0.05906997301748821\n", + "batch time: 0.05903566784328885\n", + "batch time: 0.059006193042856404\n", + "batch time: 0.0589738795410694\n", + "batch time: 0.059020756629475375\n", + "batch time: 0.05899643377445671\n", + "batch time: 0.058968152170595915\n", + "Iter 230 || Loss: 7.5936 || 10iter: 5.5751 sec.\n", + "batch time: 0.05893709442832253\n", + "batch time: 0.05889900491155427\n", + "batch time: 0.05886560345923952\n", + "batch time: 0.05882938091571514\n", + "batch time: 0.058795064560910486\n", + "batch time: 0.05876249074935913\n", + "batch time: 0.05874526349804069\n", + "batch time: 0.058712213981051406\n", + "batch time: 0.05867828484858429\n", + "batch time: 0.058654058972994486\n", + "Iter 240 || Loss: 7.0318 || 10iter: 5.5524 sec.\n", + "batch time: 0.05862698020776772\n", + "batch time: 0.0585996602192398\n", + "batch time: 0.05857280742974929\n", + "batch time: 0.058538882458796265\n", + "batch time: 0.05850229652560487\n", + "batch time: 0.05846852887936724\n", + "batch time: 0.05843458388015809\n", + "batch time: 0.05840096454466543\n", + "batch time: 0.05836960781051452\n", + "batch time: 0.058336546897888183\n", + "Iter 250 || Loss: 7.8670 || 10iter: 5.5420 sec.\n", + "batch time: 0.05831037669542776\n", + "batch time: 0.05828315114218091\n", + "batch time: 0.058250105899313225\n", + "batch time: 0.05821978576540008\n", + "batch time: 0.05827303587221632\n", + "batch time: 0.058250741101801395\n", + "batch time: 0.058231843584706344\n", + "batch time: 0.05820982770402302\n", + "batch time: 0.05818702631475382\n", + "batch time: 0.058164302202371454\n", + "Iter 260 || Loss: 7.6694 || 10iter: 5.6340 sec.\n", + "batch time: 0.05823535663414732\n", + "batch time: 0.05820624427941009\n", + "batch time: 0.058179832683316654\n", + "batch time: 0.05814859451669635\n", + "batch time: 0.05811747335038095\n", + "batch time: 0.05808528383871667\n", + "batch time: 0.05806217657939325\n", + "batch time: 0.058034346174837936\n", + "batch time: 0.05800523013430457\n", + "batch time: 0.057974606973153577\n", + "Iter 270 || Loss: 7.7616 || 10iter: 5.6440 sec.\n", + "batch time: 0.05794336962963822\n", + "batch time: 0.05791299045085907\n", + "batch time: 0.05788200766175658\n", + "batch time: 0.057857537791676766\n", + "batch time: 0.057831342870538885\n", + "batch time: 0.05780518573263417\n", + "batch time: 0.05777949030218572\n", + "batch time: 0.0577534394298526\n", + "batch time: 0.057728828067847904\n", + "batch time: 0.05770993317876543\n", + "Iter 280 || Loss: 7.3697 || 10iter: 5.5369 sec.\n", + "batch time: 0.057690880901024436\n", + "batch time: 0.05766579952645809\n", + "batch time: 0.05764622149113632\n", + "batch time: 0.05762563037200713\n", + "batch time: 0.057607045926545794\n", + "batch time: 0.05758745186812394\n", + "batch time: 0.057569736387671495\n", + "batch time: 0.05755247837967343\n", + "batch time: 0.057537082157333004\n", + "batch time: 0.05751067769938502\n", + "Iter 290 || Loss: 7.1903 || 10iter: 5.5623 sec.\n", + "batch time: 0.057493171331399084\n", + "batch time: 0.05746781009517304\n", + "batch time: 0.057449964939937655\n", + "batch time: 0.05742404736629149\n", + "batch time: 0.05742414442159362\n", + "batch time: 0.057402354639929695\n", + "batch time: 0.057407654495753024\n", + "batch time: 0.05739105867859501\n", + "batch time: 0.05736899934086114\n", + "batch time: 0.057342863877614336\n", + "Iter 300 || Loss: 7.6110 || 10iter: 5.5918 sec.\n", + "batch time: 0.057315739286302333\n", + "batch time: 0.057292158240514084\n", + "batch time: 0.05726917820795141\n", + "batch time: 0.05724754694261049\n", + "batch time: 0.057222901797685466\n", + "batch time: 0.057197658844243465\n", + "batch time: 0.05717326530804463\n", + "batch time: 0.05715017736732186\n", + "batch time: 0.05712563243112904\n", + "batch time: 0.05719927972362888\n", + "Iter 310 || Loss: 7.5151 || 10iter: 5.5303 sec.\n", + "batch time: 0.057176701125608\n", + "batch time: 0.05715702588741596\n", + "batch time: 0.057135599489790946\n", + "batch time: 0.05711423743302655\n", + "batch time: 0.057089913837493414\n", + "batch time: 0.057066217253479774\n", + "batch time: 0.057119635001342005\n", + "batch time: 0.05709969622534026\n", + "batch time: 0.05708000129293125\n", + "batch time: 0.05705708712339401\n", + "Iter 320 || Loss: 7.3837 || 10iter: 5.6028 sec.\n", + "batch time: 0.05703541645751192\n", + "batch time: 0.05701383522578648\n", + "batch time: 0.05699154836105488\n", + "batch time: 0.05704471358546504\n", + "batch time: 0.057026809545663686\n", + "batch time: 0.05701068573934169\n", + "batch time: 0.056987809843244176\n", + "batch time: 0.05696860464607797\n", + "batch time: 0.05694742405668218\n", + "batch time: 0.05693285392992424\n", + "Iter 330 || Loss: 6.7227 || 10iter: 5.6218 sec.\n", + "batch time: 0.0569792867066997\n", + "batch time: 0.05695758454770927\n", + "batch time: 0.05693856015935674\n", + "batch time: 0.05699036364069956\n", + "batch time: 0.057034780730062455\n", + "batch time: 0.057015229548726766\n", + "batch time: 0.05699640707021651\n", + "batch time: 0.056976474953826364\n", + "batch time: 0.05695547888764238\n", + "batch time: 0.05693705432555255\n", + "Iter 340 || Loss: 7.1228 || 10iter: 5.6160 sec.\n", + "batch time: 0.05691503779279871\n", + "batch time: 0.05689809196873715\n", + "batch time: 0.056879584365266404\n", + "batch time: 0.056858924932258074\n", + "batch time: 0.05683964577274046\n", + "batch time: 0.05682087564744012\n", + "batch time: 0.05681100496297611\n", + "batch time: 0.05679803675618665\n", + "batch time: 0.05678267943483369\n", + "batch time: 0.056762805666242325\n", + "Iter 350 || Loss: 7.5340 || 10iter: 5.5440 sec.\n", + "batch time: 0.0567441000218405\n", + "batch time: 0.05673036114736037\n", + "batch time: 0.05671244505106896\n", + "batch time: 0.05669374519822288\n", + "batch time: 0.05667476586892571\n", + "batch time: 0.05665779113769531\n", + "batch time: 0.056640883453753815\n", + "batch time: 0.056622513845646184\n", + "batch time: 0.056666401435405765\n", + "batch time: 0.056648172934850055\n", + "Iter 360 || Loss: 7.0870 || 10iter: 5.5493 sec.\n", + "batch time: 0.056629512448720325\n", + "batch time: 0.05661250543857806\n", + "batch time: 0.05662959009491051\n", + "batch time: 0.05661344200700194\n", + "batch time: 0.05660657490769478\n", + "batch time: 0.0565917257402764\n", + "batch time: 0.05657587324241201\n", + "batch time: 0.05655823129674663\n", + "batch time: 0.05654297676189805\n", + "batch time: 0.056525673737397066\n", + "Iter 370 || Loss: 7.2454 || 10iter: 5.5724 sec.\n", + "batch time: 0.05650683747468933\n", + "batch time: 0.05649091671871882\n", + "batch time: 0.05647326858050063\n", + "batch time: 0.05651592762075006\n", + "batch time: 0.056543030420939125\n", + "batch time: 0.0565917396799047\n", + "batch time: 0.056577115855735556\n", + "batch time: 0.05656399361040226\n", + "batch time: 0.05654613072136147\n", + "batch time: 0.0565313784699691\n", + "Iter 380 || Loss: 7.2290 || 10iter: 5.6370 sec.\n", + "batch time: 0.05651854217208902\n", + "batch time: 0.0565006027671055\n", + "batch time: 0.05648478141963949\n", + "batch time: 0.05647024450202783\n", + "batch time: 0.05645566296267819\n", + "batch time: 0.05644767889704729\n", + "batch time: 0.05649755044192923\n", + "batch time: 0.05648079791019872\n", + "batch time: 0.056471555582355414\n", + "batch time: 0.056456521229866226\n", + "Iter 390 || Loss: 7.0981 || 10iter: 5.6176 sec.\n", + "batch time: 0.0564417089037883\n", + "batch time: 0.056427725115600895\n", + "batch time: 0.056411549638549184\n", + "batch time: 0.05639500545366161\n", + "batch time: 0.05637873999680145\n", + "batch time: 0.05636266385666048\n", + "batch time: 0.056347197789689454\n", + "batch time: 0.05633219342734946\n", + "batch time: 0.05631730132234425\n", + "batch time: 0.05630188465118408\n", + "Iter 400 || Loss: 7.5046 || 10iter: 5.5368 sec.\n", + "batch time: 0.056286032360390835\n", + "batch time: 0.056271336564970256\n", + "batch time: 0.056259524437689015\n", + "batch time: 0.05624959669490852\n", + "batch time: 0.056243583302439\n", + "batch time: 0.05623340959032181\n", + "batch time: 0.056225559342405425\n", + "batch time: 0.05621970108911103\n", + "batch time: 0.05620811562666392\n", + "batch time: 0.05619261032197534\n", + "Iter 410 || Loss: 7.1489 || 10iter: 5.5729 sec.\n", + "batch time: 0.056205311540849595\n", + "batch time: 0.05620395790026026\n", + "batch time: 0.056190296754998675\n", + "batch time: 0.056181191822181\n", + "batch time: 0.056172854641833934\n", + "batch time: 0.05615854664490773\n", + "batch time: 0.056144485656591914\n", + "batch time: 0.05614781664889395\n", + "batch time: 0.056190633546196474\n", + "batch time: 0.0561766471181597\n", + "Iter 420 || Loss: 7.4090 || 10iter: 5.6094 sec.\n", + "batch time: 0.05616275261813275\n", + "batch time: 0.05614828780929059\n", + "batch time: 0.05613663236018332\n", + "batch time: 0.05612212981817857\n", + "batch time: 0.056145488514619715\n", + "batch time: 0.05618183713563731\n", + "batch time: 0.05616815642953198\n", + "batch time: 0.056153373183491076\n", + "batch time: 0.056140530637372066\n", + "batch time: 0.056126627256703934\n", + "Iter 430 || Loss: 7.1384 || 10iter: 5.6170 sec.\n", + "batch time: 0.05613189285703159\n", + "batch time: 0.05612289905548096\n", + "batch time: 0.056110824778779274\n", + "batch time: 0.056098774281514954\n", + "batch time: 0.05608511793202367\n", + "batch time: 0.05607584200867819\n", + "batch time: 0.056062876223426664\n", + "batch time: 0.056049180357423546\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "batch time: 0.056036209462715446\n", + "batch time: 0.056060953573747116\n", + "Iter 440 || Loss: 6.4678 || 10iter: 5.6182 sec.\n", + "batch time: 0.05606276810574694\n", + "batch time: 0.05605098294996028\n", + "batch time: 0.05604398869499396\n", + "batch time: 0.05603116804415041\n", + "batch time: 0.056020092160514234\n", + "batch time: 0.056007610308215224\n", + "batch time: 0.05599432290267091\n", + "batch time: 0.05598111982856478\n", + "batch time: 0.05596917934035405\n", + "batch time: 0.05596178160773383\n", + "Iter 450 || Loss: 6.9035 || 10iter: 5.5495 sec.\n", + "batch time: 0.0559555400501598\n", + "batch time: 0.05594354234965502\n", + "batch time: 0.055934589693351563\n", + "batch time: 0.05592159132600356\n", + "batch time: 0.05591596718672868\n", + "batch time: 0.05590790301038508\n", + "batch time: 0.055900924837041475\n", + "batch time: 0.05589237015320224\n", + "batch time: 0.055939920068046885\n", + "batch time: 0.055930867402449895\n", + "Iter 460 || Loss: 6.8679 || 10iter: 5.6673 sec.\n", + "batch time: 0.05592306989392593\n", + "batch time: 0.055913625857530735\n", + "batch time: 0.05590536218482526\n", + "batch time: 0.055895702078424654\n", + "batch time: 0.055932801769625756\n", + "batch time: 0.055922792704831886\n", + "batch time: 0.055912001485232364\n", + "batch time: 0.05595050663010687\n", + "batch time: 0.05593857785531961\n", + "batch time: 0.0559292493982518\n", + "Iter 470 || Loss: 7.3142 || 10iter: 5.6519 sec.\n", + "batch time: 0.0559166414216319\n", + "batch time: 0.055906072006387225\n", + "batch time: 0.055894612257878826\n", + "batch time: 0.055915206796509305\n", + "batch time: 0.0559032204276637\n", + "batch time: 0.05589399067293696\n", + "batch time: 0.05588278230631127\n", + "batch time: 0.05587082178522852\n", + "batch time: 0.05586032728063786\n", + "batch time: 0.0558477466305097\n", + "Iter 480 || Loss: 7.3180 || 10iter: 5.5717 sec.\n", + "batch time: 0.0558359617998595\n", + "batch time: 0.05582475167587091\n", + "batch time: 0.055817638618358666\n", + "batch time: 0.05580704862421209\n", + "batch time: 0.055799706940798416\n", + "batch time: 0.05578891632488235\n", + "batch time: 0.05577875162786527\n", + "batch time: 0.055767294813375\n", + "batch time: 0.05575540801986351\n", + "batch time: 0.05579631279925911\n", + "Iter 490 || Loss: 7.1327 || 10iter: 5.6494 sec.\n", + "batch time: 0.05578406619442942\n", + "batch time: 0.055774813260489366\n", + "batch time: 0.05576442704964845\n", + "batch time: 0.05580167200883873\n", + "batch time: 0.05579142570495606\n", + "batch time: 0.055781120734830054\n", + "batch time: 0.05576880117297412\n", + "batch time: 0.05580736977987021\n", + "batch time: 0.05579704225421669\n", + "batch time: 0.055788076877593996\n", + "Iter 500 || Loss: 7.0576 || 10iter: 5.5983 sec.\n", + "batch time: 0.0557783006907937\n", + "batch time: 0.055785151591813895\n", + "batch time: 0.055772387957715136\n", + "batch time: 0.055759380734156046\n", + "batch time: 0.05574606716042698\n", + "batch time: 0.05573348094352149\n", + "batch time: 0.05572049481393788\n", + "batch time: 0.05570811549509604\n", + "batch time: 0.055695152001671334\n", + "batch time: 0.05568217249477611\n", + "Iter 510 || Loss: 7.0183 || 10iter: 5.4891 sec.\n", + "batch time: 0.05566876741771362\n", + "batch time: 0.05565570434555411\n", + "batch time: 0.05564272938183641\n", + "batch time: 0.05563017691155816\n", + "batch time: 0.05561865093638596\n", + "batch time: 0.05560619101043819\n", + "batch time: 0.05559334118546093\n", + "batch time: 0.056772855257895924\n", + "-------------\n", + "epoch 1 || Epoch_TRAIN_Loss:4433.5437 ||Epoch_VAL_Loss:0.0000\n", + "timer: 295.8544 sec.\n", + "lr is: 0.001\n", + "-------------\n", + "Epoch 2/200\n", + "-------------\n", + "(train)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2457: UserWarning: nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\n", + " warnings.warn(\"nn.functional.upsample is deprecated. Use nn.functional.interpolate instead.\")\n", + "/home/ubuntu/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/torch/nn/functional.py:2539: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n", + " \"See the documentation of nn.Upsample for details.\".format(mode))\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "batch time: 0.057033233789579946\n", + "batch time: 0.05709133881788987\n", + "Iter 520 || Loss: 6.7143 || 10iter: 4.0647 sec.\n", + "batch time: 0.057128293958140425\n", + "batch time: 0.057145640767853834\n", + "batch time: 0.05713414416486401\n", + "batch time: 0.057121746867667626\n", + "batch time: 0.05711540040515718\n", + "batch time: 0.05710609829471139\n", + "batch time: 0.057096617045393476\n", + "batch time: 0.05709120134512583\n", + "batch time: 0.057081042255480934\n", + "batch time: 0.05707931248646862\n", + "Iter 530 || Loss: 6.9429 || 10iter: 5.8179 sec.\n", + "batch time: 0.05707111825601754\n", + "batch time: 0.05705815643296206\n", + "batch time: 0.057050528714178206\n", + "batch time: 0.0570403721448634\n", + "batch time: 0.05705347061157227\n", + "batch time: 0.05704433482084701\n", + "batch time: 0.05703371374744721\n", + "batch time: 0.05702323541322162\n", + "batch time: 0.057029415813580515\n", + "batch time: 0.057016751059779415\n", + "Iter 540 || Loss: 7.1472 || 10iter: 5.5745 sec.\n", + "batch time: 0.05700360164184006\n", + "batch time: 0.05699129122209725\n", + "batch time: 0.056980832505621304\n", + "batch time: 0.05701239187927807\n", + "batch time: 0.05699955126561156\n", + "batch time: 0.05698636119618957\n", + "batch time: 0.05697329432262581\n", + "batch time: 0.05696182355393459\n", + "batch time: 0.05694852934943305\n", + "batch time: 0.05693699836730957\n", + "Iter 550 || Loss: 6.5566 || 10iter: 5.5586 sec.\n", + "batch time: 0.056926325315139685\n", + "batch time: 0.056915280611618706\n", + "batch time: 0.05690341875307383\n", + "batch time: 0.0568917877837639\n", + "batch time: 0.05692139230332933\n", + "batch time: 0.056911339862741155\n", + "batch time: 0.05690049226236943\n", + "batch time: 0.05689049749818754\n", + "batch time: 0.05688206070437628\n", + "batch time: 0.05687615871429443\n", + "Iter 560 || Loss: 6.6824 || 10iter: 5.5716 sec.\n", + "batch time: 0.056906573487688086\n", + "batch time: 0.056893719048686724\n", + "batch time: 0.05688418376509292\n", + "batch time: 0.05687751584019221\n", + "batch time: 0.05686995286857132\n", + "batch time: 0.05686156488560114\n", + "batch time: 0.05685303324744815\n", + "batch time: 0.05684188134233716\n", + "batch time: 0.056833956907838844\n", + "batch time: 0.056825002452783414\n", + "Iter 570 || Loss: 6.3188 || 10iter: 5.5748 sec.\n", + "batch time: 0.05681242708984482\n", + "batch time: 0.05682939291000366\n", + "batch time: 0.056817798298274866\n", + "batch time: 0.05680571034394905\n", + "batch time: 0.05679304786350416\n", + "batch time: 0.05678515508770943\n", + "batch time: 0.05677436170792869\n", + "batch time: 0.05676621054283063\n", + "batch time: 0.05675394670950934\n", + "batch time: 0.05674306935277478\n", + "Iter 580 || Loss: 7.5331 || 10iter: 5.5319 sec.\n", + "batch time: 0.05673092203583446\n", + "batch time: 0.056724608968623316\n", + "batch time: 0.05671562297879934\n", + "batch time: 0.05670874616871142\n", + "batch time: 0.05670012327340933\n", + "batch time: 0.05668909598536052\n", + "batch time: 0.05667684187880976\n", + "batch time: 0.05670426167598387\n", + "batch time: 0.05669699743364784\n", + "batch time: 0.0566899542081154\n", + "Iter 590 || Loss: 7.1742 || 10iter: 5.6381 sec.\n", + "batch time: 0.05667978010806941\n", + "batch time: 0.05666822678334004\n", + "batch time: 0.05665908173405099\n", + "batch time: 0.05664675966256395\n", + "batch time: 0.05663903540923816\n", + "batch time: 0.05663091864361859\n", + "batch time: 0.05661981589031379\n", + "batch time: 0.056609202786831556\n", + "batch time: 0.056600902235766685\n", + "batch time: 0.056590958436330156\n", + "Iter 600 || Loss: 6.7491 || 10iter: 5.5343 sec.\n", + "batch time: 0.056580007968845464\n", + "batch time: 0.0565680792165357\n", + "batch time: 0.05655883951962093\n", + "batch time: 0.05654739544091635\n", + "batch time: 0.0565372423692183\n", + "batch time: 0.056526110904051526\n", + "batch time: 0.056516078983539414\n", + "batch time: 0.05652722951612974\n", + "batch time: 0.056516154059048355\n", + "batch time: 0.056507519815788894\n", + "Iter 610 || Loss: 6.7304 || 10iter: 5.5826 sec.\n", + "batch time: 0.05649644068142006\n", + "batch time: 0.05648420605005002\n", + "batch time: 0.05647669219659553\n", + "batch time: 0.05646856522327137\n", + "batch time: 0.056458415830038425\n", + "batch time: 0.05645122350036324\n", + "batch time: 0.05644309926458078\n", + "batch time: 0.05643511204272026\n", + "batch time: 0.05645962293005529\n", + "batch time: 0.05645079728095762\n", + "Iter 620 || Loss: 6.9430 || 10iter: 5.5852 sec.\n", + "batch time: 0.05644333266597631\n", + "batch time: 0.056433816814729254\n", + "batch time: 0.05642382444194959\n", + "batch time: 0.05645409188209436\n", + "batch time: 0.0564469181060791\n", + "batch time: 0.05644021399866659\n", + "batch time: 0.05643079687723893\n", + "batch time: 0.05642267017607476\n", + "batch time: 0.056412749525473496\n", + "batch time: 0.056404321155850846\n", + "Iter 630 || Loss: 6.5889 || 10iter: 5.6172 sec.\n", + "batch time: 0.056396934007502585\n", + "batch time: 0.05638871087303644\n", + "batch time: 0.0563795521361003\n", + "batch time: 0.05639351580045201\n", + "batch time: 0.05638706177238404\n", + "batch time: 0.056379973513525236\n", + "batch time: 0.0563721046717238\n", + "batch time: 0.05636470519636866\n", + "batch time: 0.05635491260713628\n", + "batch time: 0.056345709785819056\n", + "Iter 640 || Loss: 6.7310 || 10iter: 5.5789 sec.\n", + "batch time: 0.056336713097582744\n", + "batch time: 0.05632603539856052\n", + "batch time: 0.05631544727188988\n", + "batch time: 0.056308578630411846\n", + "batch time: 0.05629904954008354\n", + "batch time: 0.05628942705160324\n", + "batch time: 0.05628255014257416\n", + "batch time: 0.05627365759861322\n", + "batch time: 0.05626390197060325\n", + "batch time: 0.05625587353339562\n", + "Iter 650 || Loss: 7.0232 || 10iter: 5.5130 sec.\n", + "batch time: 0.05624671172802716\n", + "batch time: 0.0562368240093161\n", + "batch time: 0.056234136657364334\n", + "batch time: 0.056240732516717476\n", + "batch time: 0.056231471054426586\n", + "batch time: 0.05622279971111112\n", + "batch time: 0.05621417182039815\n", + "batch time: 0.05620470018010009\n", + "batch time: 0.056199694620459505\n", + "batch time: 0.05619253678755327\n", + "Iter 660 || Loss: 6.9646 || 10iter: 5.5654 sec.\n", + "batch time: 0.056187039244012645\n", + "batch time: 0.05618055961643461\n", + "batch time: 0.05621517837317281\n", + "batch time: 0.05620654268437121\n", + "batch time: 0.0561963206843326\n", + "batch time: 0.05621724085764842\n", + "batch time: 0.0562070627798741\n", + "batch time: 0.05619742556246455\n", + "batch time: 0.0561904582920274\n", + "batch time: 0.05618180516940444\n", + "Iter 670 || Loss: 6.5057 || 10iter: 5.5739 sec.\n", + "batch time: 0.05618330654195453\n", + "batch time: 0.05617814227229073\n", + "batch time: 0.05617406003365368\n", + "batch time: 0.05616546738395351\n", + "batch time: 0.05615773695486563\n", + "batch time: 0.05615208205386732\n", + "batch time: 0.05614659317839269\n", + "batch time: 0.05614607277872991\n", + "batch time: 0.05614144601948307\n", + "batch time: 0.0561348368139828\n", + "Iter 680 || Loss: 6.4517 || 10iter: 5.6385 sec.\n", + "batch time: 0.05612691698620498\n", + "batch time: 0.056118708901391354\n", + "batch time: 0.05611097725383708\n", + "batch time: 0.05610589395489609\n", + "batch time: 0.05609717160245798\n", + "batch time: 0.05609050019489433\n", + "batch time: 0.05608210362374696\n", + "batch time: 0.056072772588840754\n", + "batch time: 0.05606357210433362\n", + "batch time: 0.05605483780736509\n", + "Iter 690 || Loss: 6.1203 || 10iter: 5.5578 sec.\n", + "batch time: 0.05604837530773731\n", + "batch time: 0.056041275145690565\n", + "batch time: 0.05603608871779229\n", + "batch time: 0.056026751095005004\n", + "batch time: 0.0560181518252805\n", + "batch time: 0.05600968517106155\n", + "batch time: 0.056001625923036334\n", + "batch time: 0.055994502794435165\n", + "batch time: 0.055985696667083175\n", + "batch time: 0.0559770461491176\n", + "Iter 700 || Loss: 7.0602 || 10iter: 5.5554 sec.\n", + "batch time: 0.055969723621210596\n", + "batch time: 0.05596108823759943\n", + "batch time: 0.055954075130934734\n", + "batch time: 0.05594538727944547\n", + "batch time: 0.05593722289335643\n", + "batch time: 0.05596072748092349\n", + "batch time: 0.05595269567389805\n", + "batch time: 0.05594382380361611\n", + "batch time: 0.05597108678185553\n", + "batch time: 0.056000156469748055\n", + "Iter 710 || Loss: 6.5331 || 10iter: 5.7286 sec.\n", + "batch time: 0.05602189156576551\n", + "batch time: 0.05601379978522826\n", + "batch time: 0.0560047539554505\n", + "batch time: 0.05601881331756335\n", + "batch time: 0.05601268881684417\n", + "batch time: 0.056004752992917704\n", + "batch time: 0.05599535559011803\n", + "batch time: 0.05598658738362092\n", + "batch time: 0.055977903917867056\n", + "batch time: 0.05596909787919786\n", + "Iter 720 || Loss: 6.2941 || 10iter: 5.5942 sec.\n", + "batch time: 0.055960875113033555\n", + "batch time: 0.055953471614383264\n", + "batch time: 0.05594486220743646\n", + "batch time: 0.0559388626346272\n", + "batch time: 0.05593008436005691\n", + "batch time: 0.055921547012224015\n", + "batch time: 0.055912314615682523\n", + "batch time: 0.055905589690575234\n", + "batch time: 0.055896426097519276\n", + "batch time: 0.05588930730950342\n", + "Iter 730 || Loss: 7.2483 || 10iter: 5.5579 sec.\n", + "batch time: 0.05588408110079785\n", + "batch time: 0.05587685889885074\n", + "batch time: 0.05586899839351805\n", + "batch time: 0.0558617914080295\n", + "batch time: 0.055854628686191274\n", + "batch time: 0.05584814107936362\n", + "batch time: 0.055840795690363106\n", + "batch time: 0.055834645178259874\n", + "batch time: 0.055826173583612713\n", + "batch time: 0.05581781413104083\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Iter 740 || Loss: 6.8518 || 10iter: 5.5964 sec.\n", + "batch time: 0.05581476524291251\n", + "batch time: 0.05581074487166906\n", + "batch time: 0.05580368850625861\n", + "batch time: 0.05579592335608698\n", + "batch time: 0.055789068081234926\n", + "batch time: 0.05578140950394699\n", + "batch time: 0.05577423805533004\n", + "batch time: 0.05576580539744168\n", + "batch time: 0.05575773267147538\n", + "batch time: 0.0557602596282959\n", + "Iter 750 || Loss: 6.2037 || 10iter: 5.6235 sec.\n", + "batch time: 0.055751520530202894\n", + "batch time: 0.05574588223974755\n", + "batch time: 0.05573946324635945\n", + "batch time: 0.05573354665417254\n", + "batch time: 0.055744785662518434\n", + "batch time: 0.05573679845799845\n", + "batch time: 0.05572903455484811\n", + "batch time: 0.055720491270905746\n", + "batch time: 0.055715232655621956\n", + "batch time: 0.05570763129937022\n", + "Iter 760 || Loss: 6.4748 || 10iter: 5.6492 sec.\n", + "batch time: 0.055700014517905676\n", + "batch time: 0.055693958687970016\n" + ] + } + ], + "source": [ + "if DATASET == \"COCO\":\n", + " num_epochs = 50\n", + "else:\n", + " num_epochs = 200\n", + " \n", + "train_model(net, dataloaders_dict, criterion, optimizer, num_epochs=num_epochs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}