{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Training MLP-models\n",
"\n",
"Linn Alexandra Emhjellen, 2021. "
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import numpy as np\n",
"import pandas as pd\n",
"import matplotlib.pyplot as plt\n",
"import joblib"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"from sklearn.neural_network import MLPClassifier\n",
"from sklearn.preprocessing import StandardScaler\n",
"from sklearn.metrics import roc_auc_score,accuracy_score,confusion_matrix\n",
"\n",
"from sklearn.metrics import recall_score,roc_curve,auc\n",
"from pandas.plotting import scatter_matrix\n",
"from sklearn.metrics import plot_confusion_matrix\n",
"from sklearn.metrics import f1_score\n",
"from sklearn.metrics import r2_score\n",
"from sklearn.metrics import accuracy_score\n",
"from sklearn.metrics import precision_score\n",
"from sklearn.metrics import recall_score\n",
"\n",
"from sklearn.metrics import plot_roc_curve\n",
"from sklearn.model_selection import cross_val_score\n",
"from sklearn.model_selection import cross_val_predict\n",
"from sklearn import metrics\n",
"import seaborn as sns"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Loading calibration set"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"train_X = pd.read_excel('ML_training_features.xlsx')\n",
"train_y = pd.read_excel('ML_training_target.xlsx')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Loading validation set"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"validation_X = pd.read_excel('ML_validation_features.xlsx')\n",
"validation_y = pd.read_excel('ML_validation_target.xlsx')"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"validation_y = validation_y['ReleaseArea']\n",
"train_y = train_y['ReleaseArea']"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### The optimized hyperparameters from RandomSearch with cross-validation"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"best_params_MLP = pd.read_excel('MLP_best_params_RandomSearch.xlsx')\n",
"best_params_MLP = best_params_MLP.drop(columns = 'Unnamed: 0')\n",
"best_param_grid = best_params_MLP.to_dict(orient = 'records')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Defining feature combinations"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"# feature combinations\n",
"p1 = ['Slope']\n",
"\n",
"p2 = ['Slope','Elevation']\n",
"\n",
"p3 = ['Slope','North','East','North East','North West','South','South East','South West','West']\n",
"\n",
"p4 = ['Slope','Elevation','Plan_curv','Profile_curv','TRI','Distance_to_roads']\n",
"\n",
"p5 = ['Slope','Elevation','Plan_curv','Profile_curv','TRI','Flow_dir','Flow_acc','Distance_to_roads']\n",
"\n",
"p6 = ['Slope','Elevation','Plan_curv','Profile_curv','TRI']\n",
"\n",
"p7 = ['Elevation','North','East','North East','North West','South','South East','South West','West','Plan_curv','Profile_curv','TRI','Flow_dir','Flow_acc','Distance_to_roads']\n",
"\n",
"p8 = ['Slope','Elevation','North','East','North East','North West','South','South East','South West','West','Plan_curv','Profile_curv','TRI','Flow_dir','Flow_acc','Distance_to_roads',\n",
" 'Granite','Granodiorite','Tonalite','Trondhjemite','Syenite','Monzonite','Monzodiorite','Quartz diorite','Diorite','Gabbro','Norite','Peridotite','Pyroksenite','Charnockite','Mangerite','Anorthosite','Mafic dyke (Diabase, Dolerite)','Pegmatite/aplite','Felsic volcanic rock','Rhyolite','Dacite','Intermediate volcanic rock','Andesite','Mafic volcanic rock','Basalt',\n",
" 'Pyroclastic rock','Volcanic breccia','Siltstone','Sandstone','Greywacke','Arkose','Konglomerate','Sedimentary breccia','Limestone','Tuffite','Shale','Phyllite','Mica schist','Garnet mica schist','Calcareous phyllite','Calcareous mica schist','Amphibole schist','Graphitic schist','Calcite marble',\n",
" 'Metasandstone','Metagreywacke','Meta-arkose','Quartzite','Quartz schist','Mica gneiss','Calc-silicate rock','Amphibole gneiss','Granitic gneiss','Granodioritic gneiss','Tonalitic gneiss','Quartz dioritic gneiss','Monzonitic gneiss','Dioritic gneis','Orthopyroxene gneiss','Migmatite','Augengneiss',\n",
" 'Banded gneiss','Greenschist','Greenstone','Amphibolite','Metagabbro','Eclogite','Serpentinite','Mylonite/Phyllonite','Cataclasite']\n",
"\n",
"feature_combinations = [p1,p2,p3,p4,p5,p6,p7,p8]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Training of models"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 1, loss = 0.49329313\n",
"Iteration 2, loss = 0.25705141\n",
"Iteration 3, loss = 0.18584060\n",
"Iteration 4, loss = 0.16627930\n",
"Iteration 5, loss = 0.16033727\n",
"Iteration 6, loss = 0.15839359\n",
"Iteration 7, loss = 0.15758800\n",
"Iteration 8, loss = 0.15718740\n",
"Iteration 9, loss = 0.15686088\n",
"Iteration 10, loss = 0.15666376\n",
"Iteration 11, loss = 0.15652130\n",
"Iteration 12, loss = 0.15639366\n",
"Iteration 13, loss = 0.15629016\n",
"Iteration 14, loss = 0.15629755\n",
"Iteration 15, loss = 0.15615804\n",
"Iteration 16, loss = 0.15619507\n",
"Iteration 17, loss = 0.15616823\n",
"Iteration 18, loss = 0.15612967\n",
"Iteration 19, loss = 0.15618076\n",
"Iteration 20, loss = 0.15615905\n",
"Iteration 21, loss = 0.15602008\n",
"Iteration 22, loss = 0.15617861\n",
"Iteration 23, loss = 0.15607845\n",
"Iteration 24, loss = 0.15613335\n",
"Iteration 25, loss = 0.15618914\n",
"Iteration 26, loss = 0.15611393\n",
"Iteration 27, loss = 0.15606942\n",
"Iteration 28, loss = 0.15602103\n",
"Iteration 29, loss = 0.15609924\n",
"Iteration 30, loss = 0.15611314\n",
"Iteration 31, loss = 0.15608958\n",
"Iteration 32, loss = 0.15614109\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.52433151\n",
"Iteration 2, loss = 0.28399034\n",
"Iteration 3, loss = 0.19399312\n",
"Iteration 4, loss = 0.16984064\n",
"Iteration 5, loss = 0.16222169\n",
"Iteration 6, loss = 0.15942585\n",
"Iteration 7, loss = 0.15808849\n",
"Iteration 8, loss = 0.15728959\n",
"Iteration 9, loss = 0.15671450\n",
"Iteration 10, loss = 0.15626527\n",
"Iteration 11, loss = 0.15588161\n",
"Iteration 12, loss = 0.15564939\n",
"Iteration 13, loss = 0.15536143\n",
"Iteration 14, loss = 0.15513520\n",
"Iteration 15, loss = 0.15509916\n",
"Iteration 16, loss = 0.15490460\n",
"Iteration 17, loss = 0.15484107\n",
"Iteration 18, loss = 0.15476605\n",
"Iteration 19, loss = 0.15474231\n",
"Iteration 20, loss = 0.15467232\n",
"Iteration 21, loss = 0.15463524\n",
"Iteration 22, loss = 0.15463161\n",
"Iteration 23, loss = 0.15456403\n",
"Iteration 24, loss = 0.15455025\n",
"Iteration 25, loss = 0.15461401\n",
"Iteration 26, loss = 0.15451101\n",
"Iteration 27, loss = 0.15449671\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.50586640\n",
"Iteration 2, loss = 0.27903880\n",
"Iteration 3, loss = 0.20341648\n",
"Iteration 4, loss = 0.17662696\n",
"Iteration 5, loss = 0.16576619\n",
"Iteration 6, loss = 0.16104670\n",
"Iteration 7, loss = 0.15898948\n",
"Iteration 8, loss = 0.15793319\n",
"Iteration 9, loss = 0.15734918\n",
"Iteration 10, loss = 0.15697323\n",
"Iteration 11, loss = 0.15684745\n",
"Iteration 12, loss = 0.15661901\n",
"Iteration 13, loss = 0.15650445\n",
"Iteration 14, loss = 0.15641246\n",
"Iteration 15, loss = 0.15636716\n",
"Iteration 16, loss = 0.15630972\n",
"Iteration 17, loss = 0.15620840\n",
"Iteration 18, loss = 0.15621011\n",
"Iteration 19, loss = 0.15616230\n",
"Iteration 20, loss = 0.15616164\n",
"Iteration 21, loss = 0.15624596\n",
"Iteration 22, loss = 0.15613144\n",
"Iteration 23, loss = 0.15619193\n",
"Iteration 24, loss = 0.15610064\n",
"Iteration 25, loss = 0.15626796\n",
"Iteration 26, loss = 0.15619315\n",
"Iteration 27, loss = 0.15613447\n",
"Iteration 28, loss = 0.15605806\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.34460575\n",
"Iteration 2, loss = 0.20628313\n",
"Iteration 3, loss = 0.17168116\n",
"Iteration 4, loss = 0.16187290\n",
"Iteration 5, loss = 0.15846432\n",
"Iteration 6, loss = 0.15723423\n",
"Iteration 7, loss = 0.15655288\n",
"Iteration 8, loss = 0.15616655\n",
"Iteration 9, loss = 0.15592711\n",
"Iteration 10, loss = 0.15573754\n",
"Iteration 11, loss = 0.15566896\n",
"Iteration 12, loss = 0.15553550\n",
"Iteration 13, loss = 0.15549204\n",
"Iteration 14, loss = 0.15544292\n",
"Iteration 15, loss = 0.15542823\n",
"Iteration 16, loss = 0.15541292\n",
"Iteration 17, loss = 0.15538259\n",
"Iteration 18, loss = 0.15535990\n",
"Iteration 19, loss = 0.15537308\n",
"Iteration 20, loss = 0.15534541\n",
"Iteration 21, loss = 0.15531703\n",
"Iteration 22, loss = 0.15528286\n",
"Iteration 23, loss = 0.15526436\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.40462022\n",
"Iteration 2, loss = 0.24108111\n",
"Iteration 3, loss = 0.18803139\n",
"Iteration 4, loss = 0.16925378\n",
"Iteration 5, loss = 0.16186674\n",
"Iteration 6, loss = 0.15878432\n",
"Iteration 7, loss = 0.15728547\n",
"Iteration 8, loss = 0.15653268\n",
"Iteration 9, loss = 0.15604185\n",
"Iteration 10, loss = 0.15578172\n",
"Iteration 11, loss = 0.15560608\n",
"Iteration 12, loss = 0.15540907\n",
"Iteration 13, loss = 0.15526325\n",
"Iteration 14, loss = 0.15518721\n",
"Iteration 15, loss = 0.15510786\n",
"Iteration 16, loss = 0.15504654\n",
"Iteration 17, loss = 0.15504654\n",
"Iteration 18, loss = 0.15501531\n",
"Iteration 19, loss = 0.15494255\n",
"Iteration 20, loss = 0.15498039\n",
"Iteration 21, loss = 0.15495413\n",
"Iteration 22, loss = 0.15495857\n",
"Iteration 23, loss = 0.15492054\n",
"Iteration 24, loss = 0.15494339\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.51492758\n",
"Iteration 2, loss = 0.29019487\n",
"Iteration 3, loss = 0.21036998\n",
"Iteration 4, loss = 0.18099555\n",
"Iteration 5, loss = 0.16889842\n",
"Iteration 6, loss = 0.16369961\n",
"Iteration 7, loss = 0.16156390\n",
"Iteration 8, loss = 0.16055444\n",
"Iteration 9, loss = 0.16017478\n",
"Iteration 10, loss = 0.15997587\n",
"Iteration 11, loss = 0.15985139\n",
"Iteration 12, loss = 0.15978691\n",
"Iteration 13, loss = 0.15973534\n",
"Iteration 14, loss = 0.15970043\n",
"Iteration 15, loss = 0.15965589\n",
"Iteration 16, loss = 0.15964621\n",
"Iteration 17, loss = 0.15961846\n",
"Iteration 18, loss = 0.15961136\n",
"Iteration 19, loss = 0.15957377\n",
"Iteration 20, loss = 0.15962471\n",
"Iteration 21, loss = 0.15956044\n",
"Iteration 22, loss = 0.15952746\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:500: ConvergenceWarning: lbfgs failed to converge (status=1):\n",
"STOP: TOTAL NO. of ITERATIONS REACHED LIMIT.\n",
"\n",
"Increase the number of iterations (max_iter) or scale the data as shown in:\n",
" https://scikit-learn.org/stable/modules/preprocessing.html\n",
" self.n_iter_ = _check_optimize_result(\"lbfgs\", opt_res, self.max_iter)\n",
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:500: ConvergenceWarning: lbfgs failed to converge (status=1):\n",
"STOP: TOTAL NO. of ITERATIONS REACHED LIMIT.\n",
"\n",
"Increase the number of iterations (max_iter) or scale the data as shown in:\n",
" https://scikit-learn.org/stable/modules/preprocessing.html\n",
" self.n_iter_ = _check_optimize_result(\"lbfgs\", opt_res, self.max_iter)\n",
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:500: ConvergenceWarning: lbfgs failed to converge (status=1):\n",
"STOP: TOTAL NO. of ITERATIONS REACHED LIMIT.\n",
"\n",
"Increase the number of iterations (max_iter) or scale the data as shown in:\n",
" https://scikit-learn.org/stable/modules/preprocessing.html\n",
" self.n_iter_ = _check_optimize_result(\"lbfgs\", opt_res, self.max_iter)\n",
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:500: ConvergenceWarning: lbfgs failed to converge (status=1):\n",
"STOP: TOTAL NO. of ITERATIONS REACHED LIMIT.\n",
"\n",
"Increase the number of iterations (max_iter) or scale the data as shown in:\n",
" https://scikit-learn.org/stable/modules/preprocessing.html\n",
" self.n_iter_ = _check_optimize_result(\"lbfgs\", opt_res, self.max_iter)\n",
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:500: ConvergenceWarning: lbfgs failed to converge (status=1):\n",
"STOP: TOTAL NO. of ITERATIONS REACHED LIMIT.\n",
"\n",
"Increase the number of iterations (max_iter) or scale the data as shown in:\n",
" https://scikit-learn.org/stable/modules/preprocessing.html\n",
" self.n_iter_ = _check_optimize_result(\"lbfgs\", opt_res, self.max_iter)\n",
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:500: ConvergenceWarning: lbfgs failed to converge (status=1):\n",
"STOP: TOTAL NO. of ITERATIONS REACHED LIMIT.\n",
"\n",
"Increase the number of iterations (max_iter) or scale the data as shown in:\n",
" https://scikit-learn.org/stable/modules/preprocessing.html\n",
" self.n_iter_ = _check_optimize_result(\"lbfgs\", opt_res, self.max_iter)\n",
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:500: ConvergenceWarning: lbfgs failed to converge (status=1):\n",
"STOP: TOTAL NO. of ITERATIONS REACHED LIMIT.\n",
"\n",
"Increase the number of iterations (max_iter) or scale the data as shown in:\n",
" https://scikit-learn.org/stable/modules/preprocessing.html\n",
" self.n_iter_ = _check_optimize_result(\"lbfgs\", opt_res, self.max_iter)\n",
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:500: ConvergenceWarning: lbfgs failed to converge (status=1):\n",
"STOP: TOTAL NO. of ITERATIONS REACHED LIMIT.\n",
"\n",
"Increase the number of iterations (max_iter) or scale the data as shown in:\n",
" https://scikit-learn.org/stable/modules/preprocessing.html\n",
" self.n_iter_ = _check_optimize_result(\"lbfgs\", opt_res, self.max_iter)\n",
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:500: ConvergenceWarning: lbfgs failed to converge (status=1):\n",
"STOP: TOTAL NO. of ITERATIONS REACHED LIMIT.\n",
"\n",
"Increase the number of iterations (max_iter) or scale the data as shown in:\n",
" https://scikit-learn.org/stable/modules/preprocessing.html\n",
" self.n_iter_ = _check_optimize_result(\"lbfgs\", opt_res, self.max_iter)\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 1, loss = 0.48892907\n",
"Iteration 2, loss = 0.25166553\n",
"Iteration 3, loss = 0.20244590\n",
"Iteration 4, loss = 0.18799445\n",
"Iteration 5, loss = 0.18136986\n",
"Iteration 6, loss = 0.17754311\n",
"Iteration 7, loss = 0.17484031\n",
"Iteration 8, loss = 0.17321192\n",
"Iteration 9, loss = 0.17226888\n",
"Iteration 10, loss = 0.17119937\n",
"Iteration 11, loss = 0.17055765\n",
"Iteration 12, loss = 0.17006394\n",
"Iteration 13, loss = 0.16953533\n",
"Iteration 14, loss = 0.16930679\n",
"Iteration 15, loss = 0.16900752\n",
"Iteration 16, loss = 0.16901378\n",
"Iteration 17, loss = 0.16877523\n",
"Iteration 18, loss = 0.16883850\n",
"Iteration 19, loss = 0.16863304\n",
"Iteration 20, loss = 0.16852365\n",
"Iteration 21, loss = 0.16863493\n",
"Iteration 22, loss = 0.16836198\n",
"Iteration 23, loss = 0.16844024\n",
"Iteration 24, loss = 0.16832229\n",
"Iteration 25, loss = 0.16826388\n",
"Iteration 26, loss = 0.16828224\n",
"Iteration 27, loss = 0.16816954\n",
"Iteration 28, loss = 0.16823874\n",
"Iteration 29, loss = 0.16823795\n",
"Iteration 30, loss = 0.16802659\n",
"Iteration 31, loss = 0.16816288\n",
"Iteration 32, loss = 0.16821842\n",
"Iteration 33, loss = 0.16837137\n",
"Iteration 34, loss = 0.16810136\n",
"Iteration 35, loss = 0.16819344\n",
"Iteration 36, loss = 0.16820974\n",
"Iteration 37, loss = 0.16829844\n",
"Iteration 38, loss = 0.16799826\n",
"Iteration 39, loss = 0.16811486\n",
"Iteration 40, loss = 0.16802784\n",
"Iteration 41, loss = 0.16802602\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.40774467\n",
"Iteration 2, loss = 0.23575661\n",
"Iteration 3, loss = 0.19632384\n",
"Iteration 4, loss = 0.18356234\n",
"Iteration 5, loss = 0.17780920\n",
"Iteration 6, loss = 0.17459158\n",
"Iteration 7, loss = 0.17301083\n",
"Iteration 8, loss = 0.17175527\n",
"Iteration 9, loss = 0.17060201\n",
"Iteration 10, loss = 0.16976347\n",
"Iteration 11, loss = 0.16916187\n",
"Iteration 12, loss = 0.16880093\n",
"Iteration 13, loss = 0.16834109\n",
"Iteration 14, loss = 0.16824384\n",
"Iteration 15, loss = 0.16811713\n",
"Iteration 16, loss = 0.16786555\n",
"Iteration 17, loss = 0.16782666\n",
"Iteration 18, loss = 0.16776854\n",
"Iteration 19, loss = 0.16760872\n",
"Iteration 20, loss = 0.16748182\n",
"Iteration 21, loss = 0.16754239\n",
"Iteration 22, loss = 0.16738183\n",
"Iteration 23, loss = 0.16741445\n",
"Iteration 24, loss = 0.16729324\n",
"Iteration 25, loss = 0.16731677\n",
"Iteration 26, loss = 0.16721602\n",
"Iteration 27, loss = 0.16723583\n",
"Iteration 28, loss = 0.16729458\n",
"Iteration 29, loss = 0.16732686\n",
"Iteration 30, loss = 0.16721191\n",
"Iteration 31, loss = 0.16710469\n",
"Iteration 32, loss = 0.16706350\n",
"Iteration 33, loss = 0.16709587\n",
"Iteration 34, loss = 0.16727938\n",
"Iteration 35, loss = 0.16714783\n",
"Iteration 36, loss = 0.16701478\n",
"Iteration 37, loss = 0.16709416\n",
"Iteration 38, loss = 0.16699268\n",
"Iteration 39, loss = 0.16706550\n",
"Iteration 40, loss = 0.16707734\n",
"Iteration 41, loss = 0.16708940\n",
"Iteration 42, loss = 0.16697216\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.37119238\n",
"Iteration 2, loss = 0.23325188\n",
"Iteration 3, loss = 0.19707503\n",
"Iteration 4, loss = 0.18503992\n",
"Iteration 5, loss = 0.17977969\n",
"Iteration 6, loss = 0.17661953\n",
"Iteration 7, loss = 0.17479405\n",
"Iteration 8, loss = 0.17342150\n",
"Iteration 9, loss = 0.17236340\n",
"Iteration 10, loss = 0.17147184\n",
"Iteration 11, loss = 0.17099156\n",
"Iteration 12, loss = 0.17051620\n",
"Iteration 13, loss = 0.17034034\n",
"Iteration 14, loss = 0.17006215\n",
"Iteration 15, loss = 0.16997636\n",
"Iteration 16, loss = 0.16966370\n",
"Iteration 17, loss = 0.16963735\n",
"Iteration 18, loss = 0.16951474\n",
"Iteration 19, loss = 0.16957178\n",
"Iteration 20, loss = 0.16950931\n",
"Iteration 21, loss = 0.16932333\n",
"Iteration 22, loss = 0.16918855\n",
"Iteration 23, loss = 0.16920544\n",
"Iteration 24, loss = 0.16901588\n",
"Iteration 25, loss = 0.16911986\n",
"Iteration 26, loss = 0.16906293\n",
"Iteration 27, loss = 0.16905580\n",
"Iteration 28, loss = 0.16892609\n",
"Iteration 29, loss = 0.16892789\n",
"Iteration 30, loss = 0.16899630\n",
"Iteration 31, loss = 0.16892414\n",
"Iteration 32, loss = 0.16894479\n",
"Iteration 33, loss = 0.16889730\n",
"Iteration 34, loss = 0.16878126\n",
"Iteration 35, loss = 0.16887357\n",
"Iteration 36, loss = 0.16877341\n",
"Iteration 37, loss = 0.16880745\n",
"Iteration 38, loss = 0.16882094\n",
"Iteration 39, loss = 0.16877995\n",
"Iteration 40, loss = 0.16863914\n",
"Iteration 41, loss = 0.16875288\n",
"Iteration 42, loss = 0.16871678\n",
"Iteration 43, loss = 0.16881006\n",
"Iteration 44, loss = 0.16877048\n",
"Iteration 45, loss = 0.16879067\n",
"Iteration 46, loss = 0.16867355\n",
"Iteration 47, loss = 0.16868093\n",
"Iteration 48, loss = 0.16882907\n",
"Iteration 49, loss = 0.16864801\n",
"Iteration 50, loss = 0.16869462\n",
"Iteration 51, loss = 0.16871026\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.48485259\n",
"Iteration 2, loss = 0.26705667\n",
"Iteration 3, loss = 0.20833848\n",
"Iteration 4, loss = 0.18948665\n",
"Iteration 5, loss = 0.18154200\n",
"Iteration 6, loss = 0.17740234\n",
"Iteration 7, loss = 0.17486414\n",
"Iteration 8, loss = 0.17284289\n",
"Iteration 9, loss = 0.17176974\n",
"Iteration 10, loss = 0.17065829\n",
"Iteration 11, loss = 0.16998781\n",
"Iteration 12, loss = 0.16946284\n",
"Iteration 13, loss = 0.16921698\n",
"Iteration 14, loss = 0.16889212\n",
"Iteration 15, loss = 0.16859596\n",
"Iteration 16, loss = 0.16845974\n",
"Iteration 17, loss = 0.16840697\n",
"Iteration 18, loss = 0.16836409\n",
"Iteration 19, loss = 0.16828213\n",
"Iteration 20, loss = 0.16813119\n",
"Iteration 21, loss = 0.16800244\n",
"Iteration 22, loss = 0.16796954\n",
"Iteration 23, loss = 0.16800481\n",
"Iteration 24, loss = 0.16802519\n",
"Iteration 25, loss = 0.16794227\n",
"Iteration 26, loss = 0.16790702\n",
"Iteration 27, loss = 0.16773063\n",
"Iteration 28, loss = 0.16785001\n",
"Iteration 29, loss = 0.16769915\n",
"Iteration 30, loss = 0.16767352\n",
"Iteration 31, loss = 0.16763481\n",
"Iteration 32, loss = 0.16758830\n",
"Iteration 33, loss = 0.16782005\n",
"Iteration 34, loss = 0.16766232\n",
"Iteration 35, loss = 0.16758715\n",
"Iteration 36, loss = 0.16763142\n",
"Iteration 37, loss = 0.16759924\n",
"Iteration 38, loss = 0.16750812\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.45184388\n",
"Iteration 2, loss = 0.26075837\n",
"Iteration 3, loss = 0.21181514\n",
"Iteration 4, loss = 0.19401054\n",
"Iteration 5, loss = 0.18497037\n",
"Iteration 6, loss = 0.18006047\n",
"Iteration 7, loss = 0.17676484\n",
"Iteration 8, loss = 0.17447415\n",
"Iteration 9, loss = 0.17289412\n",
"Iteration 10, loss = 0.17177751\n",
"Iteration 11, loss = 0.17072472\n",
"Iteration 12, loss = 0.17022291\n",
"Iteration 13, loss = 0.16962331\n",
"Iteration 14, loss = 0.16930474\n",
"Iteration 15, loss = 0.16886634\n",
"Iteration 16, loss = 0.16859347\n",
"Iteration 17, loss = 0.16836746\n",
"Iteration 18, loss = 0.16830134\n",
"Iteration 19, loss = 0.16808344\n",
"Iteration 20, loss = 0.16803525\n",
"Iteration 21, loss = 0.16797420\n",
"Iteration 22, loss = 0.16775684\n",
"Iteration 23, loss = 0.16771924\n",
"Iteration 24, loss = 0.16758794\n",
"Iteration 25, loss = 0.16749703\n",
"Iteration 26, loss = 0.16728664\n",
"Iteration 27, loss = 0.16752444\n",
"Iteration 28, loss = 0.16737712\n",
"Iteration 29, loss = 0.16739390\n",
"Iteration 30, loss = 0.16715306\n",
"Iteration 31, loss = 0.16717241\n",
"Iteration 32, loss = 0.16727108\n",
"Iteration 33, loss = 0.16715481\n",
"Iteration 34, loss = 0.16718309\n",
"Iteration 35, loss = 0.16699755\n",
"Iteration 36, loss = 0.16693060\n",
"Iteration 37, loss = 0.16698798\n",
"Iteration 38, loss = 0.16713292\n",
"Iteration 39, loss = 0.16690967\n",
"Iteration 40, loss = 0.16704721\n",
"Iteration 41, loss = 0.16698122\n",
"Iteration 42, loss = 0.16699685\n",
"Iteration 43, loss = 0.16680964\n",
"Iteration 44, loss = 0.16698114\n",
"Iteration 45, loss = 0.16687054\n",
"Iteration 46, loss = 0.16678852\n",
"Iteration 47, loss = 0.16672873\n",
"Iteration 48, loss = 0.16691552\n",
"Iteration 49, loss = 0.16681188\n",
"Iteration 50, loss = 0.16694322\n",
"Iteration 51, loss = 0.16666273\n",
"Iteration 52, loss = 0.16681396\n",
"Iteration 53, loss = 0.16673067\n",
"Iteration 54, loss = 0.16677288\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.39442659\n",
"Iteration 2, loss = 0.24122247\n",
"Iteration 3, loss = 0.20297112\n",
"Iteration 4, loss = 0.19050488\n",
"Iteration 5, loss = 0.18456645\n",
"Iteration 6, loss = 0.18087264\n",
"Iteration 7, loss = 0.17854079\n",
"Iteration 8, loss = 0.17677251\n",
"Iteration 9, loss = 0.17568654\n",
"Iteration 10, loss = 0.17477822\n",
"Iteration 11, loss = 0.17415676\n",
"Iteration 12, loss = 0.17363624\n",
"Iteration 13, loss = 0.17319183\n",
"Iteration 14, loss = 0.17272385\n",
"Iteration 15, loss = 0.17269493\n",
"Iteration 16, loss = 0.17256386\n",
"Iteration 17, loss = 0.17227955\n",
"Iteration 18, loss = 0.17235145\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 19, loss = 0.17211451\n",
"Iteration 20, loss = 0.17177975\n",
"Iteration 21, loss = 0.17190084\n",
"Iteration 22, loss = 0.17168785\n",
"Iteration 23, loss = 0.17158649\n",
"Iteration 24, loss = 0.17164491\n",
"Iteration 25, loss = 0.17143937\n",
"Iteration 26, loss = 0.17148690\n",
"Iteration 27, loss = 0.17157937\n",
"Iteration 28, loss = 0.17126049\n",
"Iteration 29, loss = 0.17128867\n",
"Iteration 30, loss = 0.17128537\n",
"Iteration 31, loss = 0.17120853\n",
"Iteration 32, loss = 0.17125408\n",
"Iteration 33, loss = 0.17126612\n",
"Iteration 34, loss = 0.17104033\n",
"Iteration 35, loss = 0.17128975\n",
"Iteration 36, loss = 0.17105420\n",
"Iteration 37, loss = 0.17100883\n",
"Iteration 38, loss = 0.17103718\n",
"Iteration 39, loss = 0.17107349\n",
"Iteration 40, loss = 0.17110511\n",
"Iteration 41, loss = 0.17100747\n",
"Iteration 42, loss = 0.17124864\n",
"Iteration 43, loss = 0.17091055\n",
"Iteration 44, loss = 0.17116297\n",
"Iteration 45, loss = 0.17099877\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.40897591\n",
"Iteration 2, loss = 0.20491489\n",
"Iteration 3, loss = 0.16061433\n",
"Iteration 4, loss = 0.14736129\n",
"Iteration 5, loss = 0.14157138\n",
"Iteration 6, loss = 0.13868293\n",
"Iteration 7, loss = 0.13655832\n",
"Iteration 8, loss = 0.13509309\n",
"Iteration 9, loss = 0.13391833\n",
"Iteration 10, loss = 0.13316457\n",
"Iteration 11, loss = 0.13213131\n",
"Iteration 12, loss = 0.13124864\n",
"Iteration 13, loss = 0.13053236\n",
"Iteration 14, loss = 0.12978765\n",
"Iteration 15, loss = 0.12925183\n",
"Iteration 16, loss = 0.12840206\n",
"Iteration 17, loss = 0.12771913\n",
"Iteration 18, loss = 0.12717259\n",
"Iteration 19, loss = 0.12665013\n",
"Iteration 20, loss = 0.12594822\n",
"Iteration 21, loss = 0.12540389\n",
"Iteration 22, loss = 0.12457004\n",
"Iteration 23, loss = 0.12425874\n",
"Iteration 24, loss = 0.12360686\n",
"Iteration 25, loss = 0.12308933\n",
"Iteration 26, loss = 0.12256678\n",
"Iteration 27, loss = 0.12186461\n",
"Iteration 28, loss = 0.12140155\n",
"Iteration 29, loss = 0.12092938\n",
"Iteration 30, loss = 0.12063933\n",
"Iteration 31, loss = 0.11993412\n",
"Iteration 32, loss = 0.11956444\n",
"Iteration 33, loss = 0.11917599\n",
"Iteration 34, loss = 0.11883062\n",
"Iteration 35, loss = 0.11825976\n",
"Iteration 36, loss = 0.11807117\n",
"Iteration 37, loss = 0.11757102\n",
"Iteration 38, loss = 0.11741869\n",
"Iteration 39, loss = 0.11712029\n",
"Iteration 40, loss = 0.11693058\n",
"Iteration 41, loss = 0.11679223\n",
"Iteration 42, loss = 0.11649232\n",
"Iteration 43, loss = 0.11580996\n",
"Iteration 44, loss = 0.11604052\n",
"Iteration 45, loss = 0.11552174\n",
"Iteration 46, loss = 0.11539918\n",
"Iteration 47, loss = 0.11529089\n",
"Iteration 48, loss = 0.11488205\n",
"Iteration 49, loss = 0.11458887\n",
"Iteration 50, loss = 0.11463176\n",
"Iteration 51, loss = 0.11425597\n",
"Iteration 52, loss = 0.11419263\n",
"Iteration 53, loss = 0.11393306\n",
"Iteration 54, loss = 0.11379477\n",
"Iteration 55, loss = 0.11351327\n",
"Iteration 56, loss = 0.11350753\n",
"Iteration 57, loss = 0.11329965\n",
"Iteration 58, loss = 0.11305880\n",
"Iteration 59, loss = 0.11287142\n",
"Iteration 60, loss = 0.11276404\n",
"Iteration 61, loss = 0.11247335\n",
"Iteration 62, loss = 0.11242985\n",
"Iteration 63, loss = 0.11234262\n",
"Iteration 64, loss = 0.11209710\n",
"Iteration 65, loss = 0.11207833\n",
"Iteration 66, loss = 0.11188356\n",
"Iteration 67, loss = 0.11179314\n",
"Iteration 68, loss = 0.11176692\n",
"Iteration 69, loss = 0.11156732\n",
"Iteration 70, loss = 0.11123122\n",
"Iteration 71, loss = 0.11127696\n",
"Iteration 72, loss = 0.11094830\n",
"Iteration 73, loss = 0.11081313\n",
"Iteration 74, loss = 0.11056474\n",
"Iteration 75, loss = 0.11045096\n",
"Iteration 76, loss = 0.11048276\n",
"Iteration 77, loss = 0.11023710\n",
"Iteration 78, loss = 0.11020770\n",
"Iteration 79, loss = 0.10999733\n",
"Iteration 80, loss = 0.10977313\n",
"Iteration 81, loss = 0.10965476\n",
"Iteration 82, loss = 0.10962659\n",
"Iteration 83, loss = 0.10937868\n",
"Iteration 84, loss = 0.10947231\n",
"Iteration 85, loss = 0.10909863\n",
"Iteration 86, loss = 0.10911457\n",
"Iteration 87, loss = 0.10906718\n",
"Iteration 88, loss = 0.10868876\n",
"Iteration 89, loss = 0.10864203\n",
"Iteration 90, loss = 0.10883751\n",
"Iteration 91, loss = 0.10840840\n",
"Iteration 92, loss = 0.10842025\n",
"Iteration 93, loss = 0.10816441\n",
"Iteration 94, loss = 0.10789247\n",
"Iteration 95, loss = 0.10785869\n",
"Iteration 96, loss = 0.10765956\n",
"Iteration 97, loss = 0.10786537\n",
"Iteration 98, loss = 0.10761741\n",
"Iteration 99, loss = 0.10751390\n",
"Iteration 100, loss = 0.10725950\n",
"Iteration 101, loss = 0.10712434\n",
"Iteration 102, loss = 0.10713767\n",
"Iteration 103, loss = 0.10713726\n",
"Iteration 104, loss = 0.10699971\n",
"Iteration 105, loss = 0.10672956\n",
"Iteration 106, loss = 0.10635144\n",
"Iteration 107, loss = 0.10668503\n",
"Iteration 108, loss = 0.10650188\n",
"Iteration 109, loss = 0.10630787\n",
"Iteration 110, loss = 0.10606272\n",
"Iteration 111, loss = 0.10621902\n",
"Iteration 112, loss = 0.10624197\n",
"Iteration 113, loss = 0.10606449\n",
"Iteration 114, loss = 0.10586322\n",
"Iteration 115, loss = 0.10569439\n",
"Iteration 116, loss = 0.10566697\n",
"Iteration 117, loss = 0.10537140\n",
"Iteration 118, loss = 0.10539460\n",
"Iteration 119, loss = 0.10537168\n",
"Iteration 120, loss = 0.10532533\n",
"Iteration 121, loss = 0.10509790\n",
"Iteration 122, loss = 0.10517885\n",
"Iteration 123, loss = 0.10491513\n",
"Iteration 124, loss = 0.10496717\n",
"Iteration 125, loss = 0.10462477\n",
"Iteration 126, loss = 0.10434515\n",
"Iteration 127, loss = 0.10453951\n",
"Iteration 128, loss = 0.10476719\n",
"Iteration 129, loss = 0.10431263\n",
"Iteration 130, loss = 0.10438334\n",
"Iteration 131, loss = 0.10427270\n",
"Iteration 132, loss = 0.10427452\n",
"Iteration 133, loss = 0.10391870\n",
"Iteration 134, loss = 0.10394357\n",
"Iteration 135, loss = 0.10370141\n",
"Iteration 136, loss = 0.10377483\n",
"Iteration 137, loss = 0.10378036\n",
"Iteration 138, loss = 0.10368599\n",
"Iteration 139, loss = 0.10348320\n",
"Iteration 140, loss = 0.10327463\n",
"Iteration 141, loss = 0.10351367\n",
"Iteration 142, loss = 0.10315743\n",
"Iteration 143, loss = 0.10320340\n",
"Iteration 144, loss = 0.10306656\n",
"Iteration 145, loss = 0.10280504\n",
"Iteration 146, loss = 0.10308853\n",
"Iteration 147, loss = 0.10289381\n",
"Iteration 148, loss = 0.10286253\n",
"Iteration 149, loss = 0.10285425\n",
"Iteration 150, loss = 0.10243944\n",
"Iteration 151, loss = 0.10247167\n",
"Iteration 152, loss = 0.10264604\n",
"Iteration 153, loss = 0.10267425\n",
"Iteration 154, loss = 0.10220715\n",
"Iteration 155, loss = 0.10220601\n",
"Iteration 156, loss = 0.10215097\n",
"Iteration 157, loss = 0.10219035\n",
"Iteration 158, loss = 0.10194229\n",
"Iteration 159, loss = 0.10213069\n",
"Iteration 160, loss = 0.10197322\n",
"Iteration 161, loss = 0.10178664\n",
"Iteration 162, loss = 0.10163663\n",
"Iteration 163, loss = 0.10146818\n",
"Iteration 164, loss = 0.10165625\n",
"Iteration 165, loss = 0.10164161\n",
"Iteration 166, loss = 0.10161015\n",
"Iteration 167, loss = 0.10124129\n",
"Iteration 168, loss = 0.10138870\n",
"Iteration 169, loss = 0.10150982\n",
"Iteration 170, loss = 0.10121665\n",
"Iteration 171, loss = 0.10111842\n",
"Iteration 172, loss = 0.10098036\n",
"Iteration 173, loss = 0.10106474\n",
"Iteration 174, loss = 0.10115379\n",
"Iteration 175, loss = 0.10083411\n",
"Iteration 176, loss = 0.10086979\n",
"Iteration 177, loss = 0.10089412\n",
"Iteration 178, loss = 0.10126172\n",
"Iteration 179, loss = 0.10065022\n",
"Iteration 180, loss = 0.10050054\n",
"Iteration 181, loss = 0.10060114\n",
"Iteration 182, loss = 0.10031970\n",
"Iteration 183, loss = 0.10063114\n",
"Iteration 184, loss = 0.10026481\n",
"Iteration 185, loss = 0.10034847\n",
"Iteration 186, loss = 0.10029146\n",
"Iteration 187, loss = 0.10031472\n",
"Iteration 188, loss = 0.10032889\n",
"Iteration 189, loss = 0.10007487\n",
"Iteration 190, loss = 0.10025459\n",
"Iteration 191, loss = 0.10006748\n",
"Iteration 192, loss = 0.10000433\n",
"Iteration 193, loss = 0.09973964\n",
"Iteration 194, loss = 0.10005653\n",
"Iteration 195, loss = 0.09960111\n",
"Iteration 196, loss = 0.09993995\n",
"Iteration 197, loss = 0.09970206\n",
"Iteration 198, loss = 0.09951138\n",
"Iteration 199, loss = 0.09940953\n",
"Iteration 200, loss = 0.09954660\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:614: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.\n",
" warnings.warn(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 1, loss = 0.43356551\n",
"Iteration 2, loss = 0.23849093\n",
"Iteration 3, loss = 0.17866213\n",
"Iteration 4, loss = 0.15609459\n",
"Iteration 5, loss = 0.14613023\n",
"Iteration 6, loss = 0.14082568\n",
"Iteration 7, loss = 0.13746849\n",
"Iteration 8, loss = 0.13516803\n",
"Iteration 9, loss = 0.13347502\n",
"Iteration 10, loss = 0.13197186\n",
"Iteration 11, loss = 0.13087770\n",
"Iteration 12, loss = 0.12977505\n",
"Iteration 13, loss = 0.12873746\n",
"Iteration 14, loss = 0.12810431\n",
"Iteration 15, loss = 0.12716588\n",
"Iteration 16, loss = 0.12635059\n",
"Iteration 17, loss = 0.12572446\n",
"Iteration 18, loss = 0.12501772\n",
"Iteration 19, loss = 0.12452491\n",
"Iteration 20, loss = 0.12384954\n",
"Iteration 21, loss = 0.12320323\n",
"Iteration 22, loss = 0.12283299\n",
"Iteration 23, loss = 0.12246799\n",
"Iteration 24, loss = 0.12175186\n",
"Iteration 25, loss = 0.12144506\n",
"Iteration 26, loss = 0.12079248\n",
"Iteration 27, loss = 0.12056707\n",
"Iteration 28, loss = 0.11995714\n",
"Iteration 29, loss = 0.11969717\n",
"Iteration 30, loss = 0.11928999\n",
"Iteration 31, loss = 0.11882624\n",
"Iteration 32, loss = 0.11835425\n",
"Iteration 33, loss = 0.11813705\n",
"Iteration 34, loss = 0.11799517\n",
"Iteration 35, loss = 0.11748253\n",
"Iteration 36, loss = 0.11717669\n",
"Iteration 37, loss = 0.11686940\n",
"Iteration 38, loss = 0.11628080\n",
"Iteration 39, loss = 0.11611221\n",
"Iteration 40, loss = 0.11587689\n",
"Iteration 41, loss = 0.11568098\n",
"Iteration 42, loss = 0.11533341\n",
"Iteration 43, loss = 0.11497885\n",
"Iteration 44, loss = 0.11466665\n",
"Iteration 45, loss = 0.11439837\n",
"Iteration 46, loss = 0.11426870\n",
"Iteration 47, loss = 0.11389005\n",
"Iteration 48, loss = 0.11358778\n",
"Iteration 49, loss = 0.11324126\n",
"Iteration 50, loss = 0.11301279\n",
"Iteration 51, loss = 0.11281221\n",
"Iteration 52, loss = 0.11254379\n",
"Iteration 53, loss = 0.11233526\n",
"Iteration 54, loss = 0.11236511\n",
"Iteration 55, loss = 0.11200612\n",
"Iteration 56, loss = 0.11176785\n",
"Iteration 57, loss = 0.11176140\n",
"Iteration 58, loss = 0.11137866\n",
"Iteration 59, loss = 0.11120599\n",
"Iteration 60, loss = 0.11075047\n",
"Iteration 61, loss = 0.11054427\n",
"Iteration 62, loss = 0.11057425\n",
"Iteration 63, loss = 0.11021192\n",
"Iteration 64, loss = 0.11031803\n",
"Iteration 65, loss = 0.11023544\n",
"Iteration 66, loss = 0.10992164\n",
"Iteration 67, loss = 0.10978261\n",
"Iteration 68, loss = 0.10951049\n",
"Iteration 69, loss = 0.10923910\n",
"Iteration 70, loss = 0.10920416\n",
"Iteration 71, loss = 0.10902912\n",
"Iteration 72, loss = 0.10875610\n",
"Iteration 73, loss = 0.10867936\n",
"Iteration 74, loss = 0.10840947\n",
"Iteration 75, loss = 0.10831088\n",
"Iteration 76, loss = 0.10831381\n",
"Iteration 77, loss = 0.10801277\n",
"Iteration 78, loss = 0.10778362\n",
"Iteration 79, loss = 0.10781692\n",
"Iteration 80, loss = 0.10757740\n",
"Iteration 81, loss = 0.10736303\n",
"Iteration 82, loss = 0.10740343\n",
"Iteration 83, loss = 0.10725969\n",
"Iteration 84, loss = 0.10705835\n",
"Iteration 85, loss = 0.10683921\n",
"Iteration 86, loss = 0.10696825\n",
"Iteration 87, loss = 0.10681899\n",
"Iteration 88, loss = 0.10664998\n",
"Iteration 89, loss = 0.10629189\n",
"Iteration 90, loss = 0.10631343\n",
"Iteration 91, loss = 0.10604428\n",
"Iteration 92, loss = 0.10612370\n",
"Iteration 93, loss = 0.10592695\n",
"Iteration 94, loss = 0.10569485\n",
"Iteration 95, loss = 0.10574030\n",
"Iteration 96, loss = 0.10561443\n",
"Iteration 97, loss = 0.10534353\n",
"Iteration 98, loss = 0.10527700\n",
"Iteration 99, loss = 0.10517242\n",
"Iteration 100, loss = 0.10511396\n",
"Iteration 101, loss = 0.10480883\n",
"Iteration 102, loss = 0.10496312\n",
"Iteration 103, loss = 0.10478879\n",
"Iteration 104, loss = 0.10453314\n",
"Iteration 105, loss = 0.10435692\n",
"Iteration 106, loss = 0.10459372\n",
"Iteration 107, loss = 0.10436960\n",
"Iteration 108, loss = 0.10433628\n",
"Iteration 109, loss = 0.10427136\n",
"Iteration 110, loss = 0.10406141\n",
"Iteration 111, loss = 0.10387250\n",
"Iteration 112, loss = 0.10365716\n",
"Iteration 113, loss = 0.10382640\n",
"Iteration 114, loss = 0.10387294\n",
"Iteration 115, loss = 0.10328275\n",
"Iteration 116, loss = 0.10346167\n",
"Iteration 117, loss = 0.10322722\n",
"Iteration 118, loss = 0.10309399\n",
"Iteration 119, loss = 0.10309391\n",
"Iteration 120, loss = 0.10278257\n",
"Iteration 121, loss = 0.10307586\n",
"Iteration 122, loss = 0.10278153\n",
"Iteration 123, loss = 0.10264387\n",
"Iteration 124, loss = 0.10249687\n",
"Iteration 125, loss = 0.10240546\n",
"Iteration 126, loss = 0.10259213\n",
"Iteration 127, loss = 0.10213349\n",
"Iteration 128, loss = 0.10208293\n",
"Iteration 129, loss = 0.10189678\n",
"Iteration 130, loss = 0.10197179\n",
"Iteration 131, loss = 0.10171915\n",
"Iteration 132, loss = 0.10194582\n",
"Iteration 133, loss = 0.10167121\n",
"Iteration 134, loss = 0.10148496\n",
"Iteration 135, loss = 0.10157192\n",
"Iteration 136, loss = 0.10149305\n",
"Iteration 137, loss = 0.10108589\n",
"Iteration 138, loss = 0.10143215\n",
"Iteration 139, loss = 0.10112576\n",
"Iteration 140, loss = 0.10092738\n",
"Iteration 141, loss = 0.10099689\n",
"Iteration 142, loss = 0.10069907\n",
"Iteration 143, loss = 0.10103510\n",
"Iteration 144, loss = 0.10079576\n",
"Iteration 145, loss = 0.10059007\n",
"Iteration 146, loss = 0.10063062\n",
"Iteration 147, loss = 0.10046514\n",
"Iteration 148, loss = 0.10006055\n",
"Iteration 149, loss = 0.10007459\n",
"Iteration 150, loss = 0.09981699\n",
"Iteration 151, loss = 0.09994971\n",
"Iteration 152, loss = 0.09986407\n",
"Iteration 153, loss = 0.09998886\n",
"Iteration 154, loss = 0.09983301\n",
"Iteration 155, loss = 0.09970378\n",
"Iteration 156, loss = 0.09942919\n",
"Iteration 157, loss = 0.09959601\n",
"Iteration 158, loss = 0.09908986\n",
"Iteration 159, loss = 0.09936984\n",
"Iteration 160, loss = 0.09903119\n",
"Iteration 161, loss = 0.09932123\n",
"Iteration 162, loss = 0.09882719\n",
"Iteration 163, loss = 0.09880950\n",
"Iteration 164, loss = 0.09872081\n",
"Iteration 165, loss = 0.09859784\n",
"Iteration 166, loss = 0.09851851\n",
"Iteration 167, loss = 0.09861369\n",
"Iteration 168, loss = 0.09829350\n",
"Iteration 169, loss = 0.09819791\n",
"Iteration 170, loss = 0.09841358\n",
"Iteration 171, loss = 0.09829872\n",
"Iteration 172, loss = 0.09811978\n",
"Iteration 173, loss = 0.09809051\n",
"Iteration 174, loss = 0.09793465\n",
"Iteration 175, loss = 0.09814784\n",
"Iteration 176, loss = 0.09790551\n",
"Iteration 177, loss = 0.09768974\n",
"Iteration 178, loss = 0.09770476\n",
"Iteration 179, loss = 0.09750356\n",
"Iteration 180, loss = 0.09761159\n",
"Iteration 181, loss = 0.09779108\n",
"Iteration 182, loss = 0.09709539\n",
"Iteration 183, loss = 0.09743122\n",
"Iteration 184, loss = 0.09702215\n",
"Iteration 185, loss = 0.09728595\n",
"Iteration 186, loss = 0.09701341\n",
"Iteration 187, loss = 0.09732841\n",
"Iteration 188, loss = 0.09701476\n",
"Iteration 189, loss = 0.09740518\n",
"Iteration 190, loss = 0.09692234\n",
"Iteration 191, loss = 0.09711267\n",
"Iteration 192, loss = 0.09679956\n",
"Iteration 193, loss = 0.09676010\n",
"Iteration 194, loss = 0.09670037\n",
"Iteration 195, loss = 0.09677735\n",
"Iteration 196, loss = 0.09666983\n",
"Iteration 197, loss = 0.09646177\n",
"Iteration 198, loss = 0.09664060\n",
"Iteration 199, loss = 0.09651875\n",
"Iteration 200, loss = 0.09635682\n",
"Iteration 1, loss = 0.46810587\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:614: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.\n",
" warnings.warn(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 2, loss = 0.23961161\n",
"Iteration 3, loss = 0.17553127\n",
"Iteration 4, loss = 0.15373956\n",
"Iteration 5, loss = 0.14481658\n",
"Iteration 6, loss = 0.13981566\n",
"Iteration 7, loss = 0.13697285\n",
"Iteration 8, loss = 0.13506786\n",
"Iteration 9, loss = 0.13341086\n",
"Iteration 10, loss = 0.13230645\n",
"Iteration 11, loss = 0.13127388\n",
"Iteration 12, loss = 0.13005548\n",
"Iteration 13, loss = 0.12942309\n",
"Iteration 14, loss = 0.12867178\n",
"Iteration 15, loss = 0.12805260\n",
"Iteration 16, loss = 0.12706528\n",
"Iteration 17, loss = 0.12653286\n",
"Iteration 18, loss = 0.12573598\n",
"Iteration 19, loss = 0.12525984\n",
"Iteration 20, loss = 0.12436652\n",
"Iteration 21, loss = 0.12384305\n",
"Iteration 22, loss = 0.12326031\n",
"Iteration 23, loss = 0.12255092\n",
"Iteration 24, loss = 0.12203784\n",
"Iteration 25, loss = 0.12167608\n",
"Iteration 26, loss = 0.12114012\n",
"Iteration 27, loss = 0.12068833\n",
"Iteration 28, loss = 0.12007659\n",
"Iteration 29, loss = 0.11968818\n",
"Iteration 30, loss = 0.11933996\n",
"Iteration 31, loss = 0.11908276\n",
"Iteration 32, loss = 0.11860323\n",
"Iteration 33, loss = 0.11837083\n",
"Iteration 34, loss = 0.11819309\n",
"Iteration 35, loss = 0.11767230\n",
"Iteration 36, loss = 0.11757445\n",
"Iteration 37, loss = 0.11726435\n",
"Iteration 38, loss = 0.11716501\n",
"Iteration 39, loss = 0.11693307\n",
"Iteration 40, loss = 0.11656083\n",
"Iteration 41, loss = 0.11619019\n",
"Iteration 42, loss = 0.11632885\n",
"Iteration 43, loss = 0.11615666\n",
"Iteration 44, loss = 0.11605944\n",
"Iteration 45, loss = 0.11581778\n",
"Iteration 46, loss = 0.11549254\n",
"Iteration 47, loss = 0.11533195\n",
"Iteration 48, loss = 0.11502465\n",
"Iteration 49, loss = 0.11501509\n",
"Iteration 50, loss = 0.11483031\n",
"Iteration 51, loss = 0.11478859\n",
"Iteration 52, loss = 0.11469542\n",
"Iteration 53, loss = 0.11448910\n",
"Iteration 54, loss = 0.11441578\n",
"Iteration 55, loss = 0.11413495\n",
"Iteration 56, loss = 0.11401310\n",
"Iteration 57, loss = 0.11405234\n",
"Iteration 58, loss = 0.11390248\n",
"Iteration 59, loss = 0.11345772\n",
"Iteration 60, loss = 0.11356341\n",
"Iteration 61, loss = 0.11327183\n",
"Iteration 62, loss = 0.11305218\n",
"Iteration 63, loss = 0.11290451\n",
"Iteration 64, loss = 0.11301087\n",
"Iteration 65, loss = 0.11287765\n",
"Iteration 66, loss = 0.11264418\n",
"Iteration 67, loss = 0.11249902\n",
"Iteration 68, loss = 0.11253580\n",
"Iteration 69, loss = 0.11227510\n",
"Iteration 70, loss = 0.11228525\n",
"Iteration 71, loss = 0.11217056\n",
"Iteration 72, loss = 0.11200893\n",
"Iteration 73, loss = 0.11188258\n",
"Iteration 74, loss = 0.11159650\n",
"Iteration 75, loss = 0.11127731\n",
"Iteration 76, loss = 0.11136566\n",
"Iteration 77, loss = 0.11134541\n",
"Iteration 78, loss = 0.11117831\n",
"Iteration 79, loss = 0.11093649\n",
"Iteration 80, loss = 0.11091664\n",
"Iteration 81, loss = 0.11083578\n",
"Iteration 82, loss = 0.11064341\n",
"Iteration 83, loss = 0.11062689\n",
"Iteration 84, loss = 0.11042099\n",
"Iteration 85, loss = 0.11050290\n",
"Iteration 86, loss = 0.11028784\n",
"Iteration 87, loss = 0.11015249\n",
"Iteration 88, loss = 0.10981871\n",
"Iteration 89, loss = 0.10996263\n",
"Iteration 90, loss = 0.10979890\n",
"Iteration 91, loss = 0.10963595\n",
"Iteration 92, loss = 0.10976104\n",
"Iteration 93, loss = 0.10960753\n",
"Iteration 94, loss = 0.10962654\n",
"Iteration 95, loss = 0.10917045\n",
"Iteration 96, loss = 0.10931792\n",
"Iteration 97, loss = 0.10920386\n",
"Iteration 98, loss = 0.10911543\n",
"Iteration 99, loss = 0.10894554\n",
"Iteration 100, loss = 0.10888608\n",
"Iteration 101, loss = 0.10870336\n",
"Iteration 102, loss = 0.10881075\n",
"Iteration 103, loss = 0.10849141\n",
"Iteration 104, loss = 0.10878905\n",
"Iteration 105, loss = 0.10828928\n",
"Iteration 106, loss = 0.10843542\n",
"Iteration 107, loss = 0.10805449\n",
"Iteration 108, loss = 0.10809105\n",
"Iteration 109, loss = 0.10792479\n",
"Iteration 110, loss = 0.10772959\n",
"Iteration 111, loss = 0.10786475\n",
"Iteration 112, loss = 0.10777426\n",
"Iteration 113, loss = 0.10756720\n",
"Iteration 114, loss = 0.10739802\n",
"Iteration 115, loss = 0.10754857\n",
"Iteration 116, loss = 0.10756324\n",
"Iteration 117, loss = 0.10733551\n",
"Iteration 118, loss = 0.10719033\n",
"Iteration 119, loss = 0.10714336\n",
"Iteration 120, loss = 0.10694764\n",
"Iteration 121, loss = 0.10697101\n",
"Iteration 122, loss = 0.10680657\n",
"Iteration 123, loss = 0.10707832\n",
"Iteration 124, loss = 0.10701797\n",
"Iteration 125, loss = 0.10650862\n",
"Iteration 126, loss = 0.10650050\n",
"Iteration 127, loss = 0.10628593\n",
"Iteration 128, loss = 0.10621985\n",
"Iteration 129, loss = 0.10622946\n",
"Iteration 130, loss = 0.10601904\n",
"Iteration 131, loss = 0.10594210\n",
"Iteration 132, loss = 0.10566752\n",
"Iteration 133, loss = 0.10606996\n",
"Iteration 134, loss = 0.10556498\n",
"Iteration 135, loss = 0.10540973\n",
"Iteration 136, loss = 0.10577654\n",
"Iteration 137, loss = 0.10539632\n",
"Iteration 138, loss = 0.10531305\n",
"Iteration 139, loss = 0.10533044\n",
"Iteration 140, loss = 0.10536485\n",
"Iteration 141, loss = 0.10515185\n",
"Iteration 142, loss = 0.10510210\n",
"Iteration 143, loss = 0.10513083\n",
"Iteration 144, loss = 0.10485209\n",
"Iteration 145, loss = 0.10477501\n",
"Iteration 146, loss = 0.10471374\n",
"Iteration 147, loss = 0.10461789\n",
"Iteration 148, loss = 0.10460988\n",
"Iteration 149, loss = 0.10431695\n",
"Iteration 150, loss = 0.10450363\n",
"Iteration 151, loss = 0.10433268\n",
"Iteration 152, loss = 0.10412841\n",
"Iteration 153, loss = 0.10411640\n",
"Iteration 154, loss = 0.10403214\n",
"Iteration 155, loss = 0.10384206\n",
"Iteration 156, loss = 0.10384894\n",
"Iteration 157, loss = 0.10412294\n",
"Iteration 158, loss = 0.10374834\n",
"Iteration 159, loss = 0.10386583\n",
"Iteration 160, loss = 0.10372079\n",
"Iteration 161, loss = 0.10355907\n",
"Iteration 162, loss = 0.10333719\n",
"Iteration 163, loss = 0.10313314\n",
"Iteration 164, loss = 0.10356427\n",
"Iteration 165, loss = 0.10319252\n",
"Iteration 166, loss = 0.10317775\n",
"Iteration 167, loss = 0.10305032\n",
"Iteration 168, loss = 0.10313587\n",
"Iteration 169, loss = 0.10283570\n",
"Iteration 170, loss = 0.10310101\n",
"Iteration 171, loss = 0.10284818\n",
"Iteration 172, loss = 0.10282677\n",
"Iteration 173, loss = 0.10298462\n",
"Iteration 174, loss = 0.10249216\n",
"Iteration 175, loss = 0.10270290\n",
"Iteration 176, loss = 0.10257433\n",
"Iteration 177, loss = 0.10291119\n",
"Iteration 178, loss = 0.10261223\n",
"Iteration 179, loss = 0.10239474\n",
"Iteration 180, loss = 0.10210798\n",
"Iteration 181, loss = 0.10240403\n",
"Iteration 182, loss = 0.10223679\n",
"Iteration 183, loss = 0.10220652\n",
"Iteration 184, loss = 0.10194008\n",
"Iteration 185, loss = 0.10192381\n",
"Iteration 186, loss = 0.10167848\n",
"Iteration 187, loss = 0.10181912\n",
"Iteration 188, loss = 0.10173835\n",
"Iteration 189, loss = 0.10169405\n",
"Iteration 190, loss = 0.10172427\n",
"Iteration 191, loss = 0.10178386\n",
"Iteration 192, loss = 0.10144322\n",
"Iteration 193, loss = 0.10167920\n",
"Iteration 194, loss = 0.10153498\n",
"Iteration 195, loss = 0.10153680\n",
"Iteration 196, loss = 0.10126247\n",
"Iteration 197, loss = 0.10135103\n",
"Iteration 198, loss = 0.10137176\n",
"Iteration 199, loss = 0.10140997\n",
"Iteration 200, loss = 0.10073853\n",
"Iteration 1, loss = 0.49903541\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:614: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.\n",
" warnings.warn(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 2, loss = 0.23996454\n",
"Iteration 3, loss = 0.17680339\n",
"Iteration 4, loss = 0.15445503\n",
"Iteration 5, loss = 0.14503697\n",
"Iteration 6, loss = 0.14007051\n",
"Iteration 7, loss = 0.13723031\n",
"Iteration 8, loss = 0.13536892\n",
"Iteration 9, loss = 0.13412958\n",
"Iteration 10, loss = 0.13311274\n",
"Iteration 11, loss = 0.13231649\n",
"Iteration 12, loss = 0.13140123\n",
"Iteration 13, loss = 0.13076611\n",
"Iteration 14, loss = 0.12998822\n",
"Iteration 15, loss = 0.12917700\n",
"Iteration 16, loss = 0.12884207\n",
"Iteration 17, loss = 0.12835663\n",
"Iteration 18, loss = 0.12762448\n",
"Iteration 19, loss = 0.12703817\n",
"Iteration 20, loss = 0.12653163\n",
"Iteration 21, loss = 0.12611982\n",
"Iteration 22, loss = 0.12566805\n",
"Iteration 23, loss = 0.12496474\n",
"Iteration 24, loss = 0.12453627\n",
"Iteration 25, loss = 0.12408784\n",
"Iteration 26, loss = 0.12378943\n",
"Iteration 27, loss = 0.12320687\n",
"Iteration 28, loss = 0.12279574\n",
"Iteration 29, loss = 0.12240849\n",
"Iteration 30, loss = 0.12187302\n",
"Iteration 31, loss = 0.12163900\n",
"Iteration 32, loss = 0.12111030\n",
"Iteration 33, loss = 0.12078810\n",
"Iteration 34, loss = 0.12039864\n",
"Iteration 35, loss = 0.12012527\n",
"Iteration 36, loss = 0.11980026\n",
"Iteration 37, loss = 0.11944755\n",
"Iteration 38, loss = 0.11906718\n",
"Iteration 39, loss = 0.11872786\n",
"Iteration 40, loss = 0.11834345\n",
"Iteration 41, loss = 0.11800940\n",
"Iteration 42, loss = 0.11783475\n",
"Iteration 43, loss = 0.11749065\n",
"Iteration 44, loss = 0.11708042\n",
"Iteration 45, loss = 0.11703027\n",
"Iteration 46, loss = 0.11669500\n",
"Iteration 47, loss = 0.11639282\n",
"Iteration 48, loss = 0.11591065\n",
"Iteration 49, loss = 0.11607052\n",
"Iteration 50, loss = 0.11554729\n",
"Iteration 51, loss = 0.11524573\n",
"Iteration 52, loss = 0.11512003\n",
"Iteration 53, loss = 0.11480720\n",
"Iteration 54, loss = 0.11462497\n",
"Iteration 55, loss = 0.11436304\n",
"Iteration 56, loss = 0.11424428\n",
"Iteration 57, loss = 0.11377559\n",
"Iteration 58, loss = 0.11355711\n",
"Iteration 59, loss = 0.11355153\n",
"Iteration 60, loss = 0.11325172\n",
"Iteration 61, loss = 0.11314749\n",
"Iteration 62, loss = 0.11288669\n",
"Iteration 63, loss = 0.11280660\n",
"Iteration 64, loss = 0.11236056\n",
"Iteration 65, loss = 0.11221430\n",
"Iteration 66, loss = 0.11218288\n",
"Iteration 67, loss = 0.11185788\n",
"Iteration 68, loss = 0.11155435\n",
"Iteration 69, loss = 0.11151649\n",
"Iteration 70, loss = 0.11132172\n",
"Iteration 71, loss = 0.11097190\n",
"Iteration 72, loss = 0.11091300\n",
"Iteration 73, loss = 0.11062156\n",
"Iteration 74, loss = 0.11045231\n",
"Iteration 75, loss = 0.11023499\n",
"Iteration 76, loss = 0.11003374\n",
"Iteration 77, loss = 0.10999038\n",
"Iteration 78, loss = 0.10998158\n",
"Iteration 79, loss = 0.10974181\n",
"Iteration 80, loss = 0.10950072\n",
"Iteration 81, loss = 0.10924894\n",
"Iteration 82, loss = 0.10960257\n",
"Iteration 83, loss = 0.10914394\n",
"Iteration 84, loss = 0.10892495\n",
"Iteration 85, loss = 0.10875858\n",
"Iteration 86, loss = 0.10887258\n",
"Iteration 87, loss = 0.10851379\n",
"Iteration 88, loss = 0.10848456\n",
"Iteration 89, loss = 0.10823022\n",
"Iteration 90, loss = 0.10823072\n",
"Iteration 91, loss = 0.10798504\n",
"Iteration 92, loss = 0.10787693\n",
"Iteration 93, loss = 0.10772077\n",
"Iteration 94, loss = 0.10780026\n",
"Iteration 95, loss = 0.10766036\n",
"Iteration 96, loss = 0.10764297\n",
"Iteration 97, loss = 0.10725940\n",
"Iteration 98, loss = 0.10727274\n",
"Iteration 99, loss = 0.10709354\n",
"Iteration 100, loss = 0.10693753\n",
"Iteration 101, loss = 0.10685597\n",
"Iteration 102, loss = 0.10700852\n",
"Iteration 103, loss = 0.10671622\n",
"Iteration 104, loss = 0.10678261\n",
"Iteration 105, loss = 0.10680162\n",
"Iteration 106, loss = 0.10646614\n",
"Iteration 107, loss = 0.10650351\n",
"Iteration 108, loss = 0.10612507\n",
"Iteration 109, loss = 0.10616295\n",
"Iteration 110, loss = 0.10606954\n",
"Iteration 111, loss = 0.10603717\n",
"Iteration 112, loss = 0.10591302\n",
"Iteration 113, loss = 0.10587398\n",
"Iteration 114, loss = 0.10565493\n",
"Iteration 115, loss = 0.10568921\n",
"Iteration 116, loss = 0.10556568\n",
"Iteration 117, loss = 0.10554644\n",
"Iteration 118, loss = 0.10534658\n",
"Iteration 119, loss = 0.10528447\n",
"Iteration 120, loss = 0.10511684\n",
"Iteration 121, loss = 0.10508625\n",
"Iteration 122, loss = 0.10517991\n",
"Iteration 123, loss = 0.10498714\n",
"Iteration 124, loss = 0.10507368\n",
"Iteration 125, loss = 0.10511432\n",
"Iteration 126, loss = 0.10462353\n",
"Iteration 127, loss = 0.10495194\n",
"Iteration 128, loss = 0.10466122\n",
"Iteration 129, loss = 0.10435736\n",
"Iteration 130, loss = 0.10447508\n",
"Iteration 131, loss = 0.10439724\n",
"Iteration 132, loss = 0.10437094\n",
"Iteration 133, loss = 0.10453890\n",
"Iteration 134, loss = 0.10427113\n",
"Iteration 135, loss = 0.10426377\n",
"Iteration 136, loss = 0.10403779\n",
"Iteration 137, loss = 0.10401880\n",
"Iteration 138, loss = 0.10386571\n",
"Iteration 139, loss = 0.10399371\n",
"Iteration 140, loss = 0.10371884\n",
"Iteration 141, loss = 0.10367987\n",
"Iteration 142, loss = 0.10364555\n",
"Iteration 143, loss = 0.10365988\n",
"Iteration 144, loss = 0.10347037\n",
"Iteration 145, loss = 0.10333334\n",
"Iteration 146, loss = 0.10331486\n",
"Iteration 147, loss = 0.10327337\n",
"Iteration 148, loss = 0.10318770\n",
"Iteration 149, loss = 0.10324227\n",
"Iteration 150, loss = 0.10309424\n",
"Iteration 151, loss = 0.10314748\n",
"Iteration 152, loss = 0.10300073\n",
"Iteration 153, loss = 0.10304719\n",
"Iteration 154, loss = 0.10284292\n",
"Iteration 155, loss = 0.10305204\n",
"Iteration 156, loss = 0.10268886\n",
"Iteration 157, loss = 0.10265096\n",
"Iteration 158, loss = 0.10261053\n",
"Iteration 159, loss = 0.10242947\n",
"Iteration 160, loss = 0.10230153\n",
"Iteration 161, loss = 0.10233106\n",
"Iteration 162, loss = 0.10247030\n",
"Iteration 163, loss = 0.10221419\n",
"Iteration 164, loss = 0.10227879\n",
"Iteration 165, loss = 0.10232013\n",
"Iteration 166, loss = 0.10223487\n",
"Iteration 167, loss = 0.10204514\n",
"Iteration 168, loss = 0.10203022\n",
"Iteration 169, loss = 0.10182232\n",
"Iteration 170, loss = 0.10204997\n",
"Iteration 171, loss = 0.10185811\n",
"Iteration 172, loss = 0.10200771\n",
"Iteration 173, loss = 0.10174880\n",
"Iteration 174, loss = 0.10154825\n",
"Iteration 175, loss = 0.10164454\n",
"Iteration 176, loss = 0.10136641\n",
"Iteration 177, loss = 0.10138466\n",
"Iteration 178, loss = 0.10198603\n",
"Iteration 179, loss = 0.10117199\n",
"Iteration 180, loss = 0.10131854\n",
"Iteration 181, loss = 0.10121302\n",
"Iteration 182, loss = 0.10113391\n",
"Iteration 183, loss = 0.10119944\n",
"Iteration 184, loss = 0.10108123\n",
"Iteration 185, loss = 0.10105866\n",
"Iteration 186, loss = 0.10096500\n",
"Iteration 187, loss = 0.10105116\n",
"Iteration 188, loss = 0.10095365\n",
"Iteration 189, loss = 0.10067831\n",
"Iteration 190, loss = 0.10073863\n",
"Iteration 191, loss = 0.10071440\n",
"Iteration 192, loss = 0.10059405\n",
"Iteration 193, loss = 0.10082122\n",
"Iteration 194, loss = 0.10073720\n",
"Iteration 195, loss = 0.10062846\n",
"Iteration 196, loss = 0.10038727\n",
"Iteration 197, loss = 0.10057445\n",
"Iteration 198, loss = 0.10038241\n",
"Iteration 199, loss = 0.10046695\n",
"Iteration 200, loss = 0.10040110\n",
"Iteration 1, loss = 0.42725107\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:614: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.\n",
" warnings.warn(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 2, loss = 0.21965303\n",
"Iteration 3, loss = 0.16567842\n",
"Iteration 4, loss = 0.14848574\n",
"Iteration 5, loss = 0.14138165\n",
"Iteration 6, loss = 0.13729175\n",
"Iteration 7, loss = 0.13508107\n",
"Iteration 8, loss = 0.13344559\n",
"Iteration 9, loss = 0.13208670\n",
"Iteration 10, loss = 0.13125669\n",
"Iteration 11, loss = 0.13045483\n",
"Iteration 12, loss = 0.12969200\n",
"Iteration 13, loss = 0.12873765\n",
"Iteration 14, loss = 0.12833032\n",
"Iteration 15, loss = 0.12757306\n",
"Iteration 16, loss = 0.12698184\n",
"Iteration 17, loss = 0.12635309\n",
"Iteration 18, loss = 0.12592885\n",
"Iteration 19, loss = 0.12539991\n",
"Iteration 20, loss = 0.12486567\n",
"Iteration 21, loss = 0.12436105\n",
"Iteration 22, loss = 0.12391219\n",
"Iteration 23, loss = 0.12369810\n",
"Iteration 24, loss = 0.12305622\n",
"Iteration 25, loss = 0.12256565\n",
"Iteration 26, loss = 0.12225405\n",
"Iteration 27, loss = 0.12174719\n",
"Iteration 28, loss = 0.12141148\n",
"Iteration 29, loss = 0.12103588\n",
"Iteration 30, loss = 0.12070892\n",
"Iteration 31, loss = 0.12035546\n",
"Iteration 32, loss = 0.12019287\n",
"Iteration 33, loss = 0.11987410\n",
"Iteration 34, loss = 0.11933051\n",
"Iteration 35, loss = 0.11922098\n",
"Iteration 36, loss = 0.11883649\n",
"Iteration 37, loss = 0.11836893\n",
"Iteration 38, loss = 0.11814827\n",
"Iteration 39, loss = 0.11784496\n",
"Iteration 40, loss = 0.11748284\n",
"Iteration 41, loss = 0.11698308\n",
"Iteration 42, loss = 0.11665089\n",
"Iteration 43, loss = 0.11653356\n",
"Iteration 44, loss = 0.11613117\n",
"Iteration 45, loss = 0.11594333\n",
"Iteration 46, loss = 0.11550943\n",
"Iteration 47, loss = 0.11539540\n",
"Iteration 48, loss = 0.11504744\n",
"Iteration 49, loss = 0.11472733\n",
"Iteration 50, loss = 0.11463653\n",
"Iteration 51, loss = 0.11431882\n",
"Iteration 52, loss = 0.11400867\n",
"Iteration 53, loss = 0.11399473\n",
"Iteration 54, loss = 0.11372458\n",
"Iteration 55, loss = 0.11345120\n",
"Iteration 56, loss = 0.11325951\n",
"Iteration 57, loss = 0.11301004\n",
"Iteration 58, loss = 0.11282524\n",
"Iteration 59, loss = 0.11282984\n",
"Iteration 60, loss = 0.11243261\n",
"Iteration 61, loss = 0.11231212\n",
"Iteration 62, loss = 0.11208028\n",
"Iteration 63, loss = 0.11179889\n",
"Iteration 64, loss = 0.11215111\n",
"Iteration 65, loss = 0.11153720\n",
"Iteration 66, loss = 0.11158923\n",
"Iteration 67, loss = 0.11134108\n",
"Iteration 68, loss = 0.11125873\n",
"Iteration 69, loss = 0.11094723\n",
"Iteration 70, loss = 0.11095575\n",
"Iteration 71, loss = 0.11098098\n",
"Iteration 72, loss = 0.11051653\n",
"Iteration 73, loss = 0.11060979\n",
"Iteration 74, loss = 0.11045427\n",
"Iteration 75, loss = 0.11022856\n",
"Iteration 76, loss = 0.11017579\n",
"Iteration 77, loss = 0.11000173\n",
"Iteration 78, loss = 0.10997037\n",
"Iteration 79, loss = 0.10994698\n",
"Iteration 80, loss = 0.10955548\n",
"Iteration 81, loss = 0.10963869\n",
"Iteration 82, loss = 0.10934290\n",
"Iteration 83, loss = 0.10927121\n",
"Iteration 84, loss = 0.10923054\n",
"Iteration 85, loss = 0.10916855\n",
"Iteration 86, loss = 0.10878040\n",
"Iteration 87, loss = 0.10892163\n",
"Iteration 88, loss = 0.10859224\n",
"Iteration 89, loss = 0.10854045\n",
"Iteration 90, loss = 0.10859080\n",
"Iteration 91, loss = 0.10832929\n",
"Iteration 92, loss = 0.10835963\n",
"Iteration 93, loss = 0.10819046\n",
"Iteration 94, loss = 0.10801177\n",
"Iteration 95, loss = 0.10839254\n",
"Iteration 96, loss = 0.10800848\n",
"Iteration 97, loss = 0.10798283\n",
"Iteration 98, loss = 0.10796854\n",
"Iteration 99, loss = 0.10761439\n",
"Iteration 100, loss = 0.10766013\n",
"Iteration 101, loss = 0.10755678\n",
"Iteration 102, loss = 0.10745228\n",
"Iteration 103, loss = 0.10747062\n",
"Iteration 104, loss = 0.10730348\n",
"Iteration 105, loss = 0.10735146\n",
"Iteration 106, loss = 0.10709983\n",
"Iteration 107, loss = 0.10690282\n",
"Iteration 108, loss = 0.10696780\n",
"Iteration 109, loss = 0.10669789\n",
"Iteration 110, loss = 0.10683009\n",
"Iteration 111, loss = 0.10669112\n",
"Iteration 112, loss = 0.10649826\n",
"Iteration 113, loss = 0.10647949\n",
"Iteration 114, loss = 0.10624972\n",
"Iteration 115, loss = 0.10623930\n",
"Iteration 116, loss = 0.10638907\n",
"Iteration 117, loss = 0.10602239\n",
"Iteration 118, loss = 0.10603819\n",
"Iteration 119, loss = 0.10600842\n",
"Iteration 120, loss = 0.10618583\n",
"Iteration 121, loss = 0.10566127\n",
"Iteration 122, loss = 0.10552875\n",
"Iteration 123, loss = 0.10553511\n",
"Iteration 124, loss = 0.10560901\n",
"Iteration 125, loss = 0.10537515\n",
"Iteration 126, loss = 0.10518448\n",
"Iteration 127, loss = 0.10516007\n",
"Iteration 128, loss = 0.10518988\n",
"Iteration 129, loss = 0.10520706\n",
"Iteration 130, loss = 0.10526302\n",
"Iteration 131, loss = 0.10490768\n",
"Iteration 132, loss = 0.10476072\n",
"Iteration 133, loss = 0.10502915\n",
"Iteration 134, loss = 0.10480382\n",
"Iteration 135, loss = 0.10479302\n",
"Iteration 136, loss = 0.10496805\n",
"Iteration 137, loss = 0.10434425\n",
"Iteration 138, loss = 0.10428548\n",
"Iteration 139, loss = 0.10429293\n",
"Iteration 140, loss = 0.10434061\n",
"Iteration 141, loss = 0.10409690\n",
"Iteration 142, loss = 0.10405744\n",
"Iteration 143, loss = 0.10398062\n",
"Iteration 144, loss = 0.10392563\n",
"Iteration 145, loss = 0.10404086\n",
"Iteration 146, loss = 0.10385322\n",
"Iteration 147, loss = 0.10381676\n",
"Iteration 148, loss = 0.10384175\n",
"Iteration 149, loss = 0.10375628\n",
"Iteration 150, loss = 0.10351330\n",
"Iteration 151, loss = 0.10358989\n",
"Iteration 152, loss = 0.10358375\n",
"Iteration 153, loss = 0.10330788\n",
"Iteration 154, loss = 0.10317744\n",
"Iteration 155, loss = 0.10321736\n",
"Iteration 156, loss = 0.10322781\n",
"Iteration 157, loss = 0.10299226\n",
"Iteration 158, loss = 0.10283386\n",
"Iteration 159, loss = 0.10303166\n",
"Iteration 160, loss = 0.10291080\n",
"Iteration 161, loss = 0.10246824\n",
"Iteration 162, loss = 0.10276931\n",
"Iteration 163, loss = 0.10263680\n",
"Iteration 164, loss = 0.10251354\n",
"Iteration 165, loss = 0.10244561\n",
"Iteration 166, loss = 0.10228693\n",
"Iteration 167, loss = 0.10222647\n",
"Iteration 168, loss = 0.10237940\n",
"Iteration 169, loss = 0.10251393\n",
"Iteration 170, loss = 0.10211877\n",
"Iteration 171, loss = 0.10220381\n",
"Iteration 172, loss = 0.10191486\n",
"Iteration 173, loss = 0.10190969\n",
"Iteration 174, loss = 0.10176447\n",
"Iteration 175, loss = 0.10195639\n",
"Iteration 176, loss = 0.10164751\n",
"Iteration 177, loss = 0.10158887\n",
"Iteration 178, loss = 0.10166718\n",
"Iteration 179, loss = 0.10171958\n",
"Iteration 180, loss = 0.10145396\n",
"Iteration 181, loss = 0.10142363\n",
"Iteration 182, loss = 0.10148908\n",
"Iteration 183, loss = 0.10137506\n",
"Iteration 184, loss = 0.10125377\n",
"Iteration 185, loss = 0.10117549\n",
"Iteration 186, loss = 0.10134435\n",
"Iteration 187, loss = 0.10096922\n",
"Iteration 188, loss = 0.10122052\n",
"Iteration 189, loss = 0.10099875\n",
"Iteration 190, loss = 0.10093662\n",
"Iteration 191, loss = 0.10116570\n",
"Iteration 192, loss = 0.10086976\n",
"Iteration 193, loss = 0.10086621\n",
"Iteration 194, loss = 0.10061784\n",
"Iteration 195, loss = 0.10088369\n",
"Iteration 196, loss = 0.10068848\n",
"Iteration 197, loss = 0.10026625\n",
"Iteration 198, loss = 0.10052452\n",
"Iteration 199, loss = 0.10031676\n",
"Iteration 200, loss = 0.10053073\n",
"Iteration 1, loss = 0.45624088\n",
"Iteration 2, loss = 0.23613385"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:614: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.\n",
" warnings.warn(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"Iteration 3, loss = 0.17808264\n",
"Iteration 4, loss = 0.15643869\n",
"Iteration 5, loss = 0.14727684\n",
"Iteration 6, loss = 0.14235755\n",
"Iteration 7, loss = 0.13987850\n",
"Iteration 8, loss = 0.13789349\n",
"Iteration 9, loss = 0.13685355\n",
"Iteration 10, loss = 0.13594695\n",
"Iteration 11, loss = 0.13494085\n",
"Iteration 12, loss = 0.13432384\n",
"Iteration 13, loss = 0.13356547\n",
"Iteration 14, loss = 0.13294589\n",
"Iteration 15, loss = 0.13236772\n",
"Iteration 16, loss = 0.13155841\n",
"Iteration 17, loss = 0.13125595\n",
"Iteration 18, loss = 0.13068431\n",
"Iteration 19, loss = 0.13011792\n",
"Iteration 20, loss = 0.12942175\n",
"Iteration 21, loss = 0.12872247\n",
"Iteration 22, loss = 0.12804403\n",
"Iteration 23, loss = 0.12740422\n",
"Iteration 24, loss = 0.12693347\n",
"Iteration 25, loss = 0.12624293\n",
"Iteration 26, loss = 0.12586076\n",
"Iteration 27, loss = 0.12559844\n",
"Iteration 28, loss = 0.12492624\n",
"Iteration 29, loss = 0.12405949\n",
"Iteration 30, loss = 0.12341299\n",
"Iteration 31, loss = 0.12320420\n",
"Iteration 32, loss = 0.12245905\n",
"Iteration 33, loss = 0.12227073\n",
"Iteration 34, loss = 0.12170180\n",
"Iteration 35, loss = 0.12117563\n",
"Iteration 36, loss = 0.12085984\n",
"Iteration 37, loss = 0.12044647\n",
"Iteration 38, loss = 0.12008314\n",
"Iteration 39, loss = 0.11984854\n",
"Iteration 40, loss = 0.11937541\n",
"Iteration 41, loss = 0.11910114\n",
"Iteration 42, loss = 0.11877178\n",
"Iteration 43, loss = 0.11867497\n",
"Iteration 44, loss = 0.11809863\n",
"Iteration 45, loss = 0.11774683\n",
"Iteration 46, loss = 0.11764573\n",
"Iteration 47, loss = 0.11718948\n",
"Iteration 48, loss = 0.11680777\n",
"Iteration 49, loss = 0.11656798\n",
"Iteration 50, loss = 0.11643747\n",
"Iteration 51, loss = 0.11599365\n",
"Iteration 52, loss = 0.11578449\n",
"Iteration 53, loss = 0.11599910\n",
"Iteration 54, loss = 0.11536137\n",
"Iteration 55, loss = 0.11528907\n",
"Iteration 56, loss = 0.11498588\n",
"Iteration 57, loss = 0.11478338\n",
"Iteration 58, loss = 0.11447574\n",
"Iteration 59, loss = 0.11445555\n",
"Iteration 60, loss = 0.11402412\n",
"Iteration 61, loss = 0.11406010\n",
"Iteration 62, loss = 0.11407303\n",
"Iteration 63, loss = 0.11371682\n",
"Iteration 64, loss = 0.11359855\n",
"Iteration 65, loss = 0.11333223\n",
"Iteration 66, loss = 0.11302841\n",
"Iteration 67, loss = 0.11294500\n",
"Iteration 68, loss = 0.11262687\n",
"Iteration 69, loss = 0.11278930\n",
"Iteration 70, loss = 0.11250827\n",
"Iteration 71, loss = 0.11265011\n",
"Iteration 72, loss = 0.11216074\n",
"Iteration 73, loss = 0.11188915\n",
"Iteration 74, loss = 0.11189894\n",
"Iteration 75, loss = 0.11203713\n",
"Iteration 76, loss = 0.11153670\n",
"Iteration 77, loss = 0.11154944\n",
"Iteration 78, loss = 0.11108303\n",
"Iteration 79, loss = 0.11152084\n",
"Iteration 80, loss = 0.11146934\n",
"Iteration 81, loss = 0.11097977\n",
"Iteration 82, loss = 0.11096172\n",
"Iteration 83, loss = 0.11069210\n",
"Iteration 84, loss = 0.11071194\n",
"Iteration 85, loss = 0.11052698\n",
"Iteration 86, loss = 0.11034170\n",
"Iteration 87, loss = 0.11017601\n",
"Iteration 88, loss = 0.10984103\n",
"Iteration 89, loss = 0.11021266\n",
"Iteration 90, loss = 0.10982900\n",
"Iteration 91, loss = 0.10992741\n",
"Iteration 92, loss = 0.10949949\n",
"Iteration 93, loss = 0.10951534\n",
"Iteration 94, loss = 0.10941513\n",
"Iteration 95, loss = 0.10908687\n",
"Iteration 96, loss = 0.10920607\n",
"Iteration 97, loss = 0.10898631\n",
"Iteration 98, loss = 0.10885518\n",
"Iteration 99, loss = 0.10892484\n",
"Iteration 100, loss = 0.10874612\n",
"Iteration 101, loss = 0.10842318\n",
"Iteration 102, loss = 0.10834895\n",
"Iteration 103, loss = 0.10862791\n",
"Iteration 104, loss = 0.10824203\n",
"Iteration 105, loss = 0.10821538\n",
"Iteration 106, loss = 0.10794684\n",
"Iteration 107, loss = 0.10806617\n",
"Iteration 108, loss = 0.10796174\n",
"Iteration 109, loss = 0.10784005\n",
"Iteration 110, loss = 0.10751872\n",
"Iteration 111, loss = 0.10739524\n",
"Iteration 112, loss = 0.10741894\n",
"Iteration 113, loss = 0.10716807\n",
"Iteration 114, loss = 0.10733838\n",
"Iteration 115, loss = 0.10727876\n",
"Iteration 116, loss = 0.10703398\n",
"Iteration 117, loss = 0.10688295\n",
"Iteration 118, loss = 0.10713796\n",
"Iteration 119, loss = 0.10704491\n",
"Iteration 120, loss = 0.10643432\n",
"Iteration 121, loss = 0.10679367\n",
"Iteration 122, loss = 0.10655070\n",
"Iteration 123, loss = 0.10656408\n",
"Iteration 124, loss = 0.10645786\n",
"Iteration 125, loss = 0.10622166\n",
"Iteration 126, loss = 0.10588558\n",
"Iteration 127, loss = 0.10613460\n",
"Iteration 128, loss = 0.10590946\n",
"Iteration 129, loss = 0.10594030\n",
"Iteration 130, loss = 0.10620713\n",
"Iteration 131, loss = 0.10601305\n",
"Iteration 132, loss = 0.10561698\n",
"Iteration 133, loss = 0.10568444\n",
"Iteration 134, loss = 0.10536442\n",
"Iteration 135, loss = 0.10576129\n",
"Iteration 136, loss = 0.10546863\n",
"Iteration 137, loss = 0.10587200\n",
"Iteration 138, loss = 0.10544890\n",
"Iteration 139, loss = 0.10522864\n",
"Iteration 140, loss = 0.10519635\n",
"Iteration 141, loss = 0.10521940\n",
"Iteration 142, loss = 0.10514110\n",
"Iteration 143, loss = 0.10531487\n",
"Iteration 144, loss = 0.10521347\n",
"Iteration 145, loss = 0.10489354\n",
"Iteration 146, loss = 0.10491786\n",
"Iteration 147, loss = 0.10470662\n",
"Iteration 148, loss = 0.10475416\n",
"Iteration 149, loss = 0.10472472\n",
"Iteration 150, loss = 0.10485926\n",
"Iteration 151, loss = 0.10428786\n",
"Iteration 152, loss = 0.10465196\n",
"Iteration 153, loss = 0.10457015\n",
"Iteration 154, loss = 0.10461047\n",
"Iteration 155, loss = 0.10415365\n",
"Iteration 156, loss = 0.10434362\n",
"Iteration 157, loss = 0.10418601\n",
"Iteration 158, loss = 0.10411519\n",
"Iteration 159, loss = 0.10407272\n",
"Iteration 160, loss = 0.10415704\n",
"Iteration 161, loss = 0.10388930\n",
"Iteration 162, loss = 0.10387576\n",
"Iteration 163, loss = 0.10385620\n",
"Iteration 164, loss = 0.10362085\n",
"Iteration 165, loss = 0.10393964\n",
"Iteration 166, loss = 0.10384072\n",
"Iteration 167, loss = 0.10377718\n",
"Iteration 168, loss = 0.10340822\n",
"Iteration 169, loss = 0.10341844\n",
"Iteration 170, loss = 0.10347638\n",
"Iteration 171, loss = 0.10337466\n",
"Iteration 172, loss = 0.10329675\n",
"Iteration 173, loss = 0.10387646\n",
"Iteration 174, loss = 0.10345848\n",
"Iteration 175, loss = 0.10302318\n",
"Iteration 176, loss = 0.10310969\n",
"Iteration 177, loss = 0.10290600\n",
"Iteration 178, loss = 0.10327319\n",
"Iteration 179, loss = 0.10294585\n",
"Iteration 180, loss = 0.10304405\n",
"Iteration 181, loss = 0.10296616\n",
"Iteration 182, loss = 0.10296369\n",
"Iteration 183, loss = 0.10299179\n",
"Iteration 184, loss = 0.10273305\n",
"Iteration 185, loss = 0.10307473\n",
"Iteration 186, loss = 0.10277699\n",
"Iteration 187, loss = 0.10288533\n",
"Iteration 188, loss = 0.10292227\n",
"Iteration 189, loss = 0.10284483\n",
"Iteration 190, loss = 0.10247452\n",
"Iteration 191, loss = 0.10218677\n",
"Iteration 192, loss = 0.10229900\n",
"Iteration 193, loss = 0.10226368\n",
"Iteration 194, loss = 0.10239503\n",
"Iteration 195, loss = 0.10237874\n",
"Iteration 196, loss = 0.10213965\n",
"Iteration 197, loss = 0.10241386\n",
"Iteration 198, loss = 0.10207845\n",
"Iteration 199, loss = 0.10205797\n",
"Iteration 200, loss = 0.10180838\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:614: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.\n",
" warnings.warn(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 1, loss = 0.41668715\n",
"Iteration 2, loss = 0.21422159\n",
"Iteration 3, loss = 0.18225380\n",
"Iteration 4, loss = 0.17422062\n",
"Iteration 5, loss = 0.17071201\n",
"Iteration 6, loss = 0.16882231\n",
"Iteration 7, loss = 0.16738579\n",
"Iteration 8, loss = 0.16643921\n",
"Iteration 9, loss = 0.16572551\n",
"Iteration 10, loss = 0.16501167\n",
"Iteration 11, loss = 0.16457999\n",
"Iteration 12, loss = 0.16411704\n",
"Iteration 13, loss = 0.16399043\n",
"Iteration 14, loss = 0.16353731\n",
"Iteration 15, loss = 0.16353906\n",
"Iteration 16, loss = 0.16319336\n",
"Iteration 17, loss = 0.16311658\n",
"Iteration 18, loss = 0.16300894\n",
"Iteration 19, loss = 0.16282886\n",
"Iteration 20, loss = 0.16273838\n",
"Iteration 21, loss = 0.16280690\n",
"Iteration 22, loss = 0.16257304\n",
"Iteration 23, loss = 0.16256361\n",
"Iteration 24, loss = 0.16261487\n",
"Iteration 25, loss = 0.16254518\n",
"Iteration 26, loss = 0.16251474\n",
"Iteration 27, loss = 0.16237040\n",
"Iteration 28, loss = 0.16243855\n",
"Iteration 29, loss = 0.16246395\n",
"Iteration 30, loss = 0.16250733\n",
"Iteration 31, loss = 0.16235950\n",
"Iteration 32, loss = 0.16228198\n",
"Iteration 33, loss = 0.16254085\n",
"Iteration 34, loss = 0.16239217\n",
"Iteration 35, loss = 0.16232061\n",
"Iteration 36, loss = 0.16220906\n",
"Iteration 37, loss = 0.16226030\n",
"Iteration 38, loss = 0.16234926\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.48191507\n",
"Iteration 2, loss = 0.24339277\n",
"Iteration 3, loss = 0.19199365\n",
"Iteration 4, loss = 0.17663471\n",
"Iteration 5, loss = 0.17130948\n",
"Iteration 6, loss = 0.16874798\n",
"Iteration 7, loss = 0.16725312\n",
"Iteration 8, loss = 0.16598919\n",
"Iteration 9, loss = 0.16500331\n",
"Iteration 10, loss = 0.16443391\n",
"Iteration 11, loss = 0.16398254\n",
"Iteration 12, loss = 0.16340552\n",
"Iteration 13, loss = 0.16294988\n",
"Iteration 14, loss = 0.16264468\n",
"Iteration 15, loss = 0.16231155\n",
"Iteration 16, loss = 0.16223229\n",
"Iteration 17, loss = 0.16196864\n",
"Iteration 18, loss = 0.16195079\n",
"Iteration 19, loss = 0.16170315\n",
"Iteration 20, loss = 0.16161418\n",
"Iteration 21, loss = 0.16162198\n",
"Iteration 22, loss = 0.16149451\n",
"Iteration 23, loss = 0.16140167\n",
"Iteration 24, loss = 0.16111060\n",
"Iteration 25, loss = 0.16132198\n",
"Iteration 26, loss = 0.16125118\n",
"Iteration 27, loss = 0.16131307\n",
"Iteration 28, loss = 0.16111395\n",
"Iteration 29, loss = 0.16117957\n",
"Iteration 30, loss = 0.16102824\n",
"Iteration 31, loss = 0.16121408\n",
"Iteration 32, loss = 0.16098524\n",
"Iteration 33, loss = 0.16102547\n",
"Iteration 34, loss = 0.16078069\n",
"Iteration 35, loss = 0.16101862\n",
"Iteration 36, loss = 0.16094243\n",
"Iteration 37, loss = 0.16089207\n",
"Iteration 38, loss = 0.16095081\n",
"Iteration 39, loss = 0.16111794\n",
"Iteration 40, loss = 0.16086674\n",
"Iteration 41, loss = 0.16087162\n",
"Iteration 42, loss = 0.16080879\n",
"Iteration 43, loss = 0.16093887\n",
"Iteration 44, loss = 0.16089855\n",
"Iteration 45, loss = 0.16081631\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.39195143\n",
"Iteration 2, loss = 0.22013538\n",
"Iteration 3, loss = 0.18478643\n",
"Iteration 4, loss = 0.17509033\n",
"Iteration 5, loss = 0.17107185\n",
"Iteration 6, loss = 0.16891805\n",
"Iteration 7, loss = 0.16736273\n",
"Iteration 8, loss = 0.16645330\n",
"Iteration 9, loss = 0.16570925\n",
"Iteration 10, loss = 0.16530029\n",
"Iteration 11, loss = 0.16485672\n",
"Iteration 12, loss = 0.16460481\n",
"Iteration 13, loss = 0.16413911\n",
"Iteration 14, loss = 0.16390994\n",
"Iteration 15, loss = 0.16372876\n",
"Iteration 16, loss = 0.16361571\n",
"Iteration 17, loss = 0.16345507\n",
"Iteration 18, loss = 0.16327471\n",
"Iteration 19, loss = 0.16308704\n",
"Iteration 20, loss = 0.16310740\n",
"Iteration 21, loss = 0.16308848\n",
"Iteration 22, loss = 0.16305184\n",
"Iteration 23, loss = 0.16297524\n",
"Iteration 24, loss = 0.16296901\n",
"Iteration 25, loss = 0.16277372\n",
"Iteration 26, loss = 0.16278920\n",
"Iteration 27, loss = 0.16275881\n",
"Iteration 28, loss = 0.16288016\n",
"Iteration 29, loss = 0.16261763\n",
"Iteration 30, loss = 0.16263472\n",
"Iteration 31, loss = 0.16270044\n",
"Iteration 32, loss = 0.16255276\n",
"Iteration 33, loss = 0.16264847\n",
"Iteration 34, loss = 0.16254975\n",
"Iteration 35, loss = 0.16266075\n",
"Iteration 36, loss = 0.16263657\n",
"Iteration 37, loss = 0.16247620\n",
"Iteration 38, loss = 0.16264803\n",
"Iteration 39, loss = 0.16260800\n",
"Iteration 40, loss = 0.16248178\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.44637612\n",
"Iteration 2, loss = 0.23840662\n",
"Iteration 3, loss = 0.19061938\n",
"Iteration 4, loss = 0.17645486\n",
"Iteration 5, loss = 0.17132244\n",
"Iteration 6, loss = 0.16862601\n",
"Iteration 7, loss = 0.16706197\n",
"Iteration 8, loss = 0.16603102\n",
"Iteration 9, loss = 0.16511959\n",
"Iteration 10, loss = 0.16452488\n",
"Iteration 11, loss = 0.16391228\n",
"Iteration 12, loss = 0.16372109\n",
"Iteration 13, loss = 0.16342660\n",
"Iteration 14, loss = 0.16302596\n",
"Iteration 15, loss = 0.16277043\n",
"Iteration 16, loss = 0.16255968\n",
"Iteration 17, loss = 0.16241725\n",
"Iteration 18, loss = 0.16239325\n",
"Iteration 19, loss = 0.16210642\n",
"Iteration 20, loss = 0.16194240\n",
"Iteration 21, loss = 0.16181839\n",
"Iteration 22, loss = 0.16176964\n",
"Iteration 23, loss = 0.16189447\n",
"Iteration 24, loss = 0.16171833\n",
"Iteration 25, loss = 0.16177361\n",
"Iteration 26, loss = 0.16153627\n",
"Iteration 27, loss = 0.16143051\n",
"Iteration 28, loss = 0.16158241\n",
"Iteration 29, loss = 0.16139649\n",
"Iteration 30, loss = 0.16143275\n",
"Iteration 31, loss = 0.16131403\n",
"Iteration 32, loss = 0.16138104\n",
"Iteration 33, loss = 0.16125152\n",
"Iteration 34, loss = 0.16137472\n",
"Iteration 35, loss = 0.16137051\n",
"Iteration 36, loss = 0.16128633\n",
"Iteration 37, loss = 0.16150957\n",
"Iteration 38, loss = 0.16117982\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.43257671\n",
"Iteration 2, loss = 0.23497622\n",
"Iteration 3, loss = 0.18966289\n",
"Iteration 4, loss = 0.17642859\n",
"Iteration 5, loss = 0.17125170\n",
"Iteration 6, loss = 0.16885540\n",
"Iteration 7, loss = 0.16732939\n",
"Iteration 8, loss = 0.16658797\n",
"Iteration 9, loss = 0.16553894\n",
"Iteration 10, loss = 0.16489846\n",
"Iteration 11, loss = 0.16429564\n",
"Iteration 12, loss = 0.16384642\n",
"Iteration 13, loss = 0.16345855\n",
"Iteration 14, loss = 0.16315985\n",
"Iteration 15, loss = 0.16297821\n",
"Iteration 16, loss = 0.16268160\n",
"Iteration 17, loss = 0.16259865\n",
"Iteration 18, loss = 0.16248087\n",
"Iteration 19, loss = 0.16224051\n",
"Iteration 20, loss = 0.16220238\n",
"Iteration 21, loss = 0.16211735\n",
"Iteration 22, loss = 0.16187715\n",
"Iteration 23, loss = 0.16195890\n",
"Iteration 24, loss = 0.16184642\n",
"Iteration 25, loss = 0.16178602\n",
"Iteration 26, loss = 0.16167510\n",
"Iteration 27, loss = 0.16165048\n",
"Iteration 28, loss = 0.16167353\n",
"Iteration 29, loss = 0.16168451\n",
"Iteration 30, loss = 0.16147980\n",
"Iteration 31, loss = 0.16169261\n",
"Iteration 32, loss = 0.16155695\n",
"Iteration 33, loss = 0.16153449\n",
"Iteration 34, loss = 0.16143803\n",
"Iteration 35, loss = 0.16151093\n",
"Iteration 36, loss = 0.16164765\n",
"Iteration 37, loss = 0.16145608\n",
"Iteration 38, loss = 0.16127253\n",
"Iteration 39, loss = 0.16138978\n",
"Iteration 40, loss = 0.16133498\n",
"Iteration 41, loss = 0.16115082\n",
"Iteration 42, loss = 0.16133886\n",
"Iteration 43, loss = 0.16131283\n",
"Iteration 44, loss = 0.16133345\n",
"Iteration 45, loss = 0.16140229\n",
"Iteration 46, loss = 0.16128470\n",
"Iteration 47, loss = 0.16134754\n",
"Iteration 48, loss = 0.16130554\n",
"Iteration 49, loss = 0.16118600\n",
"Iteration 50, loss = 0.16125287\n",
"Iteration 51, loss = 0.16126849\n",
"Iteration 52, loss = 0.16121503\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.50038568\n",
"Iteration 2, loss = 0.24124079\n",
"Iteration 3, loss = 0.19313945\n",
"Iteration 4, loss = 0.18028138\n",
"Iteration 5, loss = 0.17532989\n",
"Iteration 6, loss = 0.17282660\n",
"Iteration 7, loss = 0.17124103\n",
"Iteration 8, loss = 0.17009378\n",
"Iteration 9, loss = 0.16926937\n",
"Iteration 10, loss = 0.16858584\n",
"Iteration 11, loss = 0.16809702\n",
"Iteration 12, loss = 0.16767360\n",
"Iteration 13, loss = 0.16734716\n",
"Iteration 14, loss = 0.16706340\n",
"Iteration 15, loss = 0.16678703\n",
"Iteration 16, loss = 0.16648413\n",
"Iteration 17, loss = 0.16642396\n",
"Iteration 18, loss = 0.16631550\n",
"Iteration 19, loss = 0.16602951\n",
"Iteration 20, loss = 0.16601701\n",
"Iteration 21, loss = 0.16596066\n",
"Iteration 22, loss = 0.16580818\n",
"Iteration 23, loss = 0.16565391\n",
"Iteration 24, loss = 0.16554454\n",
"Iteration 25, loss = 0.16551129\n",
"Iteration 26, loss = 0.16552920\n",
"Iteration 27, loss = 0.16536093\n",
"Iteration 28, loss = 0.16526235\n",
"Iteration 29, loss = 0.16532681\n",
"Iteration 30, loss = 0.16484120\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 31, loss = 0.16510676\n",
"Iteration 32, loss = 0.16506776\n",
"Iteration 33, loss = 0.16511343\n",
"Iteration 34, loss = 0.16521618\n",
"Iteration 35, loss = 0.16512188\n",
"Iteration 36, loss = 0.16499861\n",
"Iteration 37, loss = 0.16492762\n",
"Iteration 38, loss = 0.16520707\n",
"Iteration 39, loss = 0.16493184\n",
"Iteration 40, loss = 0.16503004\n",
"Iteration 41, loss = 0.16510923\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.52093640\n",
"Iteration 2, loss = 0.39700210\n",
"Iteration 3, loss = 0.35313258\n",
"Iteration 4, loss = 0.32855498\n",
"Iteration 5, loss = 0.31514881\n",
"Iteration 6, loss = 0.30696557\n",
"Iteration 7, loss = 0.30083152\n",
"Iteration 8, loss = 0.29639204\n",
"Iteration 9, loss = 0.29245396\n",
"Iteration 10, loss = 0.28950530\n",
"Iteration 11, loss = 0.28666998\n",
"Iteration 12, loss = 0.28443175\n",
"Iteration 13, loss = 0.28206755\n",
"Iteration 14, loss = 0.28051059\n",
"Iteration 15, loss = 0.27895386\n",
"Iteration 16, loss = 0.27774882\n",
"Iteration 17, loss = 0.27647275\n",
"Iteration 18, loss = 0.27574693\n",
"Iteration 19, loss = 0.27492674\n",
"Iteration 20, loss = 0.27427292\n",
"Iteration 21, loss = 0.27370726\n",
"Iteration 22, loss = 0.27331637\n",
"Iteration 23, loss = 0.27278330\n",
"Iteration 24, loss = 0.27243648\n",
"Iteration 25, loss = 0.27232131\n",
"Iteration 26, loss = 0.27189985\n",
"Iteration 27, loss = 0.27167756\n",
"Iteration 28, loss = 0.27143644\n",
"Iteration 29, loss = 0.27148855\n",
"Iteration 30, loss = 0.27106595\n",
"Iteration 31, loss = 0.27087852\n",
"Iteration 32, loss = 0.27112215\n",
"Iteration 33, loss = 0.27061141\n",
"Iteration 34, loss = 0.27033452\n",
"Iteration 35, loss = 0.27033536\n",
"Iteration 36, loss = 0.27027519\n",
"Iteration 37, loss = 0.27012210\n",
"Iteration 38, loss = 0.26990245\n",
"Iteration 39, loss = 0.26985257\n",
"Iteration 40, loss = 0.26967335\n",
"Iteration 41, loss = 0.26979807\n",
"Iteration 42, loss = 0.26973970\n",
"Iteration 43, loss = 0.26959121\n",
"Iteration 44, loss = 0.26939396\n",
"Iteration 45, loss = 0.26943711\n",
"Iteration 46, loss = 0.26942787\n",
"Iteration 47, loss = 0.26946772\n",
"Iteration 48, loss = 0.26920807\n",
"Iteration 49, loss = 0.26926303\n",
"Iteration 50, loss = 0.26918730\n",
"Iteration 51, loss = 0.26921458\n",
"Iteration 52, loss = 0.26891725\n",
"Iteration 53, loss = 0.26916443\n",
"Iteration 54, loss = 0.26906710\n",
"Iteration 55, loss = 0.26896423\n",
"Iteration 56, loss = 0.26908252\n",
"Iteration 57, loss = 0.26932604\n",
"Iteration 58, loss = 0.26891101\n",
"Iteration 59, loss = 0.26874092\n",
"Iteration 60, loss = 0.26883267\n",
"Iteration 61, loss = 0.26857231\n",
"Iteration 62, loss = 0.26869525\n",
"Iteration 63, loss = 0.26894700\n",
"Iteration 64, loss = 0.26870360\n",
"Iteration 65, loss = 0.26872992\n",
"Iteration 66, loss = 0.26894839\n",
"Iteration 67, loss = 0.26857116\n",
"Iteration 68, loss = 0.26870451\n",
"Iteration 69, loss = 0.26839867\n",
"Iteration 70, loss = 0.26862330\n",
"Iteration 71, loss = 0.26861923\n",
"Iteration 72, loss = 0.26859351\n",
"Iteration 73, loss = 0.26858364\n",
"Iteration 74, loss = 0.26856694\n",
"Iteration 75, loss = 0.26863330\n",
"Iteration 76, loss = 0.26847346\n",
"Iteration 77, loss = 0.26864300\n",
"Iteration 78, loss = 0.26865532\n",
"Iteration 79, loss = 0.26840727\n",
"Iteration 80, loss = 0.26846196\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.52067013\n",
"Iteration 2, loss = 0.40697868\n",
"Iteration 3, loss = 0.36426572\n",
"Iteration 4, loss = 0.33799498\n",
"Iteration 5, loss = 0.32272211\n",
"Iteration 6, loss = 0.31311660\n",
"Iteration 7, loss = 0.30608583\n",
"Iteration 8, loss = 0.30038479\n",
"Iteration 9, loss = 0.29584438\n",
"Iteration 10, loss = 0.29184458\n",
"Iteration 11, loss = 0.28885828\n",
"Iteration 12, loss = 0.28640208\n",
"Iteration 13, loss = 0.28406330\n",
"Iteration 14, loss = 0.28237673\n",
"Iteration 15, loss = 0.28071173\n",
"Iteration 16, loss = 0.27941938\n",
"Iteration 17, loss = 0.27802882\n",
"Iteration 18, loss = 0.27710255\n",
"Iteration 19, loss = 0.27585926\n",
"Iteration 20, loss = 0.27512096\n",
"Iteration 21, loss = 0.27422429\n",
"Iteration 22, loss = 0.27379237\n",
"Iteration 23, loss = 0.27316543\n",
"Iteration 24, loss = 0.27263224\n",
"Iteration 25, loss = 0.27237537\n",
"Iteration 26, loss = 0.27178889\n",
"Iteration 27, loss = 0.27137835\n",
"Iteration 28, loss = 0.27128723\n",
"Iteration 29, loss = 0.27079612\n",
"Iteration 30, loss = 0.27073328\n",
"Iteration 31, loss = 0.27020564\n",
"Iteration 32, loss = 0.27011678\n",
"Iteration 33, loss = 0.26967891\n",
"Iteration 34, loss = 0.26963686\n",
"Iteration 35, loss = 0.26935036\n",
"Iteration 36, loss = 0.26915845\n",
"Iteration 37, loss = 0.26904494\n",
"Iteration 38, loss = 0.26887802\n",
"Iteration 39, loss = 0.26878128\n",
"Iteration 40, loss = 0.26864049\n",
"Iteration 41, loss = 0.26879000\n",
"Iteration 42, loss = 0.26858464\n",
"Iteration 43, loss = 0.26846842\n",
"Iteration 44, loss = 0.26819494\n",
"Iteration 45, loss = 0.26837004\n",
"Iteration 46, loss = 0.26786325\n",
"Iteration 47, loss = 0.26816642\n",
"Iteration 48, loss = 0.26793579\n",
"Iteration 49, loss = 0.26794743\n",
"Iteration 50, loss = 0.26772333\n",
"Iteration 51, loss = 0.26764264\n",
"Iteration 52, loss = 0.26775127\n",
"Iteration 53, loss = 0.26768398\n",
"Iteration 54, loss = 0.26754814\n",
"Iteration 55, loss = 0.26763908\n",
"Iteration 56, loss = 0.26745192\n",
"Iteration 57, loss = 0.26754566\n",
"Iteration 58, loss = 0.26743864\n",
"Iteration 59, loss = 0.26757689\n",
"Iteration 60, loss = 0.26737223\n",
"Iteration 61, loss = 0.26732396\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.51418760\n",
"Iteration 2, loss = 0.40834220\n",
"Iteration 3, loss = 0.36505554\n",
"Iteration 4, loss = 0.33912323\n",
"Iteration 5, loss = 0.32477426\n",
"Iteration 6, loss = 0.31616076\n",
"Iteration 7, loss = 0.30973773\n",
"Iteration 8, loss = 0.30470289\n",
"Iteration 9, loss = 0.30049739\n",
"Iteration 10, loss = 0.29673963\n",
"Iteration 11, loss = 0.29357917\n",
"Iteration 12, loss = 0.29040712\n",
"Iteration 13, loss = 0.28793065\n",
"Iteration 14, loss = 0.28581276\n",
"Iteration 15, loss = 0.28387012\n",
"Iteration 16, loss = 0.28241637\n",
"Iteration 17, loss = 0.28112889\n",
"Iteration 18, loss = 0.28011969\n",
"Iteration 19, loss = 0.27911679\n",
"Iteration 20, loss = 0.27809597\n",
"Iteration 21, loss = 0.27784176\n",
"Iteration 22, loss = 0.27681030\n",
"Iteration 23, loss = 0.27614690\n",
"Iteration 24, loss = 0.27584316\n",
"Iteration 25, loss = 0.27518541\n",
"Iteration 26, loss = 0.27477774\n",
"Iteration 27, loss = 0.27453454\n",
"Iteration 28, loss = 0.27443223\n",
"Iteration 29, loss = 0.27398992\n",
"Iteration 30, loss = 0.27353748\n",
"Iteration 31, loss = 0.27348071\n",
"Iteration 32, loss = 0.27313194\n",
"Iteration 33, loss = 0.27294346\n",
"Iteration 34, loss = 0.27285727\n",
"Iteration 35, loss = 0.27262266\n",
"Iteration 36, loss = 0.27256004\n",
"Iteration 37, loss = 0.27241038\n",
"Iteration 38, loss = 0.27228605\n",
"Iteration 39, loss = 0.27196102\n",
"Iteration 40, loss = 0.27209869\n",
"Iteration 41, loss = 0.27179865\n",
"Iteration 42, loss = 0.27154534\n",
"Iteration 43, loss = 0.27174073\n",
"Iteration 44, loss = 0.27157187\n",
"Iteration 45, loss = 0.27136242\n",
"Iteration 46, loss = 0.27150045\n",
"Iteration 47, loss = 0.27103564\n",
"Iteration 48, loss = 0.27109806\n",
"Iteration 49, loss = 0.27093214\n",
"Iteration 50, loss = 0.27054657\n",
"Iteration 51, loss = 0.27094162\n",
"Iteration 52, loss = 0.27083812\n",
"Iteration 53, loss = 0.27063764\n",
"Iteration 54, loss = 0.27068103\n",
"Iteration 55, loss = 0.27068143\n",
"Iteration 56, loss = 0.27035990\n",
"Iteration 57, loss = 0.27045392\n",
"Iteration 58, loss = 0.27020297\n",
"Iteration 59, loss = 0.27018254\n",
"Iteration 60, loss = 0.27006469\n",
"Iteration 61, loss = 0.27034893\n",
"Iteration 62, loss = 0.26999106\n",
"Iteration 63, loss = 0.26991218\n",
"Iteration 64, loss = 0.27004346\n",
"Iteration 65, loss = 0.27004470\n",
"Iteration 66, loss = 0.26996833\n",
"Iteration 67, loss = 0.26966791\n",
"Iteration 68, loss = 0.26994083\n",
"Iteration 69, loss = 0.26949067\n",
"Iteration 70, loss = 0.26958276\n",
"Iteration 71, loss = 0.26977801\n",
"Iteration 72, loss = 0.26958607\n",
"Iteration 73, loss = 0.26957722\n",
"Iteration 74, loss = 0.26948411\n",
"Iteration 75, loss = 0.26959180\n",
"Iteration 76, loss = 0.26919934\n",
"Iteration 77, loss = 0.26930252\n",
"Iteration 78, loss = 0.26968473\n",
"Iteration 79, loss = 0.26960664\n",
"Iteration 80, loss = 0.26944008\n",
"Iteration 81, loss = 0.26928488\n",
"Iteration 82, loss = 0.26944148\n",
"Iteration 83, loss = 0.26952216\n",
"Iteration 84, loss = 0.26894826\n",
"Iteration 85, loss = 0.26932900\n",
"Iteration 86, loss = 0.26895570\n",
"Iteration 87, loss = 0.26913112\n",
"Iteration 88, loss = 0.26901640\n",
"Iteration 89, loss = 0.26894835\n",
"Iteration 90, loss = 0.26896113\n",
"Iteration 91, loss = 0.26869196\n",
"Iteration 92, loss = 0.26885189\n",
"Iteration 93, loss = 0.26876988\n",
"Iteration 94, loss = 0.26886942\n",
"Iteration 95, loss = 0.26878706\n",
"Iteration 96, loss = 0.26881235\n",
"Iteration 97, loss = 0.26869494\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 98, loss = 0.26864100\n",
"Iteration 99, loss = 0.26868262\n",
"Iteration 100, loss = 0.26856699\n",
"Iteration 101, loss = 0.26852363\n",
"Iteration 102, loss = 0.26871888\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.53752755\n",
"Iteration 2, loss = 0.41545320\n",
"Iteration 3, loss = 0.37091737\n",
"Iteration 4, loss = 0.34163133\n",
"Iteration 5, loss = 0.32329972\n",
"Iteration 6, loss = 0.31183491\n",
"Iteration 7, loss = 0.30428276\n",
"Iteration 8, loss = 0.29845512\n",
"Iteration 9, loss = 0.29408311\n",
"Iteration 10, loss = 0.29052246\n",
"Iteration 11, loss = 0.28740330\n",
"Iteration 12, loss = 0.28506283\n",
"Iteration 13, loss = 0.28296474\n",
"Iteration 14, loss = 0.28157030\n",
"Iteration 15, loss = 0.27999693\n",
"Iteration 16, loss = 0.27873502\n",
"Iteration 17, loss = 0.27782493\n",
"Iteration 18, loss = 0.27670022\n",
"Iteration 19, loss = 0.27614347\n",
"Iteration 20, loss = 0.27513076\n",
"Iteration 21, loss = 0.27449171\n",
"Iteration 22, loss = 0.27392166\n",
"Iteration 23, loss = 0.27301937\n",
"Iteration 24, loss = 0.27286107\n",
"Iteration 25, loss = 0.27236645\n",
"Iteration 26, loss = 0.27164440\n",
"Iteration 27, loss = 0.27137294\n",
"Iteration 28, loss = 0.27076386\n",
"Iteration 29, loss = 0.27061742\n",
"Iteration 30, loss = 0.27036802\n",
"Iteration 31, loss = 0.27004843\n",
"Iteration 32, loss = 0.26952848\n",
"Iteration 33, loss = 0.26949673\n",
"Iteration 34, loss = 0.26930769\n",
"Iteration 35, loss = 0.26913095\n",
"Iteration 36, loss = 0.26914683\n",
"Iteration 37, loss = 0.26883077\n",
"Iteration 38, loss = 0.26853603\n",
"Iteration 39, loss = 0.26851900\n",
"Iteration 40, loss = 0.26829958\n",
"Iteration 41, loss = 0.26797448\n",
"Iteration 42, loss = 0.26799799\n",
"Iteration 43, loss = 0.26780602\n",
"Iteration 44, loss = 0.26767868\n",
"Iteration 45, loss = 0.26817084\n",
"Iteration 46, loss = 0.26781227\n",
"Iteration 47, loss = 0.26738656\n",
"Iteration 48, loss = 0.26737326\n",
"Iteration 49, loss = 0.26754243\n",
"Iteration 50, loss = 0.26711539\n",
"Iteration 51, loss = 0.26738396\n",
"Iteration 52, loss = 0.26736290\n",
"Iteration 53, loss = 0.26713062\n",
"Iteration 54, loss = 0.26691386\n",
"Iteration 55, loss = 0.26744284\n",
"Iteration 56, loss = 0.26692013\n",
"Iteration 57, loss = 0.26696260\n",
"Iteration 58, loss = 0.26694817\n",
"Iteration 59, loss = 0.26663657\n",
"Iteration 60, loss = 0.26691641\n",
"Iteration 61, loss = 0.26681619\n",
"Iteration 62, loss = 0.26668981\n",
"Iteration 63, loss = 0.26668220\n",
"Iteration 64, loss = 0.26662580\n",
"Iteration 65, loss = 0.26681476\n",
"Iteration 66, loss = 0.26655722\n",
"Iteration 67, loss = 0.26637047\n",
"Iteration 68, loss = 0.26669173\n",
"Iteration 69, loss = 0.26675649\n",
"Iteration 70, loss = 0.26659882\n",
"Iteration 71, loss = 0.26670800\n",
"Iteration 72, loss = 0.26667066\n",
"Iteration 73, loss = 0.26615916\n",
"Iteration 74, loss = 0.26640426\n",
"Iteration 75, loss = 0.26637163\n",
"Iteration 76, loss = 0.26646324\n",
"Iteration 77, loss = 0.26641424\n",
"Iteration 78, loss = 0.26635593\n",
"Iteration 79, loss = 0.26646604\n",
"Iteration 80, loss = 0.26636665\n",
"Iteration 81, loss = 0.26634987\n",
"Iteration 82, loss = 0.26634673\n",
"Iteration 83, loss = 0.26614262\n",
"Iteration 84, loss = 0.26611153\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.55417646\n",
"Iteration 2, loss = 0.41786244\n",
"Iteration 3, loss = 0.37736534\n",
"Iteration 4, loss = 0.35180022\n",
"Iteration 5, loss = 0.33476454\n",
"Iteration 6, loss = 0.32286512\n",
"Iteration 7, loss = 0.31466176\n",
"Iteration 8, loss = 0.30892521\n",
"Iteration 9, loss = 0.30411340\n",
"Iteration 10, loss = 0.30013856\n",
"Iteration 11, loss = 0.29644017\n",
"Iteration 12, loss = 0.29319311\n",
"Iteration 13, loss = 0.29000098\n",
"Iteration 14, loss = 0.28780967\n",
"Iteration 15, loss = 0.28585107\n",
"Iteration 16, loss = 0.28375700\n",
"Iteration 17, loss = 0.28250352\n",
"Iteration 18, loss = 0.28085560\n",
"Iteration 19, loss = 0.27969141\n",
"Iteration 20, loss = 0.27859331\n",
"Iteration 21, loss = 0.27741268\n",
"Iteration 22, loss = 0.27666013\n",
"Iteration 23, loss = 0.27593755\n",
"Iteration 24, loss = 0.27500432\n",
"Iteration 25, loss = 0.27442587\n",
"Iteration 26, loss = 0.27398568\n",
"Iteration 27, loss = 0.27374676\n",
"Iteration 28, loss = 0.27333176\n",
"Iteration 29, loss = 0.27290932\n",
"Iteration 30, loss = 0.27261504\n",
"Iteration 31, loss = 0.27238331\n",
"Iteration 32, loss = 0.27206329\n",
"Iteration 33, loss = 0.27160029\n",
"Iteration 34, loss = 0.27127566\n",
"Iteration 35, loss = 0.27123725\n",
"Iteration 36, loss = 0.27119248\n",
"Iteration 37, loss = 0.27107824\n",
"Iteration 38, loss = 0.27062394\n",
"Iteration 39, loss = 0.27051303\n",
"Iteration 40, loss = 0.27031007\n",
"Iteration 41, loss = 0.27019152\n",
"Iteration 42, loss = 0.27007273\n",
"Iteration 43, loss = 0.26998717\n",
"Iteration 44, loss = 0.26990951\n",
"Iteration 45, loss = 0.26974781\n",
"Iteration 46, loss = 0.26967376\n",
"Iteration 47, loss = 0.26969748\n",
"Iteration 48, loss = 0.26933373\n",
"Iteration 49, loss = 0.26952601\n",
"Iteration 50, loss = 0.26959888\n",
"Iteration 51, loss = 0.26910809\n",
"Iteration 52, loss = 0.26944121\n",
"Iteration 53, loss = 0.26916637\n",
"Iteration 54, loss = 0.26913956\n",
"Iteration 55, loss = 0.26883693\n",
"Iteration 56, loss = 0.26906499\n",
"Iteration 57, loss = 0.26858486\n",
"Iteration 58, loss = 0.26870281\n",
"Iteration 59, loss = 0.26886687\n",
"Iteration 60, loss = 0.26873953\n",
"Iteration 61, loss = 0.26872467\n",
"Iteration 62, loss = 0.26868937\n",
"Iteration 63, loss = 0.26847032\n",
"Iteration 64, loss = 0.26843159\n",
"Iteration 65, loss = 0.26868601\n",
"Iteration 66, loss = 0.26865940\n",
"Iteration 67, loss = 0.26833320\n",
"Iteration 68, loss = 0.26845477\n",
"Iteration 69, loss = 0.26823761\n",
"Iteration 70, loss = 0.26856816\n",
"Iteration 71, loss = 0.26866468\n",
"Iteration 72, loss = 0.26824227\n",
"Iteration 73, loss = 0.26812448\n",
"Iteration 74, loss = 0.26820210\n",
"Iteration 75, loss = 0.26793508\n",
"Iteration 76, loss = 0.26855352\n",
"Iteration 77, loss = 0.26862895\n",
"Iteration 78, loss = 0.26828224\n",
"Iteration 79, loss = 0.26817133\n",
"Iteration 80, loss = 0.26814755\n",
"Iteration 81, loss = 0.26814589\n",
"Iteration 82, loss = 0.26782163\n",
"Iteration 83, loss = 0.26794180\n",
"Iteration 84, loss = 0.26802679\n",
"Iteration 85, loss = 0.26799739\n",
"Iteration 86, loss = 0.26800251\n",
"Iteration 87, loss = 0.26800991\n",
"Iteration 88, loss = 0.26801807\n",
"Iteration 89, loss = 0.26789143\n",
"Iteration 90, loss = 0.26812628\n",
"Iteration 91, loss = 0.26779371\n",
"Iteration 92, loss = 0.26785574\n",
"Iteration 93, loss = 0.26786495\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.58171500\n",
"Iteration 2, loss = 0.42442445\n",
"Iteration 3, loss = 0.37789934\n",
"Iteration 4, loss = 0.34864796\n",
"Iteration 5, loss = 0.33221159\n",
"Iteration 6, loss = 0.32231752\n",
"Iteration 7, loss = 0.31550616\n",
"Iteration 8, loss = 0.31005109\n",
"Iteration 9, loss = 0.30518409\n",
"Iteration 10, loss = 0.30101056\n",
"Iteration 11, loss = 0.29737004\n",
"Iteration 12, loss = 0.29413970\n",
"Iteration 13, loss = 0.29139466\n",
"Iteration 14, loss = 0.28853391\n",
"Iteration 15, loss = 0.28630816\n",
"Iteration 16, loss = 0.28471428\n",
"Iteration 17, loss = 0.28292136\n",
"Iteration 18, loss = 0.28163754\n",
"Iteration 19, loss = 0.28054637\n",
"Iteration 20, loss = 0.27964451\n",
"Iteration 21, loss = 0.27882086\n",
"Iteration 22, loss = 0.27818478\n",
"Iteration 23, loss = 0.27754944\n",
"Iteration 24, loss = 0.27716734\n",
"Iteration 25, loss = 0.27657184\n",
"Iteration 26, loss = 0.27620022\n",
"Iteration 27, loss = 0.27570704\n",
"Iteration 28, loss = 0.27582507\n",
"Iteration 29, loss = 0.27517021\n",
"Iteration 30, loss = 0.27511293\n",
"Iteration 31, loss = 0.27488943\n",
"Iteration 32, loss = 0.27469501\n",
"Iteration 33, loss = 0.27437342\n",
"Iteration 34, loss = 0.27412561\n",
"Iteration 35, loss = 0.27389195\n",
"Iteration 36, loss = 0.27415465\n",
"Iteration 37, loss = 0.27360635\n",
"Iteration 38, loss = 0.27359353\n",
"Iteration 39, loss = 0.27343403\n",
"Iteration 40, loss = 0.27322265\n",
"Iteration 41, loss = 0.27315087\n",
"Iteration 42, loss = 0.27339929\n",
"Iteration 43, loss = 0.27300211\n",
"Iteration 44, loss = 0.27275049\n",
"Iteration 45, loss = 0.27271094\n",
"Iteration 46, loss = 0.27253825\n",
"Iteration 47, loss = 0.27242856\n",
"Iteration 48, loss = 0.27278881\n",
"Iteration 49, loss = 0.27243925\n",
"Iteration 50, loss = 0.27230319\n",
"Iteration 51, loss = 0.27230457\n",
"Iteration 52, loss = 0.27212067\n",
"Iteration 53, loss = 0.27202717\n",
"Iteration 54, loss = 0.27217929\n",
"Iteration 55, loss = 0.27211784\n",
"Iteration 56, loss = 0.27188087\n",
"Iteration 57, loss = 0.27181987\n",
"Iteration 58, loss = 0.27158335\n",
"Iteration 59, loss = 0.27160395\n",
"Iteration 60, loss = 0.27166815\n",
"Iteration 61, loss = 0.27158767\n",
"Iteration 62, loss = 0.27154343\n",
"Iteration 63, loss = 0.27145855\n",
"Iteration 64, loss = 0.27158621\n",
"Iteration 65, loss = 0.27106787\n",
"Iteration 66, loss = 0.27155718\n",
"Iteration 67, loss = 0.27114228\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 68, loss = 0.27119385\n",
"Iteration 69, loss = 0.27104202\n",
"Iteration 70, loss = 0.27104666\n",
"Iteration 71, loss = 0.27128146\n",
"Iteration 72, loss = 0.27088960\n",
"Iteration 73, loss = 0.27092205\n",
"Iteration 74, loss = 0.27107855\n",
"Iteration 75, loss = 0.27099819\n",
"Iteration 76, loss = 0.27093377\n",
"Iteration 77, loss = 0.27082212\n",
"Iteration 78, loss = 0.27072426\n",
"Iteration 79, loss = 0.27068367\n",
"Iteration 80, loss = 0.27055109\n",
"Iteration 81, loss = 0.27069769\n",
"Iteration 82, loss = 0.27070335\n",
"Iteration 83, loss = 0.27050640\n",
"Iteration 84, loss = 0.27046128\n",
"Iteration 85, loss = 0.27083913\n",
"Iteration 86, loss = 0.27055725\n",
"Iteration 87, loss = 0.27076234\n",
"Iteration 88, loss = 0.27049103\n",
"Iteration 89, loss = 0.27044280\n",
"Iteration 90, loss = 0.27044328\n",
"Iteration 91, loss = 0.27020252\n",
"Iteration 92, loss = 0.27037096\n",
"Iteration 93, loss = 0.27063893\n",
"Iteration 94, loss = 0.27014957\n",
"Iteration 95, loss = 0.27033600\n",
"Iteration 96, loss = 0.27005636\n",
"Iteration 97, loss = 0.27031448\n",
"Iteration 98, loss = 0.27037434\n",
"Iteration 99, loss = 0.27022980\n",
"Iteration 100, loss = 0.27017648\n",
"Iteration 101, loss = 0.27018302\n",
"Iteration 102, loss = 0.26998546\n",
"Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
"Iteration 1, loss = 0.39590029\n",
"Iteration 2, loss = 0.18863312\n",
"Iteration 3, loss = 0.13277092\n",
"Iteration 4, loss = 0.11125892\n",
"Iteration 5, loss = 0.10189994\n",
"Iteration 6, loss = 0.09676659\n",
"Iteration 7, loss = 0.09327884\n",
"Iteration 8, loss = 0.09048151\n",
"Iteration 9, loss = 0.08873792\n",
"Iteration 10, loss = 0.08671951\n",
"Iteration 11, loss = 0.08516523\n",
"Iteration 12, loss = 0.08428249\n",
"Iteration 13, loss = 0.08271297\n",
"Iteration 14, loss = 0.08155716\n",
"Iteration 15, loss = 0.08057971\n",
"Iteration 16, loss = 0.07951212\n",
"Iteration 17, loss = 0.07884609\n",
"Iteration 18, loss = 0.07773810\n",
"Iteration 19, loss = 0.07731524\n",
"Iteration 20, loss = 0.07637070\n",
"Iteration 21, loss = 0.07573916\n",
"Iteration 22, loss = 0.07530208\n",
"Iteration 23, loss = 0.07452215\n",
"Iteration 24, loss = 0.07392904\n",
"Iteration 25, loss = 0.07319351\n",
"Iteration 26, loss = 0.07256449\n",
"Iteration 27, loss = 0.07222149\n",
"Iteration 28, loss = 0.07155472\n",
"Iteration 29, loss = 0.07120264\n",
"Iteration 30, loss = 0.07098505\n",
"Iteration 31, loss = 0.07035310\n",
"Iteration 32, loss = 0.06986932\n",
"Iteration 33, loss = 0.06940201\n",
"Iteration 34, loss = 0.06896999\n",
"Iteration 35, loss = 0.06873327\n",
"Iteration 36, loss = 0.06814476\n",
"Iteration 37, loss = 0.06787436\n",
"Iteration 38, loss = 0.06738872\n",
"Iteration 39, loss = 0.06728036\n",
"Iteration 40, loss = 0.06653823\n",
"Iteration 41, loss = 0.06623632\n",
"Iteration 42, loss = 0.06581946\n",
"Iteration 43, loss = 0.06566018\n",
"Iteration 44, loss = 0.06498917\n",
"Iteration 45, loss = 0.06524690\n",
"Iteration 46, loss = 0.06447934\n",
"Iteration 47, loss = 0.06437922\n",
"Iteration 48, loss = 0.06409584\n",
"Iteration 49, loss = 0.06387085\n",
"Iteration 50, loss = 0.06358697\n",
"Iteration 51, loss = 0.06360456\n",
"Iteration 52, loss = 0.06315178\n",
"Iteration 53, loss = 0.06297928\n",
"Iteration 54, loss = 0.06271723\n",
"Iteration 55, loss = 0.06247720\n",
"Iteration 56, loss = 0.06199295\n",
"Iteration 57, loss = 0.06190375\n",
"Iteration 58, loss = 0.06164999\n",
"Iteration 59, loss = 0.06142939\n",
"Iteration 60, loss = 0.06126205\n",
"Iteration 61, loss = 0.06077590\n",
"Iteration 62, loss = 0.06106484\n",
"Iteration 63, loss = 0.06083536\n",
"Iteration 64, loss = 0.06009930\n",
"Iteration 65, loss = 0.05987173\n",
"Iteration 66, loss = 0.05986637\n",
"Iteration 67, loss = 0.05952085\n",
"Iteration 68, loss = 0.05966259\n",
"Iteration 69, loss = 0.05932986\n",
"Iteration 70, loss = 0.05924548\n",
"Iteration 71, loss = 0.05876374\n",
"Iteration 72, loss = 0.05856255\n",
"Iteration 73, loss = 0.05850577\n",
"Iteration 74, loss = 0.05859679\n",
"Iteration 75, loss = 0.05824544\n",
"Iteration 76, loss = 0.05802151\n",
"Iteration 77, loss = 0.05787359\n",
"Iteration 78, loss = 0.05758499\n",
"Iteration 79, loss = 0.05760042\n",
"Iteration 80, loss = 0.05733676\n",
"Iteration 81, loss = 0.05709747\n",
"Iteration 82, loss = 0.05692796\n",
"Iteration 83, loss = 0.05705119\n",
"Iteration 84, loss = 0.05662592\n",
"Iteration 85, loss = 0.05662278\n",
"Iteration 86, loss = 0.05635352\n",
"Iteration 87, loss = 0.05613229\n",
"Iteration 88, loss = 0.05590855\n",
"Iteration 89, loss = 0.05598730\n",
"Iteration 90, loss = 0.05597062\n",
"Iteration 91, loss = 0.05549360\n",
"Iteration 92, loss = 0.05540894\n",
"Iteration 93, loss = 0.05534499\n",
"Iteration 94, loss = 0.05523423\n",
"Iteration 95, loss = 0.05486142\n",
"Iteration 96, loss = 0.05478608\n",
"Iteration 97, loss = 0.05456750\n",
"Iteration 98, loss = 0.05451122\n",
"Iteration 99, loss = 0.05435233\n",
"Iteration 100, loss = 0.05415197\n",
"Iteration 101, loss = 0.05426642\n",
"Iteration 102, loss = 0.05367404\n",
"Iteration 103, loss = 0.05379074\n",
"Iteration 104, loss = 0.05354833\n",
"Iteration 105, loss = 0.05350301\n",
"Iteration 106, loss = 0.05332024\n",
"Iteration 107, loss = 0.05320479\n",
"Iteration 108, loss = 0.05289300\n",
"Iteration 109, loss = 0.05302873\n",
"Iteration 110, loss = 0.05294650\n",
"Iteration 111, loss = 0.05291062\n",
"Iteration 112, loss = 0.05271167\n",
"Iteration 113, loss = 0.05250064\n",
"Iteration 114, loss = 0.05263058\n",
"Iteration 115, loss = 0.05256499\n",
"Iteration 116, loss = 0.05181527\n",
"Iteration 117, loss = 0.05193478\n",
"Iteration 118, loss = 0.05176231\n",
"Iteration 119, loss = 0.05168413\n",
"Iteration 120, loss = 0.05148817\n",
"Iteration 121, loss = 0.05166713\n",
"Iteration 122, loss = 0.05144654\n",
"Iteration 123, loss = 0.05116291\n",
"Iteration 124, loss = 0.05105069\n",
"Iteration 125, loss = 0.05112675\n",
"Iteration 126, loss = 0.05086759\n",
"Iteration 127, loss = 0.05103457\n",
"Iteration 128, loss = 0.05088414\n",
"Iteration 129, loss = 0.05053898\n",
"Iteration 130, loss = 0.05055001\n",
"Iteration 131, loss = 0.05013423\n",
"Iteration 132, loss = 0.05036718\n",
"Iteration 133, loss = 0.05017071\n",
"Iteration 134, loss = 0.05010427\n",
"Iteration 135, loss = 0.04994620\n",
"Iteration 136, loss = 0.04969368\n",
"Iteration 137, loss = 0.04991139\n",
"Iteration 138, loss = 0.04953138\n",
"Iteration 139, loss = 0.04935083\n",
"Iteration 140, loss = 0.04974703\n",
"Iteration 141, loss = 0.04928881\n",
"Iteration 142, loss = 0.04929383\n",
"Iteration 143, loss = 0.04940985\n",
"Iteration 144, loss = 0.04913182\n",
"Iteration 145, loss = 0.04883544\n",
"Iteration 146, loss = 0.04906627\n",
"Iteration 147, loss = 0.04882401\n",
"Iteration 148, loss = 0.04892710\n",
"Iteration 149, loss = 0.04853422\n",
"Iteration 150, loss = 0.04856419\n",
"Iteration 151, loss = 0.04831959\n",
"Iteration 152, loss = 0.04835006\n",
"Iteration 153, loss = 0.04827142\n",
"Iteration 154, loss = 0.04834663\n",
"Iteration 155, loss = 0.04818857\n",
"Iteration 156, loss = 0.04822901\n",
"Iteration 157, loss = 0.04792393\n",
"Iteration 158, loss = 0.04783616\n",
"Iteration 159, loss = 0.04755049\n",
"Iteration 160, loss = 0.04794776\n",
"Iteration 161, loss = 0.04801173\n",
"Iteration 162, loss = 0.04769086\n",
"Iteration 163, loss = 0.04773415\n",
"Iteration 164, loss = 0.04750882\n",
"Iteration 165, loss = 0.04730343\n",
"Iteration 166, loss = 0.04725641\n",
"Iteration 167, loss = 0.04748860\n",
"Iteration 168, loss = 0.04714239\n",
"Iteration 169, loss = 0.04701922\n",
"Iteration 170, loss = 0.04705075\n",
"Iteration 171, loss = 0.04694375\n",
"Iteration 172, loss = 0.04688782\n",
"Iteration 173, loss = 0.04667484\n",
"Iteration 174, loss = 0.04647743\n",
"Iteration 175, loss = 0.04658409\n",
"Iteration 176, loss = 0.04681823\n",
"Iteration 177, loss = 0.04664139\n",
"Iteration 178, loss = 0.04661159\n",
"Iteration 179, loss = 0.04640368\n",
"Iteration 180, loss = 0.04602194\n",
"Iteration 181, loss = 0.04590572\n",
"Iteration 182, loss = 0.04619191\n",
"Iteration 183, loss = 0.04599217\n",
"Iteration 184, loss = 0.04651761\n",
"Iteration 185, loss = 0.04570888\n",
"Iteration 186, loss = 0.04586085\n",
"Iteration 187, loss = 0.04560051\n",
"Iteration 188, loss = 0.04552305\n",
"Iteration 189, loss = 0.04545501\n",
"Iteration 190, loss = 0.04548932\n",
"Iteration 191, loss = 0.04573657\n",
"Iteration 192, loss = 0.04539304\n",
"Iteration 193, loss = 0.04553453\n",
"Iteration 194, loss = 0.04509784\n",
"Iteration 195, loss = 0.04530722\n",
"Iteration 196, loss = 0.04548491\n",
"Iteration 197, loss = 0.04509598\n",
"Iteration 198, loss = 0.04524755\n",
"Iteration 199, loss = 0.04474322\n",
"Iteration 200, loss = 0.04471551\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:614: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.\n",
" warnings.warn(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 1, loss = 0.48212501\n",
"Iteration 2, loss = 0.22842854\n",
"Iteration 3, loss = 0.16204802\n",
"Iteration 4, loss = 0.12986090\n",
"Iteration 5, loss = 0.11436321\n",
"Iteration 6, loss = 0.10600109\n",
"Iteration 7, loss = 0.10073173\n",
"Iteration 8, loss = 0.09715988\n",
"Iteration 9, loss = 0.09451988\n",
"Iteration 10, loss = 0.09242401\n",
"Iteration 11, loss = 0.09070378\n",
"Iteration 12, loss = 0.08908321\n",
"Iteration 13, loss = 0.08750574\n",
"Iteration 14, loss = 0.08626040\n",
"Iteration 15, loss = 0.08526902\n",
"Iteration 16, loss = 0.08428190\n",
"Iteration 17, loss = 0.08329741\n",
"Iteration 18, loss = 0.08235424\n",
"Iteration 19, loss = 0.08148749\n",
"Iteration 20, loss = 0.08066290\n",
"Iteration 21, loss = 0.08016988\n",
"Iteration 22, loss = 0.07916488\n",
"Iteration 23, loss = 0.07870636\n",
"Iteration 24, loss = 0.07782235\n",
"Iteration 25, loss = 0.07709920\n",
"Iteration 26, loss = 0.07664958\n",
"Iteration 27, loss = 0.07587525\n",
"Iteration 28, loss = 0.07520873\n",
"Iteration 29, loss = 0.07493461\n",
"Iteration 30, loss = 0.07462935\n",
"Iteration 31, loss = 0.07376339\n",
"Iteration 32, loss = 0.07345402\n",
"Iteration 33, loss = 0.07288577\n",
"Iteration 34, loss = 0.07259694\n",
"Iteration 35, loss = 0.07197112\n",
"Iteration 36, loss = 0.07176977\n",
"Iteration 37, loss = 0.07120564\n",
"Iteration 38, loss = 0.07054563\n",
"Iteration 39, loss = 0.07048928\n",
"Iteration 40, loss = 0.06990533\n",
"Iteration 41, loss = 0.06971343\n",
"Iteration 42, loss = 0.06905220\n",
"Iteration 43, loss = 0.06869902\n",
"Iteration 44, loss = 0.06843779\n",
"Iteration 45, loss = 0.06782924\n",
"Iteration 46, loss = 0.06758349\n",
"Iteration 47, loss = 0.06712490\n",
"Iteration 48, loss = 0.06646760\n",
"Iteration 49, loss = 0.06644069\n",
"Iteration 50, loss = 0.06585782\n",
"Iteration 51, loss = 0.06565405\n",
"Iteration 52, loss = 0.06518123\n",
"Iteration 53, loss = 0.06497363\n",
"Iteration 54, loss = 0.06446891\n",
"Iteration 55, loss = 0.06443585\n",
"Iteration 56, loss = 0.06398980\n",
"Iteration 57, loss = 0.06380958\n",
"Iteration 58, loss = 0.06352114\n",
"Iteration 59, loss = 0.06296348\n",
"Iteration 60, loss = 0.06269697\n",
"Iteration 61, loss = 0.06270422\n",
"Iteration 62, loss = 0.06220218\n",
"Iteration 63, loss = 0.06227101\n",
"Iteration 64, loss = 0.06197206\n",
"Iteration 65, loss = 0.06155305\n",
"Iteration 66, loss = 0.06102245\n",
"Iteration 67, loss = 0.06100293\n",
"Iteration 68, loss = 0.06079766\n",
"Iteration 69, loss = 0.06079623\n",
"Iteration 70, loss = 0.06041452\n",
"Iteration 71, loss = 0.06020864\n",
"Iteration 72, loss = 0.05994378\n",
"Iteration 73, loss = 0.06001418\n",
"Iteration 74, loss = 0.05976533\n",
"Iteration 75, loss = 0.05910787\n",
"Iteration 76, loss = 0.05898876\n",
"Iteration 77, loss = 0.05895571\n",
"Iteration 78, loss = 0.05886236\n",
"Iteration 79, loss = 0.05842050\n",
"Iteration 80, loss = 0.05821009\n",
"Iteration 81, loss = 0.05818499\n",
"Iteration 82, loss = 0.05824889\n",
"Iteration 83, loss = 0.05787416\n",
"Iteration 84, loss = 0.05759005\n",
"Iteration 85, loss = 0.05753758\n",
"Iteration 86, loss = 0.05744667\n",
"Iteration 87, loss = 0.05715393\n",
"Iteration 88, loss = 0.05693934\n",
"Iteration 89, loss = 0.05702597\n",
"Iteration 90, loss = 0.05691223\n",
"Iteration 91, loss = 0.05675927\n",
"Iteration 92, loss = 0.05661034\n",
"Iteration 93, loss = 0.05644515\n",
"Iteration 94, loss = 0.05621995\n",
"Iteration 95, loss = 0.05594208\n",
"Iteration 96, loss = 0.05593262\n",
"Iteration 97, loss = 0.05575672\n",
"Iteration 98, loss = 0.05573100\n",
"Iteration 99, loss = 0.05543104\n",
"Iteration 100, loss = 0.05531084\n",
"Iteration 101, loss = 0.05530188\n",
"Iteration 102, loss = 0.05498001\n",
"Iteration 103, loss = 0.05553224\n",
"Iteration 104, loss = 0.05474272\n",
"Iteration 105, loss = 0.05478189\n",
"Iteration 106, loss = 0.05450521\n",
"Iteration 107, loss = 0.05473619\n",
"Iteration 108, loss = 0.05423005\n",
"Iteration 109, loss = 0.05422905\n",
"Iteration 110, loss = 0.05409723\n",
"Iteration 111, loss = 0.05406946\n",
"Iteration 112, loss = 0.05382760\n",
"Iteration 113, loss = 0.05400166\n",
"Iteration 114, loss = 0.05373316\n",
"Iteration 115, loss = 0.05344977\n",
"Iteration 116, loss = 0.05334509\n",
"Iteration 117, loss = 0.05323020\n",
"Iteration 118, loss = 0.05289277\n",
"Iteration 119, loss = 0.05317960\n",
"Iteration 120, loss = 0.05263621\n",
"Iteration 121, loss = 0.05266286\n",
"Iteration 122, loss = 0.05270315\n",
"Iteration 123, loss = 0.05269189\n",
"Iteration 124, loss = 0.05219864\n",
"Iteration 125, loss = 0.05227782\n",
"Iteration 126, loss = 0.05234365\n",
"Iteration 127, loss = 0.05233565\n",
"Iteration 128, loss = 0.05204155\n",
"Iteration 129, loss = 0.05200735\n",
"Iteration 130, loss = 0.05194880\n",
"Iteration 131, loss = 0.05165784\n",
"Iteration 132, loss = 0.05168876\n",
"Iteration 133, loss = 0.05185820\n",
"Iteration 134, loss = 0.05159532\n",
"Iteration 135, loss = 0.05147734\n",
"Iteration 136, loss = 0.05150328\n",
"Iteration 137, loss = 0.05140154\n",
"Iteration 138, loss = 0.05125129\n",
"Iteration 139, loss = 0.05124399\n",
"Iteration 140, loss = 0.05093618\n",
"Iteration 141, loss = 0.05094033\n",
"Iteration 142, loss = 0.05086147\n",
"Iteration 143, loss = 0.05102048\n",
"Iteration 144, loss = 0.05064925\n",
"Iteration 145, loss = 0.05030770\n",
"Iteration 146, loss = 0.05056102\n",
"Iteration 147, loss = 0.05052691\n",
"Iteration 148, loss = 0.05029872\n",
"Iteration 149, loss = 0.05037908\n",
"Iteration 150, loss = 0.05047879\n",
"Iteration 151, loss = 0.05004585\n",
"Iteration 152, loss = 0.05022317\n",
"Iteration 153, loss = 0.05006842\n",
"Iteration 154, loss = 0.05010162\n",
"Iteration 155, loss = 0.04977364\n",
"Iteration 156, loss = 0.04991908\n",
"Iteration 157, loss = 0.04978074\n",
"Iteration 158, loss = 0.04965633\n",
"Iteration 159, loss = 0.04942391\n",
"Iteration 160, loss = 0.04951644\n",
"Iteration 161, loss = 0.04942528\n",
"Iteration 162, loss = 0.04944414\n",
"Iteration 163, loss = 0.04954526\n",
"Iteration 164, loss = 0.04910196\n",
"Iteration 165, loss = 0.04917940\n",
"Iteration 166, loss = 0.04881670\n",
"Iteration 167, loss = 0.04911593\n",
"Iteration 168, loss = 0.04913562\n",
"Iteration 169, loss = 0.04877716\n",
"Iteration 170, loss = 0.04893211\n",
"Iteration 171, loss = 0.04883045\n",
"Iteration 172, loss = 0.04871298\n",
"Iteration 173, loss = 0.04877370\n",
"Iteration 174, loss = 0.04850161\n",
"Iteration 175, loss = 0.04867969\n",
"Iteration 176, loss = 0.04845659\n",
"Iteration 177, loss = 0.04838591\n",
"Iteration 178, loss = 0.04821319\n",
"Iteration 179, loss = 0.04811848\n",
"Iteration 180, loss = 0.04830034\n",
"Iteration 181, loss = 0.04808769\n",
"Iteration 182, loss = 0.04778584\n",
"Iteration 183, loss = 0.04782036\n",
"Iteration 184, loss = 0.04784204\n",
"Iteration 185, loss = 0.04775500\n",
"Iteration 186, loss = 0.04755319\n",
"Iteration 187, loss = 0.04756281\n",
"Iteration 188, loss = 0.04755785\n",
"Iteration 189, loss = 0.04740299\n",
"Iteration 190, loss = 0.04731355\n",
"Iteration 191, loss = 0.04781663\n",
"Iteration 192, loss = 0.04730905\n",
"Iteration 193, loss = 0.04719818\n",
"Iteration 194, loss = 0.04707709\n",
"Iteration 195, loss = 0.04719967\n",
"Iteration 196, loss = 0.04732472\n",
"Iteration 197, loss = 0.04719449\n",
"Iteration 198, loss = 0.04680581\n",
"Iteration 199, loss = 0.04683860\n",
"Iteration 200, loss = 0.04689883\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:614: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.\n",
" warnings.warn(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 1, loss = 0.37943942\n",
"Iteration 2, loss = 0.20806206\n",
"Iteration 3, loss = 0.15098417\n",
"Iteration 4, loss = 0.12369687\n",
"Iteration 5, loss = 0.11059555\n",
"Iteration 6, loss = 0.10352855\n",
"Iteration 7, loss = 0.09942393\n",
"Iteration 8, loss = 0.09629028\n",
"Iteration 9, loss = 0.09364729\n",
"Iteration 10, loss = 0.09169521\n",
"Iteration 11, loss = 0.09029445\n",
"Iteration 12, loss = 0.08854437\n",
"Iteration 13, loss = 0.08741754\n",
"Iteration 14, loss = 0.08604385\n",
"Iteration 15, loss = 0.08556160\n",
"Iteration 16, loss = 0.08434203\n",
"Iteration 17, loss = 0.08327443\n",
"Iteration 18, loss = 0.08247679\n",
"Iteration 19, loss = 0.08174518\n",
"Iteration 20, loss = 0.08079751\n",
"Iteration 21, loss = 0.08007539\n",
"Iteration 22, loss = 0.07926548\n",
"Iteration 23, loss = 0.07870902\n",
"Iteration 24, loss = 0.07825367\n",
"Iteration 25, loss = 0.07754818\n",
"Iteration 26, loss = 0.07663220\n",
"Iteration 27, loss = 0.07621753\n",
"Iteration 28, loss = 0.07601508\n",
"Iteration 29, loss = 0.07511778\n",
"Iteration 30, loss = 0.07466639\n",
"Iteration 31, loss = 0.07416390\n",
"Iteration 32, loss = 0.07338360\n",
"Iteration 33, loss = 0.07279922\n",
"Iteration 34, loss = 0.07249319\n",
"Iteration 35, loss = 0.07225566\n",
"Iteration 36, loss = 0.07171004\n",
"Iteration 37, loss = 0.07119569\n",
"Iteration 38, loss = 0.07081552\n",
"Iteration 39, loss = 0.07048187\n",
"Iteration 40, loss = 0.07023135\n",
"Iteration 41, loss = 0.06982503\n",
"Iteration 42, loss = 0.06939560\n",
"Iteration 43, loss = 0.06881906\n",
"Iteration 44, loss = 0.06839603\n",
"Iteration 45, loss = 0.06812088\n",
"Iteration 46, loss = 0.06827815\n",
"Iteration 47, loss = 0.06747240\n",
"Iteration 48, loss = 0.06703114\n",
"Iteration 49, loss = 0.06664174\n",
"Iteration 50, loss = 0.06642281\n",
"Iteration 51, loss = 0.06622606\n",
"Iteration 52, loss = 0.06562847\n",
"Iteration 53, loss = 0.06525019\n",
"Iteration 54, loss = 0.06481738\n",
"Iteration 55, loss = 0.06469767\n",
"Iteration 56, loss = 0.06424328\n",
"Iteration 57, loss = 0.06385907\n",
"Iteration 58, loss = 0.06361867\n",
"Iteration 59, loss = 0.06347175\n",
"Iteration 60, loss = 0.06340379\n",
"Iteration 61, loss = 0.06300927\n",
"Iteration 62, loss = 0.06278506\n",
"Iteration 63, loss = 0.06264564\n",
"Iteration 64, loss = 0.06215270\n",
"Iteration 65, loss = 0.06195676\n",
"Iteration 66, loss = 0.06170632\n",
"Iteration 67, loss = 0.06133012\n",
"Iteration 68, loss = 0.06104533\n",
"Iteration 69, loss = 0.06094511\n",
"Iteration 70, loss = 0.06079928\n",
"Iteration 71, loss = 0.06036632\n",
"Iteration 72, loss = 0.06033194\n",
"Iteration 73, loss = 0.06039852\n",
"Iteration 74, loss = 0.06023046\n",
"Iteration 75, loss = 0.05961952\n",
"Iteration 76, loss = 0.05968864\n",
"Iteration 77, loss = 0.05940752\n",
"Iteration 78, loss = 0.05936053\n",
"Iteration 79, loss = 0.05924548\n",
"Iteration 80, loss = 0.05929704\n",
"Iteration 81, loss = 0.05887601\n",
"Iteration 82, loss = 0.05863248\n",
"Iteration 83, loss = 0.05868327\n",
"Iteration 84, loss = 0.05804576\n",
"Iteration 85, loss = 0.05832471\n",
"Iteration 86, loss = 0.05804650\n",
"Iteration 87, loss = 0.05789907\n",
"Iteration 88, loss = 0.05783301\n",
"Iteration 89, loss = 0.05745192\n",
"Iteration 90, loss = 0.05740573\n",
"Iteration 91, loss = 0.05727617\n",
"Iteration 92, loss = 0.05693507\n",
"Iteration 93, loss = 0.05699981\n",
"Iteration 94, loss = 0.05708543\n",
"Iteration 95, loss = 0.05645184\n",
"Iteration 96, loss = 0.05635523\n",
"Iteration 97, loss = 0.05645710\n",
"Iteration 98, loss = 0.05600539\n",
"Iteration 99, loss = 0.05586940\n",
"Iteration 100, loss = 0.05600612\n",
"Iteration 101, loss = 0.05568491\n",
"Iteration 102, loss = 0.05579413\n",
"Iteration 103, loss = 0.05574786\n",
"Iteration 104, loss = 0.05532345\n",
"Iteration 105, loss = 0.05531401\n",
"Iteration 106, loss = 0.05495304\n",
"Iteration 107, loss = 0.05520870\n",
"Iteration 108, loss = 0.05459983\n",
"Iteration 109, loss = 0.05462952\n",
"Iteration 110, loss = 0.05436476\n",
"Iteration 111, loss = 0.05426531\n",
"Iteration 112, loss = 0.05425437\n",
"Iteration 113, loss = 0.05411738\n",
"Iteration 114, loss = 0.05417683\n",
"Iteration 115, loss = 0.05401941\n",
"Iteration 116, loss = 0.05380220\n",
"Iteration 117, loss = 0.05343444\n",
"Iteration 118, loss = 0.05324554\n",
"Iteration 119, loss = 0.05340904\n",
"Iteration 120, loss = 0.05324087\n",
"Iteration 121, loss = 0.05328846\n",
"Iteration 122, loss = 0.05318582\n",
"Iteration 123, loss = 0.05321005\n",
"Iteration 124, loss = 0.05284122\n",
"Iteration 125, loss = 0.05261106\n",
"Iteration 126, loss = 0.05293804\n",
"Iteration 127, loss = 0.05272780\n",
"Iteration 128, loss = 0.05258968\n",
"Iteration 129, loss = 0.05250857\n",
"Iteration 130, loss = 0.05222535\n",
"Iteration 131, loss = 0.05264670\n",
"Iteration 132, loss = 0.05208602\n",
"Iteration 133, loss = 0.05212828\n",
"Iteration 134, loss = 0.05197158\n",
"Iteration 135, loss = 0.05207633\n",
"Iteration 136, loss = 0.05184954\n",
"Iteration 137, loss = 0.05192564\n",
"Iteration 138, loss = 0.05170677\n",
"Iteration 139, loss = 0.05168538\n",
"Iteration 140, loss = 0.05114597\n",
"Iteration 141, loss = 0.05144036\n",
"Iteration 142, loss = 0.05117491\n",
"Iteration 143, loss = 0.05112412\n",
"Iteration 144, loss = 0.05127602\n",
"Iteration 145, loss = 0.05133182\n",
"Iteration 146, loss = 0.05084342\n",
"Iteration 147, loss = 0.05133077\n",
"Iteration 148, loss = 0.05098878\n",
"Iteration 149, loss = 0.05076359\n",
"Iteration 150, loss = 0.05061357\n",
"Iteration 151, loss = 0.05091599\n",
"Iteration 152, loss = 0.05049878\n",
"Iteration 153, loss = 0.05058291\n",
"Iteration 154, loss = 0.05031191\n",
"Iteration 155, loss = 0.05042442\n",
"Iteration 156, loss = 0.05020025\n",
"Iteration 157, loss = 0.05043656\n",
"Iteration 158, loss = 0.05021777\n",
"Iteration 159, loss = 0.05009158\n",
"Iteration 160, loss = 0.05008618\n",
"Iteration 161, loss = 0.04988413\n",
"Iteration 162, loss = 0.04999682\n",
"Iteration 163, loss = 0.04951518\n",
"Iteration 164, loss = 0.04969529\n",
"Iteration 165, loss = 0.04955588\n",
"Iteration 166, loss = 0.04958786\n",
"Iteration 167, loss = 0.04944134\n",
"Iteration 168, loss = 0.04962714\n",
"Iteration 169, loss = 0.04950143\n",
"Iteration 170, loss = 0.04942597\n",
"Iteration 171, loss = 0.04914458\n",
"Iteration 172, loss = 0.04888210\n",
"Iteration 173, loss = 0.04903786\n",
"Iteration 174, loss = 0.04911281\n",
"Iteration 175, loss = 0.04885105\n",
"Iteration 176, loss = 0.04878095\n",
"Iteration 177, loss = 0.04886371\n",
"Iteration 178, loss = 0.04884604\n",
"Iteration 179, loss = 0.04870992\n",
"Iteration 180, loss = 0.04851010\n",
"Iteration 181, loss = 0.04825052\n",
"Iteration 182, loss = 0.04829006\n",
"Iteration 183, loss = 0.04834937\n",
"Iteration 184, loss = 0.04840736\n",
"Iteration 185, loss = 0.04797534\n",
"Iteration 186, loss = 0.04821260\n",
"Iteration 187, loss = 0.04834322\n",
"Iteration 188, loss = 0.04824567\n",
"Iteration 189, loss = 0.04782136\n",
"Iteration 190, loss = 0.04772285\n",
"Iteration 191, loss = 0.04774229\n",
"Iteration 192, loss = 0.04771768\n",
"Iteration 193, loss = 0.04782771\n",
"Iteration 194, loss = 0.04790038\n",
"Iteration 195, loss = 0.04773461\n",
"Iteration 196, loss = 0.04740332\n",
"Iteration 197, loss = 0.04759773\n",
"Iteration 198, loss = 0.04750071\n",
"Iteration 199, loss = 0.04746371\n",
"Iteration 200, loss = 0.04712230\n",
"Iteration 1, loss = 0.38610709\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:614: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.\n",
" warnings.warn(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 2, loss = 0.20631124\n",
"Iteration 3, loss = 0.14978593\n",
"Iteration 4, loss = 0.12212459\n",
"Iteration 5, loss = 0.10826330\n",
"Iteration 6, loss = 0.10078256\n",
"Iteration 7, loss = 0.09627726\n",
"Iteration 8, loss = 0.09273307\n",
"Iteration 9, loss = 0.09029515\n",
"Iteration 10, loss = 0.08863501\n",
"Iteration 11, loss = 0.08684797\n",
"Iteration 12, loss = 0.08555614\n",
"Iteration 13, loss = 0.08429833\n",
"Iteration 14, loss = 0.08371180\n",
"Iteration 15, loss = 0.08270833\n",
"Iteration 16, loss = 0.08180590\n",
"Iteration 17, loss = 0.08104160\n",
"Iteration 18, loss = 0.08021181\n",
"Iteration 19, loss = 0.07959178\n",
"Iteration 20, loss = 0.07883852\n",
"Iteration 21, loss = 0.07832383\n",
"Iteration 22, loss = 0.07763030\n",
"Iteration 23, loss = 0.07719975\n",
"Iteration 24, loss = 0.07649467\n",
"Iteration 25, loss = 0.07595465\n",
"Iteration 26, loss = 0.07550824\n",
"Iteration 27, loss = 0.07498409\n",
"Iteration 28, loss = 0.07463806\n",
"Iteration 29, loss = 0.07418467\n",
"Iteration 30, loss = 0.07352996\n",
"Iteration 31, loss = 0.07320213\n",
"Iteration 32, loss = 0.07270886\n",
"Iteration 33, loss = 0.07245362\n",
"Iteration 34, loss = 0.07185088\n",
"Iteration 35, loss = 0.07126980\n",
"Iteration 36, loss = 0.07094893\n",
"Iteration 37, loss = 0.07069578\n",
"Iteration 38, loss = 0.07014417\n",
"Iteration 39, loss = 0.06999321\n",
"Iteration 40, loss = 0.06990622\n",
"Iteration 41, loss = 0.06936937\n",
"Iteration 42, loss = 0.06892723\n",
"Iteration 43, loss = 0.06869372\n",
"Iteration 44, loss = 0.06843412\n",
"Iteration 45, loss = 0.06818705\n",
"Iteration 46, loss = 0.06770266\n",
"Iteration 47, loss = 0.06739975\n",
"Iteration 48, loss = 0.06711456\n",
"Iteration 49, loss = 0.06695963\n",
"Iteration 50, loss = 0.06662729\n",
"Iteration 51, loss = 0.06672306\n",
"Iteration 52, loss = 0.06601954\n",
"Iteration 53, loss = 0.06602024\n",
"Iteration 54, loss = 0.06554471\n",
"Iteration 55, loss = 0.06529730\n",
"Iteration 56, loss = 0.06492318\n",
"Iteration 57, loss = 0.06482332\n",
"Iteration 58, loss = 0.06474161\n",
"Iteration 59, loss = 0.06433515\n",
"Iteration 60, loss = 0.06410066\n",
"Iteration 61, loss = 0.06383569\n",
"Iteration 62, loss = 0.06350947\n",
"Iteration 63, loss = 0.06347626\n",
"Iteration 64, loss = 0.06297270\n",
"Iteration 65, loss = 0.06275839\n",
"Iteration 66, loss = 0.06252553\n",
"Iteration 67, loss = 0.06246693\n",
"Iteration 68, loss = 0.06237110\n",
"Iteration 69, loss = 0.06196209\n",
"Iteration 70, loss = 0.06157997\n",
"Iteration 71, loss = 0.06178561\n",
"Iteration 72, loss = 0.06151413\n",
"Iteration 73, loss = 0.06131294\n",
"Iteration 74, loss = 0.06082534\n",
"Iteration 75, loss = 0.06073260\n",
"Iteration 76, loss = 0.06046999\n",
"Iteration 77, loss = 0.06045185\n",
"Iteration 78, loss = 0.06003108\n",
"Iteration 79, loss = 0.05983998\n",
"Iteration 80, loss = 0.05985606\n",
"Iteration 81, loss = 0.05951395\n",
"Iteration 82, loss = 0.05935613\n",
"Iteration 83, loss = 0.05896466\n",
"Iteration 84, loss = 0.05934146\n",
"Iteration 85, loss = 0.05877339\n",
"Iteration 86, loss = 0.05860741\n",
"Iteration 87, loss = 0.05839712\n",
"Iteration 88, loss = 0.05820550\n",
"Iteration 89, loss = 0.05814696\n",
"Iteration 90, loss = 0.05783034\n",
"Iteration 91, loss = 0.05763866\n",
"Iteration 92, loss = 0.05758353\n",
"Iteration 93, loss = 0.05725340\n",
"Iteration 94, loss = 0.05741273\n",
"Iteration 95, loss = 0.05728585\n",
"Iteration 96, loss = 0.05710706\n",
"Iteration 97, loss = 0.05695834\n",
"Iteration 98, loss = 0.05661708\n",
"Iteration 99, loss = 0.05678703\n",
"Iteration 100, loss = 0.05656219\n",
"Iteration 101, loss = 0.05621174\n",
"Iteration 102, loss = 0.05605857\n",
"Iteration 103, loss = 0.05616572\n",
"Iteration 104, loss = 0.05586810\n",
"Iteration 105, loss = 0.05562434\n",
"Iteration 106, loss = 0.05567383\n",
"Iteration 107, loss = 0.05559941\n",
"Iteration 108, loss = 0.05571109\n",
"Iteration 109, loss = 0.05527591\n",
"Iteration 110, loss = 0.05506806\n",
"Iteration 111, loss = 0.05486208\n",
"Iteration 112, loss = 0.05516884\n",
"Iteration 113, loss = 0.05467999\n",
"Iteration 114, loss = 0.05472470\n",
"Iteration 115, loss = 0.05462689\n",
"Iteration 116, loss = 0.05437919\n",
"Iteration 117, loss = 0.05423364\n",
"Iteration 118, loss = 0.05438195\n",
"Iteration 119, loss = 0.05401502\n",
"Iteration 120, loss = 0.05411001\n",
"Iteration 121, loss = 0.05401750\n",
"Iteration 122, loss = 0.05398402\n",
"Iteration 123, loss = 0.05349591\n",
"Iteration 124, loss = 0.05376797\n",
"Iteration 125, loss = 0.05341087\n",
"Iteration 126, loss = 0.05340789\n",
"Iteration 127, loss = 0.05335498\n",
"Iteration 128, loss = 0.05317400\n",
"Iteration 129, loss = 0.05307438\n",
"Iteration 130, loss = 0.05295175\n",
"Iteration 131, loss = 0.05270006\n",
"Iteration 132, loss = 0.05286072\n",
"Iteration 133, loss = 0.05257944\n",
"Iteration 134, loss = 0.05248976\n",
"Iteration 135, loss = 0.05244947\n",
"Iteration 136, loss = 0.05234321\n",
"Iteration 137, loss = 0.05235668\n",
"Iteration 138, loss = 0.05222683\n",
"Iteration 139, loss = 0.05207978\n",
"Iteration 140, loss = 0.05205838\n",
"Iteration 141, loss = 0.05168882\n",
"Iteration 142, loss = 0.05155320\n",
"Iteration 143, loss = 0.05175820\n",
"Iteration 144, loss = 0.05129259\n",
"Iteration 145, loss = 0.05134503\n",
"Iteration 146, loss = 0.05104506\n",
"Iteration 147, loss = 0.05137557\n",
"Iteration 148, loss = 0.05148959\n",
"Iteration 149, loss = 0.05113721\n",
"Iteration 150, loss = 0.05094116\n",
"Iteration 151, loss = 0.05096299\n",
"Iteration 152, loss = 0.05073916\n",
"Iteration 153, loss = 0.05086014\n",
"Iteration 154, loss = 0.05080643\n",
"Iteration 155, loss = 0.05037717\n",
"Iteration 156, loss = 0.05076547\n",
"Iteration 157, loss = 0.05016537\n",
"Iteration 158, loss = 0.05011568\n",
"Iteration 159, loss = 0.05022352\n",
"Iteration 160, loss = 0.05016536\n",
"Iteration 161, loss = 0.04999359\n",
"Iteration 162, loss = 0.05021019\n",
"Iteration 163, loss = 0.04988607\n",
"Iteration 164, loss = 0.04993375\n",
"Iteration 165, loss = 0.04961864\n",
"Iteration 166, loss = 0.04988696\n",
"Iteration 167, loss = 0.04934576\n",
"Iteration 168, loss = 0.04934463\n",
"Iteration 169, loss = 0.04910839\n",
"Iteration 170, loss = 0.04909532\n",
"Iteration 171, loss = 0.04909694\n",
"Iteration 172, loss = 0.04940782\n",
"Iteration 173, loss = 0.04896245\n",
"Iteration 174, loss = 0.04934584\n",
"Iteration 175, loss = 0.04874653\n",
"Iteration 176, loss = 0.04867029\n",
"Iteration 177, loss = 0.04871833\n",
"Iteration 178, loss = 0.04821192\n",
"Iteration 179, loss = 0.04835259\n",
"Iteration 180, loss = 0.04860616\n",
"Iteration 181, loss = 0.04841212\n",
"Iteration 182, loss = 0.04818756\n",
"Iteration 183, loss = 0.04830424\n",
"Iteration 184, loss = 0.04817383\n",
"Iteration 185, loss = 0.04793472\n",
"Iteration 186, loss = 0.04811334\n",
"Iteration 187, loss = 0.04775278\n",
"Iteration 188, loss = 0.04786197\n",
"Iteration 189, loss = 0.04772416\n",
"Iteration 190, loss = 0.04790139\n",
"Iteration 191, loss = 0.04758699\n",
"Iteration 192, loss = 0.04732677\n",
"Iteration 193, loss = 0.04721701\n",
"Iteration 194, loss = 0.04736381\n",
"Iteration 195, loss = 0.04715509\n",
"Iteration 196, loss = 0.04719952\n",
"Iteration 197, loss = 0.04679874\n",
"Iteration 198, loss = 0.04681496\n",
"Iteration 199, loss = 0.04703293\n",
"Iteration 200, loss = 0.04691492\n",
"Iteration 1, loss = 0.37966026"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:614: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.\n",
" warnings.warn(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"Iteration 2, loss = 0.19510873\n",
"Iteration 3, loss = 0.13968976\n",
"Iteration 4, loss = 0.11629324\n",
"Iteration 5, loss = 0.10544188\n",
"Iteration 6, loss = 0.09921809\n",
"Iteration 7, loss = 0.09539963\n",
"Iteration 8, loss = 0.09212459\n",
"Iteration 9, loss = 0.08998614\n",
"Iteration 10, loss = 0.08829643\n",
"Iteration 11, loss = 0.08676704\n",
"Iteration 12, loss = 0.08542030\n",
"Iteration 13, loss = 0.08418513\n",
"Iteration 14, loss = 0.08310253\n",
"Iteration 15, loss = 0.08220080\n",
"Iteration 16, loss = 0.08140781\n",
"Iteration 17, loss = 0.08033545\n",
"Iteration 18, loss = 0.07985753\n",
"Iteration 19, loss = 0.07882425\n",
"Iteration 20, loss = 0.07804243\n",
"Iteration 21, loss = 0.07774270\n",
"Iteration 22, loss = 0.07660338\n",
"Iteration 23, loss = 0.07649369\n",
"Iteration 24, loss = 0.07567666\n",
"Iteration 25, loss = 0.07471757\n",
"Iteration 26, loss = 0.07436113\n",
"Iteration 27, loss = 0.07387757\n",
"Iteration 28, loss = 0.07322582\n",
"Iteration 29, loss = 0.07299754\n",
"Iteration 30, loss = 0.07237097\n",
"Iteration 31, loss = 0.07168846\n",
"Iteration 32, loss = 0.07143571\n",
"Iteration 33, loss = 0.07081118\n",
"Iteration 34, loss = 0.07036429\n",
"Iteration 35, loss = 0.07003325\n",
"Iteration 36, loss = 0.06954744\n",
"Iteration 37, loss = 0.06921027\n",
"Iteration 38, loss = 0.06884096\n",
"Iteration 39, loss = 0.06863495\n",
"Iteration 40, loss = 0.06789479\n",
"Iteration 41, loss = 0.06786304\n",
"Iteration 42, loss = 0.06744743\n",
"Iteration 43, loss = 0.06667983\n",
"Iteration 44, loss = 0.06654802\n",
"Iteration 45, loss = 0.06612996\n",
"Iteration 46, loss = 0.06575285\n",
"Iteration 47, loss = 0.06582786\n",
"Iteration 48, loss = 0.06576068\n",
"Iteration 49, loss = 0.06492765\n",
"Iteration 50, loss = 0.06473190\n",
"Iteration 51, loss = 0.06450597\n",
"Iteration 52, loss = 0.06418974\n",
"Iteration 53, loss = 0.06387653\n",
"Iteration 54, loss = 0.06361331\n",
"Iteration 55, loss = 0.06319182\n",
"Iteration 56, loss = 0.06293079\n",
"Iteration 57, loss = 0.06293099\n",
"Iteration 58, loss = 0.06263078\n",
"Iteration 59, loss = 0.06260593\n",
"Iteration 60, loss = 0.06247136\n",
"Iteration 61, loss = 0.06203098\n",
"Iteration 62, loss = 0.06160651\n",
"Iteration 63, loss = 0.06178685\n",
"Iteration 64, loss = 0.06117512\n",
"Iteration 65, loss = 0.06102986\n",
"Iteration 66, loss = 0.06115128\n",
"Iteration 67, loss = 0.06061920\n",
"Iteration 68, loss = 0.06065823\n",
"Iteration 69, loss = 0.06041326\n",
"Iteration 70, loss = 0.06006181\n",
"Iteration 71, loss = 0.05992340\n",
"Iteration 72, loss = 0.06010363\n",
"Iteration 73, loss = 0.05978305\n",
"Iteration 74, loss = 0.05940861\n",
"Iteration 75, loss = 0.05937296\n",
"Iteration 76, loss = 0.05908606\n",
"Iteration 77, loss = 0.05879494\n",
"Iteration 78, loss = 0.05855177\n",
"Iteration 79, loss = 0.05872237\n",
"Iteration 80, loss = 0.05841329\n",
"Iteration 81, loss = 0.05833065\n",
"Iteration 82, loss = 0.05800683\n",
"Iteration 83, loss = 0.05782144\n",
"Iteration 84, loss = 0.05791543\n",
"Iteration 85, loss = 0.05765450\n",
"Iteration 86, loss = 0.05728952\n",
"Iteration 87, loss = 0.05769576\n",
"Iteration 88, loss = 0.05740453\n",
"Iteration 89, loss = 0.05706751\n",
"Iteration 90, loss = 0.05696424\n",
"Iteration 91, loss = 0.05689613\n",
"Iteration 92, loss = 0.05661444\n",
"Iteration 93, loss = 0.05648038\n",
"Iteration 94, loss = 0.05658747\n",
"Iteration 95, loss = 0.05612649\n",
"Iteration 96, loss = 0.05616252\n",
"Iteration 97, loss = 0.05629954\n",
"Iteration 98, loss = 0.05593407\n",
"Iteration 99, loss = 0.05582771\n",
"Iteration 100, loss = 0.05570667\n",
"Iteration 101, loss = 0.05596150\n",
"Iteration 102, loss = 0.05579157\n",
"Iteration 103, loss = 0.05560141\n",
"Iteration 104, loss = 0.05534161\n",
"Iteration 105, loss = 0.05504403\n",
"Iteration 106, loss = 0.05496695\n",
"Iteration 107, loss = 0.05483803\n",
"Iteration 108, loss = 0.05482981\n",
"Iteration 109, loss = 0.05481655\n",
"Iteration 110, loss = 0.05467513\n",
"Iteration 111, loss = 0.05472299\n",
"Iteration 112, loss = 0.05431178\n",
"Iteration 113, loss = 0.05417762\n",
"Iteration 114, loss = 0.05420704\n",
"Iteration 115, loss = 0.05394043\n",
"Iteration 116, loss = 0.05419107\n",
"Iteration 117, loss = 0.05420573\n",
"Iteration 118, loss = 0.05383757\n",
"Iteration 119, loss = 0.05364336\n",
"Iteration 120, loss = 0.05332304\n",
"Iteration 121, loss = 0.05339298\n",
"Iteration 122, loss = 0.05320431\n",
"Iteration 123, loss = 0.05299003\n",
"Iteration 124, loss = 0.05334819\n",
"Iteration 125, loss = 0.05290650\n",
"Iteration 126, loss = 0.05287907\n",
"Iteration 127, loss = 0.05319072\n",
"Iteration 128, loss = 0.05255972\n",
"Iteration 129, loss = 0.05241910\n",
"Iteration 130, loss = 0.05232325\n",
"Iteration 131, loss = 0.05230118\n",
"Iteration 132, loss = 0.05229073\n",
"Iteration 133, loss = 0.05206907\n",
"Iteration 134, loss = 0.05151464\n",
"Iteration 135, loss = 0.05196920\n",
"Iteration 136, loss = 0.05176741\n",
"Iteration 137, loss = 0.05213491\n",
"Iteration 138, loss = 0.05148447\n",
"Iteration 139, loss = 0.05136122\n",
"Iteration 140, loss = 0.05149192\n",
"Iteration 141, loss = 0.05125375\n",
"Iteration 142, loss = 0.05099373\n",
"Iteration 143, loss = 0.05121339\n",
"Iteration 144, loss = 0.05153282\n",
"Iteration 145, loss = 0.05095864\n",
"Iteration 146, loss = 0.05085749\n",
"Iteration 147, loss = 0.05065342\n",
"Iteration 148, loss = 0.05047797\n",
"Iteration 149, loss = 0.05071063\n",
"Iteration 150, loss = 0.05049794\n",
"Iteration 151, loss = 0.05019508\n",
"Iteration 152, loss = 0.05047577\n",
"Iteration 153, loss = 0.05001072\n",
"Iteration 154, loss = 0.05036206\n",
"Iteration 155, loss = 0.05003186\n",
"Iteration 156, loss = 0.04973609\n",
"Iteration 157, loss = 0.04971568\n",
"Iteration 158, loss = 0.04972818\n",
"Iteration 159, loss = 0.04979825\n",
"Iteration 160, loss = 0.04944791\n",
"Iteration 161, loss = 0.04959055\n",
"Iteration 162, loss = 0.04948935\n",
"Iteration 163, loss = 0.04930123\n",
"Iteration 164, loss = 0.04911748\n",
"Iteration 165, loss = 0.04922480\n",
"Iteration 166, loss = 0.04916710\n",
"Iteration 167, loss = 0.04886509\n",
"Iteration 168, loss = 0.04883919\n",
"Iteration 169, loss = 0.04902636\n",
"Iteration 170, loss = 0.04879574\n",
"Iteration 171, loss = 0.04867555\n",
"Iteration 172, loss = 0.04851970\n",
"Iteration 173, loss = 0.04883596\n",
"Iteration 174, loss = 0.04840196\n",
"Iteration 175, loss = 0.04845407\n",
"Iteration 176, loss = 0.04821864\n",
"Iteration 177, loss = 0.04835167\n",
"Iteration 178, loss = 0.04812957\n",
"Iteration 179, loss = 0.04829312\n",
"Iteration 180, loss = 0.04777937\n",
"Iteration 181, loss = 0.04777601\n",
"Iteration 182, loss = 0.04802745\n",
"Iteration 183, loss = 0.04762981\n",
"Iteration 184, loss = 0.04827828\n",
"Iteration 185, loss = 0.04778391\n",
"Iteration 186, loss = 0.04745273\n",
"Iteration 187, loss = 0.04775234\n",
"Iteration 188, loss = 0.04755774\n",
"Iteration 189, loss = 0.04749823\n",
"Iteration 190, loss = 0.04729748\n",
"Iteration 191, loss = 0.04730926\n",
"Iteration 192, loss = 0.04704027\n",
"Iteration 193, loss = 0.04750907\n",
"Iteration 194, loss = 0.04740521\n",
"Iteration 195, loss = 0.04679165\n",
"Iteration 196, loss = 0.04709482\n",
"Iteration 197, loss = 0.04699889\n",
"Iteration 198, loss = 0.04713056\n",
"Iteration 199, loss = 0.04673973\n",
"Iteration 200, loss = 0.04703738\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:614: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.\n",
" warnings.warn(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iteration 1, loss = 0.42728881\n",
"Iteration 2, loss = 0.20737837\n",
"Iteration 3, loss = 0.14680170\n",
"Iteration 4, loss = 0.12104095\n",
"Iteration 5, loss = 0.10924771\n",
"Iteration 6, loss = 0.10276014\n",
"Iteration 7, loss = 0.09900136\n",
"Iteration 8, loss = 0.09611647\n",
"Iteration 9, loss = 0.09375229\n",
"Iteration 10, loss = 0.09169115\n",
"Iteration 11, loss = 0.09013673\n",
"Iteration 12, loss = 0.08846108\n",
"Iteration 13, loss = 0.08723971\n",
"Iteration 14, loss = 0.08617476\n",
"Iteration 15, loss = 0.08492058\n",
"Iteration 16, loss = 0.08402201\n",
"Iteration 17, loss = 0.08316931\n",
"Iteration 18, loss = 0.08217149\n",
"Iteration 19, loss = 0.08150453\n",
"Iteration 20, loss = 0.08085485\n",
"Iteration 21, loss = 0.08006636\n",
"Iteration 22, loss = 0.07933372\n",
"Iteration 23, loss = 0.07874024\n",
"Iteration 24, loss = 0.07820013\n",
"Iteration 25, loss = 0.07744751\n",
"Iteration 26, loss = 0.07704384\n",
"Iteration 27, loss = 0.07635740\n",
"Iteration 28, loss = 0.07594688\n",
"Iteration 29, loss = 0.07518443\n",
"Iteration 30, loss = 0.07488528\n",
"Iteration 31, loss = 0.07430738\n",
"Iteration 32, loss = 0.07361673\n",
"Iteration 33, loss = 0.07323256\n",
"Iteration 34, loss = 0.07293546\n",
"Iteration 35, loss = 0.07242827\n",
"Iteration 36, loss = 0.07194569\n",
"Iteration 37, loss = 0.07159768\n",
"Iteration 38, loss = 0.07145351\n",
"Iteration 39, loss = 0.07082245\n",
"Iteration 40, loss = 0.07028868\n",
"Iteration 41, loss = 0.07010466\n",
"Iteration 42, loss = 0.06956913\n",
"Iteration 43, loss = 0.06921367\n",
"Iteration 44, loss = 0.06896719\n",
"Iteration 45, loss = 0.06852744\n",
"Iteration 46, loss = 0.06815941\n",
"Iteration 47, loss = 0.06809569\n",
"Iteration 48, loss = 0.06783369\n",
"Iteration 49, loss = 0.06741844\n",
"Iteration 50, loss = 0.06684315\n",
"Iteration 51, loss = 0.06689995\n",
"Iteration 52, loss = 0.06661513\n",
"Iteration 53, loss = 0.06623696\n",
"Iteration 54, loss = 0.06595329\n",
"Iteration 55, loss = 0.06561512\n",
"Iteration 56, loss = 0.06538217\n",
"Iteration 57, loss = 0.06489595\n",
"Iteration 58, loss = 0.06455352\n",
"Iteration 59, loss = 0.06468051\n",
"Iteration 60, loss = 0.06410818\n",
"Iteration 61, loss = 0.06404122\n",
"Iteration 62, loss = 0.06371188\n",
"Iteration 63, loss = 0.06385662\n",
"Iteration 64, loss = 0.06329727\n",
"Iteration 65, loss = 0.06341061\n",
"Iteration 66, loss = 0.06257491\n",
"Iteration 67, loss = 0.06264566\n",
"Iteration 68, loss = 0.06261164\n",
"Iteration 69, loss = 0.06228283\n",
"Iteration 70, loss = 0.06236965\n",
"Iteration 71, loss = 0.06202707\n",
"Iteration 72, loss = 0.06181086\n",
"Iteration 73, loss = 0.06149269\n",
"Iteration 74, loss = 0.06147154\n",
"Iteration 75, loss = 0.06111803\n",
"Iteration 76, loss = 0.06112275\n",
"Iteration 77, loss = 0.06099216\n",
"Iteration 78, loss = 0.06050979\n",
"Iteration 79, loss = 0.06046110\n",
"Iteration 80, loss = 0.06036993\n",
"Iteration 81, loss = 0.06001674\n",
"Iteration 82, loss = 0.05999709\n",
"Iteration 83, loss = 0.05991087\n",
"Iteration 84, loss = 0.05971376\n",
"Iteration 85, loss = 0.05940788\n",
"Iteration 86, loss = 0.05950297\n",
"Iteration 87, loss = 0.05941499\n",
"Iteration 88, loss = 0.05904563\n",
"Iteration 89, loss = 0.05915871\n",
"Iteration 90, loss = 0.05905603\n",
"Iteration 91, loss = 0.05885479\n",
"Iteration 92, loss = 0.05864558\n",
"Iteration 93, loss = 0.05872547\n",
"Iteration 94, loss = 0.05832375\n",
"Iteration 95, loss = 0.05818101\n",
"Iteration 96, loss = 0.05811152\n",
"Iteration 97, loss = 0.05804963\n",
"Iteration 98, loss = 0.05794116\n",
"Iteration 99, loss = 0.05778351\n",
"Iteration 100, loss = 0.05770579\n",
"Iteration 101, loss = 0.05768799\n",
"Iteration 102, loss = 0.05736933\n",
"Iteration 103, loss = 0.05721705\n",
"Iteration 104, loss = 0.05727666\n",
"Iteration 105, loss = 0.05688117\n",
"Iteration 106, loss = 0.05701743\n",
"Iteration 107, loss = 0.05667701\n",
"Iteration 108, loss = 0.05669901\n",
"Iteration 109, loss = 0.05679904\n",
"Iteration 110, loss = 0.05637473\n",
"Iteration 111, loss = 0.05644102\n",
"Iteration 112, loss = 0.05621998\n",
"Iteration 113, loss = 0.05595889\n",
"Iteration 114, loss = 0.05587823\n",
"Iteration 115, loss = 0.05585216\n",
"Iteration 116, loss = 0.05580943\n",
"Iteration 117, loss = 0.05572017\n",
"Iteration 118, loss = 0.05566929\n",
"Iteration 119, loss = 0.05550706\n",
"Iteration 120, loss = 0.05528472\n",
"Iteration 121, loss = 0.05493993\n",
"Iteration 122, loss = 0.05509832\n",
"Iteration 123, loss = 0.05483538\n",
"Iteration 124, loss = 0.05479258\n",
"Iteration 125, loss = 0.05473503\n",
"Iteration 126, loss = 0.05500879\n",
"Iteration 127, loss = 0.05490314\n",
"Iteration 128, loss = 0.05463694\n",
"Iteration 129, loss = 0.05450835\n",
"Iteration 130, loss = 0.05413266\n",
"Iteration 131, loss = 0.05443587\n",
"Iteration 132, loss = 0.05423373\n",
"Iteration 133, loss = 0.05393571\n",
"Iteration 134, loss = 0.05395506\n",
"Iteration 135, loss = 0.05395936\n",
"Iteration 136, loss = 0.05397807\n",
"Iteration 137, loss = 0.05393931\n",
"Iteration 138, loss = 0.05360539\n",
"Iteration 139, loss = 0.05382749\n",
"Iteration 140, loss = 0.05353748\n",
"Iteration 141, loss = 0.05376538\n",
"Iteration 142, loss = 0.05321059\n",
"Iteration 143, loss = 0.05325428\n",
"Iteration 144, loss = 0.05346526\n",
"Iteration 145, loss = 0.05283696\n",
"Iteration 146, loss = 0.05297058\n",
"Iteration 147, loss = 0.05272621\n",
"Iteration 148, loss = 0.05298462\n",
"Iteration 149, loss = 0.05272633\n",
"Iteration 150, loss = 0.05229418\n",
"Iteration 151, loss = 0.05259358\n",
"Iteration 152, loss = 0.05221134\n",
"Iteration 153, loss = 0.05209465\n",
"Iteration 154, loss = 0.05259057\n",
"Iteration 155, loss = 0.05238139\n",
"Iteration 156, loss = 0.05230145\n",
"Iteration 157, loss = 0.05220427\n",
"Iteration 158, loss = 0.05216628\n",
"Iteration 159, loss = 0.05190173\n",
"Iteration 160, loss = 0.05193352\n",
"Iteration 161, loss = 0.05181292\n",
"Iteration 162, loss = 0.05150185\n",
"Iteration 163, loss = 0.05169466\n",
"Iteration 164, loss = 0.05184823\n",
"Iteration 165, loss = 0.05152065\n",
"Iteration 166, loss = 0.05154715\n",
"Iteration 167, loss = 0.05132617\n",
"Iteration 168, loss = 0.05110693\n",
"Iteration 169, loss = 0.05152028\n",
"Iteration 170, loss = 0.05133709\n",
"Iteration 171, loss = 0.05106591\n",
"Iteration 172, loss = 0.05123417\n",
"Iteration 173, loss = 0.05102209\n",
"Iteration 174, loss = 0.05082637\n",
"Iteration 175, loss = 0.05065979\n",
"Iteration 176, loss = 0.05099141\n",
"Iteration 177, loss = 0.05066517\n",
"Iteration 178, loss = 0.05056538\n",
"Iteration 179, loss = 0.05050501\n",
"Iteration 180, loss = 0.05047372\n",
"Iteration 181, loss = 0.05025837\n",
"Iteration 182, loss = 0.05045177\n",
"Iteration 183, loss = 0.05022535\n",
"Iteration 184, loss = 0.05035113\n",
"Iteration 185, loss = 0.04982998\n",
"Iteration 186, loss = 0.04968224\n",
"Iteration 187, loss = 0.04988712\n",
"Iteration 188, loss = 0.04968413\n",
"Iteration 189, loss = 0.04907887\n",
"Iteration 190, loss = 0.04976068\n",
"Iteration 191, loss = 0.04946017\n",
"Iteration 192, loss = 0.04938650\n",
"Iteration 193, loss = 0.04923168\n",
"Iteration 194, loss = 0.04923091\n",
"Iteration 195, loss = 0.04924102\n",
"Iteration 196, loss = 0.04914028\n",
"Iteration 197, loss = 0.04937449\n",
"Iteration 198, loss = 0.04876657\n",
"Iteration 199, loss = 0.04902934\n",
"Iteration 200, loss = 0.04878121\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\LEm\\Anaconda3\\lib\\site-packages\\sklearn\\neural_network\\_multilayer_perceptron.py:614: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.\n",
" warnings.warn(\n"
]
}
],
"source": [
"clf_mlp_models = []\n",
"pred_release_training_MLP = []\n",
"pred_release_testing_MLP = []\n",
"pred_release_prob_MLP = []\n",
"\n",
"confusion_matrixes_training = []\n",
"confusion_matrixes_test = []\n",
"#feature_importances = []\n",
"\n",
"f1_scores = []\n",
"ROC_curves = []\n",
"CV_scores = []\n",
"R2_scores = []\n",
"accuracy_scores = []\n",
"precision_scores = []\n",
"recall_scores = []\n",
"roc_auc_scores = []\n",
"\n",
"\n",
"\n",
"n = 0\n",
"for i in feature_combinations:\n",
" \n",
" X_train = train_X[i]\n",
" X_test = validation_X[i]\n",
"\n",
" AUTO_SCALING = True\n",
" if AUTO_SCALING:\n",
" scaler = StandardScaler()\n",
" scaler.fit(X_train)\n",
" X_training = scaler.transform(X_train)\n",
" X_testing = scaler.transform(X_test)\n",
" \n",
" #nnetwork = MLPClassifier(max_iter=1000) \n",
" \n",
" clf_mlp = MLPClassifier(solver = best_param_grid[n][\"solver\"], hidden_layer_sizes = best_param_grid[n][\"hidden_layer_sizes\"], alpha = best_param_grid[n][\"alpha\"], max_iter=200,verbose=3)\n",
" clf_mlp.fit(X_training, train_y)\n",
" clf_mlp_models.append(clf_mlp)\n",
" \n",
" pred_release_at_training_MLP = clf_mlp.predict(X_training)\n",
" pred_release_training_MLP.append(pred_release_at_training_MLP)\n",
" \n",
" pred_prob_MLP = clf_mlp.predict_proba(X_testing)\n",
" pred_release_prob_MLP.append(pred_prob_MLP)\n",
" \n",
" pred_release_at_validation_MLP = clf_mlp.predict(X_testing)\n",
" pred_release_testing_MLP.append(pred_release_at_validation_MLP)\n",
" \n",
" c_m = confusion_matrix(train_y,pred_release_at_training_MLP)\n",
" confusion_matrixes_training.append(c_m)\n",
" \n",
" c_m_v = confusion_matrix(validation_y,pred_release_at_validation_MLP)\n",
" confusion_matrixes_test.append(c_m_v)\n",
" \n",
" f1 = f1_score(validation_y, pred_release_at_validation_MLP, average='macro')\n",
" f1_scores.append(f1)\n",
" \n",
" CV = cross_val_score(estimator= clf_mlp, X=X_training, y=train_y)\n",
" CV_scores.append(CV)\n",
" \n",
" CLF_ROC = plot_roc_curve(clf_mlp, X_testing, validation_y, color = 'r')\n",
" ROC_curves.append(CLF_ROC)\n",
" plt.close()\n",
" \n",
" r2 = r2_score(validation_y, pred_release_at_validation_MLP)\n",
" R2_scores.append(r2)\n",
" \n",
" acc = accuracy_score(validation_y, pred_release_at_validation_MLP)\n",
" accuracy_scores.append(acc)\n",
" \n",
" precision = precision_score(validation_y, pred_release_at_validation_MLP)\n",
" precision_scores.append(precision)\n",
" \n",
" recall = recall_score(validation_y, pred_release_at_validation_MLP)\n",
" recall_scores.append(recall)\n",
" \n",
" roc_auc = roc_auc_score(validation_y, pred_release_at_validation_MLP)\n",
" roc_auc_scores.append(roc_auc)\n",
" \n",
" n += 1"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Save final models and metrics"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['MLP_p8_Emhjellen2.joblib']"
]
},
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# save models\n",
"joblib.dump(clf_mlp_models[0], \"MLP_p1_Emhjellen2.joblib\",compress=3)\n",
"joblib.dump(clf_mlp_models[1], \"MLP_p2_Emhjellen2.joblib\",compress=3)\n",
"joblib.dump(clf_mlp_models[2], \"MLP_p3_Emhjellen2.joblib\",compress=3)\n",
"joblib.dump(clf_mlp_models[3], \"MLP_p4_Emhjellen2.joblib\",compress=3)\n",
"joblib.dump(clf_mlp_models[4], \"MLP_p5_Emhjellen2.joblib\",compress=3)\n",
"joblib.dump(clf_mlp_models[5], \"MLP_p6_Emhjellen2.joblib\",compress=3)\n",
"joblib.dump(clf_mlp_models[6], \"MLP_p7_Emhjellen2.joblib\",compress=3)\n",
"joblib.dump(clf_mlp_models[7], \"MLP_p8_Emhjellen2.joblib\",compress=3)\n",
"\n",
"# ../saved_models/RF_p6_Emhjellen2.joblib\",compress=3)"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [],
"source": [
"# plot of confusion matrixes from training\n",
"n = 0;\n",
"for i in confusion_matrixes_training:\n",
" n += 1\n",
" group_names = ['True Neg','False Pos','False Neg','True Pos']\n",
" group_counts = ['{0:0.0f}'.format(value) for value in\n",
" i.flatten()]\n",
" group_percentages = ['{0:.2%}'.format(value) for value in\n",
" i.flatten()/np.sum(i)]\n",
" labels = [f'{v1}\\n{v2}\\n{v3}' for v1, v2, v3 in\n",
" zip(group_names,group_counts,group_percentages)]\n",
" labels = np.asarray(labels).reshape(2,2)\n",
" plt.figure()\n",
" plot = sns.heatmap(i, annot=labels, fmt='', cmap='Blues')\n",
" plt.savefig('saved_figures_2/confusion_matrix_MLP_feature_combination_training'+ 'p'+ str(n) +'.png')\n",
" plt.close()"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [],
"source": [
"# plot of confusion matrixes from test\n",
"n = 0;\n",
"for i in confusion_matrixes_test:\n",
" n += 1\n",
" group_names = ['True Neg','False Pos','False Neg','True Pos']\n",
" group_counts = ['{0:0.0f}'.format(value) for value in\n",
" i.flatten()]\n",
" group_percentages = ['{0:.2%}'.format(value) for value in\n",
" i.flatten()/np.sum(i)]\n",
" labels = [f'{v1}\\n{v2}\\n{v3}' for v1, v2, v3 in\n",
" zip(group_names,group_counts,group_percentages)]\n",
" labels = np.asarray(labels).reshape(2,2)\n",
" plt.figure()\n",
" plot = sns.heatmap(i, annot=labels, fmt='', cmap='Blues')\n",
" plt.savefig('saved_figures_2/confusion_matrix_MLP_feature_combination_validation'+ 'p'+ str(n) +'.png')\n",
" plt.close()\n",
" \n",
" #plt.savefig('saved_figures/confusion_matrix_RF_feature_combination_validation'+ 'p'+ str(n) +'.png')"
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEGCAYAAABo25JHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAbXUlEQVR4nO3dfZRU9Z3n8feHbnkGEVG0AQE9iEEiiAiixMdV0cSYnOhGk+jRxHDcRDcZd3binN2d2T3JZB6cye7GaNiMMW5OMppEjUGH8WFUQEY0PERQUJQFhQYUFcJD8yDdfPePe0nKorso7Hupau7ndU4f6tb91a0vVd31rfvw/f4UEZiZWXF1q3UAZmZWW04EZmYF50RgZlZwTgRmZgXnRGBmVnCNtQ7gYA0aNChGjBhR6zDMzLqURYsWvRcRx7S3rsslghEjRrBw4cJah2Fm1qVIequjdT40ZGZWcLklAkn3Stoo6ZUO1kvS9yWtlLRU0oS8YjEzs47luUdwHzCtwvrLgFHpz3TghznGYmZmHcgtEUTEXGBThSFXAj+NxAvAAEnH5xWPmZm1r5Yni4cAa0uWm9P7NpQPlDSdZK+BpqYmZs+eDcCJJ55Iv379WLJkCQBHH300p556KnPnzgWgsbGRqVOnsnjxYrZu3QrAxIkTeeedd1i7NnnqUaNG0aNHD155JTmCdeyxx3LyySczb948AHr06MGUKVNYuHAh27dvB2Dy5Mk0Nzezbt06AEaPHk1DQwPLly8H4LjjjmPkyJHMnz8fgF69ejF58mRefPFFdu7cCcCUKVNYvXo1b7/9NgBjxoyhra2NFStWJC/OkCEMHTqUF198EYC+ffsyceJE5s+fz+7duwGYOnUqr7/+Ohs3bgRg7Nix7N69mzfeeAOAYcOGMXjw4D+cXO/fvz8TJkxg3rx5tLa2AnDuueeybNky3n//fQDGjRvHtm3bWLVqFZCcnB84cCCLFy8G4KijjmLcuHHMmTOHiEAS5513HkuWLGHz5s0ATJgwgU2bNvHmm2/6ffL75PepTt6nSpRn0zlJI4DHImJsO+v+GfjriJiXLj8N/FlELKq0zYkTJ4avGjIzOziSFkVEuxmhllcNNQPDSpaHAutrFIuZWWHVMhHMBK5Prx46C9gSEfsdFjIzs3zldo5A0v3A+cAgSc3AXwJHAETEDGAWcDmwEtgB3JhXLGZm1rHcEkFEXHuA9QF8Pa/nNzOrhSmXXFnrEACY/+Rvqh7rymIzs4JzIjAzKzgnAjOzguty3UfNupJL/+tPax0CT3zn+lqHYHXOewRmZgXXZfYIJF0BXDF8+HC3mChISfzh8D5dNaYPj7zWwsSmHgztn/y5/duaXfTp3o3xx3UHYMV7e1izpZWLT+oFwKade3lm9U6uHN2HIxoA4KHlLZw9rCfH90vumPvWLo7q2Y2PD062sfzdD3h7exsXjky2sbGljblv7eJzH+vD7Nmz/T4dwr+ni86ZxNCm4wB4cs7zHH3UAM44bQwALy1bwfq3N3L5RZ8AYMPGd3lyznyuv+oKJBER/PTBR7nkvCkcf2wyh8ysp5+j6bhjGX/qaAAWLV3O+5t/zyXnnQ1A8/q3eeb5BVx/1RUA7Nmzh3965F/qp8VEHrpqi4m/+vWCWocAwH/57Jm1DiETV39vVq1D4Fe3XX7AMV3l0NCFX/vrQxBJZc/c/ee1DiET9Xr5aKUWE11mj8AOjVvvnV3rEAC488vn1zoEs8Lo8ongnmeX1ToEAG664NRah2Bm9pH4ZLGZWcE5EZiZFZwTgZlZwTkRmJkVXJc/WWxmxfCJa+qjWfFzD9xV6xAy5z0CM7OC6zJ7BB1VFje27aLvzncB2NPYk5YegxjQ0gxASGzpM5R+O9+hoe0DALb1Gkz31h302LMNgJ09BrBXjfTZ9V66jV609BjIgJZ16TYa2NKniX473qZh7x4AtvY+jh57ttNjT1IZuaPHQDZu3FixEhK6cYI2cQRtALwZAxnIDvprFwDvRD8EHKskri3Rky305gRtAmA3DTTHQIbrfRrZC8DqOJpjtJ2+JJWRG6I/jezlGCVxbY5ebKcnw5RUku5K3+5KlcX9G1vp07CXYT2T12vdru78vrWBU/smFZxbWxt4raUXZ/bfjgQRsGBrX07ps5P+jcn/bdn2XgxobGNIuo21u7rT0taNU/ok/9ff72ngjR09OfPIFgDaAhZt7cuYPjvo25j837Zv316xYnX8wDZe2dyNi5uS59y9F57Z0MjUY1vpd0TyOzPn7QZG9NvL8D5J0eTLm7vRFjB+YPIczTvE61u6ceHxyTZ2tMGctxs577hWeqcVvc9saODkI/cytHeyjZc2daNB8PGj9jJ79uzDprL4mrNGsmTNJja17OaCjx0PwPrNO3ju9Xf4/OSRAOxp28tDC97i4rFNHN23BwCPL13H8EF9+VjTkQAsevN9Wna3cu7owQCseb+FBave5XNnjgBg5542frNoDdNOG8KA3knsj720ltHHHfmHv+uOKou/dMlkALbt3M1vnnuJKz8xnn69kjgemrOY8aOGcVJTUo373NKVNDZ0Y8qpJyav8dp3eO2tDVw5dXzyO7h9B489/zKfO38CvbonvzC/eGYhZ506kuGDjwZg9u9ep0+v7px5ShL7sjc3sHr9e3+I05XFNVReWdxV6gi6SmVxVykoc2Vx9Q6XyuKucmioK1YW+9CQmVnBORGYmRWcE4GZWcE5EZiZFZwTgZlZwTkRmJkVnBOBmVnBORGYmRWcE4GZWcG5xYRbTLjFhFtMuMUEbjFRcUC9cYuJznGLiey4xUS23GIiW24xYWZmVXMiMDMrOCcCM7OCcyIwMys4JwIzs4LLNRFImiZphaSVkm5vZ/2Rkh6VtETSMkk35hmPmZntL7dEIKkBuAu4DBgDXCtpTNmwrwPLI2IccD7wD5K65xWTmZntL889gknAyohYFREfAA8A5RfYBtBPkoC+wCagNceYzMysTJ6VxUOAtSXLzcDksjE/AGYC64F+wOcjYm/5hiRNB6YDNDU1ubLYlcWuLHZlsSuLu0JlsaSrgUsj4qZ0+TpgUkTcWjLmKuAc4DbgJOApYFxEbO1ou64s7hxXFmfHlcXZcmVxtuqlsrgZGFayPJTkm3+pG4GHI7ESWA2ckmNMZmZWJs9EsAAYJWlkegL4GpLDQKXWABcBSBoMjAZW5RiTmZmVye0cQUS0SroFeAJoAO6NiGWSbk7XzwC+Ddwn6WVAwLci4r28YjIzs/3l2oY6ImYBs8rum1Fyez1wSZ4xmJlZZa4sNjMrOCcCM7OCcyIwMys4JwIzs4JzIjAzKzhPXu8WE24x4RYTbjGBW0xUHFBv3GKic9xiIjtuMZEtt5jIVr20mDAzsy7AicDMrOCcCMzMCs6JwMys4JwIzMwKzonAzKzgnAjMzArOicDMrOCcCMzMCs4tJtxiwi0m3GLCLSZwi4mKA+qNW0x0jltMZMctJrLlFhPZcosJMzOrmhOBmVnBORGYmRWcE4GZWcE5EZiZFZwTgZlZwTkRmJkVnBOBmVnBubLYlcWuLHZlsSuLcWVxxQH1xpXFnePK4uy4sjhbrizOliuLzcysak4EZmYF50RgZlZwTgRmZgXnRGBmVnBOBGZmBZdrIpA0TdIKSSsl3d7BmPMlvSRpmaQ5ecZjZmb7y62gTFIDcBdwMdAMLJA0MyKWl4wZANwNTIuINZKOzSseMzNrX1V7BJKmSroxvX2MpJFVPGwSsDIiVkXEB8ADQHmlxReAhyNiDUBEbKw+dDMzy8IB9wgk/SUwERgN/AQ4AvgZcM4BHjoEWFuy3AxMLhtzMnCEpNlAP+B/R8R+pZiSpgPTAZqamtxiwi0m3GLCLSbcYuJQtpiQ9BJwOrA4Ik5P71saEacd4HFXA5dGxE3p8nXApIi4tWTMD0iSzEVAL2A+8MmIeL2j7brFROe4xUR23GIiW24xka2DaTFRzTmCDyIiJEW6sT5VxtEMDCtZHgqsb2fMexHRArRImguMAzpMBGZmlq1qzhH8UtL/AQZI+irwr8A/VvG4BcAoSSMldQeuAWaWjfkN8AlJjZJ6kxw6erX68M3MrLMq7hFIEvAL4BRgK8l5gr+IiKcOtOGIaJV0C/AE0ADcGxHLJN2crp8REa9KehxYCuwF7omIVzr1PzIzs4NSMRGkh4QeiYgzgAN++Lfz+FnArLL7ZpQt3wHccbDbNjOzbFRzaOgFSZXPMJqZWZdVzcniC4CbJb0JtAAi2VmoeNWQmZl1DdUkgstyj8LMzGrmgIeGIuItYABwRfozIL3PzMwOA9VUFn8D+CrwcHrXzyT9KCLuzDWy/ePw5PWuLHZlsSuLXVlco8ripcCUtOhrX0HZ/FqdI3Blcee4sjg7rizOliuLs5X15PWC9Gtsoi29z8zMDgPVnCz+CfCipF+ny58BfpxfSGZmdigdMBFExPfS7qBTSfYEboyI3+UdmJmZHRrVnCw+C1gWEYvT5X6SJkfEi7lHZ2ZmuavmHMEPge0lyy3pfWZmdhio6mRxlFxaFBF7yXGKSzMzO7SqSQSrJP1HSUekP98AVuUdmJmZHRrVJIKbgbOBdfxxusnpeQZlZmaHTjVXDW0kmVTGzMwOQ9VcNfR3wHeAncDjJFNJfjMifpZzbOVxuMWEW0y4xYRbTLjFRK0mr4+I8ZI+S1JM9ifAsxExruIDc+IWE53jFhPZcYuJbLnFRLaybjGRfr/icuD+iNjUufDMzKyeVHMZ6KOSXiM5NPQ1SccAu/INy8zMDpVq5iO4HZgCTIyIPcAOoD72fczMrNOqKgyLiM0lt1tIqovNzOwwUM05AjMzO4w5EZiZFdxHSgSSTsk6EDMzq42PukfwZKZRmJlZzXR4sljS9ztaBQzIJ5yOubLYlcWuLHZlMbiy+JBWFkvaBvwnSD9lPuwfImJQxS3nxJXFnePK4uy4sjhbrizO1sFUFle6fHQB8EpEPF++QtJ/70yAZmZWPyolgqvooII4IkbmE46ZmR1qlU4W942IHYcsEjMzq4lKieCRfTckPXQIYjEzsxqolAhUcvvEvAMxM7PaqJQIooPbZmZ2GKmUCMZJ2ppeRnpaenurpG2StlazcUnTJK2QtFLS7RXGnSmpTdJVB/sfMDOzzunwqqGIaOjMhiU1AHcBF5NMer9A0syIWN7OuL8FnujM85mZ2UeTZ9O5ScDKiFgVER8AD9D+PAa3Ag8BG3OMxczMOlDVfAQf0RBgbclyMzC5dICkIcBngQuBDkteJU0HpgM0NTW5xYRbTLjFhFtMuMXEoZy8/qOSdDVwaUTclC5fB0yKiFtLxvyKpF3FC5LuAx6LiAcrbdctJjrHLSay4xYT2XKLiWxl1WKis5qBYSXLQ4H1ZWMmAg9IAhgEXC6pNSIewczMDok8E8ECYJSkkcA64BrgC6UDSltVlOwROAmYmR1CuSWCiGiVdAvJ1UANwL0RsUzSzen6GXk9t5mZVS/PPQIiYhYwq+y+dhNARNyQZyxmZtY+z1lsZlZwTgRmZgXnRGBmVnBOBGZmBedEYGZWcLleNZQlSVcAVwwfPtwtJtxiwi0m3GLCLSa6QouJvLjFROe4xUR23GIiW24xka2DaTHhQ0NmZgXnRGBmVnBOBGZmBedEYGZWcE4EZmYF50RgZlZwTgRmZgXnRGBmVnCuLHZlsSuLXVnsymJcWVxxQL1xZXHnuLI4O64szpYri7PlymIzM6uaE4GZWcE5EZiZFZwTgZlZwTkRmJkVnBOBmVnBORGYmRWcE4GZWcE5EZiZFZxbTLjFhFtMuMWEW0zgFhMVB9Qbt5joHLeYyI5bTGTLLSay5RYTZmZWNScCM7OCcyIwMys4JwIzs4JzIjAzK7hcE4GkaZJWSFop6fZ21n9R0tL053lJ4/KMx8zM9pdbIpDUANwFXAaMAa6VNKZs2GrgvIg4Dfg28KO84jEzs/bluUcwCVgZEasi4gPgAeBDF9hGxPMRsTldfAEYmmM8ZmbWjjwri4cAa0uWm4HJFcZ/BfiX9lZImg5MB2hqanJlsSuLXVnsymJXFneFymJJVwOXRsRN6fJ1wKSIuLWdsRcAdwNTI+L9Stt1ZXHnuLI4O64szpYri7N1MJXFee4RNAPDSpaHAuvLB0k6DbgHuOxAScDMzLKX5zmCBcAoSSMldQeuAWaWDpB0AvAwcF1EvJ5jLGZm1oHc9ggiolXSLcATQANwb0Qsk3Rzun4G8BfA0cDdkgBaO9p1MTOzfOTahjoiZgGzyu6bUXL7JuCmPGMwM7PKXFlsZlZwTgRmZgXnRGBmVnBOBGZmBedEYGZWcJ683i0m3GLCLSbcYgK3mKg4oN64xUTnuMVEdtxiIltuMZEtT15vZmZVcyIwMys4JwIzs4JzIjAzKzgnAjOzgnMiMDMrOCcCM7OCcyIwMys4JwIzs4Jziwm3mHCLCbeYcIsJ3GKi4oB64xYTneMWE9lxi4lsucVEttxiwszMquZEYGZWcE4EZmYF50RgZlZwTgRmZgXnRGBmVnBOBGZmBedEYGZWcK4sdmWxK4tdWezKYlxZXHFAvXFlcee4sjg7rizOliuLs+XKYjMzq5oTgZlZwTkRmJkVnBOBmVnBORGYmRWcE4GZWcHlmggkTZO0QtJKSbe3s16Svp+uXyppQp7xmJnZ/nJLBJIagLuAy4AxwLWSxpQNuwwYlf5MB36YVzxmZta+PPcIJgErI2JVRHwAPACUV1pcCfw0Ei8AAyQdn2NMZmZWJrfKYklXAdMi4qZ0+TpgckTcUjLmMeBvImJeuvw08K2IWFi2rekkewwAo4EVGYc7CHgv423mwXFmy3FmpyvECMWOc3hEHNPeijx7Damd+8qzTjVjiIgfAT/KIqj2SFrYUel1PXGc2XKc2ekKMYLj7Eieh4aagWEly0OB9R9hjJmZ5SjPRLAAGCVppKTuwDXAzLIxM4Hr06uHzgK2RMSGHGMyM7MyuR0aiohWSbcATwANwL0RsUzSzen6GcAs4HJgJbADuDGveA4gt8NOGXOc2XKc2ekKMYLjbFeXa0NtZmbZcmWxmVnBORGYmRVcoROBpHslbZT0Sq1jqUTSMEnPSnpV0jJJ36h1TOUk9ZT0W0lL0hj/R61jqkRSg6TfpbUsdUnSm5JelvSSpIUHfkRtSBog6UFJr6W/o1NqHVM5SaPT13Hfz1ZJ36x1XO2R9Cfp39Arku6X1DP35yzyOQJJ5wLbSaqbx9Y6no6k1dbHR8RiSf2ARcBnImJ5jUP7A0kC+kTEdklHAPOAb6QV43VH0m3ARKB/RHyq1vG0R9KbwMSIqOsCKEn/F3guIu5JrxDsHRG/r3VcHUnb36wjKXB9q9bxlJI0hORvZ0xE7JT0S2BWRNyX5/MWeo8gIuYCm2odx4FExIaIWJze3ga8CgypbVQflrYJ2Z4uHpH+1OW3DElDgU8C99Q6lq5OUn/gXODHABHxQT0ngdRFwP+rtyRQohHoJakR6M0hqK0qdCLoiiSNAE4HXqxtJPtLD7e8BGwEnoqIuosx9b+APwP21jqQAwjgSUmL0jYr9ehE4F3gJ+mhtnsk9al1UAdwDXB/rYNoT0SsA/4eWANsIKmtejLv53Ui6EIk9QUeAr4ZEVtrHU+5iGiLiPEkFeKTJNXd4TZJnwI2RsSiWsdShXMiYgJJl96vp4cy600jMAH4YUScDrQA+7WcrxfpoatPA7+qdSztkXQUSTPOkUAT0EfSl/J+XieCLiI97v4Q8POIeLjW8VSSHhqYDUyrcSjtOQf4dHr8/QHgQkk/q21I7YuI9em/G4Ffk3T0rTfNQHPJ3t+DJImhXl0GLI6Id2odSAf+HbA6It6NiD3Aw8DZeT+pE0EXkJ6I/THwakR8r9bxtEfSMZIGpLd7kfxCv1bbqPYXEX8eEUMjYgTJIYJnIiL3b1wHS1Kf9MIA0kMtlwB1d3VbRLwNrJU0Or3rIqBuLmJox7XU6WGh1BrgLEm907/7i0jOCeaq0IlA0v3AfGC0pGZJX6l1TB04B7iO5NvrvsvfLq91UGWOB56VtJSkz9RTEVG3l2Z2AYOBeZKWAL8F/jkiHq9xTB25Ffh5+t6PB75b43jaJak3cDHJt+y6lO5ZPQgsBl4m+YzOvd1EoS8fNTOzgu8RmJmZE4GZWeE5EZiZFZwTgZlZwTkRmJkVnBOB1ZyktrLOkCM+wjY+I2lM9tH9YfsnS5olaWXaYfOXkgZnsN3ZkvabpFzSpyV9pArdtBvo10qWmyQ92Jk47fDmy0et5iRtj4i+ndzGfcBjEVH1B56kxohorWJcT5Jrum+LiEfT+y4A3o2IThV5SZoN/GlEZNZmOk2kj9VzR12rL94jsLok6QxJc9KGa0+krbiR9FVJC9J5Dx5KKzDPJukfc0e6R3FS6TdtSYPSlhJIukHSryQ9StLQrY+SeSkWpE3TrmwnnC8A8/clAYCIeDYiXlEyD8NPlMwb8Ls0Qex7nkckPSpptaRbJN2WjnlB0sCS7X9J0vNp//lJJY//QXr7PknfT8esknRVen9fSU9LWpw+/77Y/wY4KX0t7pA0QumcGweI92FJj0t6Q9LfZfJGWpeQ2+T1Zgehl5KupQCrgX8P3AlcGRHvSvo88FfAl4GHI+IfASR9B/hKRNwpaSYlewRJdX6HpgCnRcQmSd8laTPx5bRFxm8l/WtEtJSMH0syB0R7vg4QER+XdApJcjm55HGnAz2BlcC3IuJ0Sf8TuJ6kCyok8zicraSp3L3p48odD0wFTgFmklSf7gI+GxFbJQ0CXkhfh9uBsWkDwH17CNXEOz6NdzewQtKdEbG2w1fRDhtOBFYPdu770AJQ0rV0LPBU+oHeQNKSF2BsmgAGAH2BJz7C8z0VEfvmobiEpAndn6bLPYETqL6/y1SSpEVEvCbpLWDfB+uz6fwR2yRtAfbtUbwMnFayjfvTx8+V1D9NSOUeiYi9wPKScxMCvpsmkL0kc1Qc6LxFpXifjogtAJKWA8MBJ4ICcCKweiRgWUS0N+XhfSSzsy2RdANwfgfbaOWPhz7Lp/or/bYv4HMRsaJCPMuA8yrE2pHdJbf3lizv5cN/e+Un6to7cVe6rX3P+UXgGOCMiNiTHv460LSG1cbbhj8fCsPnCKwerQCOUTr3raQjJJ2arusHbFDSlvuLJY/Zlq7b503gjPT2VRWe6wngVqW7HpJOb2fMPwFnS/rkvjskTZP0cWDuvjjSQywnpPEfjM+nj59KMhHJliofdyTJ3Ap70mP9w9P7y1+LUlnEa4cZJwKrOxHxAcmH998q6b75En/syf7fSGZne4oPt7l+APjP6QnQk0hmefoPkp4HBlV4um+TTKu5ND2h+u124tkJfIokYbyRHja5gWQmtruBBkkvA78AboiI3eXbOIDNaZwzgIPpgPtzYKKSie2/SPp6RMT7wL+lJ5/vKHtMFvHaYcaXj5qZFZz3CMzMCs6JwMys4JwIzMwKzonAzKzgnAjMzArOicDMrOCcCMzMCu7/A5N/FeM0BlVTAAAAAElFTkSuQmCC\n",
"text/plain": [
"