Skip to content
Snippets Groups Projects
ocr_project.ipynb 245 KiB
Newer Older
2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012 2013 2014 2015 2016 2017 2018 2019 2020 2021 2022 2023 2024 2025 2026 2027 2028 2029 2030 2031 2032 2033 2034 2035 2036 2037 2038 2039 2040 2041 2042 2043 2044 2045 2046 2047 2048 2049 2050 2051 2052 2053 2054 2055 2056 2057 2058 2059 2060 2061 2062 2063 2064 2065 2066 2067 2068 2069 2070 2071 2072 2073 2074 2075 2076 2077 2078 2079 2080 2081 2082 2083 2084 2085 2086 2087 2088 2089 2090 2091 2092 2093 2094 2095 2096 2097 2098 2099 2100 2101 2102 2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115 2116 2117 2118 2119 2120 2121 2122 2123 2124 2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193 2194 2195 2196 2197 2198 2199 2200 2201 2202 2203 2204 2205 2206 2207 2208 2209 2210 2211 2212 2213 2214 2215 2216 2217 2218 2219 2220 2221 2222 2223 2224 2225 2226 2227 2228 2229 2230 2231 2232 2233 2234 2235 2236 2237 2238 2239 2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253 2254 2255 2256 2257 2258 2259 2260 2261 2262 2263 2264 2265 2266 2267 2268 2269 2270 2271 2272 2273 2274 2275 2276 2277 2278 2279 2280 2281 2282 2283 2284 2285 2286 2287 2288 2289 2290 2291 2292 2293 2294 2295 2296 2297 2298 2299 2300 2301 2302 2303 2304 2305 2306 2307 2308 2309 2310 2311 2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322 2323 2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334 2335 2336 2337 2338 2339 2340 2341 2342 2343 2344 2345 2346 2347 2348 2349 2350 2351
      "62/62 [==============================] - 1s 20ms/step - loss: 0.0418 - accuracy: 0.9876 - val_loss: 1.9749 - val_accuracy: 0.6921\n",
      "Epoch 37/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.0465 - accuracy: 0.9853 - val_loss: 1.9971 - val_accuracy: 0.6716\n",
      "Epoch 38/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0718 - accuracy: 0.9775 - val_loss: 2.0241 - val_accuracy: 0.6862\n",
      "Epoch 39/80\n",
      "62/62 [==============================] - 2s 24ms/step - loss: 0.0584 - accuracy: 0.9801 - val_loss: 1.9995 - val_accuracy: 0.6833\n",
      "Epoch 40/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0342 - accuracy: 0.9899 - val_loss: 2.1054 - val_accuracy: 0.6657\n",
      "Epoch 41/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.0263 - accuracy: 0.9925 - val_loss: 2.1175 - val_accuracy: 0.6833\n",
      "Epoch 42/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0124 - accuracy: 0.9964 - val_loss: 2.0999 - val_accuracy: 0.6862\n",
      "Epoch 43/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.0403 - accuracy: 0.9889 - val_loss: 2.2080 - val_accuracy: 0.6716\n",
      "Epoch 44/80\n",
      "62/62 [==============================] - 1s 19ms/step - loss: 0.0590 - accuracy: 0.9837 - val_loss: 2.2236 - val_accuracy: 0.6774\n",
      "Epoch 45/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.0852 - accuracy: 0.9775 - val_loss: 1.9087 - val_accuracy: 0.6686\n",
      "Epoch 46/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0298 - accuracy: 0.9932 - val_loss: 1.9759 - val_accuracy: 0.6804\n",
      "Epoch 47/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0225 - accuracy: 0.9935 - val_loss: 2.0122 - val_accuracy: 0.6950\n",
      "Epoch 48/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0143 - accuracy: 0.9961 - val_loss: 1.9644 - val_accuracy: 0.6950\n",
      "Epoch 49/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0059 - accuracy: 0.9980 - val_loss: 2.0185 - val_accuracy: 0.6921\n",
      "Epoch 50/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0058 - accuracy: 0.9984 - val_loss: 2.0507 - val_accuracy: 0.6979\n",
      "Epoch 51/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0055 - accuracy: 0.9984 - val_loss: 2.1037 - val_accuracy: 0.6979\n",
      "Epoch 52/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0065 - accuracy: 0.9984 - val_loss: 2.1036 - val_accuracy: 0.6950\n",
      "Epoch 53/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0042 - accuracy: 0.9987 - val_loss: 2.1285 - val_accuracy: 0.6921\n",
      "Epoch 54/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0044 - accuracy: 0.9990 - val_loss: 2.1856 - val_accuracy: 0.6950\n",
      "Epoch 55/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0044 - accuracy: 0.9984 - val_loss: 2.2159 - val_accuracy: 0.6950\n",
      "Epoch 56/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.0050 - accuracy: 0.9987 - val_loss: 2.2115 - val_accuracy: 0.6921\n",
      "Epoch 57/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0054 - accuracy: 0.9984 - val_loss: 2.2214 - val_accuracy: 0.6891\n",
      "Epoch 58/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.0054 - accuracy: 0.9984 - val_loss: 2.1961 - val_accuracy: 0.7067\n",
      "Epoch 59/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0056 - accuracy: 0.9984 - val_loss: 2.1579 - val_accuracy: 0.6950\n",
      "Epoch 60/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0050 - accuracy: 0.9987 - val_loss: 2.2094 - val_accuracy: 0.6833\n",
      "Epoch 61/80\n",
      "62/62 [==============================] - 2s 25ms/step - loss: 0.0063 - accuracy: 0.9980 - val_loss: 2.2343 - val_accuracy: 0.6891\n",
      "Epoch 62/80\n",
      "62/62 [==============================] - 2s 26ms/step - loss: 0.0042 - accuracy: 0.9984 - val_loss: 2.2284 - val_accuracy: 0.7038\n",
      "Epoch 63/80\n",
      "62/62 [==============================] - 1s 24ms/step - loss: 0.0049 - accuracy: 0.9980 - val_loss: 2.2338 - val_accuracy: 0.6979\n",
      "Epoch 64/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0091 - accuracy: 0.9974 - val_loss: 2.6446 - val_accuracy: 0.6686\n",
      "Epoch 65/80\n",
      "62/62 [==============================] - 1s 19ms/step - loss: 0.2005 - accuracy: 0.9342 - val_loss: 2.0209 - val_accuracy: 0.6481\n",
      "Epoch 66/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.2535 - accuracy: 0.9185 - val_loss: 2.1616 - val_accuracy: 0.6305\n",
      "Epoch 67/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.1401 - accuracy: 0.9508 - val_loss: 1.9564 - val_accuracy: 0.6979\n",
      "Epoch 68/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0759 - accuracy: 0.9788 - val_loss: 1.9610 - val_accuracy: 0.6833\n",
      "Epoch 69/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0220 - accuracy: 0.9935 - val_loss: 1.8584 - val_accuracy: 0.7243\n",
      "Epoch 70/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.0138 - accuracy: 0.9971 - val_loss: 1.9887 - val_accuracy: 0.7273\n",
      "Epoch 71/80\n",
      "62/62 [==============================] - 2s 24ms/step - loss: 0.0078 - accuracy: 0.9977 - val_loss: 2.0536 - val_accuracy: 0.7126\n",
      "Epoch 72/80\n",
      "62/62 [==============================] - 1s 24ms/step - loss: 0.0107 - accuracy: 0.9980 - val_loss: 2.0608 - val_accuracy: 0.7009\n",
      "Epoch 73/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.0021 - accuracy: 1.0000 - val_loss: 2.0809 - val_accuracy: 0.7038\n",
      "Epoch 74/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.0020 - accuracy: 0.9997 - val_loss: 2.1431 - val_accuracy: 0.7214\n",
      "Epoch 75/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0027 - accuracy: 0.9993 - val_loss: 2.1357 - val_accuracy: 0.7155\n",
      "Epoch 76/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0018 - accuracy: 0.9997 - val_loss: 2.1865 - val_accuracy: 0.7155\n",
      "Epoch 77/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0013 - accuracy: 1.0000 - val_loss: 2.2132 - val_accuracy: 0.7155\n",
      "Epoch 78/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.0021 - accuracy: 0.9990 - val_loss: 2.2929 - val_accuracy: 0.7273\n",
      "Epoch 79/80\n",
      "62/62 [==============================] - 2s 25ms/step - loss: 0.0037 - accuracy: 0.9987 - val_loss: 2.2580 - val_accuracy: 0.7038\n",
      "Epoch 80/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0137 - accuracy: 0.9958 - val_loss: 2.1615 - val_accuracy: 0.7214\n",
      "Score for fold 9: loss of 2.161499500274658; accuracy of 72.14076519012451%\n",
      "------------------------------------------------------------------------\n",
      "Training for fold 10 ...\n",
      "Epoch 1/80\n",
      "62/62 [==============================] - 3s 32ms/step - loss: 4.0988 - accuracy: 0.0316 - val_loss: 3.9318 - val_accuracy: 0.0850\n",
      "Epoch 2/80\n",
      "62/62 [==============================] - 2s 27ms/step - loss: 3.2523 - accuracy: 0.1965 - val_loss: 2.8015 - val_accuracy: 0.2551\n",
      "Epoch 3/80\n",
      "62/62 [==============================] - 2s 25ms/step - loss: 2.1609 - accuracy: 0.4174 - val_loss: 2.1322 - val_accuracy: 0.3988\n",
      "Epoch 4/80\n",
      "62/62 [==============================] - 2s 25ms/step - loss: 1.6546 - accuracy: 0.5419 - val_loss: 1.7082 - val_accuracy: 0.5367\n",
      "Epoch 5/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 1.2903 - accuracy: 0.6344 - val_loss: 1.5384 - val_accuracy: 0.5630\n",
      "Epoch 6/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 1.0680 - accuracy: 0.6830 - val_loss: 1.5125 - val_accuracy: 0.5572\n",
      "Epoch 7/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.9057 - accuracy: 0.7263 - val_loss: 1.3382 - val_accuracy: 0.6276\n",
      "Epoch 8/80\n",
      "62/62 [==============================] - 1s 19ms/step - loss: 0.7582 - accuracy: 0.7638 - val_loss: 1.3581 - val_accuracy: 0.6129\n",
      "Epoch 9/80\n",
      "62/62 [==============================] - 1s 19ms/step - loss: 0.6591 - accuracy: 0.7908 - val_loss: 1.3036 - val_accuracy: 0.6510\n",
      "Epoch 10/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.5628 - accuracy: 0.8254 - val_loss: 1.2775 - val_accuracy: 0.6569\n",
      "Epoch 11/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.4706 - accuracy: 0.8543 - val_loss: 1.2692 - val_accuracy: 0.6540\n",
      "Epoch 12/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.4159 - accuracy: 0.8615 - val_loss: 1.2941 - val_accuracy: 0.6716\n",
      "Epoch 13/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.3521 - accuracy: 0.8869 - val_loss: 1.3421 - val_accuracy: 0.6422\n",
      "Epoch 14/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.3041 - accuracy: 0.9045 - val_loss: 1.2835 - val_accuracy: 0.6950\n",
      "Epoch 15/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.2570 - accuracy: 0.9198 - val_loss: 1.4327 - val_accuracy: 0.6686\n",
      "Epoch 16/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.2081 - accuracy: 0.9358 - val_loss: 1.4217 - val_accuracy: 0.6628\n",
      "Epoch 17/80\n",
      "62/62 [==============================] - 2s 25ms/step - loss: 0.1817 - accuracy: 0.9423 - val_loss: 1.5014 - val_accuracy: 0.6598\n",
      "Epoch 18/80\n",
      "62/62 [==============================] - 1s 24ms/step - loss: 0.1632 - accuracy: 0.9479 - val_loss: 1.6336 - val_accuracy: 0.6833\n",
      "Epoch 19/80\n",
      "62/62 [==============================] - 2s 25ms/step - loss: 0.1646 - accuracy: 0.9466 - val_loss: 1.4928 - val_accuracy: 0.6716\n",
      "Epoch 20/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.1249 - accuracy: 0.9664 - val_loss: 1.5707 - val_accuracy: 0.6598\n",
      "Epoch 21/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.1014 - accuracy: 0.9707 - val_loss: 1.7214 - val_accuracy: 0.6921\n",
      "Epoch 22/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.1041 - accuracy: 0.9645 - val_loss: 1.6541 - val_accuracy: 0.6833\n",
      "Epoch 23/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.1005 - accuracy: 0.9717 - val_loss: 1.6808 - val_accuracy: 0.6628\n",
      "Epoch 24/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.0876 - accuracy: 0.9743 - val_loss: 1.6991 - val_accuracy: 0.6628\n",
      "Epoch 25/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0730 - accuracy: 0.9814 - val_loss: 1.7868 - val_accuracy: 0.6862\n",
      "Epoch 26/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0861 - accuracy: 0.9730 - val_loss: 1.9939 - val_accuracy: 0.6657\n",
      "Epoch 27/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.1211 - accuracy: 0.9580 - val_loss: 1.8650 - val_accuracy: 0.6481\n",
      "Epoch 28/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.1213 - accuracy: 0.9619 - val_loss: 1.7981 - val_accuracy: 0.6745\n",
      "Epoch 29/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.0621 - accuracy: 0.9795 - val_loss: 1.8225 - val_accuracy: 0.6950\n",
      "Epoch 30/80\n",
      "62/62 [==============================] - 1s 19ms/step - loss: 0.0417 - accuracy: 0.9886 - val_loss: 1.8746 - val_accuracy: 0.6862\n",
      "Epoch 31/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0287 - accuracy: 0.9915 - val_loss: 1.8727 - val_accuracy: 0.6950\n",
      "Epoch 32/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0322 - accuracy: 0.9902 - val_loss: 1.9873 - val_accuracy: 0.6950\n",
      "Epoch 33/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0477 - accuracy: 0.9883 - val_loss: 2.0559 - val_accuracy: 0.6774\n",
      "Epoch 34/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0594 - accuracy: 0.9795 - val_loss: 1.9762 - val_accuracy: 0.6950\n",
      "Epoch 35/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0662 - accuracy: 0.9808 - val_loss: 1.9717 - val_accuracy: 0.6804\n",
      "Epoch 36/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0337 - accuracy: 0.9909 - val_loss: 2.0102 - val_accuracy: 0.6804\n",
      "Epoch 37/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.0250 - accuracy: 0.9935 - val_loss: 2.1170 - val_accuracy: 0.6774\n",
      "Epoch 38/80\n",
      "62/62 [==============================] - 1s 20ms/step - loss: 0.0213 - accuracy: 0.9941 - val_loss: 2.0537 - val_accuracy: 0.6833\n",
      "Epoch 39/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.0221 - accuracy: 0.9925 - val_loss: 2.0892 - val_accuracy: 0.6921\n",
      "Epoch 40/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0373 - accuracy: 0.9879 - val_loss: 2.1625 - val_accuracy: 0.6686\n",
      "Epoch 41/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0380 - accuracy: 0.9870 - val_loss: 2.0608 - val_accuracy: 0.6833\n",
      "Epoch 42/80\n",
      "62/62 [==============================] - 2s 24ms/step - loss: 0.0629 - accuracy: 0.9814 - val_loss: 2.2050 - val_accuracy: 0.6745\n",
      "Epoch 43/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0960 - accuracy: 0.9681 - val_loss: 2.2725 - val_accuracy: 0.6598\n",
      "Epoch 44/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0980 - accuracy: 0.9694 - val_loss: 2.1557 - val_accuracy: 0.6540\n",
      "Epoch 45/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0654 - accuracy: 0.9746 - val_loss: 2.2921 - val_accuracy: 0.6481\n",
      "Epoch 46/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0513 - accuracy: 0.9840 - val_loss: 2.1890 - val_accuracy: 0.6862\n",
      "Epoch 47/80\n",
      "62/62 [==============================] - 2s 24ms/step - loss: 0.0279 - accuracy: 0.9925 - val_loss: 2.1796 - val_accuracy: 0.6921\n",
      "Epoch 48/80\n",
      "62/62 [==============================] - 1s 24ms/step - loss: 0.0114 - accuracy: 0.9971 - val_loss: 2.2145 - val_accuracy: 0.6862\n",
      "Epoch 49/80\n",
      "62/62 [==============================] - 2s 27ms/step - loss: 0.0084 - accuracy: 0.9977 - val_loss: 2.2101 - val_accuracy: 0.6921\n",
      "Epoch 50/80\n",
      "62/62 [==============================] - 1s 24ms/step - loss: 0.0129 - accuracy: 0.9961 - val_loss: 2.1808 - val_accuracy: 0.6921\n",
      "Epoch 51/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0098 - accuracy: 0.9971 - val_loss: 2.1699 - val_accuracy: 0.7038\n",
      "Epoch 52/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0074 - accuracy: 0.9980 - val_loss: 2.2066 - val_accuracy: 0.6950\n",
      "Epoch 53/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0068 - accuracy: 0.9974 - val_loss: 2.2522 - val_accuracy: 0.6950\n",
      "Epoch 54/80\n",
      "62/62 [==============================] - 2s 25ms/step - loss: 0.0092 - accuracy: 0.9967 - val_loss: 2.2748 - val_accuracy: 0.6862\n",
      "Epoch 55/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.0112 - accuracy: 0.9964 - val_loss: 2.2931 - val_accuracy: 0.6950\n",
      "Epoch 56/80\n",
      "62/62 [==============================] - 2s 25ms/step - loss: 0.0083 - accuracy: 0.9971 - val_loss: 2.2434 - val_accuracy: 0.6979\n",
      "Epoch 57/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0062 - accuracy: 0.9971 - val_loss: 2.2799 - val_accuracy: 0.6891\n",
      "Epoch 58/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0106 - accuracy: 0.9961 - val_loss: 2.2980 - val_accuracy: 0.6950\n",
      "Epoch 59/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0084 - accuracy: 0.9974 - val_loss: 2.2835 - val_accuracy: 0.7097\n",
      "Epoch 60/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.0084 - accuracy: 0.9964 - val_loss: 2.3406 - val_accuracy: 0.6862\n",
      "Epoch 61/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0117 - accuracy: 0.9961 - val_loss: 2.3019 - val_accuracy: 0.6921\n",
      "Epoch 62/80\n",
      "62/62 [==============================] - 1s 24ms/step - loss: 0.0071 - accuracy: 0.9980 - val_loss: 2.2639 - val_accuracy: 0.7038\n",
      "Epoch 63/80\n",
      "62/62 [==============================] - 2s 25ms/step - loss: 0.0130 - accuracy: 0.9967 - val_loss: 2.2944 - val_accuracy: 0.6950\n",
      "Epoch 64/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.1294 - accuracy: 0.9658 - val_loss: 2.5276 - val_accuracy: 0.5953\n",
      "Epoch 65/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.2799 - accuracy: 0.9065 - val_loss: 2.0809 - val_accuracy: 0.6129\n",
      "Epoch 66/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.1344 - accuracy: 0.9573 - val_loss: 2.1698 - val_accuracy: 0.6598\n",
      "Epoch 67/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.0675 - accuracy: 0.9769 - val_loss: 2.4213 - val_accuracy: 0.6745\n",
      "Epoch 68/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.0564 - accuracy: 0.9834 - val_loss: 2.0034 - val_accuracy: 0.6745\n",
      "Epoch 69/80\n",
      "62/62 [==============================] - 1s 24ms/step - loss: 0.0314 - accuracy: 0.9899 - val_loss: 2.0989 - val_accuracy: 0.6862\n",
      "Epoch 70/80\n",
      "62/62 [==============================] - 2s 25ms/step - loss: 0.0160 - accuracy: 0.9951 - val_loss: 2.1941 - val_accuracy: 0.6950\n",
      "Epoch 71/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0143 - accuracy: 0.9967 - val_loss: 2.2414 - val_accuracy: 0.6774\n",
      "Epoch 72/80\n",
      "62/62 [==============================] - 1s 24ms/step - loss: 0.0110 - accuracy: 0.9977 - val_loss: 2.2457 - val_accuracy: 0.6833\n",
      "Epoch 73/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.0142 - accuracy: 0.9954 - val_loss: 2.3332 - val_accuracy: 0.6979\n",
      "Epoch 74/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.0168 - accuracy: 0.9951 - val_loss: 2.2853 - val_accuracy: 0.6862\n",
      "Epoch 75/80\n",
      "62/62 [==============================] - 1s 23ms/step - loss: 0.0283 - accuracy: 0.9915 - val_loss: 2.4522 - val_accuracy: 0.6862\n",
      "Epoch 76/80\n",
      "62/62 [==============================] - 1s 21ms/step - loss: 0.0166 - accuracy: 0.9964 - val_loss: 2.4504 - val_accuracy: 0.6862\n",
      "Epoch 77/80\n",
      "62/62 [==============================] - 2s 25ms/step - loss: 0.0255 - accuracy: 0.9932 - val_loss: 2.4046 - val_accuracy: 0.6774\n",
      "Epoch 78/80\n",
      "62/62 [==============================] - 1s 24ms/step - loss: 0.0214 - accuracy: 0.9948 - val_loss: 2.4315 - val_accuracy: 0.6804\n",
      "Epoch 79/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0083 - accuracy: 0.9984 - val_loss: 2.4370 - val_accuracy: 0.6921\n",
      "Epoch 80/80\n",
      "62/62 [==============================] - 1s 22ms/step - loss: 0.0029 - accuracy: 0.9993 - val_loss: 2.4862 - val_accuracy: 0.7009\n",
      "Score for fold 10: loss of 2.486210823059082; accuracy of 70.08797526359558%\n"
     ]
    }
   ],
   "source": [
    "acc_per_fold = []\n",
    "loss_per_fold = []\n",
    "kf = KFold(n_splits=10, random_state=42, shuffle=True)\n",
    "fold = 1\n",
    "for train_index, test_index in kf.split(data, labels):\n",
    "    model = models.Sequential([keras.Input(shape=(32, 32, 1))])\n",
    "    model.add(layers.Conv2D(32, (3, 3), activation='relu'))\n",
    "    model.add(layers.MaxPooling2D((2, 2)))\n",
    "    model.add(layers.Conv2D(64, (3, 3), activation='relu'))\n",
    "    model.add(layers.MaxPooling2D((2, 2)))\n",
    "    model.add(layers.Conv2D(64, (3, 3), activation='relu'))\n",
    "    model.add(layers.Flatten())\n",
    "    model.add(layers.Dense(128, activation='relu'))\n",
    "    model.add(layers.Dense(62, activation='softmax'))\n",
    "\n",
    "    model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=1e-3),\n",
    "                loss=keras.losses.categorical_crossentropy,\n",
    "                metrics=['accuracy'])\n",
    "    \n",
    "    print('------------------------------------------------------------------------')\n",
    "    print(f'Training for fold {fold} ...')\n",
    "    \n",
    "    train_images, test_images  = data[train_index], data[test_index]\n",
    "    train_labels, test_labels =  labels[train_index], labels[test_index]\n",
    "\n",
    "    train_images = train_images.reshape(-1, 32, 32, 1)\n",
    "    test_images = test_images.reshape(-1, 32, 32, 1)\n",
    "\n",
    "    history = model.fit(x=train_images, \n",
    "                    y=train_labels, \n",
    "                    validation_data=(test_images, test_labels), \n",
    "                    batch_size=BATCH_SIZE,\n",
    "                    epochs=EPOCHS, \n",
    "                    class_weight=classWeight)\n",
    "    \n",
    "      # Generate generalization metrics\n",
    "    scores = model.evaluate(test_images, test_labels, verbose=0)\n",
    "    print(f'Score for fold {fold}: {model.metrics_names[0]} of {scores[0]}; {model.metrics_names[1]} of {scores[1]*100}%')\n",
    "    acc_per_fold.append(scores[1] * 100)\n",
    "    loss_per_fold.append(scores[0])\n",
    "    \n",
    "    fold+=1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 169,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "------------------------------------------------------------------------\n",
      "Score per fold\n",
      "------------------------------------------------------------------------\n",
      "> Fold 1 - Loss: 2.8269495964050293 - Accuracy: 63.63636255264282%\n",
      "------------------------------------------------------------------------\n",
      "> Fold 2 - Loss: 1.9607480764389038 - Accuracy: 68.62170100212097%\n",
      "------------------------------------------------------------------------\n",
      "> Fold 3 - Loss: 2.3371033668518066 - Accuracy: 70.3812301158905%\n",
      "------------------------------------------------------------------------\n",
      "> Fold 4 - Loss: 2.010115623474121 - Accuracy: 69.79472041130066%\n",
      "------------------------------------------------------------------------\n",
      "> Fold 5 - Loss: 2.6227874755859375 - Accuracy: 72.14076519012451%\n",
      "------------------------------------------------------------------------\n",
      "> Fold 6 - Loss: 2.587796926498413 - Accuracy: 68.62170100212097%\n",
      "------------------------------------------------------------------------\n",
      "> Fold 7 - Loss: 1.7485564947128296 - Accuracy: 70.67448496818542%\n",
      "------------------------------------------------------------------------\n",
      "> Fold 8 - Loss: 2.2871928215026855 - Accuracy: 70.67448496818542%\n",
      "------------------------------------------------------------------------\n",
      "> Fold 9 - Loss: 2.161499500274658 - Accuracy: 72.14076519012451%\n",
      "------------------------------------------------------------------------\n",
      "> Fold 10 - Loss: 2.486210823059082 - Accuracy: 70.08797526359558%\n",
      "------------------------------------------------------------------------\n",
      "Average scores for all folds:\n",
      "> Accuracy: 69.67741906642914 (+- 2.3135587334320644)\n",
      "> Loss: 2.3028960704803465\n",
      "------------------------------------------------------------------------\n"
     ]
    }
   ],
   "source": [
    "print('------------------------------------------------------------------------')\n",
    "print('Score per fold')\n",
    "for i in range(0, len(acc_per_fold)):\n",
    "  print('------------------------------------------------------------------------')\n",
    "  print(f'> Fold {i+1} - Loss: {loss_per_fold[i]} - Accuracy: {acc_per_fold[i]}%')\n",
    "print('------------------------------------------------------------------------')\n",
    "print('Average scores for all folds:')\n",
    "print(f'> Accuracy: {np.mean(acc_per_fold)} (+- {np.std(acc_per_fold)})')\n",
    "print(f'> Loss: {np.mean(loss_per_fold)}')\n",
    "print('------------------------------------------------------------------------')\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Validation\n",
    "\n",
    "Now, we need to validate our data. Using the test data we split earlier, we can evaluate our  model. Let's plot our training accuracy."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 139,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAABEaUlEQVR4nO3dd3hUZfbA8e9Jb5BCQi8B6Sg1AgIKdlQUu2DFta+uouvuz3VX11W36u5a1nUXOxawICwqioIUFQRC79ICBJKQ3vu8vz/eCUxCEpKQyQyZ83mePMwtc+fMncs99y33vWKMQSmllO/y83QASimlPEsTgVJK+ThNBEop5eM0ESillI/TRKCUUj5OE4FSSvk4TQTqpInIlyJyW3Ov60kikiQiF7hhu0tF5M7m3q5SJ0MTgY8SkQKXP4eIFLtM39SYbRljLjHGvNPc63ojEXlMRJbXMj9WRMpE5HRPxOUSxzQRMSJygyfjUKcWTQQ+yhgTUfUHHAAud5n3ftV6IhLguSi90nvAGBHpWWP+FGCzMWaLB2JydRuQBdzakh+qx8mpTROBqkZEJohIsoj8n4ikAm+JSLSIfC4i6SKS7Xzd1eU9R6s7nFek34vI885194nIJU1ct6eILBeRfBFZJCKviMh7dcTdkBifEZEfnNv7WkRiXZbfIiL7RSRTRH5b1/4xxiQD3wK31Fh0KzDzRHHUiPkp1+8jIvHOq/kA53SkiLwhIikickhEnhUR/7piE5EewHjgbuBiEenossxfRB4XkT3O779WRLo5lw0SkW9EJEtE0kTkcef8t0XkWZdtTBCRZJfpJOdxsgkoFJEAZ4mp6jO2ichVNWK8S0S2uywfLiK/EpE5NdZ7SURerOu7qualiUDVpiMQA/TAnlT8gLec092BYuBf9bx/FLATiAX+BrwhItKEdT8AVgPtgKc4/uTrqiEx3gjcDrQHgoBHAURkIPCqc/udnZ9X68nb6R3XWESkHzDUGW9j91V93gYqgN7AMOAioL72hVuBRGPMHGA74FrF9wgwFbgUaAv8DCgSkTbAIuAr7HfvDSxuRIxTgcuAKGNMBbAHOBuIBP4AvCcinQBE5Drs73irM4YrgExsKWuiiEQ51wvAlrBmNiIOdTKMMfrn439AEnCB8/UEoAwIqWf9oUC2y/RS4E7n62nAbpdlYYABOjZmXexJtAIIc1n+HvBeA79TbTH+zmX658BXztdPArNdloU798EFdWw7DMgDxjin/wj8rwn76inX7wPEO79/ANABKAVCXZZPBZbU8513AdOdr38DbHRZthOYXMt7pgLr69je28CzLtMTgOQax83PTvA7bKj6XGAh8FAd630J3OV8PQnY5un/F770pyUCVZt0Y0xJ1YSIhInIf51VJ3nAciCqnmqK1KoXxpgi58uIRq7bGchymQdwsK6AGxhjqsvrIpeYOrtu2xhTiL1SrZUzpo+BW52ll5twXr02YV/VpQcQCKSISI6I5AD/xZZmjiMiY4GewGznrA+AM0RkqHO6G/Zqvaa65jdUtd9ERG4VkQ0uMZ+OLe2d6LPeAW52vr4ZePckYlKNpIlA1abmkLS/BPoBo4wxbYFznPPrqu5pDilAjIiEuczrVs/6JxNjiuu2nZ/Z7gTveQe4HrgQaAN81oQ4CrGliyodXV4fxJYIYo0xUc6/tsaYQXXEc5vzMzaIbdtZ5TK/anun1fK+g0CvOrZZX3xVjh4rzjaK14AHgHbGmChgC8e+e10xAMwDBovtdTUJeL+O9ZQbaCJQDdEGW9edIyIxwO/d/YHGmP1AIvCUiASJyFnA5W6K8RNgkoiME5Eg4GlO/H/jOyAHmIGtViprQhwbgHNEpLuIRGKrcwAwxqQAXwN/F5G2IuInIqeJyPiaGxGREGxSuhtbFVX19wvgRmed++vAMyLSR6zBItIO+BzoJCLTRSRYRNqIyCiX+C4VkRhnw/P0E+yTcGxiSHfGdTu2RFDldeBRERnhjKG3M3ngLIF+grNdyBhz4ASfpZqRJgLVEC8AoUAG8CO2YbEl3AScha2meRb4EHuVXJsXaGKMxpitwP3Yk1AKkA0kn+A9Blsd1IPqjZoNjsMY8w32O20C1mJPyq5uxTZqb3PG9AnQqZZNXYlNPjONMalVf8Cb2PaGicA/gI+wySUPeAPb/pCPLdVcjq062wWc69zuu8BGbFvA185Y62SM2Qb8HVgJpAFnAD+4LP8Y257yAZCPLQXEuGziHed7tFqohYmzcUYpryciHwI7jDFuL5Golici3YEd2I4FeZ6Ox5doiUB5LRE501kd4iciE4HJ2KtI1cqIiB+2i+tsTQItz22JQETeFJEjIlLrnZbOOsKXRGS3iGwSkeHuikWdsjpiu1sWAC8B9xlj1ns0ItXsRCQcW111IS3Q/qSO57aqIRE5B/sfeKYx5rjxV0TkUmxj1qXYm4peNMaMqrmeUkop93JbicAYsxw75kldJmOThDHG/Ijta11bQ5hSSik38uRAUV2ofjNKsnNeSs0VReRubNc4wsPDR/Tv379FAlSquZWUOygpr8RhDJUOQ6UxVFQayioclFU6KK90EOjvR3iwPxHBAYQHBVDhMJSUV1JSXklphQOHMRhzrAN/gJ8Q4C8E+PnhL0KlMce27zCUVzoor7Tzqgjg7ycE+vsRGuRPSKA/IYF++CEYqLrbl5BAf/z93Hm7iHuVVdj9XencZw5jcBiO7psKhwOHw86rWoYBxO4jERCEqkFP7K4Q53y73O5jQ4Wj+j52hy5RocSEBzXpvWvXrs0wxsTVtuyUGDHQGDMD21+bhIQEk5iY6OGIlDcoLK0go6CU9PxSsovKGdotirg2wS0aw5ZDuczfeJhbRvegW0zYccuNMexIzefLzSl8sTmFjPTCo8sECPYTukQE0S06jK7RoXSOCmV/ZhEr92aSVViGa6tpTHAAfTpEEB4UcPTEb4whs7CM9PxSMgpKKa1wEBboR0RwIG1CAogKC6RzZCgdI0PoFBlCoL8f+SXl5JdWkFdcwd70Aran5JFXUkFxLd+vGOgWE8rgLlGM6hXDTaN6NGtiyC0uZ3tKHml5JZzbvz1tQwJP+J7dRwrYmZpPgL8Q6NwPRWWV5BSVkV1UTlZhKTtS89l8KJe8ovLj3u8HRAT5Ex0WRFRYIG1DAgkN8ic00J/gQD8C/IQKh03QFQ6H81+bUCsqncnVZX5kaACxEcHEtQmmXXgQ4cEBhATa7QX4C2UVDorLKykpt0knIiSAiOAAIkICCA7wQ1zuNaxwOCguq6SkwkFJmU1grhJ6RNOnQ5sm7WsR2V/XMk8mgkNUv1O0q3OeagXKKx2s2puFwdApMoSOkaGEB/mzJ72QNUlZrN6Xxd6MQsb3ieWq4V3pGRte57YKSiv4cU8mWw7nkpRRyL6MQvZmFJJfUlFtvaAAP64d0ZW7zu5V7/aaw+bkXF5c/BOLth8B4NN1ybx2awLDukcfXSersIzH5mzi621p+AmM7BnDtDHxjO7VjsjQQNqEBBIS6Edt4/E5HIadafms3Z9NbEQwAzu1pWt0KH71nISNsxQQ4N+4Gl9jDIdzS9iVln/0/QF+Qnmlg+0p+Ww5lMvG5By+2JzC8p8yeHnqMEKDGjtixjGbknP4z7I9bDyYy6GcY+knIjiAG0d15/ax8XSKDD3ufQezivjnNz8xd8Mh6rvwDgrwo3dcBBMHdeT0LpEM6tyW6LAgW/IJ8CckyI/ggKbH3xq59T4CEYkHPq+jsfgy7K3oVY3FLxljRp5om1oi8F7GGLYezmPOumTmbzhMZmFZteVBAX6UVTgAiI0IoltMGBsP5uAwMLx7FBcP6khESACBfn74+wmpeSUs/ymddQeyKa80iEDnyFB6xoYTHxtGl6gw4toEExthr8Lmrj/EJ2uTKa90cNHADkwa3JlxvWOJbmJRujb7Mgr54xfbWLT9CJGhgdwxrifn9I3jwVnrScsr4cUpQ5l4eid+2J3BIx9tILuwnIcu6MP1Cd1avLTS3GauTOL387cypGsUb9yWQLuIxn2fg1lFPLdwJ/M3HiYmPIixvWMZ0KkNAzu1JSI4gJkr9/PF5hQEuHBgB3q0Cz/6+244mMP7Px5ABKaNiWfy0C44jHFeuTsICfQnOjyI6LBAQgP9a02uvk5E1hpjEmpd5sZeQ7OwoxXGYu8y/D12EC2MMf9xDtb1L+xdj0XA7caYE57hNRF4D2MMB7KKWL0vizVJWazal8X+zCKC/P24YGB7rhzahcjQQFLzSjicU0JGQSl92kcwsmcMPWPDERFSc0v434ZDzFmXzE9pBcd9xsBObTmnbxzn9IlleI9oQgLrv5JLzy/lnRVJvL9qP9lF5YjA4C6RTOjXnlvO6kFsI09eVUrKK/n30j38Z+keggL8uOecXkwbG08bZ1VGZkEpd85MZMPBHM7v34HFO9LoFRvOi1OGcXqXyCZ9pjdauDWVB2etp1NkCG/fPpL4BpS8CksreGnxLt76IQkRuOvsXtwzvtfRfefqYFYRb3y/j6+3ppJeUEp5pT0/+Qlcn9CNhy7oU2tpQZ2YRxKBu2gi8A6bknN4Yt4WNibnAhAVFkhCjxjO7R/HpDM6Exl24rpeV8YYcorKbcOm8yqvTUhgkxvGKh2Gjck5fPdTBst3pbP+QDahgf7ceXYv7jqnFxHBx2pF80rKqaw0tZYcyisdLNqWxp++3M7BrGImD+3Mby8dQPu2IcetW1JeycMfbuDLLancOKo7T1w28KSqULzV2v3Z3PnOGgL8/Vjw4Nn1lnS+2ZbG7/+3hcO5JVw7oiu/vKhvg0/kxhhyi8vJKCglPDhAE8BJ0kSgmk1uUTl/W7iDD1YfIDYimJ9POI2xvWPpHRdRb/21p+1JL+DvX+9kweZUYsKDuHJoFw5mF7E9JY/k7GJEYET3aC4a1IGLBnakoLSCT9cdYv7GQ2QUlHFaXDjPTD6dMb1j6/0ch8NwOLeYrtHHNxy3JttT8rjylR8Y2TOGd24fedxvn5pbwlPzt/LV1lT6dWjDn64+nRE9YurYmmoJmggUABWVDhZsSaW0vJJuMWF0iwmjY9uQWnuB5JeU886KJL7ZlkaAvx8hgX6EBPiz4WAO2UVl3DYmnocv7NugXh7eZOPBHP62cAcr92TSMzacAZ3aMqBTW8orHXyzLY2th4/10wny9+P8Ae25enhXJvSLI7CRjbCt3QerDvD43M08dkl/7h1/bHTpNUlZ3D0zkeLySh46vy93nt1T950X0ETg44wxLNp+hL98uZ09Lt0XwZ7shnaPYnzfOM7uE0uPmHDeWZnEG9/vI7e4nBE9ogkJ9KOk3HZri2sTzK8n9mNQ51O73rvSYWpNgAezili8PY3AAD8uO6MTUWHN19Dc2hhjeOCD9SzcmspH957F8O7RzN94mEc/2kjX6FBevy2BXnF1PY9ItTRNBD5s2+E8/vDZVlbty6JXbDj/d0l/+nVow8HsIg5mFbMvo4AVezKPXgmLgDFwwYD2PHh+HwZ3jfLsF1BeLa+knEtf/A6Aa4Z35cXFuxgZH8N/bxnRrL211MnTROCjMgpKueAfy/AXYfoFfZgysnudRfT0/FK+353OjpR8Jg3uzBldT+0rftVy1h/I5rr/rKTCYZg8tDN/u3aw9tP3QvUlglPizmLVNE9/to2i0kq+eHDcCe9GjGsTzFXDusKwFgpOtRrDukfzzxuGkp5fyu1j47UP/ylIE0ErtWTHEeZvPMzDF/Rt8i3pSjXU5UM6ezoEdRK0Kb8VKiit4LdzN9OnfQT3TajrWeFKKWVpIjjFpeQWM2dtMqm5JUfnPb9wJyl5JfzlmsEEBehPrJSqn1YNncJKyiu5/a017EjNB2BQ57acGR/DOyuTuHV0D0b0iD7BFpRSShPBKe3pz7exIzWfv1x9BtlF5SzZcYSZK5PoHBnKrybqMxuUUg2jieAU9dnGw3yw6gD3jj+NKSO7A3DfhNPIKSpDkGpj6SilVH30bHEKSsoo5DefbmZEj2h+eVHfasv0TlilVGNpS+Ippriskvs/WIe/n/DS1GE6hotS6qRpieAUcSSvhJkr9x8dZ/+1WxPoEqXD8iqlTp4mAi9XWFrB7+dv5X8bDlHhMFwwoAN3n9OLM+N1SF+lVPPQRODl/vzlduasS+bW0T24fWzPBj0RSimlGkMTgRdbsTuD9348wB3jevLEpIGeDkcp1UppS6OXKiyt4NdzNhHfLoxHL+rn6XCUUq2Ylgi81F+/2sGhnGI+vPusVvncW6WU99ASgRdauSeTmSv3M21MPCN7aqOwUsq9NBF4meKySv5vziZ6tAvjVxdrlZBSyv20asjL/Hvpbg5kFfHBXaMIC9KfRynlfloi8CL7Mgr577K9XDm0M2NOi/V0OEopH6GJwEsYY3jyf1sIDvDj8csGeDocpZQP0UTgJb7aksp3uzJ45KK+tG8T4ulwlFI+RBOBFygsreDpz7cxsFNbbhndw9PhKKV8jLZGeoGXv91NSm4J/7pxGAE6mqhSqoXpWcfD9qYX8Mb3e7luRFdG9NB7BpRSLU8TgYc9+8V2QgL8+bU+WlIp5SGaCDxoyc4jfLvjCA+e34e4NsGeDkcp5aM0EXhIeaWDZz7fRs/YcG4bE+/pcJRSPkwTgYfMXLmfvemFPDFpAEEB+jMopTxHz0AekFlQyguLfmJ83zjO7dfe0+EopXycJoIWZozhTwt2UFxWyROTBiAing5JKeXj3JoIRGSiiOwUkd0i8lgty7uLyBIRWS8im0TkUnfG4w1e/nY3c9Ylc+/40+jdvo2nw1FKKfclAhHxB14BLgEGAlNFpObzFn8HfGSMGQZMAf7trni8wbsrk/jHNz9xzfCuPHJhX0+Ho5RSgHtLBCOB3caYvcaYMmA2MLnGOgZo63wdCRx2YzweNX/jYZ6cv5ULBnTgr9ecgZ+fVgkppbyDOxNBF+Cgy3Syc56rp4CbRSQZWAD8orYNicjdIpIoIonp6enuiNWtvt+VwSMfbuDM+BgdRkIp5XU8fUaaCrxtjOkKXAq8KyLHxWSMmWGMSTDGJMTFxbV4kCfDGMMzn2+je7swXr8tgZBAff6wUsq7uDMRHAK6uUx3dc5zdQfwEYAxZiUQArSqJ7Ks2pfFzrR87j3nNNqGBHo6HKWUOo47E8EaoI+I9BSRIGxj8Pwa6xwAzgcQkQHYRHDq1f3UY+bKJKLCArliaGdPh6KUUrVyWyIwxlQADwALge3Y3kFbReRpEbnCudovgbtEZCMwC5hmjDHuiqmlpeQWs3BrGjckdNMqIaWU13Lr8wiMMQuwjcCu8550eb0NGOvOGDzpg1UHcBjDzfqwGaWUF/N0Y3GrVVpRyazVBzi/f3u6xYR5OhyllKqTJgI3+XJzKhkFZdx6VrynQ1FKqXppInCTd1Ym0Ss2nHG9W1UnKKVUK6SJwA02J+ey/kAON4/uoXcQK6W8niaCZmaM4c9fbiciOIBrRnT1dDhKKXVCmgia2ew1B1mxJ5PfXNqfyFC9gUwp5f00ETSjlNxi/vTFdkb3imHqmd09HY5SSjWIJoJmYozh8U83U+5w8NdrBmvbgFLqlKGJoJnM23CIJTvT+dXF/enRLtzT4SilvEFlhacjaBBNBM0go6CUP3y2jeHdo5g2Jt7T4Sil6lKSC1vmtMwJesXL8Nd4OLTW/Z91kjQRNIOPE5PJKSrnL9cMxl+rhNSpoLwEPr4dvv4dFBzxdDQtozgHZl4Jn/wMvvwVnGhYs/IS+PIx+PAW2PQxlOQ1/LO2zrX7tqwA5v3cbsuLuXWsIV/x1dZUhnSNpG8HfQaxaqTtn0PmbojrD+37Q2R38GvC9VlpAexaCCkbq5/gup4JA684fv2Fj8PWT0H8YPXrcOYdMOZBaNOh6d/FmxVnw7tXQeoW6HcZJL4J0T1h7IO1r5+fCrNvtFfz4e1h+3zwD4be58P4X0PnYXV/1sHV8Ok90G203f7sG2Hpn+HCP7jnuzUDTQQn6XBOMRsP5vDrif08HUrDZO+H0CgIifR0JKeO/FQIjYaA4ObbZmUFfPME/FjjMd2BYdBjDAycDP0nQVhM3duoKLMnqK1zYfciqCgB/yAQ50i3phJWvAQX/xnO+vmx923+BBLfsCf+4bfBd8/bOFbPgFCXzwsKg5s/hZiezfe9m5sxkLHLJrC6junibFsSOLINbngP+lwEn9xu9390D7uvXR1eD7NutNVIN7xnE0fyatj2P9j8sU0odyyC2N7Hf1bWXpg1Bdp2hikfQHg7GH6r/R0GXA5dE+x6hRk2Ge9dVv39MT1tPAMn2220EDnVRn1OSEgwiYmJng7jqLd/2MdTn23j21+Op1dchPs/MOcgbJpt/xPXdmLau8yeuIbccPyy3GR4OcGeLEbfB6PvtSe45pL4FkR2gz4XnNx2cg7AtvngKIcxDzXtCrm5FGbAy8Oh42C4dX7zxFKcY6sn9iyGUffCOb+2pYL07ZC2FX76yu4DvwDoeQ6MvBv6TgRxqXY8vAH+dz+kbYE2nWDAFTDoSug2CvyciaCy3J7wtn8GF/8JzrrfnjRnTIAOg2DaF+DvvNclcw+sfcul+sPA+vdhzC8afyVrDKx+zZ5Qq4jYq+gBl0Objk3abdW2n7oJts6zJ+esPdDrXLhlbvV9BFBWCG9dAke225N634vt/PJieOcKu52bPoagCEjfaffnmjcgPBamzoKOZ1TfXtY+eP0CCG4Ddy6y67kue/86KMqonihK8uDfZ9nEes9y+/t+8ahNNKdfDQEhzu/lsCWQI9vsdLdREHNa9c8fOtUeE00gImuNMQm1LtNEcHKmzFhJVmEZXz883v0f5qiENyfaq5Pzfgfn/Kr68oIj9kRfmgd3Lzm++Dr3PttQ1vt82LkAgtvaE9G4h+1BWtOmj+DrJ+wVzVk/rz9prH4NFjwKfoFw6zyIH1d9uTGwbzl0GQHBtSTM4mxY9y5sm1e9cW34rTDpxaafgLP2wvK/25NhZBdbBRPX38YX34AR0Bf8Glb/176+5DkYdXfT4qiSsQtmTYXsfXDZ32HEtOPXMQZSNjivQD+B3IPQaQiMfwxOOw+WPwff/xPC4+Cy5+0Va137p7LcJp3t8+H8J2HLp5B3GO793u6P+nwwxZ7MH94K/g2sPDDG1o2v/JdNUH7ORFNZBgWpgED3s2DQVTD8FggMbdh2q5QV2qqWvUttyafnORDRHjZ9CDfPgd41LkK+eRJ+eBGmzoZ+l1RfVpgBr58P2UnH5vkF2m1e9R+73docXAPvTLK/ya3z7YXVmtdg0VM2ed/4oS3VudqzBN690lZHZe+DTkPhylehw8Djt5/+E2z/n602LMqqvuz8J2Dw9SfcTbXRROAmmQWlnPnHRTxwbm8euagFqoZ+eMkWZ2N6QV4K3L/KFm2rzL3PFl1D2kJ0vL0qqTpBpG6G/5xtr/AuesbWlS77qz1BDLoarn2z+tVUzkF4dYy9Wik84kwa98Donx9fXfHTQlscPu18eyVbkGavlmL72OXlxfbqdcsciOoBV7wMvVwS584v4bPp9kTRaQgMvNJe3W6YBcv/BsNuhstfrj8ZlBVCRemx6YI0u782fWj/cw660l6Bpe+w1WMY+5+4Vz0JPHMPvDISht1iS1P7f7An0Han1f0esFU2AUHV5zkqYdV/YPEz9uR3w7vHJ8vaVJbb77D8eXsCCYqwDZBDb4KL/9iwEl1lOcy50yZZgJs+gT4Xnvh92z+DD2+GGz+GvhdVX2YMOCqOlSiq5i38Lfz4ii3FXPK36sfUkR02hq3zbOmn01B71d3QKpDSAvjgejiwEi58GobcaKteKsrglTMhMBzu/e5YiSh9pz2GB0+BK1+pfZs5B23CjepuLxBielb/TnXZOhc+nmaTcHE2HFgBvS+Ey1+sO8F+8Sisewcm/MaW6BuaXJuJJgI3+XDNAf5vzma+eHAcgzo3U527w2GvBrqNhradjs1P32lP5H0uhEv+Cv8aCb0mwNQP7PL9K+GtiTDuEYjrB3PvsQdl1RXnu1fBoXXw0IbqJ4/v/g6Ln7ZXp2feaecZY9c/uBru+8GeZJf/zf6HCWpjr4rPesAmhJSN8OYl9uR4+5e2WPz6BRAUDncutleCs2+0VRln3W9P+ll7IOFntiSy+BnY/BG0HwST/wVdhh+LzRjbyLbsrzD0ZptAyvLtvkjfYf89st3+m5d8/L4MCLGfM/ah6tURJXnw2rn2BPLzFbaYX5sPb4Hdi+HB9ba+/ZXR9gpu2oLjk1L6Trt/ts6zRfseY2xCG3gFlObbRHhwFfS5GC5/ofH1v5UVdj/t+MLW69c8MTfk/d88YS8QRt3TsPdUlME/+kP82XD9O9WXffW4bWfofYH9nn0vhiV/glWv2lLmxL8cX03jascC+PQum9imfABdR9QfS2m+rXY5uAqufg3OuLb68i1zbMln8r9h2E322HnncnsB9Iu11atwmsv3L8Ci30NwJFzyFxgytf7vbIwtrXuofU4TgZvc/tZqdh0p4Ltfn4vUdwA0VGm+7W2w8wt7cE38k73yc1TCmxfZOsj7V9kia9VBeONH9kr8v+fYg+z+VbbB8e3L7AnpgbW2muG9q4/VE7tyOOxV1r5lcMfXtjop8S34fHr15AC2/nr5c/ZkFxRuT7KbP7ZF9DsXHUtcyYn28+P6QX6avYK95nVbNC8rgiV/hJWvAMZerZ/9Szj70eOvoqss+TMs+4tNYMXZx+YHhEBsX2g/wJY+gtseW+YXYBtb6+oFc3A1vHmxPale/kLty9+40F69TXjMztswC+bde6zxteYVLgLdR9vqr92Lj83zC7D765K/wuAb6j9ZeJsvH7Mn/F/uPFYSPLzBJtLOwyHvEOSn2GPAVMKo+2Dinxv2HdO2wawb7DEy+RUYfF3t65Xmw3vXQvIauOY1OP2a49cxxlbz5KXYE//OBTDnDrjsH7ZHlDsYYy9sOg+rftHmpTQRuEF+STkjnlnErWf14HeTaqnna6ycA7ZONn27PfnsWeIsbl5gT3QrXoZr3jh2JVRRBv8ZZ3uKjLjNXtXf8D4MmGSXp22zy4dOhcMbbZJ4YE3tDcyFmfDfs22R+Ib3bDtElxFwy7zaq2OObIdlf7PF46AIuGOhbXx0tW0+fHQrRHWDqR8eXxd6YBWsfds2WncafOL9s+YNeyKI6+es5+9nq5mqqgGa4usnbG+Omz+17SZVjLFJIjsJfrHuWJuGMbZ+f+8Se2WdvoNjdd5X2gbbmqW4rfNsKensX558I6knpG62x1FV+4jDYRNkzgH4RaItIVb1qAmPtSXSxiS6wgx7nOz/ofbqrgOr4H8/txdB175h2xbqkvQDvH2pLWlumGV/izsXn9wx0opoInCD+RsP8+Cs9Xxy71kkxNfTxa8h9q+0dbGV5XDdW/ak5HDAmtftVX95kT3JXD+z+n+yfctt8Rds/eRNH1dfvvC3ttEOqieR2hxYZf8Tib9NCD9faetN65OxGzDH2gJqOrzBtmE0Z8+k5lReYhNgWZGtIgqJtPO2fGKrclyr1qrkp9peKG06O0/+zdALxtv9Z5y93+Ce5TZ5f/YQXDWj9p5pTVFRZkt8379gG8AvfwF6jodvn7XdWiO72mrDXhNOvK1ZU21pAIG7FtsLGgVoInCL+99fx+qkLFb95vyTG2Auea2tRonsYns21DypZu2D9e/C6Pttw1hNc+6yDb73rTi+EbM0H14ZZXtv3PHNiXverHjZ9viY9AIk3N7073QqSV4Lb1xgSxkVpbZB1jggboBtGG7hBj2v9ON/4Kv/g9s+h49ugfYDbdfT5q7iOrwe5t0PR7ZCWDsoyoSEO2z31bracWpK/8k2EA+/BSb9s3njO8VpImhmJeWVDH/mG64a1oU/XnVG/SsbA58/bBtVr32z+s052Um2YTUwzBZhI+IaH0xlBRSm111HWZRlq4OCGjgQXvb+6j2RfMH3/4SNs217Q1W102nn1X8zly8pzIS/97NtMuVFNkHW1u2xOVSU2Rvcdn1tewY1pc983mGI6OjZ+0+8UH2JQC93mmDVviyKyiq5cGADbsdf+md7o45/MLx23rFug8XZ8P71tjpo2idNSwJgr1jra6hq7MnM15IA2DrlcQ97OgrvFd4O+k203UnPesB9SQBsh4FzH7d/TdWCd+S2Fpoym2DFngwC/YVRPWupqnG14YNjXR9/vtIWd2dOtjdffXiLvdlpyvsQ17dlAleqqcY9bHthVfWgUq2Klgia4Mc9mQzrFk1okEtvhKIs29hY1UNh33KY/6Bt9Lr8BdsAe+ci26VtwaN2natmNOymIqU8rcsIe9GiWiVNBI2UW1zO5kO5/OI8l0bdZX+zfeP9g531zP1g9ze28fb6mcfuVAyNsv3+v/uHrbJprl4XSil1EjQRNNLqfVk4DJx1mrNaaONsmwT6T7INwek77c1IER1td87QqOob8POH8b86brtKKeUpmggaacWeDIID/BjWPQr2fQf/e8D2bLj2rbrvjFVKKS+mjcWNtHJPJmfGxxCcvQc+vMkOAHf9u5oElFKnLE0EjZBZUMqO1HzO6REM719rh5+trfpHKaVOIVo11Ag/7rVjg18YuBFy9tsHYfhiv3ulVKuiJYJGWLk3g4jgAHpkr4KQKNs1VCmlTnGaCBphxZ5MzuwRhd/eb+0AWDqqoVKqFdBE0EBpeSXsTS/kso55dvz1087zdEhKKdUs3JoIRGSiiOwUkd0iUuu96SJyvYhsE5GtIvKBO+M5GSv3ZAIw1m+TnaGJQCnVSritsVhE/IFXgAuBZGCNiMw3xmxzWacP8BtgrDEmW0TqeFq0563Yk0FkaCAd01fYu4ejunk6JKWUahbuLBGMBHYbY/YaY8qA2cDkGuvcBbxijMkGMMYccWM8J2XFnkzGxUcg+3/Q0oBSqlVxZyLoAhx0mU52znPVF+grIj+IyI8iMrG2DYnI3SKSKCKJ6enpbgq3bqm5JSRnF3N51H6oKNZEoJRqVTzdWBwA9AEmAFOB10QkquZKxpgZxpgEY0xCXFwTx+0/CZuScwAYWr4e/AJ1xFClVKtywkQgIpeLSFMSxiHAtSK9q3Oeq2RgvjGm3BizD/gJmxi8ypZDufgJtD/yPXQf3fCnfSml1CmgISf4G4BdIvI3EenfiG2vAfqISE8RCQKmAPNrrDMPWxpARGKxVUV7G/EZLWLToVxGx5Xjd2SrVgsppVqdEyYCY8zNwDBgD/C2iKx01tnX+zRpY0wF8ACwENgOfGSM2SoiT4vIFc7VFgKZIrINWAL8yhiTeRLfp9kZY9hyKJdJbX6yM3qf79mAlFKqmTWo+6gxJk9EPgFCgenAVcCvROQlY8zL9bxvAbCgxrwnXV4b4BHnn1dKyS0ho6CMUXEbICwWOpzgYfVKKXWKaUgbwRUiMhdYCgQCI40xlwBDgF+6NzzP23woFzB0z14Fp50Lfp5uX1dKqebVkBLBNcA/jTHLXWcaY4pE5A73hOU9Nifn0sMvg8CSDOgxxtPhKKVUs2vI5e1TwOqqCREJFZF4AGPMYveE5T02H8plQnSGnehwumeDUUopN2hIIvgYcLhMVzrntXrGGDYfymV0RJqdEdeYTlNKKXVqaEgiCHAOEQGA87VPPJfxUE4xWYVl9Pc7CJHdIaStp0NSSqlm15BEkO7S3RMRmQxkuC8k77HlUC4AHUv2QfsBHo5GKaXcoyGNxfcC74vIvwDBjh90q1uj8hKbknMJ8askJHcPDKx1GCSllDrlnTARGGP2AKNFJMI5XeD2qLzE5kO5jI/NR/LKof1AT4ejlFJu0aAbykTkMmAQECIiABhjnnZjXB5X1VD8qy7pkIdWDSmlWq2G3FD2H+x4Q7/AVg1dB/Rwc1wel5xdTE5ROYODDoH424fRKKVUK9SQxuIxxphbgWxjzB+As7CDw7Vqm50Nxd0r9kO70yAwxMMRKaWUezQkEZQ4/y0Skc5AOdDJfSF5h82Hcgn0F9rk7dJqIaVUq9aQRPCZ82ExzwHrgCTAax8y31w2J+cyuEMgftn7tKFYKdWq1dtY7HwgzWJjTA4wR0Q+B0KMMbktEZwn7UjN45Ye2ZBltESglGrV6i0RGGMcwCsu06W+kARKyivJKChjgL/zgWrtB3k2IKWUcqOGVA0tFpFrpKrfqA84klcKQI+KfeAfDDE9PRyRUkq5T0MSwT3YQeZKRSRPRPJFJM/NcXlUSm4xAO2L90JcP/Dz93BESinlPg25s7jeR1K2Rql5tqNUm7xdcNoEzwajlFJudsJEICLn1Da/5oNqWpOU3BLaUkBAYao2FCulWr2GDDHxK5fXIcBIYC1wnlsi8gIpOcUMDUm1Ex20oVgp1bo1pGroctdpEekGvOCugLxBSm4JCSGH7a10WiJQSrVyTXkSezLQqs+OqXklDAw4BMFtoW0XT4ejlFJu1ZA2gpcB45z0A4Zi7zButVJyS+gVeMCWBnyn16xSykc1pI0g0eV1BTDLGPODm+LxuLIKB1kFxXQO2wPtr/N0OEop5XYNSQSfACXGmEoAEfEXkTBjTJF7Q/OMI/kl9OUgIZUF0P0sT4ejlFJu16A7i4FQl+lQYJF7wvG81NwSRvrtsBM9xng2GKWUagENSQQhro+ndL4Oc19InpWSW8JIv+2UR3SFqG6eDkcppdyuIYmgUESGV02IyAig2H0heVZqTjEj/XZgtDSglPIRDWkjmA58LCKHsY+q7Ih9dGWrVHbkJ+IkD9NrnKdDUUqpFtGQG8rWiEh/oJ9z1k5jTLl7w/KcqPTVAEiPsR6ORCmlWkZDHl5/PxBujNlijNkCRIjIz90fmmd0zttAjl+0fU6xUkr5gIa0EdzlfEIZAMaYbOAut0XkYf1LNrM/YojeSKaU8hkNSQT+rg+lERF/IMh9IXlORWYSnUgns90IT4eilFItpiGNxV8BH4rIf53T9wBfui8kzynY9R1RQHHn0Z4ORSmlWkxDEsH/AXcD9zqnN2F7DrU6FXt/INeEEdrldE+HopRSLeaEVUPOB9ivApKwzyI4D9jekI2LyEQR2Skiu0XksXrWu0ZEjIgkNCxs9wg5/CNrHP3oGBXhyTCUUqpF1ZkIRKSviPxeRHYALwMHAIwx5xpj/nWiDTvbEl4BLgEGAlNFZGAt67UBHsImG88pOEJEwT5WO/rTKTLEo6EopVRLqq9EsAN79T/JGDPOGPMyUNmIbY8Edhtj9hpjyoDZwORa1nsG+Cv2MTCes38FAOtlIFFhgR4NRSmlWlJ9ieBqIAVYIiKvicj52DuLG6oLcNBlOtk57yjn0BXdjDFf1LchEblbRBJFJDE9Pb0RITTC/hWUSghZbQcg2nVUKeVD6kwExph5xpgpQH9gCXaoifYi8qqIXHSyHywifsA/gF+eaF1jzAxjTIIxJiEuLu5kP7p2KRvZHdCbOG0fUEr5mIY0FhcaYz5wPru4K7Ae25PoRA4BrsN3dnXOq9IGOB1YKiJJwGhgvscajPNTSK6MoVNk6InXVUqpVqRRzyw2xmQ7r87Pb8Dqa4A+ItJTRIKAKcB8l23lGmNijTHxxph44EfgCmNMYu2bcyNjMAVHOFDelo7aUKyU8jFNeXh9gxhjKoAHgIXY7qYfGWO2isjTInKFuz63SUrzkIpi0hyR2mNIKeVzGnJDWZMZYxYAC2rMe7KOdSe4M5Z6FRwB4IiJYmRbTQRKKd/ithLBKSU/FYB0orSNQCnlczQRABSkAbZEoG0ESilfo4kAjiaCHL8Y2oW3yoFVlVKqTpoIAPJTKZdAwtpG4+enN5MppXyLJgKAgiNk+8XQUdsHlFI+SBMBQEEqGUTRLjzY05EopVSL00QAUHCE1MpIYiK0fUAp5Xs0EQAmP5XDlW2JCdNEoJTyPZoIKsqQ4izSHFHEaI8hpZQP0kRQaO8qTieKdlo1pJTyQZoI8o/dTBatVUNKKR+kicB5M1m60aohpZRv0kRQYMcZOmK0akgp5Zs0ERQcwSBk0larhpRSPkkTQX4qRQFRBAcFExLo7+lolFKqxWkiKEgjxz9GbyZTSvksTQQFaWRKlN5MppTyWZoI8tM4oj2GlFI+zLcTgTFQkEZKRVtidMA5pZSPcuszi71ecTY4yjlQ2YaY8EBPR6OUUh7h2yUC581kKZWRWiJQSvks304E+S43k2kbgVLKR/l2Iqh6aD1RRGsiUEr5KE0E6DhDSinf5tuJID+NCv9QCgnVqiGllM/y7URQkEZhUCyAVg0ppXyWzyeCvIAYAv2FtiG+3ZNWKeW7fD4RZEs00WFBiIino1FKKY/w7USQn0Y60dpQrJTyab6bCMqLoTSXVEdbTQRKKZ/mu4nA2XX0ULkmAqWUb/PdROB8aH1SaRtNBEopn+a7iaCgKhFEaCJQSvk0n08ER4w2FiulfJtbE4GITBSRnSKyW0Qeq2X5IyKyTUQ2ichiEenhzniqKUjDiB9ZaNWQUsq3uS0RiIg/8ApwCTAQmCoiA2usth5IMMYMBj4B/uaueI6Tn0J5SCwO/DQRKKV8mjtLBCOB3caYvcaYMmA2MNl1BWPMEmNMkXPyR6CrG+OpLvcQhSEdATQRKKV8mjsTQRfgoMt0snNeXe4AvqxtgYjcLSKJIpKYnp7ePNHlJpMX1AHQRKCU8m1e0VgsIjcDCcBztS03xswwxiQYYxLi4uJO/gONgbxDZPrbbUWHaSJQSvkud460dgjo5jLd1TmvGhG5APgtMN4YU+rGeI4pzobyItIklrYhAQT6e0U+VEopj3DnGXAN0EdEeopIEDAFmO+6gogMA/4LXGGMOeLGWKrLtTVWyY52tIvQZxUrpXyb2xKBMaYCeABYCGwHPjLGbBWRp0XkCudqzwERwMciskFE5texueaVmwzA/soYbR9QSvk8tw7Cb4xZACyoMe9Jl9cXuPPz65Rra6h2l0QS3U4TgVLKt/nm01hyD4J/MPuKQpnQXROBOjWVl5eTnJxMSUmJp0NRXiQkJISuXbsSGBjY4Pf4aCJIxkR2ISutgpgITQTq1JScnEybNm2Ij4/XByspAIwxZGZmkpycTM+ePRv8Pt/sLpObTGVEZ8orDTHadVSdokpKSmjXrp0mAXWUiNCuXbtGlxJ9MxHkHaIkrDOgN5OpU5smAVVTU44J30sEleWQn0J+1fASWjWklPJxvpcI8lPAOMgOaA+gVUNKNVFmZiZDhw5l6NChdOzYkS5duhydLisrq/e9iYmJPPjggyf8jDFjxjRXuABMnz6dLl264HA4mnW7pzrfayx2dh1N97PDS2jVkFJN065dOzZs2ADAU089RUREBI8++ujR5RUVFQQE1H6KSUhIICEh4YSfsWLFimaJFcDhcDB37ly6devGsmXLOPfcc5tt267q+97e6tSKtjk4byZLoR1QRjutGlKtwB8+28q2w3nNus2Bndvy+8sHNeo906ZNIyQkhPXr1zN27FimTJnCQw89RElJCaGhobz11lv069ePpUuX8vzzz/P555/z1FNPceDAAfbu3cuBAweYPn360dJCREQEBQUFLF26lKeeeorY2Fi2bNnCiBEjeO+99xARFixYwCOPPEJ4eDhjx45l7969fP7558fFtnTpUgYNGsQNN9zArFmzjiaCtLQ07r33Xvbu3QvAq6++ypgxY5g5cybPP/88IsLgwYN59913mTZtGpMmTeLaa689Lr4nnniC6OhoduzYwU8//cSVV17JwYMHKSkp4aGHHuLuu+8G4KuvvuLxxx+nsrKS2NhYvvnmG/r168eKFSuIi4vD4XDQt29fVq5cSbOMrdYAPpgI7PASBytjCA44Qmigv4cDUqp1SU5OZsWKFfj7+5OXl8d3331HQEAAixYt4vHHH2fOnDnHvWfHjh0sWbKE/Px8+vXrx3333XdcP/j169ezdetWOnfuzNixY/nhhx9ISEjgnnvuYfny5fTs2ZOpU6fWGdesWbOYOnUqkydP5vHHH6e8vJzAwEAefPBBxo8fz9y5c6msrKSgoICtW7fy7LPPsmLFCmJjY8nKyjrh9163bh1btmw52m3zzTffJCYmhuLiYs4880yuueYaHA4Hd91119F4s7Ky8PPz4+abb+b9999n+vTpLFq0iCFDhrRYEgCfTATJEBrNwQI/2rcN1l4XqlVo7JW7O1133XX4+9sLrNzcXG677TZ27dqFiFBeXl7rey677DKCg4MJDg6mffv2pKWl0bVr9ceTjBw58ui8oUOHkpSUREREBL169Tp68p06dSozZsw4bvtlZWUsWLCAf/zjH7Rp04ZRo0axcOFCJk2axLfffsvMmTMB8Pf3JzIykpkzZ3LdddcRGxsLQExMzAm/98iRI6v13X/ppZeYO3cuAAcPHmTXrl2kp6dzzjnnHF2vars/+9nPmDx5MtOnT+fNN9/k9ttvP+HnNSffSwR5h6BtV35Ky6d3XISno1Gq1QkPDz/6+oknnuDcc89l7ty5JCUlMWHChFrfExx8bPBHf39/KioqmrROXRYuXEhOTg5nnHEGAEVFRYSGhjJp0qQGbwMgICDgaEOzw+Go1iju+r2XLl3KokWLWLlyJWFhYUyYMKHevv3dunWjQ4cOfPvtt6xevZr333+/UXGdLN/rNZSbjKNtF/amF9K3QxtPR6NUq5abm0uXLvZ5VG+//Xazb79fv37s3buXpKQkAD788MNa15s1axavv/46SUlJJCUlsW/fPr755huKioo4//zzefXVVwGorKwkNzeX8847j48//pjMzEyAo1VD8fHxrF27FoD58+fXWcLJzc0lOjqasLAwduzYwY8//gjA6NGjWb58Ofv27au2XYA777yTm2++uVqJqqX4YCI4SH5IR8oqHfTRRKCUW/3617/mN7/5DcOGDWvUFXxDhYaG8u9//5uJEycyYsQI2rRpQ2RkZLV1ioqK+Oqrr7jsssuOzgsPD2fcuHF89tlnvPjiiyxZsoQzzjiDESNGsG3bNgYNGsRvf/tbxo8fz5AhQ3jkkUcAuOuuu1i2bBlDhgxh5cqV1UoBriZOnEhFRQUDBgzgscceY/To0QDExcUxY8YMrr76aoYMGcINN9xw9D1XXHEFBQUFLV4tBCDGmBb/0JORkJBgEhMTm/bm0nz4c1d2nv5LLk4cwfwHxjK4a1SzxqdUS9m+fTsDBgzwdBgeV1BQQEREBMYY7r//fvr06cPDDz/s6bAaLTExkYcffpjvvvvupLdV27EhImuNMbX22fWtEoHzHoJ95dEA9G6vbQRKnepee+01hg4dyqBBg8jNzeWee+7xdEiN9pe//IVrrrmGP//5zx75fN9qLHbeQ7CtqC3dYkIJC/Ktr69Ua/Twww+fkiUAV4899hiPPfaYxz7fx0oE9h6CtTnh9G2v7QNKKQU+lwiSMeLH2qxg+nbURKCUUuBriSDvEBXhHSmp9KNvB20fUEop8LVEkJtMfrAdfrqPVg0ppRTgc4ngIEckDj/RHkNKnaxzzz2XhQsXVpv3wgsvcN9999X5ngkTJlDV/fvSSy8lJyfnuHWeeuopnn/++Xo/e968eWzbtu3o9JNPPsmiRYsaEX39fG24at9JBA4H5B7iQGU03WPCCNHB5pQ6KVOnTmX27NnV5s2ePbvegd9cLViwgKioqCZ9ds1E8PTTT3PBBRc0aVs11Ryu2l3ccYNdU/lO/8nCdHCUs6M4kj6dtVpItTJfPgapm5t3mx3PgEv+Uufia6+9lt/97neUlZURFBREUlIShw8f5uyzz+a+++5jzZo1FBcXc+211/KHP/zhuPfHx8eTmJhIbGwsf/zjH3nnnXdo37493bp1Y8SIEYC9R2DGjBmUlZXRu3dv3n33XTZs2MD8+fNZtmwZzz77LHPmzOGZZ545Ojz04sWLefTRR6moqODMM8/k1VdfJTg4mPj4eG677TY+++wzysvL+fjjj+nfv/9xcfnicNW+UyJw3kOwJb+NNhQr1QxiYmIYOXIkX375JWBLA9dffz0iwh//+EcSExPZtGkTy5YtY9OmTXVuZ+3atcyePZsNGzawYMEC1qxZc3TZ1VdfzZo1a9i4cSMDBgzgjTfeYMyYMVxxxRU899xzbNiwgdNOO+3o+iUlJUybNo0PP/yQzZs3U1FRcXQcIYDY2FjWrVvHfffdV2f1U9Vw1VdddRVffPHF0fGEqoar3rhxI+vWrWPQoEFHh6v+9ttv2bhxIy+++OIJ99u6det48cUX+emnnwA7XPXatWtJTEzkpZdeIjMzk/T0dO666y7mzJnDxo0b+fjjj6sNVw0063DVvlMicN5DkOyI4TIdY0i1NvVcubtTVfXQ5MmTmT17Nm+88QYAH330ETNmzKCiooKUlBS2bdvG4MGDa93Gd999x1VXXUVYWBhgx9ypsmXLFn73u9+Rk5NDQUEBF198cb3x7Ny5k549e9K3b18AbrvtNl555RWmT58O2MQCMGLECD799NPj3u+rw1X7UCKwJYJDJlZ7DCnVTCZPnszDDz/MunXrKCoqYsSIEezbt4/nn3+eNWvWEB0dzbRp0+odgrk+06ZNY968eQwZMoS3336bpUuXnlS8VUNZ1zWMta8OV+07VUPx41jW40HyJZxecbWPGKiUapyIiAjOPfdcfvaznx1tJM7LyyM8PJzIyEjS0tKOVh3V5ZxzzmHevHkUFxeTn5/PZ599dnRZfn4+nTp1ory8vNpJr02bNuTn5x+3rX79+pGUlMTu3bsBePfddxk/fnyDv4+vDlftO4mg81A+CJhMfLsI7TGkVDOaOnUqGzduPJoIhgwZwrBhw+jfvz833ngjY8eOrff9w4cP54YbbmDIkCFccsklnHnmmUeXPfPMM4waNYqxY8dWa9idMmUKzz33HMOGDWPPnj1H54eEhPDWW29x3XXXccYZZ+Dn58e9997boO/hy8NV+9Qw1Oc9v5Q+HSL47y21jsSq1ClFh6H2TQ0ZrlqHoa5DSXklSZn6VDKl1KnLXcNV+0wi2JteiMOgTyVTSp2yHnvsMfbv38+4ceOadbs+kwh2HbENS/00EahW5FSr2lXu15RjwmcSQUZBGaGB/vSM1R5DqnUICQkhMzNTk4E6yhhDZmYmISEhjXqfTzUWV1Q6CPD3mdynWrny8nKSk5Ob3EdftU4hISF07dqVwMDAavPrayz2nRvKQJOAalUCAwOr3aGqVFO59cwoIhNFZKeI7BaR4x7IKSLBIvKhc/kqEYl3ZzxKKaWO57ZEICL+wCvAJcBAYKqIDKyx2h1AtjGmN/BP4K/uikcppVTt3FkiGAnsNsbsNcaUAbOByTXWmQy843z9CXC+iIgbY1JKKVWDO9sIugAHXaaTgVF1rWOMqRCRXKAdkOG6kojcDdztnCwQkZ1NjCm25ra9iLfG5q1xgffG5q1xgffG5q1xQeuJrUddC06JxmJjzAxgxsluR0QS62o19zRvjc1b4wLvjc1b4wLvjc1b4wLfiM2dVUOHgG4u012d82pdR0QCgEgg040xKaWUqsGdiWAN0EdEeopIEDAFmF9jnfnAbc7X1wLfmlPtxgallDrFua1qyFnn/wCwEPAH3jTGbBWRp4FEY8x84A3gXRHZDWRhk4U7nXT1kht5a2zeGhd4b2zeGhd4b2zeGhf4QGyn3J3FSimlmpfeaquUUj5OE4FSSvk4n0kEJxruooVjeVNEjojIFpd5MSLyjYjscv4b7YG4uonIEhHZJiJbReQhb4hNREJEZLWIbHTG9Qfn/J7OoUl2O4cqCWrJuGrE6C8i60Xkc2+JTUSSRGSziGwQkUTnPI8fZ844okTkExHZISLbReQsT8cmIv2c+6rqL09Epns6Lpf4HnYe/1tEZJbz/0WzHGc+kQgaONxFS3obmFhj3mPAYmNMH2Cxc7qlVQC/NMYMBEYD9zv3k6djKwXOM8YMAYYCE0VkNHZIkn86hyjJxg5Z4ikPAdtdpr0ltnONMUNd+pp7+res8iLwlTGmPzAEu+88GpsxZqdzXw0FRgBFwFxPxwUgIl2AB4EEY8zp2A44U2iu48wY0+r/gLOAhS7TvwF+4+GY4oEtLtM7gU7O152AnV6w3/4HXOhNsQFhwDrsXeoZQEBtv3ELx9QVe4I4D/gcEG+IDUgCYmvM8/hvib1faB/OzireFJtLLBcBP3hLXBwbhSEG29vzc+Di5jrOfKJEQO3DXXTxUCx16WCMSXG+TgU6eDIY50iww4BVeEFszqqXDcAR4BtgD5BjjKlwruLJ3/QF4NeAwzndDu+IzQBfi8ha5zAt4AW/JdATSAfeclanvS4i4V4SW5UpwCzna4/HZYw5BDwPHABSgFxgLc10nPlKIjilGJvePdavV0QigDnAdGNMnusyT8VmjKk0tsjeFTugYf+WjqE2IjIJOGKMWevpWGoxzhgzHFsler+InOO60IPHWQAwHHjVGDMMKKRGdYsn/w8469mvAD6uucxTcTnbJSZjk2hnIJzjq5ebzFcSQUOGu/C0NBHpBOD894gnghCRQGwSeN8Y86k3xQZgjMkBlmCLwVHOoUnAc7/pWOAKEUnCjrB7Hrb+2+OxOa8iMcYcwdZ1j8Q7fstkINkYs8o5/Qk2MXhDbGAT5zpjTJpz2hviugDYZ4xJN8aUA59ij71mOc58JRE0ZLgLT3MdbuM2bP18ixIRwd7tvd0Y8w9viU1E4kQkyvk6FNtusR2bEK71VFwAxpjfGGO6GmPiscfVt8aYmzwdm4iEi0ibqtfYOu8teMFxZoxJBQ6KSD/nrPOBbd4Qm9NUjlULgXfEdQAYLSJhzv+nVfuseY4zTzXGeKCx5VLgJ2zd8m89HMssbD1fOfbq6A5svfJiYBewCIjxQFzjsMXeTcAG59+lno4NGAysd8a1BXjSOb8XsBrYjS3GB3v4d50AfO4NsTk/f6Pzb2vVMe/p39IlvqFAovM3nQdEe0Ns2CqXTCDSZZ7H43LG8Qdgh/P/wLtAcHMdZzrEhFJK+ThfqRpSSilVB00ESinl4zQRKKWUj9NEoJRSPk4TgVJK+ThNBErVICKVNUahbLZBxkQkXlxGnVXKG7jtUZVKncKKjR3OQimfoCUCpRrIOb7/35xj/K8Wkd7O+fEi8q2IbBKRxSLS3Tm/g4jMdT5HYaOIjHFuyl9EXnOOLf+1825ppTxGE4FSxwutUTV0g8uyXGPMGcC/sKOOArwMvGOMGQy8D7zknP8SsMzY5ygMx97hC9AHeMUYMwjIAa5x67dR6gT0zmKlahCRAmNMRC3zk7APyNnrHJwv1RjTTkQysOPVlzvnpxhjYkUkHehqjCl12UY88I2xDzlBRP4PCDTGPNsCX02pWmmJQKnGMXW8boxSl9eVaFud8jBNBEo1zg0u/650vl6BHXkU4CbgO+frxcB9cPTBOpEtFaRSjaFXIkodL9T5NLQqXxljqrqQRovIJuxV/VTnvF9gn7b1K+yTt253zn8ImCEid2Cv/O/DjjqrlFfRNgKlGsjZRpBgjMnwdCxKNSetGlJKKR+nJQKllPJxWiJQSikfp4lAKaV8nCYCpZTycZoIlFLKx2kiUEopH/f/mPJbVnKixOsAAAAASUVORK5CYII=",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "# Plot accuracies\n",
    "plt.plot(history.history['accuracy'], label='Training Accuracy')\n",
    "plt.plot(history.history['val_accuracy'], label = 'Validation Accuracy')\n",
    "plt.xlabel('Epoch')\n",
    "plt.ylabel('Accuracy')\n",
    "plt.ylim([0, 1])\n",
    "plt.legend(loc='lower right')\n",
    "plt.title(\"Training and Value Accuracy\")\n",
    "plt.savefig(\"accuracy_plot\")\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "A big difference between training accuracy and validation accuracy means we could have potentially overfitted the model.\n",
    "\n",
    "### Evaluation\n",
    "\n",
    "We can evaluate the loss and accuracy with the `evaluate` function"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 146,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[INFO] evaluating network...\n",
      "7/7 [==============================] - 0s 12ms/step - loss: 2.5202 - accuracy: 0.7097\n"
     "data": {
      "text/plain": [
       "'test loss, test acc:'"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "text/plain": [
       "[2.520172357559204, 0.7096773982048035]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "# evaluate the network\n",
    "\n",
    "print(\"[INFO] evaluating network...\")\n",
    "testmodel = load_model(\"./model.keras\")\n",
    "results = testmodel.evaluate(test_images, test_labels, batch_size=BATCH_SIZE)\n",
    "display(f\"test loss, test acc:\", results)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Predictions\n",
    "\n",
    "Let's predict with our model."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 147,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[INFO] predicting test samples...\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "'predictions shape:'"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "text/plain": [
       "(341, 62)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "              precision    recall  f1-score   support\n",
      "\n",
      "           0       0.67      0.67      0.67         6\n",
      "           1       0.75      0.50      0.60         6\n",
      "           2       0.67      0.80      0.73         5\n",
      "           3       1.00      0.80      0.89         5\n",
      "           4       1.00      0.40      0.57         5\n",
      "           5       0.75      0.60      0.67         5\n",
      "           6       0.71      0.83      0.77         6\n",
      "           7       0.80      0.80      0.80         5\n",
      "           8       0.50      0.60      0.55         5\n",
      "           9       1.00      0.60      0.75         5\n",
      "           A       0.80      0.67      0.73         6\n",
      "           B       0.60      0.60      0.60         5\n",
      "           C       0.67      0.67      0.67         6\n",
      "           D       0.83      1.00      0.91         5\n",
      "           E       1.00      0.60      0.75         5\n",
      "           F       0.83      0.83      0.83         6\n",
      "           G       1.00      0.80      0.89         5\n",
      "           H       0.83      0.83      0.83         6\n",
      "           I       0.50      0.50      0.50         6\n",
      "           G       0.57      0.67      0.62         6\n",
      "           K       0.60      0.60      0.60         5\n",
      "           L       1.00      0.83      0.91         6\n",
      "           M       1.00      0.67      0.80         6\n",
      "           N       0.67      1.00      0.80         6\n",
      "           O       0.67      0.67      0.67         6\n",
      "           P       0.80      0.80      0.80         5\n",
      "           Q       0.80      0.67      0.73         6\n",
      "           R       0.67      0.80      0.73         5\n",
      "           S       0.57      0.80      0.67         5\n",
      "           T       0.83      0.83      0.83         6\n",
      "           U       1.00      0.80      0.89         5\n",
      "           V       0.43      0.60      0.50         5\n",
      "           W       0.75      0.50      0.60         6\n",
      "           X       0.83      0.83      0.83         6\n",
      "           Y       1.00      0.83      0.91         6\n",
      "           Z       0.75      0.50      0.60         6\n",
      "           a       0.80      0.80      0.80         5\n",
      "           b       0.80      0.67      0.73         6\n",
      "           c       1.00      0.17      0.29         6\n",
      "           d       0.71      0.83      0.77         6\n",
      "           e       0.60      1.00      0.75         6\n",
      "           f       1.00      0.83      0.91         6\n",
      "           g       0.60      0.60      0.60         5\n",
      "           h       0.80      0.67      0.73         6\n",
      "           i       0.83      1.00      0.91         5\n",
      "           g       0.75      0.60      0.67         5\n",
      "           k       1.00      0.80      0.89         5\n",
      "           l       0.33      0.33      0.33         6\n",
      "           m       1.00      1.00      1.00         5\n",
      "           n       0.50      0.40      0.44         5\n",
      "           o       0.62      0.83      0.71         6\n",
      "           p       0.86      1.00      0.92         6\n",
      "           q       0.60      0.60      0.60         5\n",
      "           r       0.56      1.00      0.71         5\n",
      "           s       0.40      0.67      0.50         6\n",
      "           t       0.71      1.00      0.83         5\n",
      "           u       0.71      1.00      0.83         5\n",
      "           v       0.33      0.20      0.25         5\n",
      "           w       0.80      0.80      0.80         5\n",
      "           x       0.50      0.33      0.40         6\n",
      "           y       0.67      0.67      0.67         6\n",
      "           z       0.44      0.80      0.57         5\n",
      "    accuracy                           0.71       341\n",
      "   macro avg       0.74      0.71      0.71       341\n",
      "weighted avg       0.74      0.71      0.71       341\n",
      "\n"
     ]
    }
   ],
   "source": [
    "print(\"[INFO] predicting test samples...\")\n",
    "predictions = testmodel.predict(test_images, batch_size=BATCH_SIZE)\n",
    "display(\"predictions shape:\", predictions.shape)\n",
    "\n",
    "# labels for readability\n",
    "labelNames = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', \n",
    "                   'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'G', 'K', 'L', 'M', \n",
    "                   'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',\n",
    "                   'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'g', 'k', 'l', 'm', \n",
    "                   'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']\n",
    "\n",
    "print(classification_report(test_labels.argmax(axis=1),\n",
    "    predictions.argmax(axis=1), target_names=labelNames))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Saving\n",
    "\n",
    "Lets save the model."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 98,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[INFO] saving trained model...\n"
     ]
    }
   ],
   "source": [
    " # save the model to disk\n",
    "print(\"[INFO] saving trained model...\")\n",
    "model.save(\"OCR_CNN.h5\")"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Intro_to_AI",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}