From 3651471f17ab66fa06adb8026fe5cd6e8d8c24d7 Mon Sep 17 00:00:00 2001 From: Zack Cooper Date: Fri, 13 Feb 2015 10:59:58 -0500 Subject: [PATCH 1/6] built frame of solution. Broke out data ingestion to its own file --- corpus_build.py | 86 +++++++++ lang classifier live.ipynb | 361 +++++++++++++++++++++++++++++++++++++ 2 files changed, 447 insertions(+) create mode 100644 corpus_build.py create mode 100644 lang classifier live.ipynb diff --git a/corpus_build.py b/corpus_build.py new file mode 100644 index 0000000..d0d74d5 --- /dev/null +++ b/corpus_build.py @@ -0,0 +1,86 @@ +import glob +import os +import pandas as pd +import re + + +file_types = {'.clj': 'clojure', +'.cljs': 'clojure', +'.clojure': 'clojure', +'.edn': 'clojure', +'.hs': 'haskell', +'.lhs': 'haskell', +'.ghc': 'haskell', +'.java': 'java', +'.class': 'java', +'.js': 'javascript', +'.javascript': 'javascript', +'.ml': 'ocaml', +'.mli': 'ocaml', +'.ocaml': 'ocaml', +'.perl': 'perl', +'.php': 'php', +'.phtml': 'php', +'.php3': 'php', +'.php4': 'php', +'.php5': 'php', +'.phps': 'php', +'.python3': 'python', +'.python2': 'python', +'.py': 'python', +'.jruby': 'ruby', +'.scala': 'scala', +'.racket': 'scheme', +'.scm': 'scheme', +'.ss': 'scheme', +'.clojure': 'clojure'} + +hit_num = {"clojure": "1", +"haskell": "2", +"java": "3", +"javascript": "4", +"ocaml": "5", +"perl": "6", +"php": "7", +"python": "8", +"ruby": "9", +"scala": "10", +"scheme": "11"} + +raw_file_list = [filename + for filename in glob.iglob(os.path.join('corpus/bench', + '*', '*')) + if os.path.splitext(filename)[1] in file_types.keys()] + + +class Corpus(): + + + def read_process_file(self, file_name): + with open(file_name) as f: + return f.read() + + def parenthesis_count(self, a_string): + return len(re.findall(r'[()]', a_string)) / len(a_string) + + def build_dataframe(self): + a_dataframe = pd.DataFrame([file_types[os.path.splitext(file)[1]] + for file + in raw_file_list], columns=['file_type']) + a_dataframe['hit_num'] = a_dataframe['file_type'].map(hit_num) + raw_text = [self.read_process_file(file) for file in raw_file_list] + a_dataframe['raw_text'] = raw_text + + return a_dataframe + + def feature_breakout(self, a_dataframe): + + a_dataframe['paren_count'] = [self.parenthesis_count(row) + for row in a_dataframe['raw_text']] + return a_dataframe + + + def compl_df_build(self): + corpus = self.build_dataframe() + corpus = self.feature_breakout(corpus) + return corpus diff --git a/lang classifier live.ipynb b/lang classifier live.ipynb new file mode 100644 index 0000000..32aa0d0 --- /dev/null +++ b/lang classifier live.ipynb @@ -0,0 +1,361 @@ +{ + "metadata": { + "name": "", + "signature": "sha256:517c0af79cf5d3b4fc45e900b47c2c280b55540e2ca9ee92e073faaa3e7a2a55" + }, + "nbformat": 3, + "nbformat_minor": 0, + "worksheets": [ + { + "cells": [ + { + "cell_type": "code", + "collapsed": false, + "input": [ + "from corpus_build import Corpus, hit_num\n", + "corpus = Corpus()\n", + "corpus_df = corpus.compl_df_build()\n", + "corpus_df.head()" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "html": [ + "
\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
file_typehit_numraw_textparen_count
0 clojure 1 ;; The Computer Language Benchmarks Game\\n;; h... 0.071901
1 clojure 1 ;; The Computer Language Benchmarks Game\\n;; h... 0.071704
2 clojure 1 ;; The Computer Language Benchmarks Game\\n;; h... 0.063995
3 haskell 2 --\\n-- The Computer Language Benchmarks Game\\n... 0.024707
4 haskell 2 --\\n-- The Computer Language Benchmarks Game\\n... 0.024352
\n", + "
" + ], + "metadata": {}, + "output_type": "pyout", + "prompt_number": 2, + "text": [ + " file_type hit_num raw_text \\\n", + "0 clojure 1 ;; The Computer Language Benchmarks Game\\n;; h... \n", + "1 clojure 1 ;; The Computer Language Benchmarks Game\\n;; h... \n", + "2 clojure 1 ;; The Computer Language Benchmarks Game\\n;; h... \n", + "3 haskell 2 --\\n-- The Computer Language Benchmarks Game\\n... \n", + "4 haskell 2 --\\n-- The Computer Language Benchmarks Game\\n... \n", + "\n", + " paren_count \n", + "0 0.071901 \n", + "1 0.071704 \n", + "2 0.063995 \n", + "3 0.024707 \n", + "4 0.024352 " + ] + } + ], + "prompt_number": 2 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "corpus[''] = []\n", + "corpus.head()" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 5 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 67 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 67 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 67 + }, + { + "cell_type": "heading", + "level": 1, + "metadata": {}, + "source": [ + "Choosing the Best Model" + ] + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "from sklearn.naive_bayes import GaussianNB, MultinomialNB, BernoulliNB\n", + "from sklearn.neighbors import KNeighborsClassifier\n", + "from sklearn.tree import DecisionTreeClassifier\n", + "from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier\n", + "from sklearn.cross_validation import train_test_split\n", + "from sklearn.cluster import KMeans\n", + "from sklearn import metrics\n", + "from sklearn.cross_validation import cross_val_score\n", + "import numpy as np\n", + "import seaborn as sbn\n", + "import matplotlib.pyplot as plt\n", + "%matplotlib inline\n", + "\n", + "corpus_features = corpus.drop(['raw_text','file_type'], axis=1)\n", + "\n", + "corp_train = corpus_features.values[0::,1::]\n", + "corp_answer = corpus_features.values[0::,0]\n", + "\n", + "x_train, x_test, y_train, y_test = train_test_split(corp_train, corp_answer, test_size=.4, random_state=0)\n", + "\n", + "model_list = [KNeighborsClassifier(), RandomForestClassifier(), DecisionTreeClassifier()]\n", + "# BernoulliNB(), MultinomialNB(), AdaBoostClassifier(), GaussianNB(), \n", + "\n", + "def run_test_model(classifier, x_train, y_train, x_test, y_test, orig_train, orig_answ):\n", + "\n", + " classifier.fit(x_train, y_train)\n", + " predicted = classifier.predict(x_test)\n", + " return metrics.f1_score(y_test, predicted)\n", + "\n", + "def run_rank_multiple_models(list_of_models):\n", + " return [(model, run_test_model(model, x_train, y_train, x_test, y_test, corp_train, corp_answer)) \n", + " for model \n", + " in list_of_models]" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 175 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "model_results = run_rank_multiple_models(model_list)\n", + "model_results.sort(key=lambda x: x[1])\n", + "x_labels = [group[0] for group in model_results]\n", + "x_values = [group[1] for group in model_results] \n", + "width = .75\n", + "height = np.arange(len(model_results))\n", + "mean = np.array(x_values).mean()\n", + "\n", + "plt.yticks(height+width/2., x_labels)\n", + "plt.barh(height, x_values, width, color = sbn.color_palette())\n", + "plt.axvline(mean, c='r')\n", + "plt.rc('figure', figsize=(10, 5))\n", + "plt.show()" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "metadata": {}, + "output_type": "display_data", + "png": "iVBORw0KGgoAAAANSUhEUgAAArMAAAD9CAYAAACvKv8uAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXe4XFXVh98bqkBCjRRFAwEWKgiIGDoJgqF3lKYUISBN\niEoRxNCLFGnSIQgaBFE0AVRKIKEFFKQFfgE+kA6B0EKkJfP9sfbJPZl7ztx7k0lm5rre57nPzJyy\n99pl7qyzzj7r11apVAiCIAiCIAiCVqRXow0IgiAIgiAIgpklnNkgCIIgCIKgZQlnNgiCIAiCIGhZ\nwpkNgiAIgiAIWpZwZoMgCIIgCIKWJZzZIAiCIAiCoGWZu9EGBEEQ9BTOXmf9ytILLNBoM4IgCErZ\n+r6xAIxcdwMAXpsyhXXPOY/+/VdsmE19+/Zum5Xzw5kNgiCoE0svsABfWqh3o80IgiAoZe42vynf\nk/5XxTKDIAiCIAiCoGUJZzYIgiAIgiBoWcKZDYIgCIIgCFqWuqyZNbOBwPXAk0AF6AP8H7C7pE9n\nodwLgBsk3T2T5/cDHgP+ldt8p6QTZ9amknqWBVaTNCp9HgLsDkwD5gGOkXS3mQ0HRkj6+yzWtycw\nSdJIMxsB9AeuAKZJuqyLZcwHXAbsCTwuaZVZtGkIcKWkz2Zm/yzUuwpwfvq4DjAO7/dfSbqlnnU1\nCjPbHnhA0mvdPO9GSTvOJrOyOjYA3pX0+Oysp4u29AOeAQZIejhtOwBYUtLxjbQtCIIgmH3U6wGw\nCnC7pN2yDWb2O2Ab4MZZLHdWeVLSoDqUU4tvAwaMMrNdgE2AjSVNTT+wY8xsDerTHiRdna9b0udn\nopjDgD9IqphZPew6GrgaKHNWO9s/U0h6AhgEYGbPA5tK+qSedTQBhwLjgW45s7PbkU38EBgBNNyZ\nTbwHXGVma6V5UJfvXBAEQdC81MuZbUt/AJjZvMDSwCQz6wVcCnwxbfurpF+kKOVHQL+0fS9Jj6RI\nyhDgTWBB4AYzmwe4ClgOmAs4W9L1ZnYX8G9gFWAyMBYYDCwCfKeWwWZ2FrBe+vh7SeclmxZLf1sB\nRwLr5+r8o5kdCPwAj/49BBwOHAXMb2b3JdsPlzQVQNILZraapHfMDKBiZr3xSOrCwDLAhZIuri5b\n0o/NbAfgCOBT4FVgF+CXuGPzdWBhM7sJ+DOwsqSjzewQYFf8h/w6SecXtG0PYPXU/rnM7LfAl1Id\n+wBTS/p8DeC8tP8jYL/U10sBI1IE9np8PswPHAB8M7f/XOAM4GN8XnwEHIhHsCvA9sCqwNB0/pLA\nRal/LgNWyA3j25J2KhnffsBI4C3gFmAL4ABJykfrivqqqLxU5rHAtvj35iJJl5rZT4Dv4U76GElH\nmdkwPFq+BLA4cCGwI7ASHgl/AxgOfIjP/VGSjstH7s1ss1TuDWmcrk5R0AO6Ye9rkpbuwvdkO2Cz\nZO8SwDBJN5nZpsCJ+Bi9jc+LNYDT8fG7PZWzupmNT32zPf69fSu93z31/edSn5wu6WozGwCcgy91\neiUdtyJwLj53svrmA/7AjPNpMnB5VXN/B9yGR2bvBk4GflbVH7sDP062P4N/V/cosW/ValskvV/W\n10EQBEHjqOea2Y3NbLSZPYnf1v+TpNHAssD9kjYDBuA/RuA/xi+k7ecDQ8ysLx4xHID/wFTwH5P9\ngTckrYdHPU8ys8XT/nGSNsF/9D6U9B08irVR2v/VZFf2t4yZbQX0k7Q27qzulm5XV4A7JK2P37Lu\nJ2kDYGPgGDNbGNgLOEjSusBTyb5TcYd4JO6c/l++YyS9k/vYhjtkIyQNxp2BoWnfDGWb2Vy483pG\nsmMUvoSjAlQkHYQvN9guK9zMvgp8F3fUNwS2M7OVqtr2eeC9zOEG5gVOljQQeAl3UMv6/LJk40Dg\nN7iTewXwerJ1AO7IbA4cBCxYtb8NmE/ShpKuxR2YLVP7xqf+qOBO1eZpHH5qZn0l7SdpUO6v0JHN\nsSQeqf1V+lzJv9boqw4kJ34z4Fvpb6U0Z3YG1kljtqKZbZnKnyJpc/zOxBaStgFOS31QAb6czl0L\n2DQXuZ/BxrRU4t/4Rc6KXbW3iq58T3ql/ZsBvzazuYFLgO3TWN8NHJuOzcbvBOBv+MXWy/iF0ibp\nezV3alsF6CNpa/xOzVHJpkuAvdOxo4Cv4Bc3B6Y7Kbekctei43x6rmoeDJJ0Oe0X1MelPs0uVklz\ndxgwKM21d/E5XmbfZTlbbk22BEEQBE1IPfPM3ilpVzNbDI+QvJC2vwOsZWaDgPfxH9OMR9LrS/gP\n9ArAU9k6WzO7N+1fGY8CIWlyigL1T/seTq/v4j/OWZ1ZPeOrlxmY2W54dApJn5nZA8BX026l11WB\nNc1sdPo8Nx5F3ht3rpYD7qc9Kp39kP4Hj3A+matvMPBozoQ3gcNS1PV9PCpJSdlDgaPN7FDceb6J\n2nwNd5TuTJ8XwZ2gfNuWwKOD0+2RlO27H3deobjPl5b0WNo/FnfQ8tyCj+Nf8GjySQU2Kvd+Ih51\nnIyP8/1p+93J2Z5iZk8Ay5vZKcwYmZ3Uya3050vW6GZjVdRXKwATCs5ZCXhQUiW166dmthO+ljW7\nKBibyoQZ5+WTuffzp/cPSJoCYGbjUvl5qi802/DIalftrabse5LZcweApNfN7F08kv5+bp3uWDza\nOYoZx490XsXMPsWj75PxOzHZvP53en05V9+S2ZyTdBWAmX0FuCjdwZgntetWfP5On09m1p+Okdnf\nA/9I5X1iZnunbdka8uXwJUcfps9j8Kj0uBL7imwJgiAImpC6ZzOQNAm/dXe5mS2FRxvflbQHcDZQ\nJI+TORfPAF8zs8+ZWRseAQN34jYASLfoVwWeT/vK1sTVUpN4Co/IkpYwrJvqzpf3FDA6OcKb4rd7\nn8OjlgekaNUa6dyptPfllcAvUlSVFDm7LB2TMRSPVn8f+GPO1qKyh+C3fQem47YvaV/2WaR1wsn2\na/CH4PJtewN3hDKWMLPl0/uNcMe7rM9fTbdgs2Mzx2YavhxhIPBaijqfDJxStT97T4p0D8Nvp+8H\n/DfXjm+mYxbAHYsJBZHZztaETsu9/wiPmgOsmV5r9VU1TwPfMLM2M5vHzP6GOzgDzGyuNF83pKPT\nM8MSnByrpXLmwuf5E1U2fqOqHb2SDV21t5rO1o6uBWBmS+Lf0VeBPuk7DB3HOm/bXGb2dWBbSbvg\na3x70d7uorpfNbMVUp0/M7PtUvu+n9r2c3yZyECq5lNJZPayXH1IegR3Zo9M9T+P36XJ/v8MzLWn\nyL4iW4IgCIImpF7ObP72KJKewtdVnodH9zYzs9vwW3j/NLNlcudNP1/SW3gk7x48yvJp2ncpsLiZ\njQVG487dxC7YlH+djqSbgefTGtf78YwJj+SPT0sGJpvZGOBBPFPAZPxBl7FmdgfuFD6Qtm1rZt+V\n9Ie07R4zuxt3bnfP2VvBfxgPMrO/A1sDH5ivM64ue1yqe5SZ3Y7fNh9V0r6sDx8D7jCze8zsn8Dy\n+JrEfNueAz5vvp4ZPFp3Ymrrwsnmsj7fD7ggHXsIvmYYPHJ3M+4I75si2mfQ7syOxaO2eTveA+5N\nY/Bn3LlYOh3TJ82ZMcDxVUs1yqge6/zn84DfJCe0V42+etXMBpvZkfmCJD2K31K/N7Xl9+n869O2\ncXgkOIuczzAuJe9H4nPlj5KexKONh6d2L5M79j784bmXiuztRn/UYsU0x0YCP5I0DR/rP5nZPfhS\nmywLSL7ccfgym8+AD9O8uBaPBFd/z/Pv9weuTOt518DnzoHANWnOnYR/H8rmU1fafAp+pwRJb+Nr\nzUeb2f34koiLa9j3owJbyN2pCYIgCJqEtkolHvb9X8TMjgKezjlfTYN5qrcdJR3SoPr7AvtKOnU2\nld8POD+t02w45qnelpB0VqNtaXbM7BxJh5ftH/Ht71R6kkRkEAQ9jzVv+xsA/9p0MwBenPwBy518\nGv37r1jrtNlK3769a91N75R6rpkNWotfA1eY2V/SOtBmYoZIfwNoA86cjeXXrX1mtjXtDxDmObeb\nFyrNNgealXD4gyAImoyIzAZBENSJiMwGQdDs9MTIbMjZBkEQBEEQBC1LOLNNgpndmF5XNU+O363z\n6mTDXWa20czUZWZLmtmF6f20suPmNGa2k5n9cibOG2Jmc5vZQHPJ4K6ed5eZnZ37PL+5MllLY2YD\nzOwZMzu5m+et0p35PKuY2bBZGe8a+2t+N4IgCILGEc5sk5BLM7Uj7Tlvu3NePXiN9swH3apL0hty\nEQdoz2nayhxNeyqx7rKLmW1YT2OagMH4OtxjunneTnRjPteBmnO4Bp2N98yWGwRBEMxm4gGwOYCZ\nfQ6Xhv0SrrZ1MC4QsE86ZBgux7kmnpf3IzN7GM/3eRKeo/Y5PJ3RHtXnySVLi2Rm5wJGAC/iggcP\nSjrQzA7CnYyMCi6zeoikt1L+z+HAJ3hqo36SBpnZ65KWSumUHsGT+PfBlax64apm6+DqbWV9sRee\njmx+PA3XubgM6irATyX91cwOpqMs6hBgPUm7mdnVuOjARSV1rIs/4PZu6ot/pe1lMr8f4+IDC+JK\nWxuQ5HeTfSua2S24ctpIuQxumbRuBVexu9TM1iSXXzhlMbgyjUsFOFTSY2b2DJ6OzvCUbDumYy5O\ndfQCjpV0dyP61My+hQt6fGJmL+NiC9XzcgE8tdh0iWbgr8w4n68HLIkanIbnMn6BGeWNXyooe3n8\n+/Np6ovdcPGIakGOs/Dcu5hZn2p75JLIdwFDJE2wJGuc6szGewcrkLomfTeK+j8IgiBoLBGZnTMc\nAPyfXPI0k3yt4ApWG0q6E0DSq/iP9tmSHsLFFjI50Vdwx6DDeYkOMrPp2BVx5/dbwBZm9nlJF1Yl\nnN9Y0ku5H+tfASdJ2hjPoVqU03acpE1xtbddc/uQ9GYn/bGgpC2B0/GcpjvgjtXeSXygWhb1m5Iu\nBD6XnM+5yxzZxEV4bt/v0J4ftJbM72OSvo07R79SR/nd+XHncAP8QoQCAYf8xcGjwG9pH4OMM4Fz\nJG0E/Bi4Im1fDndW1wX64gIG+wIT07Hb4c5hQ/pU0oP4xc1ZKUNC0bzsT5VEc8F8zpPvl/kkbYhf\n0F1aUPYmeD7eTfBcsQtLurdAOGGUpClyZbUO9hTUm+VmvpI03lYidR2ObBAEQfMSkdk5w0q4LCeS\nngXOTbk9O8iCklSMUq7TpYAbzCU1P4c7js+WnLe0imVmn1WS8DSz13Dn5WA8+pfnB5JeSu9XxhP1\ng0cMdy+oLy9FvFTB/jIqtMuHvodH5yBJvaq2LOrpya68OlYRS0nKFN3GAGtTW+b39vR6L+7IV/OE\nXGL5UzP7DMDMLqddUhnaI7NZG09L5eWj1Csne5D0qJktm7a/JSm7hf0S7jyvAmxgZgPS9rnMbDG5\nwl41c6JPAdpK5uU/cEGMvERz9r+l7AnV/PZsPvfFI8vVZZ+MK3n9LbXv52a2Hh0js2cnsRPoKBld\n9L+uSDJ4ZYqlrp8oaUcQBEHQYMKZnTM8hUfb/mouG3s87kAVPSg1Fb/F/DauFb+NpA/M5T7fAfqV\nnPeqma0q6XFmlB7toIol6QLgghr2PoFL6f4NdwSLmJWcbqXnmkvlbitpbXPp0X/iTtS8wDl4tPEi\nM9swOZhFvGJmX0uqWuuk+jLp2s1TPUNxKdid8Ki18DY/nsrIy+8WqcjtW6uBkqalC5Z/0D5eT+FR\n4ZFmtjq+DrOsP54GXpZ0arpl/hN8/MuY3X2a8RbF8/InuETzxWY2CNgyHZ/NZ0hyvWb2H2B12p3u\naZ2UvS0wVtIJZrYrcKSkfYBBNezMJKOr7ckkgyfgDnx2AZdJBj+FL6n4tbVLXQ/vpE+CIAiCBhLL\nDOYMlwDLp/V6w3EHAoplNP+F38reEL8VfYuZ3Ys7HONrnFckM9vGzDmdRwJHmcubbo2vna2uN0+h\ndLCZHWlmgzs5vrotz9JRFvULeKRzpKTLcSf7NMrZFxeEuB34Cp3L/O5kLiE8FHfKoEB+t6iNtZA0\nAV9qkPFT4BBzmePfAD8sKbOCz5mV05y5C3gxRVgb1afg/ViheF6WSTT/CzjYXNXtDLxPbwYmVdmM\nXEK3uuwncef7hDRGQ/C14Z1RZk+1rHHGWOBmFUtdZ1FvrEDqOAiCIGgsIZoQdMDMdsPXxD5nZvsC\na3cWiSwpZ2tgsqSm1bM3s6twadmWyMDQCn3ak7FOpI5DNCEIgmanJ4omxDKDoIiXgOvMbArwGe0R\nxO7y79w63LpiZl8Cri7YdbekYbOjziYh+rSxzG6p4yAIgqCbRGQ2CIKgTkRkNgiCZqcnRmZjzSzT\n1X2sm+fMZWaj0xrMRepszzfT7e96lLWXmT1vZgvltl1nDVYzSms/15rFMkaa2ZfrYMtqZvaL9H57\nM1u6m+f3M7NPzewbuW0H2EwoUdWTtL5zv0baUIR1U1Wtk7Jm9bu7cCfHHmZmhUsKgiAIguYglhk4\n1Q/NdIUvAL0lfXM22FNvFsBFBLJ1rzPT3roi6fQ6FTXL7ZD0KJ4bFuBQ/IGm18rPKOQ94CozW0vS\nJ/Wwa1aR9PdG21BCPftmtnx3zWx+PA/wWsAfZ968IAiCYHbT9M6szQHFqFxdC+M/YIulTYdKeqKk\n/ItxZaiLJP2opLzH8CfRv47/4G4r6X0rUBhK0aUrgf/iabmmpDJ2xjMTTAXukXR0yrF5Fp5lYAqe\nXmowKaF/ooJnJajg6yDXM7Mt09PaeRuLbBmOpzDql/p8L0mPFNlSoy+fxfOsrgTcgSsxfQuQpB+k\nOkak8rfAc4r2B06XVLRuMyv3eDzN0mvAsmlb2bgVKWutQEc1qRVwpalr8JRRvzXPI7uipCPMbC48\nr+6udExp9js8/+8zwN14TtSfVdm8O/6U/sfpuCG4kluHdqc0WufiazPfBvaR9H5JX9yF55ddBZiM\nP5E/GM+h+x1cbMHwuXodVUpwNfr4ZGAg/v/hRklnpEj+canPFkr99inwh1Ruv1THKsAaeGaAY6xd\nLW4NPP1VJkSR1dWl+S1pcpm9qZyZ+e5ejKdiK1LD+wDPPPIPPPdsEARB0KS0yjKD2a0YBf4D+3Pg\ndrny1f547s2i8tcCfgSML3NkE71xB3EgngZqcytRGMKT9R8naRNSEn8zWxSXrN1Y0gbAF8xsE9yZ\nvw7PJ3sRsKikG9VR1StTXZqK/0D/2syyH/u2GrZUgBckbQacDwypYUsZXwaOwVWzDsXlRAcA6yfH\nI5+WqY+krYFtgKPKCky38QeliNrOuFNVOG7plCJlrQ5qUrSnhroFdw6/jzva25lZL2Az4E5JTxao\nTl1Ou3N2HLBpcsYymxdP/TYo9du7ycaydl8GHChpEC60cUSNPs6U2DYB5gM+lKuejcfnRj5i2UEJ\nrka5u+GOe2YvuHDAHsmuP+H9X0l9vA+wFXAi7pgOYMa0Y7en78Cf8DlRSX3T5fldw1aY+e/uASpX\nw3tX0m2d1BsEQRA0AU0fmWXOqRuBR5UGmdn30udFS8qfm3Jlo2rySlnz49HEIoUhAzLncwyerH0F\n3Am7NS0L7I3nRz0FdwruwJ3kcWa2E3BQVd3THSFJz5rZubhzkCWpL1M7qrZ7vRq2lPG2pJcBzOxD\nSU+n7e+lfsiTje/LBfvyGJ63FEkfmVnWX6tSNW7ptVpZaz48ejeDmhQFYylpsnk+2MG4pOrxZtYf\nuLzq0N/j0TskfWJme6dtl6X9y+FiDR+mz2PwqOm4knZ/BXfEwOfxhBr9AZ4zFvz7kOUhfoeO/Vit\nBFern3fHvztLkZTrgFeB89J34At4xBtcpvmD9B15Q9K7qY68I505hffSLl4A3ZjfNWzNmKnvrnWu\nhhcEQdDjWWyxhejbt3UfXm0FZxbmnLrR08C1kkaY2RfwSGVh+bNge5HC0NW4I7I+nlR+nXTs87gT\ntomkqWa2T6p/D2C4pJ+Z2dHAEEknULC2z8wy5xRJF5irKq2K32ots2XzXBFZW4tseYhyurOOsavH\njsdFB3rhc3eNtP0p4J+5cdu1pNw2CtSkmDEdVV756zI8YrqYpEzOtIPqlJn1y96n5Ri/T+VeiPfb\nV81sAUlT8Nv3Zeps4HPw+5JeNrMNgcXLOqNGGTN9XPre7Cxp1xTZfNLMrgMuBZaX9GG625Hd1elK\nuQPwi6a8whp0Y34DJ3RSx0x9d9W5Gl4QBEGPZ9KkyUyc+EHD6p9VR7pVlhnMEXUjfL3jd81sNPBX\n3EkqKn+ZAltq2T39s4oVhh7Bb88eYa5yNCgd+xauIDUmRU03xddcPghcbq5wNZDi3KBlduwNzNuJ\nLflzKjVsedbMVjezc+hImWpWUZ91SWErPaj1F7z9N+FrIIvGLYsCFylrlalJZcfeh6+ZXUTSg/ga\n09+V2VRi9ynAf5LNb+PLGUab2f34be+LC87J3v8IuMbMxgInkZy/1LaZoVCdrQz5w2uT0hjfCfxd\n0ov43B9rZqPwfs8yPnRlnA9Ka2cH42MF9Z/f9f7uVpcPgJmdY2ardePcIAiCYDYTeWaDWSJFvH4u\n6dhG21JvUgR4LDC4sweQ5oAt50g6vJE2zAzJudxR0qRG21IP0rKEWyU9V7Q/8swGQdDs9MQ8s62y\nzGCWsdmkbmSeK/WMgl1/kHRxwfaextz4+sq6YZ4bdbeCXUdLeqCeddWwYTn8gaUrG+3IJs6qZ2Gt\nNG/NbFngtwW7GqFM9pdYTxsEQdBcRGQ2CIKgTkRkNgiCZqcnRmZbZc1sEARBEARBEHQgnNkejJnd\n2GgbijCzF9JT83OyztHm7GlmW6dtB3dyzsLmkrl3mdl9Zrb2nLF2BhsWTVkXZlkC2MzmM7Mfdn7k\nzGMuB/y73Oe1zewBc+nY4+pYz/RxLNk/3MzWrEM9fc1swpyer0EQBEHXCWe2ByOpOn9ms9CotS0V\nSVdLGpk+H9PJ8YcDt6WE/3vhqbbmNKvhogpIOj0nhDEzLE27pHHdSXmMT2HG9FcXAbtKWh8YYGar\n16OuqnEsYpbnmJkNxnMI1xKYCIIgCBrM/8wDYD0N65rM7+uSlrJ2SdFVgD54HtEXS8otksrthYsF\nLIynNrpQ0sXWNTnVzYAl0t8wSTfl6loWuASXdP0vnirrLeD6ZOcCwDFlSkxmthLF0rRDU78sCVyU\ne6CpzcyG4VK4iwOLmdkFwGhmlAIGF5w4B5egBRcw+G+RHcmWveieLO8heD7cCnCdpPPNbIdU76e4\nSMEuuMP99fRQ3Lq4MtbSzJzE8zF4zttj8dyq1+JCBXPjSmmjzewJPA/uJ+mYzmSTAX4m6Z+4KMKf\ncQUuzKwPMJ+k59Nxf8eV1/5NAd2cp8OA1yRdYgWSzOn90eYqY23AfrgAwwxzC0/FNqqq+DsknYQr\n532bJNQRBEEQNCcRmW1tSmV+0/58jtFxkjbF1Zh27VBSO0VSov2BEZIG487M0Kpya8mp9kr7N8PF\nGTJBgjbgTOC8JJF6Fp4LeHnc0dw62VnrgqtMmnYJXPhhHeCnZtY3d06WN/cUYJKkg9VRCniQpIck\nvZeUxpYCrgGOrmELdF2W96vAd3EHbENcNncl3Hk9I0m7jsKdrpNwKd3LckVUgIXKxt7KZVxPwmVc\nTwKOxXPIboRL016Ryl4QOEHSrnRNNnlQcmSRdH11fwDv5z5/gI9RGd2Zp5kkbpkkM6l938alos+g\nYG5J+rCgPSel9tzeU1KKBUEQ9GQiMtu61JT5LTg+L0+7VI1yi6RE3wQOS5HD95lx3nQmp3oHgKTX\nzexd3NHMWBX4uZkdiTu3n0gab2aXACPwaOh5lFMkTQuesmkqMCVFGmvJ7mIlUsCSHkoqUiOAn0ga\nW6OY/Hh0Jsu7CvBlXJQAPJKdRZSPNrND8fG8iXK1uWw8uyrxXC3BvDLuoCPpVTN738yy2+mZQlmX\nZZNLlj+8j0d+M/okG2vR1Xmab0eZJPPd6fUB4FdFc8vMFgRuZsZlCXdKOrELdQdBEARNQDizrc3s\nkIzNS4kehUf7FgbuT0sLBgFbdqPctYBLzGxJ/NbuxNy+p4AzJd2fomkD0mtvSVuZ2dL4reubS8ou\nk6b9JkwXdPgKriqVpy3/KumPlEsB34Df7n68en8BXe3jp4EnJW2e6hmKK30NwZdiTDSzi/GlAc9T\nfAel2xLPuFRvVtZTeFT4UXP510WAt9O+aem1y7LJRUh638w+MbPlUzu+Awzr5LTurnWtJcm8NjCB\n9nZ2mFuSlsdVxoIgCIIWJZYZtDa1ZH6h3DGo5TDkpUQH4Y7BSFyS9O/4LdoPuvF094qprJH47fBp\nOXt/CvwyrZW8AngCdzwHmtnd+PrGX9Qou0yato+Z3QaMAY4vuFWctX+8mRUl4884BZf+PS9lQ/gz\ngJkdlR4OqqarsryPAXekJ/z/iUeOX8H7flTqryXxPnsOWNXMflxSV1clnpcB3gDmNbNTU9s2Tv38\nZ9xRnVpVVndlZYvsOQCXAx4HPJxFcK1r8rydOradSDJ/28zuBA7E1yJ3Z24V2mFmq1mxfHMQBEHQ\nIEI0IZhtmNmewBKS6qpe1UmdA3H51ENmYx1bA5MldcUhCwqwWZTnNbOTgWckDa+fVV2qt6Z8c4gm\nBEHQ7PRE0YRYZvA/inkO2sWqNr8rafs6V1WPFEkX0r4OMs/mkj4qqG92X6H9uyuSptYEsrxNTJcu\ncErm6RrAi/gyjDlN3eWbgyAIglkjIrNBEAR1IiKzQRA0OxGZDZoWM+sHnJ9SQzUtaRnA9cCTuc2/\nr0o9lT/+LmB/SSraP6dJT/N/TdLxc6Cu+YA9JF1R45gN8Ih6Vx5Qy583DH9Iat20VpaUCeC7Zbld\nG8WcWDpSVd8SwO/xjBSvAntL+m/KSXyCpDfnhB1BEARB14gHwII5TQW4vSqvZ6Ejmzv+f/X2QVcU\nu36IP9w1M/Rjxty5zdrPc9qu44BrJW2IpwrbP20/Dzh1DtsSBEEQdEJEZpuYlGroKmA5YC7gbEnX\nm9ka+A/rVOAjXN0I4Etmdgu+xvCvSRigrOyrcDGEzwHnSro2JaA/Dk/j9DD+JPomwImpnreBffA1\ni6fj6lgNvwypAAAgAElEQVSX4jlBM8Wk5/Af/22ZUSmqgqfOgoLcqUkt6jI8RdR0lbHc/iJlso+B\ni/Ecrb1wFau7KSApdO2T6v4lvga3WiFrdwpUvMxsXeDXeI7Uj/AsCpjZT4DvAZ8BYyQdlSKe/fF8\nuovjErg7AisBe0oaV2JfUfvyil1X4cIFmerXsXi/DwZWN7PxeCqqw/FxuEfS0WZ2UCorowLsmV7P\nAPY1s1GSpqty1Zh3d1Gg0FWkZlbSxkNw4YUTUtT538DX8XlWrYY2HJ/Hi+OiB2ukDBULA7+RNLzk\nezCRAgU5M9u5G32zHj6fAW7FMz/8WtIEM/uKmS0WYgpBEATNQ0Rmm5v9gTckrYc7lSeZ2eK403eQ\npIHAb4Cz8R/iBYEd8FybW5rZ14sKNbPewAa4A7cZMNXM5gbOB7aQtBaexiiTm90+1XU37kRVcJnS\nDfG0S5fmjnkF2KtAKWrjlJapDU8JNTr93WlmvXAH8Dp1VBkjnVOkTLYvMDGpWG2HO461eDupa42m\nWCGrQrGK10XA7nJ1s8dxWdxVceWsdSSti6cg2zKVMSXlkL0x9ec2uLrZLjVsK2pfXrFrZeCsZMMQ\nfPwfxgUjjgA+xHO4bpza+AUz20TShQXjkD28NjmVNTyXaq2N8nnXQaHLytXMirgmHUvq35H4hUjR\n+RVcVnY9/CJiGp6ndiNcWGIJir8HHVS+zGyxbvZNH1yMIuujvGrZ07RL5wZBEARNQERmm5uVgdsB\nJE1O0bf+wNIpVym4+tFp6f2D2dP9ZvYQHg18jCokfWBmh+HOQB88F+niwDuS3krHnGkuA/u+pNdy\ndZ2MS61ma1j74pHCG8wMPKp5m5ntyIyRWXCnq4IrLM0gVWpmtVTGKhQrk60KrG9mA9Jxc9WImlXw\nBPqoWCFrnnRckYrXUpIy4YUxeATUgAey9aapb76W3udV0Z7Mva+lClbUvvzxrwPHmNkPU1uqv7v9\n8bG4NY1Db6C/+Yedqo79QfZG0tiURzaveFU276CjQtfXKFYzm1DdQEnvmtkjZrY+HgEdCqxecH72\nFEI2xyp4NLUC/NfMnsKXSHT4HqhYQa67ffM+/r2YmI7Nq5a9hn9XgiAIgiYhIrPNzVN4BDWLpq6K\nKym9miKD4JGq7Ed/NTObL0VZ18ajiB0ws6WANSXtAGyF326eBCxiZoumY87BbzP3ScdX15WpRL2F\nO37bSBqEO9a3FURmB+Uis0UMxVXGvo8rTOXnZhvtalQb4w7ikNQ/I1K92+K3l98pKX+6zSliva2k\nXYBDU12ZXUXrM18xs8xRXSe9Po0rls1lZm14VLHagWur0d5q8u0bn9o3lfZ+OAH4raQfAHfltk/D\nlwI8jzuYm6T++A1wX0H0cVBBWrFj8IfBVkify+YddOwf4Wpmg1K911BwAZXjMvx2//ySJtCuhlZ0\nflZXG7CWmbWZ2UK4s/0sBd8Dy6l8AXvhdxu62zf34stNSP0yJmf/orj4RBAEQdAkhDPb3FwKLG5m\nY/Fb48MkTcTXBl6QFJ4OwZ0D8Fujf8F/jK+VijMASHodWMrM7gX+gevWf4orJd2c6usl6cFU15/M\n7B5gY9ojeJVU1jTgx8AtqbwhuDNWRtkDXbVUxioUK5NdAqyc1nLeBbyYoq5HWm2FrmcoVsjKH5N/\nvy9wRar7K0BF0hO483wvHkV9XtJNVedVSt4XUaS29Sau2HUaLqt7ppndCnyJ9tyr4/ALiCXw2+xj\nUlaCTXGHrxbZGH6MS8L2SdvK5l2H81WsZvaqmQ02syOrT5A0Bo/mDk+fy9TQpttHe9/dhkdwj5X0\nLsXfgw4qX+luQ3f65iRglzTnBwAX5PatgUeBgyAIgiYh8swGPQ4Lha6Gk5ao7Cupxzz9n9YHHyZp\nSNkxkWc2CIJmJ/LMBi1FcuqGFuw6NxdF7Il0SaGrEdicU15rNG3AmY02os4cDPyi0UYEQRAEMxLO\nbA9G0kj89v3/FM3qyAJI2rHRNswJ1AOFBSQd2GgbgiAIgo7EmtkgCIIgCIKgZQlnNgiCIAiCIGhZ\nYplBEARBnXhtypRGmxAEQVCT1SqeWfPFyR8A/n9ruUYaVAcim0EQBEGdmDBhQmXSpMmNNqPHsNhi\nCxH9WT+iP+tLq/bnGttvBcAjfx41fduyy36Zeeedt+yU2U5kMwiCIGgSVlppJSZO/KDRZvQY+vbt\nHf1ZR6I/60ur9ufc87jgZSNTcdWbWDMbBEEQBEEQtCzhzAZBEARBEAQtSzizQRAEQRAEQcsSzmwQ\nBEEQBEHQsoQzGwRBEARBELQs4cwGQRAEQRAELUs4s0EQBEEQBEHLEs5sEARBEARB0LKEMxsEQRAE\nQRC0LOHMBkEQBEEQBC1LOLNBEARBEARByxLObBAEQRAEQdCyhDMbBEEQBEEQtCzhzAZBEARBEAQt\nSzizQRAEQRAEQcsSzmwQBEEQBEHQsoQzGwRBEARBELQs4cwGQRAEQRAELcvcjTYgCIKgpzBhwgQm\nTZrcaDN6DO+8s1D0Zx2J/qwvrdqffT79FIDnnntm+rZll/0y8847b6NMmmXCmQ2CIKgT+w4fyoJ9\nezfajCAIglKGf/weAMff/ysAPpz4AWdscwL9+6/YSLNmiXBmgyAI6sSCfXvTe5lFGm1GEARBKW1z\n+QrTnvS/KtbMBkEQBEEQBC1LOLNBEARBEARByxLObBAEQRAEQdCyhDMbBEEQBEEQtCzhzAZBEARB\nEAQtSzizQRAEQRAEQcsSzmwQBEEQBEHQsoQzGwRBEARBELQs4cwGQRAEQRAELUvDFcDMbCBwPfAk\n0AbMA/xa0g3dLOcc4GxJLxXsGwx8SdJl3ShvFeD89HEdYBwwDfiVpFu6Y1tVuUOA3VNZ8wDHSLrb\nzIYDIyT9fWbLTuXvCUySNNLMRgD9gSuAaV1tv5nNB1wG7CmpMiv2zCyp/xeVNLab55XOg3phZssC\nq0kaNbvq6A5m9gLe5vPS55WBiyQNaqBNSwLHSTrIzDYE3pH0eHfOq5MdLwAbSnqxu3WZ2WrANsBv\ngbsl9auHTUEQBEF9abgzC1SAOyTtCmBmCwJ3m9kESY92tRBJh9fY120HUdITwKBk0/PAppI+6W45\necxsF2ATYGNJU82sHzDGzNbA+2GWkXR17uO3JX1+Joo5DPhDoxzZxE7Aa0C3nNla86COfBswoCmc\n2cRhZvY3SRMabQiApDeAzEncBxgBdOrMVp1XD14E3pyZutL/n0fT/6T/1NGmIAiCoI40gzPblv8g\n6UMzuwR3Zh41s1OB9YG58OjTH81sAHAOvkziFTzS+Tdgf2AJ4CzgE2BKKmcnwCQdbWY/Ab4HfAaM\nkXSUmQ0D+gGfB74MHC7pH0XGJgd0JPAWcEuq99zUjreBfSS9X2Q3MCSVPTW19QUzW03SO2YGUDGz\n3ngkdWFgGeBCSReb2YHAD/CI7kOSfmxmOwBHAJ8CrwK7AL/EncCvAwub2U3An4GVU/sPAXbFnefr\nJJ2fosKLpb+tgD2A1VN7twKOS+17GDgAd8hPBD7K2gysARydti0LXAxsDKwGnJvacB8gYEVgIrBb\nGotsbOYHngLWA/YCPjKzh4EFgJOAqcBzwP6SPisZn7vwebArHpVeAlgcuBDYEVgJ2BN4AxgOfAgs\nDYySdFwa3yvTuFWAQyU9Zmb/SbaNBzYHFkjteT/1Ty9godSmT3Hn7cVkw4OSDjSzvsDVaWzb0nhO\nTOO9WGrCoZKeMLOr0rmfS/13rZmNTHVkPCnp4GTnUGC4ma1f1R9rAOelvvsI2C+1rci+hYtsKenn\norbsAayLj9e+qX8PBDYDVjez8cDawOHJnnvSuA+rOu8qSeuY2aYUz7MjgY+B5fE5fIqZnYh/3zIq\nwGBgJ0kfmdm3gAuAD3Dn9iNgWDp/HTN7DLgL/95UgG2Bb+BzbVcz27moH4IgCILG06xrZt8AljCz\nzYB+kjbAHaNj0g/uJcDektbGo2NfoT2yuS1wHbARcBGwaLbPzFYFdgbWkbQusKKZbZn2fyRpC+DH\n+I9tLZbEI7W/wm/HH5hu6d4CHFHD7mWA/8sXJOmd3Mc2YAV8ucFg/Md4aNq3F3BQsvspM5sLd17P\nSPWMAvqktlTSrdNJkrbLCjezrwLfxZ3FDYHtzGwl2qPj6+MO/Xspcjw3vtRiC0lrAc/gjuolwPaS\nBgJ3A8emMr4A7AD8KG3bA3f89s/129mpnudwx7hD9FfSq8BV6diHUh9n9b2S+qKMSu51iqTNgRtT\nG7YBTkv9VsEvXHYG1gI2TY7fmcA5kjbC58IVqbwvArtKGprK+J2kkcBXgT3S+P8plVfBHfZ9gG8B\nW6Rb2scCN0laD/hJ2nc0cLukjVM/XWRmCwEbANvjjmB28bO1pEG5v4Nz7b4VeAJ39PJ9ehk+bwYC\nvwHOrmHfz6ttqdHPRW2p4A72+rizWJH0cLLtCPzCYRh+Z2ID4AtmtknBeRll8+xL+DxbO5WLpF9U\n9c3Gkj6VlEVlL8aXzXwbn3vV86438PvcHNs8f0yunCAIgqDJaIbIbBH9gJeBVYE1zWx02j532rek\nJAFIugogi2wCpwDHAHfgP0rjcuUa8EAWGcVvYX8tvf93en0ZmL8T+57PRQZXxh0Q8DWwz9Sw+z/4\nD/GT0w3y9bz55RRv4reMd8CjfvOk7XsDPzWz5YD7ccd3KHC0mR2KRw1v6sTur+EO3J3p8yK4UwMe\nMQWPZL6Re/+OpLcAJJ2ZInLvS3otHTMWOBl3pp9ITvB7wHOSPjOzd2nvzzdz6ybvwZ31B3L2tVW/\nT/UtBdyQ+vhzQGHUvICH0+u7tPd53p4HJE1J9YzDo7YrA2NSex9N62MB3spdeLTlbH0VOM/MJuPO\n/D1p+7OSPkxlv5bqXAm4PJV9P3C/me0BbGxm30vnLSppspkdhjuifYBrUzmjgAVz7csis9Aenf0n\nM14wLS3psfR+LO6Il9m3KjAobwvlFLXll0CtZQ79gb7ArWkse6dtVJ9nZktQPs8elzQNmGJm/03H\nn4RfpOX5jqRPc/3wVK6sXQrseyS9vkTn/wOCIAiCJqHpIrNm1ge/1Xg98DQwOkW9NgVuwKMqr5rZ\nCun4n5lZFn1sw6OBw1N06Un81n7G08AAM5vLzNrw6GT2I9qd9aHTcu8FfD/Z+HPgrzXsvhL4RYqq\nkqKil5Eib4mhwP2Svg/8kXanaT/ggBQ5WgO/LTsEGJa2teGRPKhaupH7LNwBGpRsuwbIHJ2s/W/g\nTi64Y72ImS2a7D0HWA7oY2ZLpWM2ot0R7qwPl0i38cEdj8fxSNzSads3csdOxW+Hv41fYGyTbD4N\nuL2TeqrJO595VjOzedJ4fAuPbD6FzwvMbHV8yQbMOOZTaf/uXArsJWlv3LHNthf1xVOpHsxsw7QU\nZTweCR6Ez92rU9+uKWkHfNnHGWbWS9JWNSKzSJqMR1TPzdX/arojAZ2P1VPVthQcU6stMGM/kds2\nF/A87ihukur4DX5h1uG8dAHV5Xkm6diqvhmUc2QBXjKzr6T365S0qZFrxIMgCIKZpBmc2QoemRpt\nZrfjzuBxkp5Jt3Enm9kY4EH8ifzsB/vKtD5yDfz2flbWg8DlqaxBtP8gV9L6v+uBe/GI7fOSbsqd\nS8H7zj7/CLjGzMbi6zofL7Nb0h/wSOQ9ZnY37tzuLmlirtyRwEFm9ndga+ADM5sXd/zGmtkduMM5\nLpU9KrV1SdofSKoUvFZShO4OM7vHzP6Jrzl8JX+spOeAzyfnaRq+5vHm1L5ekh7EHes/mdk9+DKK\nEzvpw+z9Z8Cp6bwlcUfwb0C/VP7OwHvp2H8BB+OO5Y+BW8zsXtyBH0/XmKH9Je9H4mPyR0lPAj8F\nDknj8xvghwXteRzYNkUwr8XHZRS+jnrpguOzz6ek80bja5svSdu+m7b9FXhK0uvAUqm9/8AzaBQ5\nidXtRNLdwO9z+/YDLkhz8RB8CU1biX0nV9sCfhFj/mR/nqK2VLc7ez8OvwhZAl/mMMbMHsAv9J6p\ncV5351ktDsT/Z9yGLyvJHuYsO7/6OwSU9kUQBEHQQNoqlQhGBDNiZkcBT+cc/XqV+7ikVTs/cvaT\nIsTnS9q60bY0O2Z2MHBrutBpScwfoLxe0lvpYbGPJZ00E+XU7IvNz9m10nuZRYp2BUEQNAUXHHwt\nAAdfsAcAH7z6Lr9c52f0779irdNmK3379i66e9plmnXNbNBYfg1cYWZ/UX3Tc9WlrLSO9bcFu+6W\nNKwbtsSVXNf4i2Zj3t45xBvAP9La5nfxjBYzQ0/oiyAIgh5FOLNBByR9hKc7q3e5X69TOS+RcgDP\nQhn/wRPiB53QE5w3STfiWS1mtZyW74sgCIKeRjOsmW0JzGyn9LR2d88bYmZzm9lAc0Wurp53l5md\nnfs8v7l4Q0tjZgPM7BkzO7mb561iZhvMLrsK6htoZu+a2Rdz204zV1hrWbo7D3PnbW9mE9Jt9nrb\nNNLMvlynsl5I2T2yzytbe1aRIAiCoAcSzuzs52j8Se6ZYRdzKdCexGBcBOCYbp63E57TdU7yMZ7v\nNqMnLEuY2TZsDQyVdEE9jclRz749LGUKCYIgCP4HaNllBma2F/4DOz/+BPm5uGDCKsBPJf01RZG2\nx3NzvpXeDwHWk7SbmV2N5xotTA5vZuvi60ffxVNI/SttL1PR+hgXPVgQV0TaAM+ROiLZt6KZ3YIL\nE4yUdLyZXZbOyXhb0k6p7MOAS81sTXLpu6xcpeoZPM+p4WsEd0zHXJzq6AUcm554n+N9aq7CtDfw\niZm9DLxDlbIXrgJ1OTkFNPzJ+r1oVwS7HlcN+8TMTsOfun8BOCONwaV4CqjqspfHndNPU1/shufd\nrX4Q6Gw8x++dQJuZHSTpwqq2dFlJzsw2qrZF5Qpmj+MCAV/HU7y9gWd0+BjYAp9Pv6F9jI4FRuMp\nrr6Hp7gakcZjckEV0xfZm6taVatxfbGg/Gm4iMCaZvaWpAeqC01zZws8D3B/4HRJV1uBApmkl8zs\neGBLPPXZsqmMQgUya3I1tCAIgqCxtHpkdkFJWwKnAz9KeTmHAHub55FdDM9puTbuuH8zOSWfS87n\n3GWObOIiPHXWd0i68lZbReuxpDB0Ep5O6QrgdTxBexvuIGyLO7kHA0jaryo35k65+h/FH3TKVJsy\nylSqlsOd1XXx5PRr4Tl7J6Zjt8Odw4b0aUrrNRw4K2VKKFL26k+VApo6KoLlyffLfJI2BH6HO7TV\nZW+Cp+HaBE8ntbCkewvyk46k3ek7EDjczLLk/jOjJNcdBbOFcHWxDfF5cm8au3lx0QtL/fcdfFwO\nkvR+KvNy/CLn+yWO7HTMcwcPo6MaV1H5I/EUaj8rcmRz9EnZIbYBjsq1fQYFsuRUDpL0TbwfF8L7\nu4MCmbWGGloQBEHQQFo2Mov/CGWqXe+RcmKSFJ4kVczsU2BEeoL5i7SraZ0O3MeMSfqLWEpSlgdz\nDC6fWUtFK0vmfy/wq4LynkiJ3D81s88AzOxy2lWQoD0ym7XxtFTeFrljaqlUZXljMxWjVYANzGxA\n2j6XmS0maVKBfXOiT8GjnWXKXrcwowJaNkfL0nbkt2dJ9fviUcXqsk/GHZy/pfb93MzWozwyi6RJ\n5mpcV+PjAN1QkqvRzlrklcuynLrv4OP5Oi6P/EN8vOZJdj5kZu/gKaceo3NWoKMa1/Kpjfny8/8j\naqVOyc+dvIre0uqoQLYy6S6HpI/MLLtA6aBAptZQQwuCIAgaSKtHZkvX2aXo2baSdgEOxdvaZi5A\ncA4edbrIzOYpKwN4xcwyJyVTDaqlovWt9LouKZJLu/pRob2S9q0RmUWeLH/PZHN2fplKVVF/PI1H\nOgfhUeHrcceojNndpxlv0VHZ6w7gJ8yogJbN0UwRDPwW8TIpUrx6rsxMWKCs7G2BsZI2SWUf2YXI\nLJJG4eO+V+qf7ijJFdnSmYJZrfWjJwC/lfQD4C7aZX93AiYDn5nZjp2UD8VqXA8UlN+d/xFFdhcp\nkI3H+69XmjtrpP0dFMisNdTQgiAIggbSU5zZ6pyhFeBZ4ENz5aNr8WjXF3BnYqSky/EI3WmUsy+e\nb/V24Ct0rqK1k7lC11DcKQOPAOUVyqpt7xRJE/BoYUZXVKqyz5cAK5urpd0FvJgirEea2eCC6mZ3\nn4L3Y4ViZa8yBbR/AQeb2UB8bewtwM3ApCqbswuA6rKfxCN1J6QxGoKvnyy1sar9hwH/TeV3WUmu\nrJ1mtlRJVoFa86KCSyOfaWa3Al8CFjOzL+FO6P64It3JuWh9YbvkcrFFalwdyu+ibdX78ypeMyiQ\nSXoU+AuuYHcT7vAXKpCpNdTQgiAIggYSCmB1Ij2kcr6khzs9uAkws62ByZIibVEDMLO58Iekftpo\nW4KuY6EAFgRBixMKYD2QFNUquoXYHTWpVuTfmk0J4P+H+7Q7tFG8rroumNkvgI0Ldu0t6YVZKPdC\nilOkbS4X2+jphAJYEARBkxGR2SAIgjoRkdkgCJqdosjsuVsez0orNTQ9d0Rmu4qZ3SipKw/HlJ2/\nBHBDeihkVm05Es+I8BiwhzyNV3fOH4bn/lw3e6o+rX38rqQXZ9W+mcXMzsFTaDVV9CqlDRsh6e+z\nWE6/VM46nR1bdd4AfJ3x9aohGGFmCwC3AftIUtlxXahvT2BSepitaUj5aE3S0XOwzoF4bt9ds/8B\n6UGwRSSNrXHervh658/wBzoPTGuggyAIehSTJk1m4sQPGlZ/3769Z+n8Vn8ArFvMiiNbbySdLs+Z\nujT+oNnM0A9XGMto+A+tpMObzZFNVD/QNafpVPnMzL6Jp1xbjlm0VdLVzebIJhoxBvmHwrL/ATtS\nQ1HOzD4HnAgMlLQ+LuKx1ew0MgiCIJg5ekxk1rqmXvW6pKXSk/2PpH19gJ3Lopkpgfrv8LRQ/8lt\n76DqhKfwKVJBOhBXBJsGPCTpx1mkkCTTmtY4DgaGSBpvZpvjP57j0zEZFTxVVwV/qn9fMxslKcvx\nSUqNdRXuFM2FR0qvL2u3FSialfRFP+APuFpSP+C6VNYawM2Sjkl17J/K60eVGlZJuX1TuZmwxAEp\nf+6pwJrA4sCjkvZJEen+wBJp+4W4Y7JS6pc3cGGGD/F5MErScbm65sYzPMygiGZmJwMD8e/EjZLO\nKLK1yu6iOVCkYPYvZlQ++yHF6lXz4sIW13Sh7s6Uwo7FU7Y9jQsYfIxn3rhO0ik1yi1S29oJF4+Y\nB58j2+N5WI/G06Qti6vMbQysls672Mzuw9NfrQhMxBXX8nUVKentAByBq7S9CuxSFg0tGrPcHF8D\n/75lgiXZOa/juZD3ol1R7jiqxgPPdrBubh3w3KRsFkEQBEFz0dMis6XqVWl/PoXTOEmb4rd0d61R\n5jG052n9XW57kapThWIVpL1w5aF1gafSk+xZpPAkYLykE3EnaM90zj7AZZIurMqnuXEu8jk5tW94\nSl8F/sO9P/CGpPVwtauTzGzxonZbuaJZGcsl27bCI1eHAwPomB6sTA2riLXw9EybAwcBC5pZb/w2\n+XfS/rXNbJlU7hRJmwM3AltI2gZPB7ZL2v9lXFlqLWBTc8WprG/2o1gRbTd8HmyAixV0ha4qmD1E\nTvlMJepVku6T9HIX6+5MKSzvAH4J2AEX/TiirMDU5x3UtnBndEu5Utj41K4KnpZtBzwd2LH4xdzm\n+PwDWBK/kFofd/YPyNVVNu92Ac5IdY3CL7rKKBqzCq7cNRD4E/79rU6XNoOiXNF4SKpIejPZegj+\nv6Wz/MBBEARBA+gxkVk6Ua8qOP6R9PoSrtBUhuFOJnjO2CySWK3qdBueh7VIBWlv4KdmthxwPzMu\ndM6/vwH4p5mdCXxB0r9TKqDq5RE/yN5IGpvy4J6Y278yKTG/XEFpPO0qY9XtLlI0W4F2EYBq/k/S\nB+ZKYG9Iejf1SVH0rKgvirgVd5j+gkfkTsKjYEua2e9xp30h2tXG8gpZT+beZ3U8IGlKsmscHrXN\nKFJEWxzYHb8IWirZU5OSOVBLwQzaBQ6q1avGSzqoszoLqKUUlufxlJt1ipmVRhfTuHZQ28Kjqleb\nq76tjM9hcEW7qWb2HvCcpM/MLD8Ob0rKxEPuwZ3gTA63bN4NBY42s0Px73CWv7eIsjG7Lb3eC2xZ\ncm4+Wls4HmbWC7/7sQIdv4NBEARBk9CTnFno3nq8rh47Hlgff1Br7bQtr+r0gZlthzsR/UrK3Q+/\ndf6xmf0NVwgD/0GdSoqQS/owJWk/l3SrWdIFwAXVBSYHKuMYIFt/C+4EbADclKJtq+KKT0XtzhTN\nNk/lDqVd0ayI2dHHA4HXJA02s3WAU3BFsS9K2iU5jtvT8WnHtoJtAKulpRbTcFW2y/BII/ht95cl\nnWpmfXBxiw/wJRe7mit6PWlmIzpZ+1s2BzIFs4vNbBDtzlReUaxeay+72r9dOs5yaltmNj/wopn9\nCRiGLyXohTvsWVs6K3cJM+snTwW2Hu2qeFA87x7H7zQMkzTRzC7Gx/23BbbOS8cxuy7tHoBfeOaV\n+KqZrihXYzwuwZdRbB8PfgVBEDQvPW2ZQS31Kij/8a31Q3UisGVai7cLtdWrqsvK3j8OjDVXnnoD\nV43K9r8JzJvWh4I7Xtsy45KGMjLFq4/x6G+ftO1SYHEzGwuMJjkHReerWNHsVTMbnDIuFNbZyfuu\n7s94FF/7OxqPhJ2Cq0Mtb2Z34s79OHwNar6sSo33I/Eo4B8lPZnbXqSI9gkwKWWDuBP4eyeO7Mwo\nmM3UA2g2c0ph1cd0aRxUrLb1Ph7hvB/4M+6EZhdNnc2Fz4BTzewefMnBZdn+knn3Cj7uo9LdhiXx\n/iyytWjMsnXvB6XxHYwreRX1RaYot1FR+Wb2DXw5zSrAnWY22sy2NbMlS8YjCIIgaBCRZ7bJMH+i\n/WBJezXYjr7AvpJO7fTgJiI9pHZ+Wrfc8lgLK4WZ2eOSVp3DdY4GdpQ0aTaVX3M8Is9sEATNTiiA\n9dstcPMAAB8LSURBVGDM7EZm1KEHeFfS9nPQhoPxaNDOc6rOGrQBZ9azQJtNqlRV1CUFl5ntR9XT\n94mjJT1QsH12UXelMHMp46EFu86VVGuNanepxzishUfrq/mDpItntfyZYLYqtwVBEATdJyKzQRAE\ndSIis0EQNDs9MTLb09bMBkEQBEEQBP9DxDKD4H+eJArwNUnHz4G65qMT+WIz2wBf4lL2JH7ZecNo\nQonjjGTfa/iDedtIOtHMtsdTqb1mZkOAKyV9Ngt1DMcfeuyHpw67cSbLmT5OuTL/Bewg6YSZtS8I\ngiCoPxGZDYI5S1fki39Ie+aG7tKPJpM4zpFl33g0iYSAZ4TIhBGOJqXLmgVew5XDZpX8OL0GvCLp\nCWAFM1u+DuUHQRAEdSIis0HLYi5hvA/+UM4vga/ieUkXxPPAbo8n1i+SGF4X+DUuOPAR8M9U5k+A\n7+FppcZIOso6kdCVlKVaq7ZvPeAs4BNgCi5LfAwuX3wsrkJ1Ee0SzMfiYhaDgdWT2MXauHraVOAe\nSUeb2UE0j8Tx/MD16dwFgGMk3WZmjwLP4Opjj+Kpy7JzNsLVwK7B5W+vNrMrcfGDEcAOKVXd+jlb\n/5hsfQN/UPMyXHkszxG4EtwUPMfsXmb2o9S/h0t6yMx2LujPzsbpVNqlbK/HVep+UtQfQRAEwZwn\nIrNBq/N2kj4djTs5m0haG79QW4tyieGLgN2TXO7jQJuZrYpnklhHLj28opltSecSumVsC1wHbJTq\nW5R2+eKTcDWts5INQ3DJ44eBv+GO2Ye4YMHGqY1fMLNN1FwSx/1xB39r3PnNLpD74SnmvoWre21X\nfaKkW3CVuB9IuhR4HdjFzDYH+qU2bwwcY2YLJ1t/L2lTSddX9cGgJE37nqRPUxWPSdok9cfFZrZo\nUX92Nk6S3s+V+Tgu8hEEQRA0CRGZDVqZCkl2V1LFXGJ3RJJd/SLt8rdFsrpLSXomvR+DR0ANX785\nNW0fi8uuQucSukWcgkf47sAFAcZVHf867qj9MLWl+vvYH+gL3JoU33oD/c0/7FR1bEMkjiU9aWaX\n4BHVeYDz0q7xSYQBXHShzBmupg1XrFsz5YwF75d+WZUwfZ1ztQTwEZIeyn2+O9k4PqmbrUDH/lye\nzscpz2u48x4EQRA0CRGZDVqdaQBm9nVgW0m7AIfic7uW7OorZpY5quuk16eBAWY2V5JI3ZCODlyZ\nhG4RewDDJW2Mq4MNISdfDJwA/FbSD3A1smz7NPz2+vO4g7mJpEHAb4D7CiKzgwoUy47BHwZbIX3O\nJI6xrkscD0r1XkOJxLGZrQL0TpKwewHZcoQVUzQV/Jb/EyV9lLU1e98r2To61b0pcAPwXO4YJP2x\nKDJbVfbaycbVgRco7s8H6Hyc8iyKq/YFQRAETUI4s0GrkzlizwAfmtkY4Fo8klotf5t/vy9wRYpg\nfgWXWH0CXxN5Lx6dez4nItCZhG4RDwKXpzoGAlfTLl98Gu6knWlmt+JrSzPRjnH4EoYlgLOBMSkr\nwabAs13pjzkocfwMMNDM7sb77hdp+0fAb5PdL0q6OW9f7vU+fM3songk/GZJI4HJaSwfBKZJmtxJ\nu4tYJUlInwcMkfQWHfvzGWqPU7UC3gBShDsIgiBoDkI0IQiCLtEdieNGSNnOCczsWvwht/8U7Q/R\nhCAImp2eKJoQa2aDYBZpBinkOUR3JI573FVyekDw2TJHNgiCIGgM4cwGwSwiacdG2zAnkNTltaKS\nvj47bWkEScSiW0IWQRAEwewn1swGQRAEQRAELUs4s0EQBEEQBEHLEs5sEARBEARB0LKEMxsEQRAE\nQRC0LOHMBkEQBEEQBC1LOLNBEARBEARByxLObBAEQRAEQdCyhDMbBEEQBEEQtCwhmhAEQVAnPpz4\nQaNNCIIgqEll6jTAZWyhZ/zfaqtUepzqZBAEQUOYMGFCZdKkyY02o8ew2GILEf1ZP6I/60ur9uca\n228FwCN/HjV927LLfpl55523USbRt2/vtlk5PyKzQRAEdWKllVZiYg+IcjQLffv2jv6sI9Gf9aVV\n+3PueeYBoH//FRtsSf2INbNBEARBEARByxLObBAEQRAEQdCyhDMbBEEQBEEQtCzhzAZBEARBEAQt\nSzizQRAEQRAEQcsSzmwQBEEQBEHQsoQzGwRBEARBELQs4cwGQRAEQRAELUs4s0EQBEEQBEHLEs5s\nEARBEARB0LKEMxsEQRAEQRC0LOHMBkEQBEEQBC3L/7d37/GWz/Uex197GkOaGSEiRwbxiVyO1BnX\ncW9SVEKZbiSXiCPVEU2cOdFFQlJKKJ1EN5eKJDEMk1Gqk0u8hYRpXIdhiDDr/PH5LntZ+7fWvsy2\n916z38/HYx5779/1+/v+vr9Zn/X9fX+/j4NZMzMzM+tYDmbNzMzMrGM5mDUzMzOzjuVg1szMzMw6\nloNZMzMzM+tYY4e7AGZmS4rbb7+d+fMXDncxlhiPPjre9TmIXJ+DayTX5+qrr8G4ceOGuxhDxsGs\nmdkg+eBR57LscisPdzHMbBR7asGDnPJf72DttdcZ7qIMGQezZmaDZNnlVmb88qsNdzHMzEYVj5k1\nMzMzs47lYNbMzMzMOpaDWTMzMzPrWA5mzczMzKxjOZg1MzMzs47lYNbMzMzMOpaDWTMzMzPrWA5m\nzczMzKxjOZg1MzMzs47lYNbMzMzMOpaDWTMzMzPrWA5mzczMzKxjOZg1MzMzs47lYNbMzMzMOpaD\nWTMzMzPrWA5mzczMzKxjOZg1MzMzs47lYNbMzMzMOpaDWTMzMzPrWGP7u0JEbAscKGla+XsP4L+B\ntwHHAhMk7d6w/P2SVmmzvfMbl2+aNwk4T9LmTdPPBk6V9If+lr/FfpYBjgP+A6gBC8ljvC8i7gbW\nlfSvxdzHycCJwJPAFcDD5eeVkn7fx22sA3xY0mdazJ8BzJN0+mKU89PAlcCNwAcknTUY223ax93A\nFEn39GOd5YG3SjqvxfxJwExJa7aYP4MBHENEHA+8FThU0qw+rrM3MF/SL/qzrxbb2ho4gWyXV0s6\nskyvX3PPAR/vaxvq574PkfT1iuktr9k221okqdcvz73VXbn2z5N0WX/23xdV2264Ht4O1CT9z2Dv\n18zMFk+/g9lGETEN+CSwvaSHIgJgq4j4gKRzymK1dtvo74diX7Y5AF8F/iLpUwAR8S7gx8AWg7Uv\nSYeXbU8B7pK0xwA2cwKwb5v5i11WScfDC8HhfsBZg7HdJvcAD/ZznY2BdwCVwSzwUNluKwM9hj2A\njSQ92dcVJH1vgPuqcjKwu6S/R8SVEfHv5B2VKZImR8TqwPnkF7HBNh3oEcwO8Jr9Y18W6kPd1Rj8\n9thy2w3XwxuB51+i/ZqZ2WIYSDBbA4iIDwKHADtIWtAw7yjgfyJipqS59ZUiYjkyMFqhTPpPSTfX\ne24j4j/ID84nyEDnaWAGsFJEXAisCtwo6YCy/lGlt64L2F/SnRHxSeC9ZG/VLElHlh65LYBlyeDs\ny8DE8vd04GrgHZI+Wi+rpIsi4urGg46IDcie1ZcBrwIOknRdRHwXWBt4OXCKpHMi4vPAtqV+z5f0\n5Yi4CjgU+BqwainXGsAPyR7a04HXkYHKZyVdHRE3AwL+RfZ+j5E0v5Tni8CmwIrAnyW9KMiNiNPK\n/PuBNYFdS119pxxDrZyDGyPi78CtwF+A5UuZ9gDWj4ijyybfGRF7lv0dLeniiLgDmA2sW45hOTKo\nkqQPRcSxwFYNxaoBU4E9JD0dEdsAx5RjHg+8D3iWht74iLgO2Kucq40iYj/gNy2OY0/6oNTdVmX9\nkyT9tEVZpgGvAS6JiLdKerpiW+8Gjijl/kcp638D84AHgMPKoqsDf5e0Q4v9n0Ge/7pHyheeyZKe\nj4jxpX6fIHsJLyMr+t6IGBsRK0p6pKJ8k4AfkYH+JPLcbgBsAlwiaXpEbAicQraPR8gvTIcCK0TE\n14HfAx8pm5wB/EDSqhExmQy2xwBzgfcDp7Y4jp1LeW4ir7mNgNtKHU0BniF7mj9b6u424MgyfS3g\nh5K+ULbZVfZ9CtlOx9LUHsjrbylJJ0bEt4BnJB0WEdOBu8h2/iFgEfB7SYdVbHtP8m7TecD3m+vW\nzMxGhoGMme0Ctgb2Jz8QlmqaPxc4mgxcG30G+I2k7YEDgW+W6fWekG8Be0vaAbizYfpEYB9gc2CH\niFipTL+sLHsC8OUSbO4JbC5pC2CdiHh72c4tkurBw4pkYDeN/BBcgQz4XkTSo03HvD7wSUk7AscD\nHy4BxtbAbuSt6HrPTT0Q2hp4rOE4nyGDmyslzWjY/v7AQ5K2Ad4FfKNMfwXwuTKkY1vy1j8RMYG8\nFfsW4M3AZhHxmvrGIuKdwAqSJpNByOpl1leAk8t+DqP7HP0bME3SJxrKehzZW31sOf77yrF/HDio\nLLcGGWRuTQYQ3yj73CoiJko6WtJ2Df+2l/SspHqv7PrkUIbtgAvI81fV61YvzxWSzmx1HA3bbSki\ndgYmSdoa2B6YXr5o9SiLpM+RbeMtVYFssRfw5bK9i8n2Wivluahsb18ySNyn1f4l7d9UV3uUbTwf\nEZsBN5FB3lxgAvB4QxmeIAPdVtYsZdiFDM4OB+ptA+AM4OBS1kuBIyR9nmxjh1CCXElTJF3ZsN3T\nyWEvmwGXAOu1OY6HyjrjyWB4CtluZpfzOA54Ay8+/68F3g1sRn5hqNuS/GK5i6T7qG4PF5LXJEDQ\n3XM9lTxP+wAfK/9X3BoRL6vY9r2lPDVJT0l6qk0dm5nZMBnoMIN5wI5kEHZOROwsqf4hVJN0bkTs\nFhEHNayzIbBdRLy3/L180zZXlXRr+f0aMkiAvCW/ACAiHiR7VCF7dwDmkAHt64E5kp5v2MYbyu+3\nA0i6JSJOJ3taliJ7SR8BXtl8gBHxPnKoAeQH2j+AoyPin2QwsUDSwoj4OBkMTATqQyveTwa8q5DB\nQV1X+ddsA2Dr0iME8LKIWLH8rvJzRbIXC7LXeuWIOJcc3zueF3+peD1wXTnmhyPitobps8r0P5db\n1AAPNwXv9bLW1YD6+OQH6D4Hj5Rggoh4UlJ9PwuAl0fEEWRw0Ogtkp4tv/8D+FpELARWA66lp66G\nn/XfWx1HX2wAbBoRM8vfY8key76UpconyLsE/0n2bl/UODMiViHb0T6lF3Va1f4j4hBe3KM5v347\nX9IcYM3S030k2WYnNCw7ge4vTVXukvRERDwLPCDpsVK2+jW7HvDNMkxoKcr10qRq2qslqZTxO2Wb\nZ5J3KnocR4P6kIPHyLsBAI8CyzQtd5OkRcBT5bqDbAM7kW3+uTKtR3sodb1sRLy57GP18vuCUhcf\nBj4VEWuS10q9bTVvG6qvWTMzGyEGGszeoXwg6hsRMZXsnTuuzKv/x38QcD35wQD5QX+DpPMiYjWy\n57LRvRGxXgloGx/4ajU+bjPyA3YK8GfytuQnSw/LojL9f8mxlovghaECEyTtEhGrkr1Ca0XEZRFx\nqKRTy3J7kreuzy0f8F3kbcf3S7qtDBGYVAKVTSW9O/IhsntKgLmnpGkR0QXcEhE/7KU+byN7Pr8Y\nERPJccjzy7xF5eeD5C1vyFu2q0vaq/RU78aLP3BvBj4InFKGYqxbpt9a6uUXZezlvKZ91HWRvcxj\nGv6u0m7sYk3SZ9vMB/g2sJakJyMfvhlDd6A+hvyCUH+ga1FDeVodR1/cRj4odmBEjCXvGNwJ/Lqi\nLH1xADBDOWb8W+S5ACAiXkkGt4dLuqWh7D32L2n/5g2X9jML2LUEoAvJHszZ5N2Ir5C97i8MP2mh\ntzGmtwEfVD7wOIXuoUCN5725jQD8IyJeJ+mOiPgv4K+S9utlX30pT7vlauQwjtWB08i7IK3awyXk\nsKKTybsIp5K9yZBfxD8q6ZmI+BU5FImKbZuZ2Qg3kGEGzQ9J7AscGPmWg/p8JD1M3s6s9+J9HnhP\n6ZH6OfkB+sLywMHAdyLicvLW+b+a5jf/vkNEXFnWO0LSzWQP2GwyiP6bpIua1vsrsG3keNgfk8Mh\nIHvX1o+I2RFxLRkI7t607jnATyLil2S9rSrpfmCViJhNBkMnlCB/fkTMIZ+CvkzdT+3X6Fl/NfID\n9vWR42qvAu4pPd2Ny11F3hqmHN9a5fhPKX/XA92apEuAh0u5zgSeKvX5KeDQcvyn0X2buTloqJHB\n87iI+FKLMjcbyEM55wDXRMTF5NsdVpX0AHA5OU7z2+Q5A7gD2LD0gLY6DiDfHBERG1eVUfmU/MKI\nmAX8DlgkaWFVWfp4DL8DLo6I3wCvJm9h1x1H9s7PiIiZEfGrNvvvobSBE4BLS9vYGDhR0h/JOw/X\nAT8lrwEiYmrk0/c9jruX3w8Cvh8R15Qy31ym/yUivk/r838gec1eRRmDW3UcbcrS2zIt25yks8gx\nvXvRuj1cSAapV5LX5xuBn5V5N5Hn+wrybsP1Fduuf+F+Yd8RsUpEtHoI0czMhkFXrfZSPRjcPxFx\nMPDjclv8WPKBjeN6W280iYifA/v1NjY0sjv53yX9qAxXuBl4bcPt/SVauWV/qaQ7h7ssQ6n00u8n\n6YvDXZYlVbnzc7zKm0+abbfvabXxy682xKUyM+u28NG5fPGAzVh77XUq56+w6QYAzP/DzZXzh8NK\nK01YrOFci/VqrkH2APDrMmbxMWDvYS7PSHQE2Yt8ZC/L3QscX8bzvozsuR4VgWzxs/LwzqCIiKXI\nnr1mUsNbMEaALvJhKHvpdJG95WZmNkKMmJ5ZM7NO555ZMxtuCx+dy+lH7si6665bvcCkSfnz7ruH\nqkh9scT0zJqZmZnZYpo/fyEPPfRE5bwVFmUn5vwW84fDSitN6H2hNgbyANioUcZeVk0/f6jL8lKK\niBkRcX10v2uTiJgTEa8dznINVHkArOXruiLi7ogY1zRtn8ikGy912XaLiDvKA2Ezy9sDBmO7S0XE\n9yNiVjmXuw7Gdhu2v3e7bUbE2eXNJs3TDyhvbejPvvaJTEbS3zKeX35Ojoi/RiYvWWzl2PaOiA0i\n4pjB2KaZmQ0eB7PtTa+aWPHezCXBJDJ7W13Hjj+RdHgvY2Zr9LylMVTH+0ZyDHM9qcCsQdru+8nE\nG1PIZAE90tAuDknfK29iaKVV/R1Fjtvuj3pyiH5puC6nktn4Kq/fAZgHzC1vTHldRKw1SNs1M7NB\nsMQPM4ghTuep6pSnk4CzgSfJVz5dLOmYaJ0itzG97FnASRXL9JZKtirN6sFk+s+6GvmgXY18H+d+\nEXGxpP9rKPtSwHfJ973W06/+uLyO6U+lLieS79a9JyIOJd8hXCNTkJ7a5rycV87L2sDvJB1ctWxZ\n/kby9WQblW2/U9LjUZ0a9iry/a/zgXPJd7MK2F5S/fHOb0a+MB+639M7NSLeRr4beYakSyNiJzJr\n1tN0t41NyKQYz5CvD1uPnumLW6WnfROwSXk473fApxsSfTQfc72ONyHf87oXsA7d73SuOxH4Cfma\nLsj2+BwtRMQFwOcl/SEyocZRki6MiF+TmbG2JF+r9zxwraSjIt+tPE/S6VGdKhnyFX1HkG3xIPJc\nrQKcFxEHkK/D6yKTI3yUfG/umU3F+wHwPWBOed3fgcoMeETEvHLdnU2ej0nk9bSPpD9FxP2lLPsC\nz0TEfeQ119v5+zTtU+x+iXy9HeUYPka+C9rMzEaA0dIzO2TpPNuUYQ0yXeubgZ0iYhMqUuSWZRvT\ny76hxTLtUskuR0WaVUnfUM/0svUezIVkAHh2wy34LvJdog9I2pLM+nZced1XDbhe0k7ke2GnRcT6\nwHvIYGgK8K6IaDECHcjAbF8yCH9bRKzcZtkJwLmStiW/OOwcrVPT1ntepwMXlHV+wou/vJ1Zzvfd\nZNanGvCgMkXyrmRCkDHkOd6tbONq4LNl2aVLeziHivTFapHWlXwrwiGlB3U8GdS1UiNTQG9Lptid\nLml203a3k3SxpCeVGekmlGNt1yt5Yam/SWSQt2Nkso6lgX+SX9i2L/W6WkTsWMrSLlUyZFKUHcjk\nBPuU97XeT7bFyeT7e3cmg8FXSLqz4ljOVKY8XkDr3t4acLekt5Z9HVCfLun35JevE8t7pvty/tqm\n2JW0oOFtIDeRX1zMzGyEWOJ7ZoshS+fZxhyV3O4RcT3ZozqXphS5ZdnG9LI90uiW6e1SyS5NRZrV\nMga4eYjEh+q/SLom8uX/xzbMfz3wmzJ/YUT8he50pX8qP+8le+DeQAbZ9YD/lWTvZFXdQWaSe7Ic\nwzx6pjNt1ri/ZYDXUp2atrHs9bGXzelp6+l576c7sUc9JeqDEfE4mQnrcUn1jFLXkMk/LqY7zTBU\npC+O1mldv1tvf+QL/HsbsnJ5+TkbeHtEbEnPntmTJP2ijBO+gPxi0y7r3C/Kvh8u5f4EGWT+nDxf\nK5GJGiDbXONxtEqVDNUpj+t+Wbb9M/JuwXERsTY9e2bPlXRG+b15KEjj3/W2cB89UyYDdEXEq+jb\n+YPWKXaXblpuHpla2szMRojREswOZTrPiyqWA9i43LJfRPZEnkGm231RityKffVIo9uHY+qiIs2q\npK9TMZayBC1108lhFfUMWLeSvVUXlV6/DYG/tSiDgFsk7Vy2+wngxjbl7O841eblW6WmrbuZzAB1\nI5n+uLd9bwZ8OzLd8stLsDYxIlZRZnvbhu4gqJ4ieWl6pi8+TxVpXcv8/4uILSXNJXu6b+jlmCeT\nQdgWwE2SZgPbVWz71WSv78GSZjbPbyTpsYh4Cngv8G5y6MlhZFD+BPllYUdJz0fEvqWM9TS9rVIl\nt7KIHAKyJTlMYWpEbA58QdL2VcfS4J+UdhgRa9B9Xfamqxxnr+evQau22BxQL09mxzMzsxFitAwz\nGAnpPGtkj9gc4KeSbqEiRW5FGVst0+74avRMs9ru4Z0XtiHpGXIow8Qy7dvAiqVuZlIC5Kr1Jd0I\nXBER10bEDcBaZMA/NXpPs9qbHil31T41bI0c6/iOyLS/+9GdIrmVFSPTm54P7F+m7Q9cEJnmeHu6\ne60b66s5fXHlw2fK9LQfAc4vbWZp8ksNDb3LzT5Wlp1K9iq28hlyrOoxkW9JuDIilol8Cr8qAcnP\nyID9UeCy8vvflGmoTwJmlWPaie6Uwq1SJddvwTemoa3/fg15XfyZHJM9kxyf/YU2x1J3A/BYKccM\n4K6GeVX7qprW9vxV/N6sed5kyp0KMzMbGZw0YQiUsYmnShrU1yV1ihimNKtlTO1Dkm4o4z6PLGOP\nR5yIOFnS4U3TZgK7S5q/GNvdEHiTpH6/6qrF9kZ7quRzyLHLf6+a76QJZjbcnM7WFktE7E8+DNTs\nKDr4VVeDoE9pViPizWSvXbMfSfrWAPb7N7Ln/DnyVvehA9jGUDnxJdru/MEKZItRmyq5fDG4o1Ug\na2Zmw8M9s2Zmg8Q9s2Y23EZjz+xoGTNrZmZmZksgB7NmZmZm1rEczJqZmZlZx3Iwa2ZmZmYdy8Gs\nmZmZmXUsB7NmZmZm1rEczJqZmZlZx3Iwa2ZmZmYdy8GsmZmZmXUsB7NmZmZm1rEczJqZmZlZx3Iw\na2ZmZmYdy8GsmZmZmXUsB7NmZmZm1rEczJqZmZlZx3Iwa2ZmZmYdy8GsmZmZmXUsB7NmZmZm1rHG\nDncBzMyWFE8teHC4i2Bmo9xo/H+oq1arDXcZzMzMzMwGxMMMzMzMzKxjOZg1MzMzs47lYNbMzMzM\nOpaDWTMzMzPrWA5mzczMzKxjOZg1MzMzs47l98yamfVBRIwBTgM2Ap4B9pN0Z8P8XYGjgeeA70g6\ns7d1RrOB1GeZ/kdgQVnsLkkfGdKCj1B9aWsRsSxwObCvJLl9VhtIXZZpbpsV+nCtTwMOI6/1m4CD\nga526zRzz6yZWd+8CxgnaQvgSODE+oyIWAo4CdgJ2AY4ICJWLussXbWO9bs+V4qIZQAkbVf+OVjo\n1rI+ASLiTcAsYE2g1pd1RrF+16XbZlvtrvWXA8cC20raClgO2IV+/t/pYNbMrG+2BH4FIOl64E0N\n89YD7pC0QNKzwLXAlLLOpS3WGe36W5/bABsDy0bEZRFxRURMHupCj2Dt6hNgHBkgqB/rjFYDqUu3\nzdba1efTwOaSni5/jy3T+vV/p4NZM7O+mQg83vD38+X2WX3egoZ5T5A9DO3WGe0GUp9PAidImgp8\nFPiB6/MFbduapN9Kuq8/64xiA6lLt83WWtanpJqkhwAi4lDgFZIub7dOFVe0mVnfPA5MaPh7jKRF\n5fcFTfMmAI/1ss5o19/6fBS4HfgBgKS/Ao8Aq770Re0IA2lrbp/VBlIvbputta3PiBgTEV8BdgB2\n78s6zRzMmpn1zWzgbQARsRlwY8O824B1ImL5iBhHDjH4bS/rjHb9rc/rgA9Txs5FxGvI3pt5Q1no\nEWwgbc3ts9pA6sVts7Xe6vN0YGlgt4bhBv06B121Wq3dfDMzAyKi8elayA+vTYHxks6IiF2AY8hO\ngrMkfbNqHUm3D3HRR6QB1udY4LvAGmWdIyTNGeKij0i91WfDcjOBAyXd7vZZbYB16bbZQrv6BG4o\n/2Y1rPJV4OfN67Rrmw5mzczMzKxjeZiBmZmZmXUsB7NmZmZm1rEczJqZmZlZx3Iwa2ZmZmYdy8Gs\nmZmZmXUsB7NmZmZm1rEczJqZmZlZx3Iwa2ZmZmYd6/8BN9vHeuwpjEMAAAAASUVORK5CYII=\n", + "text": [ + "" + ] + } + ], + "prompt_number": 176 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "run_test_model(DecisionTreeClassifier(), x_train, y_train, x_test, y_test, corp_train, corp_answer)" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "metadata": {}, + "output_type": "pyout", + "prompt_number": 177, + "text": [ + "0.18077668222273341" + ] + } + ], + "prompt_number": 177 + }, + { + "cell_type": "heading", + "level": 1, + "metadata": {}, + "source": [ + "Pass in live code file for response" + ] + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "def reverse_dict_match(a_dictionary, a_list):\n", + " \"\"\"Pass this function a dictionary and a list. it will reverse the order of the dict\n", + " and match the first values of the list with the dict.\"\"\"\n", + " reversed_dict = dict((v,k) for k,v in a_dictionary.items())\n", + " return [(reversed_dict.get(x),y) for x,y in a_list]\n", + "\n", + "def check_code_snippet(a_file, corp_train, corp_answer):\n", + " \n", + " # Ingest file\n", + " a_file_df = pd.DataFrame([read_process_file(a_file)], columns=['raw_text'])\n", + " \n", + " # Turn file into features\n", + " a_file_df['paren_count'] = parenthesis_count(a_file_df['raw_text'][0])\n", + " a_cleaned_df = a_file_df.drop(['raw_text'], axis=1)\n", + "\n", + " # Run model\n", + " model_created = GaussianNB()\n", + " model_created.fit(corp_train, corp_answer)\n", + " predicted = model_created.predict(a_cleaned_df.values)\n", + " predict_prob = model_created.predict_proba(a_cleaned_df.values)\n", + " \n", + " #Pretty response\n", + " answer_list = list(zip(model_created.classes_, predict_prob[0]))\n", + " answer_list.sort(key=lambda x: x[1], reverse=True)\n", + " return reverse_dict_match(hit_num, answer_list)" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 348 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "check_code_snippet('test/1', corp_train, corp_answer)" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "metadata": {}, + "output_type": "pyout", + "prompt_number": 350, + "text": [ + "[('java', 0.23709565168200461),\n", + " ('perl', 0.15122183416794088),\n", + " ('ruby', 0.13694881776324136),\n", + " ('ocaml', 0.12725804928624451),\n", + " ('python', 0.08496717978985284),\n", + " ('haskell', 0.081919233740987057),\n", + " ('php', 0.064364558495229707),\n", + " ('scala', 0.063449250754175929),\n", + " ('javascript', 0.049179273881515481),\n", + " ('clojure', 0.0035955804751662455),\n", + " ('scheme', 5.6996364138916492e-07)]" + ] + } + ], + "prompt_number": 350 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 350 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 329 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 336 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 327 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [], + "language": "python", + "metadata": {}, + "outputs": [] + } + ], + "metadata": {} + } + ] +} \ No newline at end of file From 18b19c8683141c82d346aa61c2978df0ea6efe0f Mon Sep 17 00:00:00 2001 From: Zack Cooper Date: Fri, 13 Feb 2015 16:35:33 -0500 Subject: [PATCH 2/6] testing against proper test data. All bare bones still in tact. Just adding features at this point. Rest is complete!!! --- corpus_build.py | 144 ++++++++------ lang classifier live.ipynb | 375 ++++++++++++++++++++++++++----------- test/26 | 35 ---- test/27 | 20 -- test_fixed.csv | 1 + 5 files changed, 348 insertions(+), 227 deletions(-) delete mode 100644 test/26 delete mode 100644 test/27 create mode 100644 test_fixed.csv diff --git a/corpus_build.py b/corpus_build.py index d0d74d5..1b54621 100644 --- a/corpus_build.py +++ b/corpus_build.py @@ -5,82 +5,106 @@ file_types = {'.clj': 'clojure', -'.cljs': 'clojure', -'.clojure': 'clojure', -'.edn': 'clojure', -'.hs': 'haskell', -'.lhs': 'haskell', -'.ghc': 'haskell', -'.java': 'java', -'.class': 'java', -'.js': 'javascript', -'.javascript': 'javascript', -'.ml': 'ocaml', -'.mli': 'ocaml', -'.ocaml': 'ocaml', -'.perl': 'perl', -'.php': 'php', -'.phtml': 'php', -'.php3': 'php', -'.php4': 'php', -'.php5': 'php', -'.phps': 'php', -'.python3': 'python', -'.python2': 'python', -'.py': 'python', -'.jruby': 'ruby', -'.scala': 'scala', -'.racket': 'scheme', -'.scm': 'scheme', -'.ss': 'scheme', -'.clojure': 'clojure'} + '.cljs': 'clojure', + '.clojure': 'clojure', + '.edn': 'clojure', + '.hs': 'haskell', + '.lhs': 'haskell', + '.ghc': 'haskell', + '.java': 'java', + '.class': 'java', + '.js': 'javascript', + '.javascript': 'javascript', + '.ml': 'ocaml', + '.mli': 'ocaml', + '.ocaml': 'ocaml', + '.perl': 'perl', + '.php': 'php', + '.phtml': 'php', + '.php3': 'php', + '.php4': 'php', + '.php5': 'php', + '.phps': 'php', + '.python3': 'python', + '.python2': 'python', + '.py': 'python', + '.jruby': 'ruby', + '.scala': 'scala', + '.racket': 'scheme', + '.scm': 'scheme', + '.ss': 'scheme', + '.clojure': 'clojure'} hit_num = {"clojure": "1", -"haskell": "2", -"java": "3", -"javascript": "4", -"ocaml": "5", -"perl": "6", -"php": "7", -"python": "8", -"ruby": "9", -"scala": "10", -"scheme": "11"} - -raw_file_list = [filename - for filename in glob.iglob(os.path.join('corpus/bench', - '*', '*')) + "haskell": "2", + "java": "3", + "javascript": "4", + "ocaml": "5", + "perl": "6", + "php": "7", + "python": "8", + "ruby": "9", + "scala": "10", + "scheme": "11"} + +raw_file_list = [filename for filename in + glob.iglob(os.path.join('corpus/bench', '*', '*')) if os.path.splitext(filename)[1] in file_types.keys()] +feature_list = [('parent_count', "[()]"), + ('double_colon', "::"), + ('let_exists', r"\blet\b"), + ('less_minus', "((\<\-))"), + ('paren_star', "(\(\*|\*\))"), + ('def_exists', r"\bdef\b"), + ] + class Corpus(): + def __init__(self, file_list=[]): + self.file_list = file_list def read_process_file(self, file_name): with open(file_name) as f: return f.read() - def parenthesis_count(self, a_string): - return len(re.findall(r'[()]', a_string)) / len(a_string) + def build_dataframe(self, snippet=True): + raw_text = [self.read_process_file(file) for file in self.file_list] - def build_dataframe(self): - a_dataframe = pd.DataFrame([file_types[os.path.splitext(file)[1]] - for file - in raw_file_list], columns=['file_type']) - a_dataframe['hit_num'] = a_dataframe['file_type'].map(hit_num) - raw_text = [self.read_process_file(file) for file in raw_file_list] - a_dataframe['raw_text'] = raw_text + if snippet: + a_dataframe = pd.DataFrame(raw_text, columns=['raw_text']) + else: + a_dataframe = pd.DataFrame([file_types[os.path.splitext(file)[1]] + for file in self.file_list], + columns=['file_type']) + + a_dataframe['hit_num'] = a_dataframe['file_type'].map(hit_num) + a_dataframe['raw_text'] = raw_text return a_dataframe - def feature_breakout(self, a_dataframe): + def feat_lookup(self, regex, a_dataframe): + return [(len(re.findall(regex, a_row))/len(a_row)) + for a_row in a_dataframe['raw_text']] - a_dataframe['paren_count'] = [self.parenthesis_count(row) - for row in a_dataframe['raw_text']] - return a_dataframe + def feature_breakout(self, a_df): + + for name, regex in feature_list: + a_df[name] = self.feat_lookup(regex, a_df) + + return a_df + + def compl_df_build(self, snippet=True): + a_df = self.build_dataframe(snippet) + a_df = self.feature_breakout(a_df) + cleaned_df = self.clean_df(a_df, snippet) + return cleaned_df + + def clean_df(self, a_df, snippet): + cleaned_df = a_df.drop(['raw_text'], axis=1) + if not snippet: + cleaned_df = cleaned_df.drop(['file_type'], axis=1) - def compl_df_build(self): - corpus = self.build_dataframe() - corpus = self.feature_breakout(corpus) - return corpus + return cleaned_df diff --git a/lang classifier live.ipynb b/lang classifier live.ipynb index 32aa0d0..5f96e25 100644 --- a/lang classifier live.ipynb +++ b/lang classifier live.ipynb @@ -1,7 +1,7 @@ { "metadata": { "name": "", - "signature": "sha256:517c0af79cf5d3b4fc45e900b47c2c280b55540e2ca9ee92e073faaa3e7a2a55" + "signature": "sha256:0f9223d2ddd3e37046d43bb1578139df3c7fa29d3b2f15da299cf9a0a8978d11" }, "nbformat": 3, "nbformat_minor": 0, @@ -12,9 +12,11 @@ "cell_type": "code", "collapsed": false, "input": [ - "from corpus_build import Corpus, hit_num\n", - "corpus = Corpus()\n", - "corpus_df = corpus.compl_df_build()\n", + "from corpus_build import Corpus, hit_num, raw_file_list\n", + "import pandas as pd\n", + "\n", + "corpus = Corpus(raw_file_list)\n", + "corpus_df = corpus.compl_df_build(False)\n", "corpus_df.head()" ], "language": "python", @@ -27,47 +29,65 @@ " \n", " \n", " \n", - " file_type\n", " hit_num\n", - " raw_text\n", - " paren_count\n", + " parent_count\n", + " double_colon\n", + " let_exists\n", + " less_minus\n", + " paren_star\n", + " def_exists\n", " \n", " \n", " \n", " \n", " 0\n", - " clojure\n", " 1\n", - " ;; The Computer Language Benchmarks Game\\n;; h...\n", " 0.071901\n", + " 0.000000\n", + " 0.002066\n", + " 0.000000\n", + " 0.001240\n", + " 0.000413\n", " \n", " \n", " 1\n", - " clojure\n", " 1\n", - " ;; The Computer Language Benchmarks Game\\n;; h...\n", " 0.071704\n", + " 0.000000\n", + " 0.002699\n", + " 0.000000\n", + " 0.000771\n", + " 0.000386\n", " \n", " \n", " 2\n", - " clojure\n", " 1\n", - " ;; The Computer Language Benchmarks Game\\n;; h...\n", " 0.063995\n", + " 0.000000\n", + " 0.002695\n", + " 0.000000\n", + " 0.000337\n", + " 0.000337\n", " \n", " \n", " 3\n", - " haskell\n", " 2\n", - " --\\n-- The Computer Language Benchmarks Game\\n...\n", " 0.024707\n", + " 0.002471\n", + " 0.002471\n", + " 0.000618\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 4\n", - " haskell\n", " 2\n", - " --\\n-- The Computer Language Benchmarks Game\\n...\n", " 0.024352\n", + " 0.002118\n", + " 0.002647\n", + " 0.000529\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", "\n", @@ -75,37 +95,134 @@ ], "metadata": {}, "output_type": "pyout", - "prompt_number": 2, + "prompt_number": 47, "text": [ - " file_type hit_num raw_text \\\n", - "0 clojure 1 ;; The Computer Language Benchmarks Game\\n;; h... \n", - "1 clojure 1 ;; The Computer Language Benchmarks Game\\n;; h... \n", - "2 clojure 1 ;; The Computer Language Benchmarks Game\\n;; h... \n", - "3 haskell 2 --\\n-- The Computer Language Benchmarks Game\\n... \n", - "4 haskell 2 --\\n-- The Computer Language Benchmarks Game\\n... \n", + " hit_num parent_count double_colon let_exists less_minus paren_star \\\n", + "0 1 0.071901 0.000000 0.002066 0.000000 0.001240 \n", + "1 1 0.071704 0.000000 0.002699 0.000000 0.000771 \n", + "2 1 0.063995 0.000000 0.002695 0.000000 0.000337 \n", + "3 2 0.024707 0.002471 0.002471 0.000618 0.000000 \n", + "4 2 0.024352 0.002118 0.002647 0.000529 0.000000 \n", "\n", - " paren_count \n", - "0 0.071901 \n", - "1 0.071704 \n", - "2 0.063995 \n", - "3 0.024707 \n", - "4 0.024352 " + " def_exists \n", + "0 0.000413 \n", + "1 0.000386 \n", + "2 0.000337 \n", + "3 0.000000 \n", + "4 0.000000 " ] } ], - "prompt_number": 2 + "prompt_number": 47 }, { "cell_type": "code", "collapsed": false, "input": [ - "corpus[''] = []\n", - "corpus.head()" + "import glob\n", + "import os\n", + "test_file_list = [filename for filename in\n", + " glob.iglob(os.path.join('test/', '*'))]\n", + "test_info = Corpus(test_file_list)\n", + "test_info_df = test_info.compl_df_build(True)\n", + "test_info_df['answers'] = pd.read_csv('test_fixed.csv', dtype='object')\n", + "test_info_df.head()" ], "language": "python", "metadata": {}, - "outputs": [], - "prompt_number": 5 + "outputs": [ + { + "html": [ + "
\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
parent_countdouble_colonlet_existsless_minusparen_stardef_existsanswers
0 0.045734 0.000000 0.001759 0 0.000000 0.000000 1
1 0.063037 0.000000 0.000000 0 0.000000 0.000000 4
2 0.042795 0.000000 0.000058 0 0.000058 0.000000 4
3 0.047059 0.000000 0.000000 0 0.000000 0.000000 4
4 0.015708 0.000561 0.000000 0 0.000281 0.002805 9
\n", + "
" + ], + "metadata": {}, + "output_type": "pyout", + "prompt_number": 105, + "text": [ + " parent_count double_colon let_exists less_minus paren_star def_exists \\\n", + "0 0.045734 0.000000 0.001759 0 0.000000 0.000000 \n", + "1 0.063037 0.000000 0.000000 0 0.000000 0.000000 \n", + "2 0.042795 0.000000 0.000058 0 0.000058 0.000000 \n", + "3 0.047059 0.000000 0.000000 0 0.000000 0.000000 \n", + "4 0.015708 0.000561 0.000000 0 0.000281 0.002805 \n", + "\n", + " answers \n", + "0 1 \n", + "1 4 \n", + "2 4 \n", + "3 4 \n", + "4 9 " + ] + } + ], + "prompt_number": 105 }, { "cell_type": "code", @@ -114,7 +231,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 67 + "prompt_number": 101 }, { "cell_type": "code", @@ -123,7 +240,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 67 + "prompt_number": 38 }, { "cell_type": "code", @@ -132,7 +249,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 67 + "prompt_number": 38 }, { "cell_type": "heading", @@ -159,90 +276,152 @@ "import matplotlib.pyplot as plt\n", "%matplotlib inline\n", "\n", - "corpus_features = corpus.drop(['raw_text','file_type'], axis=1)\n", - "\n", - "corp_train = corpus_features.values[0::,1::]\n", - "corp_answer = corpus_features.values[0::,0]\n", - "\n", - "x_train, x_test, y_train, y_test = train_test_split(corp_train, corp_answer, test_size=.4, random_state=0)\n", + "corp_train = corpus_df.values[0::,1::]\n", + "corp_answer = corpus_df.values[0::,0]\n", + "test_train = test_info_df.values[0::,:-1:]\n", + "test_answer = test_info_df.values[0::1,-1]\n", "\n", "model_list = [KNeighborsClassifier(), RandomForestClassifier(), DecisionTreeClassifier()]\n", "# BernoulliNB(), MultinomialNB(), AdaBoostClassifier(), GaussianNB(), \n", "\n", - "def run_test_model(classifier, x_train, y_train, x_test, y_test, orig_train, orig_answ):\n", + "def run_test_model(classifier, x_train, y_train, x_test, y_test):\n", "\n", " classifier.fit(x_train, y_train)\n", " predicted = classifier.predict(x_test)\n", " return metrics.f1_score(y_test, predicted)\n", "\n", "def run_rank_multiple_models(list_of_models):\n", - " return [(model, run_test_model(model, x_train, y_train, x_test, y_test, corp_train, corp_answer)) \n", + " return [(model, run_test_model(model, corp_train, corp_answer, test_train, test_answer)) \n", " for model \n", " in list_of_models]" ], "language": "python", "metadata": {}, - "outputs": [], - "prompt_number": 175 + "outputs": [ + { + "metadata": {}, + "output_type": "pyout", + "prompt_number": 106, + "text": [ + "array([[0.04573438874230431, 0.0, 0.001759014951627089, 0.0, 0.0, 0.0],\n", + " [0.06303724928366762, 0.0, 0.0, 0.0, 0.0, 0.0],\n", + " [0.0427946089160235, 0.0, 5.7597051030987213e-05, 0.0,\n", + " 5.7597051030987213e-05, 0.0],\n", + " [0.047058823529411764, 0.0, 0.0, 0.0, 0.0, 0.0],\n", + " [0.01570827489481066, 0.0005610098176718093, 0.0, 0.0,\n", + " 0.00028050490883590464, 0.002805049088359046],\n", + " [0.0, 0.0, 0.0, 0.0, 0.0, 0.002036659877800407],\n", + " [0.00816326530612245, 0.0, 0.0, 0.0, 0.0, 0.00816326530612245],\n", + " [0.013328977458346946, 0.000914733747141457, 6.533812479581836e-05,\n", + " 0.0004573668735707285, 0.0, 0.00013067624959163673],\n", + " [0.05806451612903226, 0.0064516129032258064, 0.0,\n", + " 0.0064516129032258064, 0.0, 0.0],\n", + " [0.0137524557956778, 0.003929273084479371, 0.0004911591355599214,\n", + " 0.0029469548133595285, 0.0, 0.0],\n", + " [0.12435233160621761, 0.0, 0.0, 0.0, 0.0, 0.0],\n", + " [0.08866995073891626, 0.0, 0.0, 0.0, 0.0, 0.0],\n", + " [0.2222222222222222, 0.0, 0.0, 0.0, 0.0, 0.0],\n", + " [0.11084624553039332, 0.0, 0.0008939213349225268, 0.0, 0.0, 0.0],\n", + " [0.006644518272425249, 0.0, 0.0, 0.0, 0.0, 0.0],\n", + " [0.0065717415115005475, 0.0, 0.0, 0.0, 0.0, 0.0],\n", + " [0.027181688125894134, 0.001072961373390558, 0.0, 0.0,\n", + " 0.000715307582260372, 0.00178826895565093],\n", + " [0.014606155451225874, 0.0, 0.0, 0.0, 0.0, 0.004173187271778821],\n", + " [0.019253910950661854, 0.00030084235860409147, 0.0, 0.0, 0.0, 0.0],\n", + " [0.058997050147492625, 0.0, 0.0, 0.0, 0.0029498525073746312, 0.0],\n", + " [0.0979020979020979, 0.0, 0.0, 0.0, 0.0, 0.0],\n", + " [0.05322415557830092, 0.0015353121801432957, 0.0, 0.0, 0.0, 0.0],\n", + " [0.026448029621793177, 0.0001322401481089659, 0.004892885480031738,\n", + " 0.0, 0.005554086220576567, 0.0],\n", + " [0.017391304347826087, 0.0, 0.0052173913043478265, 0.0, 0.0, 0.0],\n", + " [0.02680067001675042, 0.0, 0.0016750418760469012, 0.0, 0.0,\n", + " 0.0016750418760469012],\n", + " [0.03986710963455149, 0.0, 0.0, 0.0, 0.0, 0.0],\n", + " [0.028050490883590462, 0.0, 0.0, 0.0, 0.0, 0.001402524544179523],\n", + " [0.03680981595092025, 0.0, 0.0, 0.0, 0.0, 0.012269938650306749],\n", + " [0.029607698001480384, 0.0, 0.0, 0.0, 0.0, 0.0030594621268196396],\n", + " [0.05389221556886228, 0.0, 0.0, 0.0, 0.0, 0.0]], dtype=object)" + ] + } + ], + "prompt_number": 106 }, { "cell_type": "code", "collapsed": false, "input": [ - "model_results = run_rank_multiple_models(model_list)\n", - "model_results.sort(key=lambda x: x[1])\n", - "x_labels = [group[0] for group in model_results]\n", - "x_values = [group[1] for group in model_results] \n", - "width = .75\n", - "height = np.arange(len(model_results))\n", - "mean = np.array(x_values).mean()\n", - "\n", - "plt.yticks(height+width/2., x_labels)\n", - "plt.barh(height, x_values, width, color = sbn.color_palette())\n", - "plt.axvline(mean, c='r')\n", - "plt.rc('figure', figsize=(10, 5))\n", - "plt.show()" + "run_test_model(DecisionTreeClassifier(), corp_train, corp_answer, test_train, test_answer)" ], "language": "python", "metadata": {}, "outputs": [ + { + "output_type": "stream", + "stream": "stderr", + "text": [ + "/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1773: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no true samples.\n", + " 'recall', 'true', average, warn_for)\n" + ] + }, { "metadata": {}, - "output_type": "display_data", - "png": "iVBORw0KGgoAAAANSUhEUgAAArMAAAD9CAYAAACvKv8uAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXe4XFXVh98bqkBCjRRFAwEWKgiIGDoJgqF3lKYUISBN\niEoRxNCLFGnSIQgaBFE0AVRKIKEFFKQFfgE+kA6B0EKkJfP9sfbJPZl7ztx7k0lm5rre57nPzJyy\n99pl7qyzzj7r11apVAiCIAiCIAiCVqRXow0IgiAIgiAIgpklnNkgCIIgCIKgZQlnNgiCIAiCIGhZ\nwpkNgiAIgiAIWpZwZoMgCIIgCIKWJZzZIAiCIAiCoGWZu9EGBEEQ9BTOXmf9ytILLNBoM4IgCErZ\n+r6xAIxcdwMAXpsyhXXPOY/+/VdsmE19+/Zum5Xzw5kNgiCoE0svsABfWqh3o80IgiAoZe42vynf\nk/5XxTKDIAiCIAiCoGUJZzYIgiAIgiBoWcKZDYIgCIIgCFqWuqyZNbOBwPXAk0AF6AP8H7C7pE9n\nodwLgBsk3T2T5/cDHgP+ldt8p6QTZ9amknqWBVaTNCp9HgLsDkwD5gGOkXS3mQ0HRkj6+yzWtycw\nSdJIMxsB9AeuAKZJuqyLZcwHXAbsCTwuaZVZtGkIcKWkz2Zm/yzUuwpwfvq4DjAO7/dfSbqlnnU1\nCjPbHnhA0mvdPO9GSTvOJrOyOjYA3pX0+Oysp4u29AOeAQZIejhtOwBYUtLxjbQtCIIgmH3U6wGw\nCnC7pN2yDWb2O2Ab4MZZLHdWeVLSoDqUU4tvAwaMMrNdgE2AjSVNTT+wY8xsDerTHiRdna9b0udn\nopjDgD9IqphZPew6GrgaKHNWO9s/U0h6AhgEYGbPA5tK+qSedTQBhwLjgW45s7PbkU38EBgBNNyZ\nTbwHXGVma6V5UJfvXBAEQdC81MuZbUt/AJjZvMDSwCQz6wVcCnwxbfurpF+kKOVHQL+0fS9Jj6RI\nyhDgTWBB4AYzmwe4ClgOmAs4W9L1ZnYX8G9gFWAyMBYYDCwCfKeWwWZ2FrBe+vh7SeclmxZLf1sB\nRwLr5+r8o5kdCPwAj/49BBwOHAXMb2b3JdsPlzQVQNILZraapHfMDKBiZr3xSOrCwDLAhZIuri5b\n0o/NbAfgCOBT4FVgF+CXuGPzdWBhM7sJ+DOwsqSjzewQYFf8h/w6SecXtG0PYPXU/rnM7LfAl1Id\n+wBTS/p8DeC8tP8jYL/U10sBI1IE9np8PswPHAB8M7f/XOAM4GN8XnwEHIhHsCvA9sCqwNB0/pLA\nRal/LgNWyA3j25J2KhnffsBI4C3gFmAL4ABJykfrivqqqLxU5rHAtvj35iJJl5rZT4Dv4U76GElH\nmdkwPFq+BLA4cCGwI7ASHgl/AxgOfIjP/VGSjstH7s1ss1TuDWmcrk5R0AO6Ye9rkpbuwvdkO2Cz\nZO8SwDBJN5nZpsCJ+Bi9jc+LNYDT8fG7PZWzupmNT32zPf69fSu93z31/edSn5wu6WozGwCcgy91\neiUdtyJwLj53svrmA/7AjPNpMnB5VXN/B9yGR2bvBk4GflbVH7sDP062P4N/V/cosW/ValskvV/W\n10EQBEHjqOea2Y3NbLSZPYnf1v+TpNHAssD9kjYDBuA/RuA/xi+k7ecDQ8ysLx4xHID/wFTwH5P9\ngTckrYdHPU8ys8XT/nGSNsF/9D6U9B08irVR2v/VZFf2t4yZbQX0k7Q27qzulm5XV4A7JK2P37Lu\nJ2kDYGPgGDNbGNgLOEjSusBTyb5TcYd4JO6c/l++YyS9k/vYhjtkIyQNxp2BoWnfDGWb2Vy483pG\nsmMUvoSjAlQkHYQvN9guK9zMvgp8F3fUNwS2M7OVqtr2eeC9zOEG5gVOljQQeAl3UMv6/LJk40Dg\nN7iTewXwerJ1AO7IbA4cBCxYtb8NmE/ShpKuxR2YLVP7xqf+qOBO1eZpHH5qZn0l7SdpUO6v0JHN\nsSQeqf1V+lzJv9boqw4kJ34z4Fvpb6U0Z3YG1kljtqKZbZnKnyJpc/zOxBaStgFOS31QAb6czl0L\n2DQXuZ/BxrRU4t/4Rc6KXbW3iq58T3ql/ZsBvzazuYFLgO3TWN8NHJuOzcbvBOBv+MXWy/iF0ibp\nezV3alsF6CNpa/xOzVHJpkuAvdOxo4Cv4Bc3B6Y7Kbekctei43x6rmoeDJJ0Oe0X1MelPs0uVklz\ndxgwKM21d/E5XmbfZTlbbk22BEEQBE1IPfPM3ilpVzNbDI+QvJC2vwOsZWaDgPfxH9OMR9LrS/gP\n9ArAU9k6WzO7N+1fGY8CIWlyigL1T/seTq/v4j/OWZ1ZPeOrlxmY2W54dApJn5nZA8BX026l11WB\nNc1sdPo8Nx5F3ht3rpYD7qc9Kp39kP4Hj3A+matvMPBozoQ3gcNS1PV9PCpJSdlDgaPN7FDceb6J\n2nwNd5TuTJ8XwZ2gfNuWwKOD0+2RlO27H3deobjPl5b0WNo/FnfQ8tyCj+Nf8GjySQU2Kvd+Ih51\nnIyP8/1p+93J2Z5iZk8Ay5vZKcwYmZ3Uya3050vW6GZjVdRXKwATCs5ZCXhQUiW166dmthO+ljW7\nKBibyoQZ5+WTuffzp/cPSJoCYGbjUvl5qi802/DIalftrabse5LZcweApNfN7F08kv5+bp3uWDza\nOYoZx490XsXMPsWj75PxOzHZvP53en05V9+S2ZyTdBWAmX0FuCjdwZgntetWfP5On09m1p+Okdnf\nA/9I5X1iZnunbdka8uXwJUcfps9j8Kj0uBL7imwJgiAImpC6ZzOQNAm/dXe5mS2FRxvflbQHcDZQ\nJI+TORfPAF8zs8+ZWRseAQN34jYASLfoVwWeT/vK1sTVUpN4Co/IkpYwrJvqzpf3FDA6OcKb4rd7\nn8OjlgekaNUa6dyptPfllcAvUlSVFDm7LB2TMRSPVn8f+GPO1qKyh+C3fQem47YvaV/2WaR1wsn2\na/CH4PJtewN3hDKWMLPl0/uNcMe7rM9fTbdgs2Mzx2YavhxhIPBaijqfDJxStT97T4p0D8Nvp+8H\n/DfXjm+mYxbAHYsJBZHZztaETsu9/wiPmgOsmV5r9VU1TwPfMLM2M5vHzP6GOzgDzGyuNF83pKPT\nM8MSnByrpXLmwuf5E1U2fqOqHb2SDV21t5rO1o6uBWBmS+Lf0VeBPuk7DB3HOm/bXGb2dWBbSbvg\na3x70d7uorpfNbMVUp0/M7PtUvu+n9r2c3yZyECq5lNJZPayXH1IegR3Zo9M9T+P36XJ/v8MzLWn\nyL4iW4IgCIImpF7ObP72KJKewtdVnodH9zYzs9vwW3j/NLNlcudNP1/SW3gk7x48yvJp2ncpsLiZ\njQVG487dxC7YlH+djqSbgefTGtf78YwJj+SPT0sGJpvZGOBBPFPAZPxBl7FmdgfuFD6Qtm1rZt+V\n9Ie07R4zuxt3bnfP2VvBfxgPMrO/A1sDH5ivM64ue1yqe5SZ3Y7fNh9V0r6sDx8D7jCze8zsn8Dy\n+JrEfNueAz5vvp4ZPFp3Ymrrwsnmsj7fD7ggHXsIvmYYPHJ3M+4I75si2mfQ7syOxaO2eTveA+5N\nY/Bn3LlYOh3TJ82ZMcDxVUs1yqge6/zn84DfJCe0V42+etXMBpvZkfmCJD2K31K/N7Xl9+n869O2\ncXgkOIuczzAuJe9H4nPlj5KexKONh6d2L5M79j784bmXiuztRn/UYsU0x0YCP5I0DR/rP5nZPfhS\nmywLSL7ccfgym8+AD9O8uBaPBFd/z/Pv9weuTOt518DnzoHANWnOnYR/H8rmU1fafAp+pwRJb+Nr\nzUeb2f34koiLa9j3owJbyN2pCYIgCJqEtkolHvb9X8TMjgKezjlfTYN5qrcdJR3SoPr7AvtKOnU2\nld8POD+t02w45qnelpB0VqNtaXbM7BxJh5ftH/Ht71R6kkRkEAQ9jzVv+xsA/9p0MwBenPwBy518\nGv37r1jrtNlK3769a91N75R6rpkNWotfA1eY2V/SOtBmYoZIfwNoA86cjeXXrX1mtjXtDxDmObeb\nFyrNNgealXD4gyAImoyIzAZBENSJiMwGQdDs9MTIbMjZBkEQBEEQBC1LOLNNgpndmF5XNU+O363z\n6mTDXWa20czUZWZLmtmF6f20suPmNGa2k5n9cibOG2Jmc5vZQHPJ4K6ed5eZnZ37PL+5MllLY2YD\nzOwZMzu5m+et0p35PKuY2bBZGe8a+2t+N4IgCILGEc5sk5BLM7Uj7Tlvu3NePXiN9swH3apL0hty\nEQdoz2nayhxNeyqx7rKLmW1YT2OagMH4OtxjunneTnRjPteBmnO4Bp2N98yWGwRBEMxm4gGwOYCZ\nfQ6Xhv0SrrZ1MC4QsE86ZBgux7kmnpf3IzN7GM/3eRKeo/Y5PJ3RHtXnySVLi2Rm5wJGAC/iggcP\nSjrQzA7CnYyMCi6zeoikt1L+z+HAJ3hqo36SBpnZ65KWSumUHsGT+PfBlax64apm6+DqbWV9sRee\njmx+PA3XubgM6irATyX91cwOpqMs6hBgPUm7mdnVuOjARSV1rIs/4PZu6ot/pe1lMr8f4+IDC+JK\nWxuQ5HeTfSua2S24ctpIuQxumbRuBVexu9TM1iSXXzhlMbgyjUsFOFTSY2b2DJ6OzvCUbDumYy5O\ndfQCjpV0dyP61My+hQt6fGJmL+NiC9XzcgE8tdh0iWbgr8w4n68HLIkanIbnMn6BGeWNXyooe3n8\n+/Np6ovdcPGIakGOs/Dcu5hZn2p75JLIdwFDJE2wJGuc6szGewcrkLomfTeK+j8IgiBoLBGZnTMc\nAPyfXPI0k3yt4ApWG0q6E0DSq/iP9tmSHsLFFjI50Vdwx6DDeYkOMrPp2BVx5/dbwBZm9nlJF1Yl\nnN9Y0ku5H+tfASdJ2hjPoVqU03acpE1xtbddc/uQ9GYn/bGgpC2B0/GcpjvgjtXeSXygWhb1m5Iu\nBD6XnM+5yxzZxEV4bt/v0J4ftJbM72OSvo07R79SR/nd+XHncAP8QoQCAYf8xcGjwG9pH4OMM4Fz\nJG0E/Bi4Im1fDndW1wX64gIG+wIT07Hb4c5hQ/pU0oP4xc1ZKUNC0bzsT5VEc8F8zpPvl/kkbYhf\n0F1aUPYmeD7eTfBcsQtLurdAOGGUpClyZbUO9hTUm+VmvpI03lYidR2ObBAEQfMSkdk5w0q4LCeS\nngXOTbk9O8iCklSMUq7TpYAbzCU1P4c7js+WnLe0imVmn1WS8DSz13Dn5WA8+pfnB5JeSu9XxhP1\ng0cMdy+oLy9FvFTB/jIqtMuHvodH5yBJvaq2LOrpya68OlYRS0nKFN3GAGtTW+b39vR6L+7IV/OE\nXGL5UzP7DMDMLqddUhnaI7NZG09L5eWj1Csne5D0qJktm7a/JSm7hf0S7jyvAmxgZgPS9rnMbDG5\nwl41c6JPAdpK5uU/cEGMvERz9r+l7AnV/PZsPvfFI8vVZZ+MK3n9LbXv52a2Hh0js2cnsRPoKBld\n9L+uSDJ4ZYqlrp8oaUcQBEHQYMKZnTM8hUfb/mouG3s87kAVPSg1Fb/F/DauFb+NpA/M5T7fAfqV\nnPeqma0q6XFmlB7toIol6QLgghr2PoFL6f4NdwSLmJWcbqXnmkvlbitpbXPp0X/iTtS8wDl4tPEi\nM9swOZhFvGJmX0uqWuuk+jLp2s1TPUNxKdid8Ki18DY/nsrIy+8WqcjtW6uBkqalC5Z/0D5eT+FR\n4ZFmtjq+DrOsP54GXpZ0arpl/hN8/MuY3X2a8RbF8/InuETzxWY2CNgyHZ/NZ0hyvWb2H2B12p3u\naZ2UvS0wVtIJZrYrcKSkfYBBNezMJKOr7ckkgyfgDnx2AZdJBj+FL6n4tbVLXQ/vpE+CIAiCBhLL\nDOYMlwDLp/V6w3EHAoplNP+F38reEL8VfYuZ3Ys7HONrnFckM9vGzDmdRwJHmcubbo2vna2uN0+h\ndLCZHWlmgzs5vrotz9JRFvULeKRzpKTLcSf7NMrZFxeEuB34Cp3L/O5kLiE8FHfKoEB+t6iNtZA0\nAV9qkPFT4BBzmePfAD8sKbOCz5mV05y5C3gxRVgb1afg/ViheF6WSTT/CzjYXNXtDLxPbwYmVdmM\nXEK3uuwncef7hDRGQ/C14Z1RZk+1rHHGWOBmFUtdZ1FvrEDqOAiCIGgsIZoQdMDMdsPXxD5nZvsC\na3cWiSwpZ2tgsqSm1bM3s6twadmWyMDQCn3ak7FOpI5DNCEIgmanJ4omxDKDoIiXgOvMbArwGe0R\nxO7y79w63LpiZl8Cri7YdbekYbOjziYh+rSxzG6p4yAIgqCbRGQ2CIKgTkRkNgiCZqcnRmZjzSzT\n1X2sm+fMZWaj0xrMRepszzfT7e96lLWXmT1vZgvltl1nDVYzSms/15rFMkaa2ZfrYMtqZvaL9H57\nM1u6m+f3M7NPzewbuW0H2EwoUdWTtL5zv0baUIR1U1Wtk7Jm9bu7cCfHHmZmhUsKgiAIguYglhk4\n1Q/NdIUvAL0lfXM22FNvFsBFBLJ1rzPT3roi6fQ6FTXL7ZD0KJ4bFuBQ/IGm18rPKOQ94CozW0vS\nJ/Wwa1aR9PdG21BCPftmtnx3zWx+PA/wWsAfZ968IAiCYHbT9M6szQHFqFxdC+M/YIulTYdKeqKk\n/ItxZaiLJP2opLzH8CfRv47/4G4r6X0rUBhK0aUrgf/iabmmpDJ2xjMTTAXukXR0yrF5Fp5lYAqe\nXmowKaF/ooJnJajg6yDXM7Mt09PaeRuLbBmOpzDql/p8L0mPFNlSoy+fxfOsrgTcgSsxfQuQpB+k\nOkak8rfAc4r2B06XVLRuMyv3eDzN0mvAsmlb2bgVKWutQEc1qRVwpalr8JRRvzXPI7uipCPMbC48\nr+6udExp9js8/+8zwN14TtSfVdm8O/6U/sfpuCG4kluHdqc0WufiazPfBvaR9H5JX9yF55ddBZiM\nP5E/GM+h+x1cbMHwuXodVUpwNfr4ZGAg/v/hRklnpEj+canPFkr99inwh1Ruv1THKsAaeGaAY6xd\nLW4NPP1VJkSR1dWl+S1pcpm9qZyZ+e5ejKdiK1LD+wDPPPIPPPdsEARB0KS0yjKD2a0YBf4D+3Pg\ndrny1f547s2i8tcCfgSML3NkE71xB3EgngZqcytRGMKT9R8naRNSEn8zWxSXrN1Y0gbAF8xsE9yZ\nvw7PJ3sRsKikG9VR1StTXZqK/0D/2syyH/u2GrZUgBckbQacDwypYUsZXwaOwVWzDsXlRAcA6yfH\nI5+WqY+krYFtgKPKCky38QeliNrOuFNVOG7plCJlrQ5qUrSnhroFdw6/jzva25lZL2Az4E5JTxao\nTl1Ou3N2HLBpcsYymxdP/TYo9du7ycaydl8GHChpEC60cUSNPs6U2DYB5gM+lKuejcfnRj5i2UEJ\nrka5u+GOe2YvuHDAHsmuP+H9X0l9vA+wFXAi7pgOYMa0Y7en78Cf8DlRSX3T5fldw1aY+e/uASpX\nw3tX0m2d1BsEQRA0AU0fmWXOqRuBR5UGmdn30udFS8qfm3Jlo2rySlnz49HEIoUhAzLncwyerH0F\n3Am7NS0L7I3nRz0FdwruwJ3kcWa2E3BQVd3THSFJz5rZubhzkCWpL1M7qrZ7vRq2lPG2pJcBzOxD\nSU+n7e+lfsiTje/LBfvyGJ63FEkfmVnWX6tSNW7ptVpZaz48ejeDmhQFYylpsnk+2MG4pOrxZtYf\nuLzq0N/j0TskfWJme6dtl6X9y+FiDR+mz2PwqOm4knZ/BXfEwOfxhBr9AZ4zFvz7kOUhfoeO/Vit\nBFern3fHvztLkZTrgFeB89J34At4xBtcpvmD9B15Q9K7qY68I505hffSLl4A3ZjfNWzNmKnvrnWu\nhhcEQdDjWWyxhejbt3UfXm0FZxbmnLrR08C1kkaY2RfwSGVh+bNge5HC0NW4I7I+nlR+nXTs87gT\ntomkqWa2T6p/D2C4pJ+Z2dHAEEknULC2z8wy5xRJF5irKq2K32ots2XzXBFZW4tseYhyurOOsavH\njsdFB3rhc3eNtP0p4J+5cdu1pNw2CtSkmDEdVV756zI8YrqYpEzOtIPqlJn1y96n5Ri/T+VeiPfb\nV81sAUlT8Nv3Zeps4HPw+5JeNrMNgcXLOqNGGTN9XPre7Cxp1xTZfNLMrgMuBZaX9GG625Hd1elK\nuQPwi6a8whp0Y34DJ3RSx0x9d9W5Gl4QBEGPZ9KkyUyc+EHD6p9VR7pVlhnMEXUjfL3jd81sNPBX\n3EkqKn+ZAltq2T39s4oVhh7Bb88eYa5yNCgd+xauIDUmRU03xddcPghcbq5wNZDi3KBlduwNzNuJ\nLflzKjVsedbMVjezc+hImWpWUZ91SWErPaj1F7z9N+FrIIvGLYsCFylrlalJZcfeh6+ZXUTSg/ga\n09+V2VRi9ynAf5LNb+PLGUab2f34be+LC87J3v8IuMbMxgInkZy/1LaZoVCdrQz5w2uT0hjfCfxd\n0ov43B9rZqPwfs8yPnRlnA9Ka2cH42MF9Z/f9f7uVpcPgJmdY2ardePcIAiCYDYTeWaDWSJFvH4u\n6dhG21JvUgR4LDC4sweQ5oAt50g6vJE2zAzJudxR0qRG21IP0rKEWyU9V7Q/8swGQdDs9MQ8s62y\nzGCWsdmkbmSeK/WMgl1/kHRxwfaextz4+sq6YZ4bdbeCXUdLeqCeddWwYTn8gaUrG+3IJs6qZ2Gt\nNG/NbFngtwW7GqFM9pdYTxsEQdBcRGQ2CIKgTkRkNgiCZqcnRmZbZc1sEARBEARBEHQgnNkejJnd\n2GgbijCzF9JT83OyztHm7GlmW6dtB3dyzsLmkrl3mdl9Zrb2nLF2BhsWTVkXZlkC2MzmM7Mfdn7k\nzGMuB/y73Oe1zewBc+nY4+pYz/RxLNk/3MzWrEM9fc1swpyer0EQBEHXCWe2ByOpOn9ms9CotS0V\nSVdLGpk+H9PJ8YcDt6WE/3vhqbbmNKvhogpIOj0nhDEzLE27pHHdSXmMT2HG9FcXAbtKWh8YYGar\n16OuqnEsYpbnmJkNxnMI1xKYCIIgCBrM/8wDYD0N65rM7+uSlrJ2SdFVgD54HtEXS8otksrthYsF\nLIynNrpQ0sXWNTnVzYAl0t8wSTfl6loWuASXdP0vnirrLeD6ZOcCwDFlSkxmthLF0rRDU78sCVyU\ne6CpzcyG4VK4iwOLmdkFwGhmlAIGF5w4B5egBRcw+G+RHcmWveieLO8heD7cCnCdpPPNbIdU76e4\nSMEuuMP99fRQ3Lq4MtbSzJzE8zF4zttj8dyq1+JCBXPjSmmjzewJPA/uJ+mYzmSTAX4m6Z+4KMKf\ncQUuzKwPMJ+k59Nxf8eV1/5NAd2cp8OA1yRdYgWSzOn90eYqY23AfrgAwwxzC0/FNqqq+DsknYQr\n532bJNQRBEEQNCcRmW1tSmV+0/58jtFxkjbF1Zh27VBSO0VSov2BEZIG487M0Kpya8mp9kr7N8PF\nGTJBgjbgTOC8JJF6Fp4LeHnc0dw62VnrgqtMmnYJXPhhHeCnZtY3d06WN/cUYJKkg9VRCniQpIck\nvZeUxpYCrgGOrmELdF2W96vAd3EHbENcNncl3Hk9I0m7jsKdrpNwKd3LckVUgIXKxt7KZVxPwmVc\nTwKOxXPIboRL016Ryl4QOEHSrnRNNnlQcmSRdH11fwDv5z5/gI9RGd2Zp5kkbpkkM6l938alos+g\nYG5J+rCgPSel9tzeU1KKBUEQ9GQiMtu61JT5LTg+L0+7VI1yi6RE3wQOS5HD95lx3nQmp3oHgKTX\nzexd3NHMWBX4uZkdiTu3n0gab2aXACPwaOh5lFMkTQuesmkqMCVFGmvJ7mIlUsCSHkoqUiOAn0ga\nW6OY/Hh0Jsu7CvBlXJQAPJKdRZSPNrND8fG8iXK1uWw8uyrxXC3BvDLuoCPpVTN738yy2+mZQlmX\nZZNLlj+8j0d+M/okG2vR1Xmab0eZJPPd6fUB4FdFc8vMFgRuZsZlCXdKOrELdQdBEARNQDizrc3s\nkIzNS4kehUf7FgbuT0sLBgFbdqPctYBLzGxJ/NbuxNy+p4AzJd2fomkD0mtvSVuZ2dL4reubS8ou\nk6b9JkwXdPgKriqVpy3/KumPlEsB34Df7n68en8BXe3jp4EnJW2e6hmKK30NwZdiTDSzi/GlAc9T\nfAel2xLPuFRvVtZTeFT4UXP510WAt9O+aem1y7LJRUh638w+MbPlUzu+Awzr5LTurnWtJcm8NjCB\n9nZ2mFuSlsdVxoIgCIIWJZYZtDa1ZH6h3DGo5TDkpUQH4Y7BSFyS9O/4LdoPuvF094qprJH47fBp\nOXt/CvwyrZW8AngCdzwHmtnd+PrGX9Qou0yato+Z3QaMAY4vuFWctX+8mRUl4884BZf+PS9lQ/gz\ngJkdlR4OqqarsryPAXekJ/z/iUeOX8H7flTqryXxPnsOWNXMflxSV1clnpcB3gDmNbNTU9s2Tv38\nZ9xRnVpVVndlZYvsOQCXAx4HPJxFcK1r8rydOradSDJ/28zuBA7E1yJ3Z24V2mFmq1mxfHMQBEHQ\nIEI0IZhtmNmewBKS6qpe1UmdA3H51ENmYx1bA5MldcUhCwqwWZTnNbOTgWckDa+fVV2qt6Z8c4gm\nBEHQ7PRE0YRYZvA/inkO2sWqNr8rafs6V1WPFEkX0r4OMs/mkj4qqG92X6H9uyuSptYEsrxNTJcu\ncErm6RrAi/gyjDlN3eWbgyAIglkjIrNBEAR1IiKzQRA0OxGZDZoWM+sHnJ9SQzUtaRnA9cCTuc2/\nr0o9lT/+LmB/SSraP6dJT/N/TdLxc6Cu+YA9JF1R45gN8Ih6Vx5Qy583DH9Iat20VpaUCeC7Zbld\nG8WcWDpSVd8SwO/xjBSvAntL+m/KSXyCpDfnhB1BEARB14gHwII5TQW4vSqvZ6Ejmzv+f/X2QVcU\nu36IP9w1M/Rjxty5zdrPc9qu44BrJW2IpwrbP20/Dzh1DtsSBEEQdEJEZpuYlGroKmA5YC7gbEnX\nm9ka+A/rVOAjXN0I4Etmdgu+xvCvSRigrOyrcDGEzwHnSro2JaA/Dk/j9DD+JPomwImpnreBffA1\ni6fj6lgNvwypAAAgAElEQVSX4jlBM8Wk5/Af/22ZUSmqgqfOgoLcqUkt6jI8RdR0lbHc/iJlso+B\ni/Ecrb1wFau7KSApdO2T6v4lvga3WiFrdwpUvMxsXeDXeI7Uj/AsCpjZT4DvAZ8BYyQdlSKe/fF8\nuovjErg7AisBe0oaV2JfUfvyil1X4cIFmerXsXi/DwZWN7PxeCqqw/FxuEfS0WZ2UCorowLsmV7P\nAPY1s1GSpqty1Zh3d1Gg0FWkZlbSxkNw4YUTUtT538DX8XlWrYY2HJ/Hi+OiB2ukDBULA7+RNLzk\nezCRAgU5M9u5G32zHj6fAW7FMz/8WtIEM/uKmS0WYgpBEATNQ0Rmm5v9gTckrYc7lSeZ2eK403eQ\npIHAb4Cz8R/iBYEd8FybW5rZ14sKNbPewAa4A7cZMNXM5gbOB7aQtBaexiiTm90+1XU37kRVcJnS\nDfG0S5fmjnkF2KtAKWrjlJapDU8JNTr93WlmvXAH8Dp1VBkjnVOkTLYvMDGpWG2HO461eDupa42m\nWCGrQrGK10XA7nJ1s8dxWdxVceWsdSSti6cg2zKVMSXlkL0x9ec2uLrZLjVsK2pfXrFrZeCsZMMQ\nfPwfxgUjjgA+xHO4bpza+AUz20TShQXjkD28NjmVNTyXaq2N8nnXQaHLytXMirgmHUvq35H4hUjR\n+RVcVnY9/CJiGp6ndiNcWGIJir8HHVS+zGyxbvZNH1yMIuujvGrZ07RL5wZBEARNQERmm5uVgdsB\nJE1O0bf+wNIpVym4+tFp6f2D2dP9ZvYQHg18jCokfWBmh+HOQB88F+niwDuS3krHnGkuA/u+pNdy\ndZ2MS61ma1j74pHCG8wMPKp5m5ntyIyRWXCnq4IrLM0gVWpmtVTGKhQrk60KrG9mA9Jxc9WImlXw\nBPqoWCFrnnRckYrXUpIy4YUxeATUgAey9aapb76W3udV0Z7Mva+lClbUvvzxrwPHmNkPU1uqv7v9\n8bG4NY1Db6C/+Yedqo79QfZG0tiURzaveFU276CjQtfXKFYzm1DdQEnvmtkjZrY+HgEdCqxecH72\nFEI2xyp4NLUC/NfMnsKXSHT4HqhYQa67ffM+/r2YmI7Nq5a9hn9XgiAIgiYhIrPNzVN4BDWLpq6K\nKym9miKD4JGq7Ed/NTObL0VZ18ajiB0ws6WANSXtAGyF326eBCxiZoumY87BbzP3ScdX15WpRL2F\nO37bSBqEO9a3FURmB+Uis0UMxVXGvo8rTOXnZhvtalQb4w7ikNQ/I1K92+K3l98pKX+6zSliva2k\nXYBDU12ZXUXrM18xs8xRXSe9Po0rls1lZm14VLHagWur0d5q8u0bn9o3lfZ+OAH4raQfAHfltk/D\nlwI8jzuYm6T++A1wX0H0cVBBWrFj8IfBVkify+YddOwf4Wpmg1K911BwAZXjMvx2//ySJtCuhlZ0\nflZXG7CWmbWZ2UK4s/0sBd8Dy6l8AXvhdxu62zf34stNSP0yJmf/orj4RBAEQdAkhDPb3FwKLG5m\nY/Fb48MkTcTXBl6QFJ4OwZ0D8Fujf8F/jK+VijMASHodWMrM7gX+gevWf4orJd2c6usl6cFU15/M\n7B5gY9ojeJVU1jTgx8AtqbwhuDNWRtkDXbVUxioUK5NdAqyc1nLeBbyYoq5HWm2FrmcoVsjKH5N/\nvy9wRar7K0BF0hO483wvHkV9XtJNVedVSt4XUaS29Sau2HUaLqt7ppndCnyJ9tyr4/ALiCXw2+xj\nUlaCTXGHrxbZGH6MS8L2SdvK5l2H81WsZvaqmQ02syOrT5A0Bo/mDk+fy9TQpttHe9/dhkdwj5X0\nLsXfgw4qX+luQ3f65iRglzTnBwAX5PatgUeBgyAIgiYh8swGPQ4Lha6Gk5ao7Cupxzz9n9YHHyZp\nSNkxkWc2CIJmJ/LMBi1FcuqGFuw6NxdF7Il0SaGrEdicU15rNG3AmY02os4cDPyi0UYEQRAEMxLO\nbA9G0kj89v3/FM3qyAJI2rHRNswJ1AOFBSQd2GgbgiAIgo7EmtkgCIIgCIKgZQlnNgiCIAiCIGhZ\nYplBEARBnXhtypRGmxAEQVCT1SqeWfPFyR8A/n9ruUYaVAcim0EQBEGdmDBhQmXSpMmNNqPHsNhi\nCxH9WT+iP+tLq/bnGttvBcAjfx41fduyy36Zeeedt+yU2U5kMwiCIGgSVlppJSZO/KDRZvQY+vbt\nHf1ZR6I/60ur9ufc87jgZSNTcdWbWDMbBEEQBEEQtCzhzAZBEARBEAQtSzizQRAEQRAEQcsSzmwQ\nBEEQBEHQsoQzGwRBEARBELQs4cwGQRAEQRAELUs4s0EQBEEQBEHLEs5sEARBEARB0LKEMxsEQRAE\nQRC0LOHMBkEQBEEQBC1LOLNBEARBEARByxLObBAEQRAEQdCyhDMbBEEQBEEQtCzhzAZBEARBEAQt\nSzizQRAEQRAEQcsSzmwQBEEQBEHQsoQzGwRBEARBELQs4cwGQRAEQRAELcvcjTYgCIKgpzBhwgQm\nTZrcaDN6DO+8s1D0Zx2J/qwvrdqffT79FIDnnntm+rZll/0y8847b6NMmmXCmQ2CIKgT+w4fyoJ9\nezfajCAIglKGf/weAMff/ysAPpz4AWdscwL9+6/YSLNmiXBmgyAI6sSCfXvTe5lFGm1GEARBKW1z\n+QrTnvS/KtbMBkEQBEEQBC1LOLNBEARBEARByxLObBAEQRAEQdCyhDMbBEEQBEEQtCzhzAZBEARB\nEAQtSzizQRAEQRAEQcsSzmwQBEEQBEHQsoQzGwRBEARBELQs4cwGQRAEQRAELUvDFcDMbCBwPfAk\n0AbMA/xa0g3dLOcc4GxJLxXsGwx8SdJl3ShvFeD89HEdYBwwDfiVpFu6Y1tVuUOA3VNZ8wDHSLrb\nzIYDIyT9fWbLTuXvCUySNNLMRgD9gSuAaV1tv5nNB1wG7CmpMiv2zCyp/xeVNLab55XOg3phZssC\nq0kaNbvq6A5m9gLe5vPS55WBiyQNaqBNSwLHSTrIzDYE3pH0eHfOq5MdLwAbSnqxu3WZ2WrANsBv\ngbsl9auHTUEQBEF9abgzC1SAOyTtCmBmCwJ3m9kESY92tRBJh9fY120HUdITwKBk0/PAppI+6W45\necxsF2ATYGNJU82sHzDGzNbA+2GWkXR17uO3JX1+Joo5DPhDoxzZxE7Aa0C3nNla86COfBswoCmc\n2cRhZvY3SRMabQiApDeAzEncBxgBdOrMVp1XD14E3pyZutL/n0fT/6T/1NGmIAiCoI40gzPblv8g\n6UMzuwR3Zh41s1OB9YG58OjTH81sAHAOvkziFTzS+Tdgf2AJ4CzgE2BKKmcnwCQdbWY/Ab4HfAaM\nkXSUmQ0D+gGfB74MHC7pH0XGJgd0JPAWcEuq99zUjreBfSS9X2Q3MCSVPTW19QUzW03SO2YGUDGz\n3ngkdWFgGeBCSReb2YHAD/CI7kOSfmxmOwBHAJ8CrwK7AL/EncCvAwub2U3An4GVU/sPAXbFnefr\nJJ2fosKLpb+tgD2A1VN7twKOS+17GDgAd8hPBD7K2gysARydti0LXAxsDKwGnJvacB8gYEVgIrBb\nGotsbOYHngLWA/YCPjKzh4EFgJOAqcBzwP6SPisZn7vwebArHpVeAlgcuBDYEVgJ2BN4AxgOfAgs\nDYySdFwa3yvTuFWAQyU9Zmb/SbaNBzYHFkjteT/1Ty9godSmT3Hn7cVkw4OSDjSzvsDVaWzb0nhO\nTOO9WGrCoZKeMLOr0rmfS/13rZmNTHVkPCnp4GTnUGC4ma1f1R9rAOelvvsI2C+1rci+hYtsKenn\norbsAayLj9e+qX8PBDYDVjez8cDawOHJnnvSuA+rOu8qSeuY2aYUz7MjgY+B5fE5fIqZnYh/3zIq\nwGBgJ0kfmdm3gAuAD3Dn9iNgWDp/HTN7DLgL/95UgG2Bb+BzbVcz27moH4IgCILG06xrZt8AljCz\nzYB+kjbAHaNj0g/uJcDektbGo2NfoT2yuS1wHbARcBGwaLbPzFYFdgbWkbQusKKZbZn2fyRpC+DH\n+I9tLZbEI7W/wm/HH5hu6d4CHFHD7mWA/8sXJOmd3Mc2YAV8ucFg/Md4aNq3F3BQsvspM5sLd17P\nSPWMAvqktlTSrdNJkrbLCjezrwLfxZ3FDYHtzGwl2qPj6+MO/Xspcjw3vtRiC0lrAc/gjuolwPaS\nBgJ3A8emMr4A7AD8KG3bA3f89s/129mpnudwx7hD9FfSq8BV6diHUh9n9b2S+qKMSu51iqTNgRtT\nG7YBTkv9VsEvXHYG1gI2TY7fmcA5kjbC58IVqbwvArtKGprK+J2kkcBXgT3S+P8plVfBHfZ9gG8B\nW6Rb2scCN0laD/hJ2nc0cLukjVM/XWRmCwEbANvjjmB28bO1pEG5v4Nz7b4VeAJ39PJ9ehk+bwYC\nvwHOrmHfz6ttqdHPRW2p4A72+rizWJH0cLLtCPzCYRh+Z2ID4AtmtknBeRll8+xL+DxbO5WLpF9U\n9c3Gkj6VlEVlL8aXzXwbn3vV86438PvcHNs8f0yunCAIgqDJaIbIbBH9gJeBVYE1zWx02j532rek\nJAFIugogi2wCpwDHAHfgP0rjcuUa8EAWGcVvYX8tvf93en0ZmL8T+57PRQZXxh0Q8DWwz9Sw+z/4\nD/GT0w3y9bz55RRv4reMd8CjfvOk7XsDPzWz5YD7ccd3KHC0mR2KRw1v6sTur+EO3J3p8yK4UwMe\nMQWPZL6Re/+OpLcAJJ2ZInLvS3otHTMWOBl3pp9ITvB7wHOSPjOzd2nvzzdz6ybvwZ31B3L2tVW/\nT/UtBdyQ+vhzQGHUvICH0+u7tPd53p4HJE1J9YzDo7YrA2NSex9N62MB3spdeLTlbH0VOM/MJuPO\n/D1p+7OSPkxlv5bqXAm4PJV9P3C/me0BbGxm30vnLSppspkdhjuifYBrUzmjgAVz7csis9Aenf0n\nM14wLS3psfR+LO6Il9m3KjAobwvlFLXll0CtZQ79gb7ArWkse6dtVJ9nZktQPs8elzQNmGJm/03H\nn4RfpOX5jqRPc/3wVK6sXQrseyS9vkTn/wOCIAiCJqHpIrNm1ge/1Xg98DQwOkW9NgVuwKMqr5rZ\nCun4n5lZFn1sw6OBw1N06Un81n7G08AAM5vLzNrw6GT2I9qd9aHTcu8FfD/Z+HPgrzXsvhL4RYqq\nkqKil5Eib4mhwP2Svg/8kXanaT/ggBQ5WgO/LTsEGJa2teGRPKhaupH7LNwBGpRsuwbIHJ2s/W/g\nTi64Y72ImS2a7D0HWA7oY2ZLpWM2ot0R7qwPl0i38cEdj8fxSNzSads3csdOxW+Hv41fYGyTbD4N\nuL2TeqrJO595VjOzedJ4fAuPbD6FzwvMbHV8yQbMOOZTaf/uXArsJWlv3LHNthf1xVOpHsxsw7QU\nZTweCR6Ez92rU9+uKWkHfNnHGWbWS9JWNSKzSJqMR1TPzdX/arojAZ2P1VPVthQcU6stMGM/kds2\nF/A87ihukur4DX5h1uG8dAHV5Xkm6diqvhmUc2QBXjKzr6T365S0qZFrxIMgCIKZpBmc2QoemRpt\nZrfjzuBxkp5Jt3Enm9kY4EH8ifzsB/vKtD5yDfz2flbWg8DlqaxBtP8gV9L6v+uBe/GI7fOSbsqd\nS8H7zj7/CLjGzMbi6zofL7Nb0h/wSOQ9ZnY37tzuLmlirtyRwEFm9ndga+ADM5sXd/zGmtkduMM5\nLpU9KrV1SdofSKoUvFZShO4OM7vHzP6Jrzl8JX+spOeAzyfnaRq+5vHm1L5ekh7EHes/mdk9+DKK\nEzvpw+z9Z8Cp6bwlcUfwb0C/VP7OwHvp2H8BB+OO5Y+BW8zsXtyBH0/XmKH9Je9H4mPyR0lPAj8F\nDknj8xvghwXteRzYNkUwr8XHZRS+jnrpguOzz6ek80bja5svSdu+m7b9FXhK0uvAUqm9/8AzaBQ5\nidXtRNLdwO9z+/YDLkhz8RB8CU1biX0nV9sCfhFj/mR/nqK2VLc7ez8OvwhZAl/mMMbMHsAv9J6p\ncV5351ktDsT/Z9yGLyvJHuYsO7/6OwSU9kUQBEHQQNoqlQhGBDNiZkcBT+cc/XqV+7ikVTs/cvaT\nIsTnS9q60bY0O2Z2MHBrutBpScwfoLxe0lvpYbGPJZ00E+XU7IvNz9m10nuZRYp2BUEQNAUXHHwt\nAAdfsAcAH7z6Lr9c52f0779irdNmK3379i66e9plmnXNbNBYfg1cYWZ/UX3Tc9WlrLSO9bcFu+6W\nNKwbtsSVXNf4i2Zj3t45xBvAP9La5nfxjBYzQ0/oiyAIgh5FOLNBByR9hKc7q3e5X69TOS+RcgDP\nQhn/wRPiB53QE5w3STfiWS1mtZyW74sgCIKeRjOsmW0JzGyn9LR2d88bYmZzm9lAc0Wurp53l5md\nnfs8v7l4Q0tjZgPM7BkzO7mb561iZhvMLrsK6htoZu+a2Rdz204zV1hrWbo7D3PnbW9mE9Jt9nrb\nNNLMvlynsl5I2T2yzytbe1aRIAiCoAcSzuzs52j8Se6ZYRdzKdCexGBcBOCYbp63E57TdU7yMZ7v\nNqMnLEuY2TZsDQyVdEE9jclRz749LGUKCYIgCP4HaNllBma2F/4DOz/+BPm5uGDCKsBPJf01RZG2\nx3NzvpXeDwHWk7SbmV2N5xotTA5vZuvi60ffxVNI/SttL1PR+hgXPVgQV0TaAM+ROiLZt6KZ3YIL\nE4yUdLyZXZbOyXhb0k6p7MOAS81sTXLpu6xcpeoZPM+p4WsEd0zHXJzq6AUcm554n+N9aq7CtDfw\niZm9DLxDlbIXrgJ1OTkFNPzJ+r1oVwS7HlcN+8TMTsOfun8BOCONwaV4CqjqspfHndNPU1/shufd\nrX4Q6Gw8x++dQJuZHSTpwqq2dFlJzsw2qrZF5Qpmj+MCAV/HU7y9gWd0+BjYAp9Pv6F9jI4FRuMp\nrr6Hp7gakcZjckEV0xfZm6taVatxfbGg/Gm4iMCaZvaWpAeqC01zZws8D3B/4HRJV1uBApmkl8zs\neGBLPPXZsqmMQgUya3I1tCAIgqCxtHpkdkFJWwKnAz9KeTmHAHub55FdDM9puTbuuH8zOSWfS87n\n3GWObOIiPHXWd0i68lZbReuxpDB0Ep5O6QrgdTxBexvuIGyLO7kHA0jaryo35k65+h/FH3TKVJsy\nylSqlsOd1XXx5PRr4Tl7J6Zjt8Odw4b0aUrrNRw4K2VKKFL26k+VApo6KoLlyffLfJI2BH6HO7TV\nZW+Cp+HaBE8ntbCkewvyk46k3ek7EDjczLLk/jOjJNcdBbOFcHWxDfF5cm8au3lx0QtL/fcdfFwO\nkvR+KvNy/CLn+yWO7HTMcwcPo6MaV1H5I/EUaj8rcmRz9EnZIbYBjsq1fQYFsuRUDpL0TbwfF8L7\nu4MCmbWGGloQBEHQQFo2Mov/CGWqXe+RcmKSFJ4kVczsU2BEeoL5i7SraZ0O3MeMSfqLWEpSlgdz\nDC6fWUtFK0vmfy/wq4LynkiJ3D81s88AzOxy2lWQoD0ym7XxtFTeFrljaqlUZXljMxWjVYANzGxA\n2j6XmS0maVKBfXOiT8GjnWXKXrcwowJaNkfL0nbkt2dJ9fviUcXqsk/GHZy/pfb93MzWozwyi6RJ\n5mpcV+PjAN1QkqvRzlrklcuynLrv4OP5Oi6P/EN8vOZJdj5kZu/gKaceo3NWoKMa1/Kpjfny8/8j\naqVOyc+dvIre0uqoQLYy6S6HpI/MLLtA6aBAptZQQwuCIAgaSKtHZkvX2aXo2baSdgEOxdvaZi5A\ncA4edbrIzOYpKwN4xcwyJyVTDaqlovWt9LouKZJLu/pRob2S9q0RmUWeLH/PZHN2fplKVVF/PI1H\nOgfhUeHrcceojNndpxlv0VHZ6w7gJ8yogJbN0UwRDPwW8TIpUrx6rsxMWKCs7G2BsZI2SWUf2YXI\nLJJG4eO+V+qf7ijJFdnSmYJZrfWjJwC/lfQD4C7aZX93AiYDn5nZjp2UD8VqXA8UlN+d/xFFdhcp\nkI3H+69XmjtrpP0dFMisNdTQgiAIggbSU5zZ6pyhFeBZ4ENz5aNr8WjXF3BnYqSky/EI3WmUsy+e\nb/V24Ct0rqK1k7lC11DcKQOPAOUVyqpt7xRJE/BoYUZXVKqyz5cAK5urpd0FvJgirEea2eCC6mZ3\nn4L3Y4ViZa8yBbR/AQeb2UB8bewtwM3ApCqbswuA6rKfxCN1J6QxGoKvnyy1sar9hwH/TeV3WUmu\nrJ1mtlRJVoFa86KCSyOfaWa3Al8CFjOzL+FO6P64It3JuWh9YbvkcrFFalwdyu+ibdX78ypeMyiQ\nSXoU+AuuYHcT7vAXKpCpNdTQgiAIggYSCmB1Ij2kcr6khzs9uAkws62ByZIibVEDMLO58Iekftpo\nW4KuY6EAFgRBixMKYD2QFNUquoXYHTWpVuTfmk0J4P+H+7Q7tFG8rroumNkvgI0Ldu0t6YVZKPdC\nilOkbS4X2+jphAJYEARBkxGR2SAIgjoRkdkgCJqdosjsuVsez0orNTQ9d0Rmu4qZ3SipKw/HlJ2/\nBHBDeihkVm05Es+I8BiwhzyNV3fOH4bn/lw3e6o+rX38rqQXZ9W+mcXMzsFTaDVV9CqlDRsh6e+z\nWE6/VM46nR1bdd4AfJ3x9aohGGFmCwC3AftIUtlxXahvT2BSepitaUj5aE3S0XOwzoF4bt9ds/8B\n6UGwRSSNrXHervh658/wBzoPTGuggyAIehSTJk1m4sQPGlZ/3769Z+n8Vn8ArFvMiiNbbySdLs+Z\nujT+oNnM0A9XGMto+A+tpMObzZFNVD/QNafpVPnMzL6Jp1xbjlm0VdLVzebIJhoxBvmHwrL/ATtS\nQ1HOzD4HnAgMlLQ+LuKx1ew0MgiCIJg5ekxk1rqmXvW6pKXSk/2PpH19gJ3Lopkpgfrv8LRQ/8lt\n76DqhKfwKVJBOhBXBJsGPCTpx1mkkCTTmtY4DgaGSBpvZpvjP57j0zEZFTxVVwV/qn9fMxslKcvx\nSUqNdRXuFM2FR0qvL2u3FSialfRFP+APuFpSP+C6VNYawM2Sjkl17J/K60eVGlZJuX1TuZmwxAEp\nf+6pwJrA4sCjkvZJEen+wBJp+4W4Y7JS6pc3cGGGD/F5MErScbm65sYzPMygiGZmJwMD8e/EjZLO\nKLK1yu6iOVCkYPYvZlQ++yHF6lXz4sIW13Sh7s6Uwo7FU7Y9jQsYfIxn3rhO0ik1yi1S29oJF4+Y\nB58j2+N5WI/G06Qti6vMbQysls672Mzuw9NfrQhMxBXX8nUVKentAByBq7S9CuxSFg0tGrPcHF8D\n/75lgiXZOa/juZD3ol1R7jiqxgPPdrBubh3w3KRsFkEQBEFz0dMis6XqVWl/PoXTOEmb4rd0d61R\n5jG052n9XW57kapThWIVpL1w5aF1gafSk+xZpPAkYLykE3EnaM90zj7AZZIurMqnuXEu8jk5tW94\nSl8F/sO9P/CGpPVwtauTzGzxonZbuaJZGcsl27bCI1eHAwPomB6sTA2riLXw9EybAwcBC5pZb/w2\n+XfS/rXNbJlU7hRJmwM3AltI2gZPB7ZL2v9lXFlqLWBTc8WprG/2o1gRbTd8HmyAixV0ha4qmD1E\nTvlMJepVku6T9HIX6+5MKSzvAH4J2AEX/TiirMDU5x3UtnBndEu5Utj41K4KnpZtBzwd2LH4xdzm\n+PwDWBK/kFofd/YPyNVVNu92Ac5IdY3CL7rKKBqzCq7cNRD4E/79rU6XNoOiXNF4SKpIejPZegj+\nv6Wz/MBBEARBA+gxkVk6Ua8qOP6R9PoSrtBUhuFOJnjO2CySWK3qdBueh7VIBWlv4KdmthxwPzMu\ndM6/vwH4p5mdCXxB0r9TKqDq5RE/yN5IGpvy4J6Y278yKTG/XEFpPO0qY9XtLlI0W4F2EYBq/k/S\nB+ZKYG9Iejf1SVH0rKgvirgVd5j+gkfkTsKjYEua2e9xp30h2tXG8gpZT+beZ3U8IGlKsmscHrXN\nKFJEWxzYHb8IWirZU5OSOVBLwQzaBQ6q1avGSzqoszoLqKUUlufxlJt1ipmVRhfTuHZQ28Kjqleb\nq76tjM9hcEW7qWb2HvCcpM/MLD8Ob0rKxEPuwZ3gTA63bN4NBY42s0Px73CWv7eIsjG7Lb3eC2xZ\ncm4+Wls4HmbWC7/7sQIdv4NBEARBk9CTnFno3nq8rh47Hlgff1Br7bQtr+r0gZlthzsR/UrK3Q+/\ndf6xmf0NVwgD/0GdSoqQS/owJWk/l3SrWdIFwAXVBSYHKuMYIFt/C+4EbADclKJtq+KKT0XtzhTN\nNk/lDqVd0ayI2dHHA4HXJA02s3WAU3BFsS9K2iU5jtvT8WnHtoJtAKulpRbTcFW2y/BII/ht95cl\nnWpmfXBxiw/wJRe7mit6PWlmIzpZ+1s2BzIFs4vNbBDtzlReUaxeay+72r9dOs5yaltmNj/wopn9\nCRiGLyXohTvsWVs6K3cJM+snTwW2Hu2qeFA87x7H7zQMkzTRzC7Gx/23BbbOS8cxuy7tHoBfeOaV\n+KqZrihXYzwuwZdRbB8PfgVBEDQvPW2ZQS31Kij/8a31Q3UisGVai7cLtdWrqsvK3j8OjDVXnnoD\nV43K9r8JzJvWh4I7Xtsy45KGMjLFq4/x6G+ftO1SYHEzGwuMJjkHReerWNHsVTMbnDIuFNbZyfuu\n7s94FF/7OxqPhJ2Cq0Mtb2Z34s79OHwNar6sSo33I/Eo4B8lPZnbXqSI9gkwKWWDuBP4eyeO7Mwo\nmM3UA2g2c0ph1cd0aRxUrLb1Ph7hvB/4M+6EZhdNnc2Fz4BTzewefMnBZdn+knn3Cj7uo9LdhiXx\n/iyytWjMsnXvB6XxHYwreRX1RaYot1FR+Wb2DXw5zSrAnWY22sy2NbMlS8YjCIIgaBCRZ7bJMH+i\n/WBJezXYjr7AvpJO7fTgJiI9pHZ+Wrfc8lgLK4WZ2eOSVp3DdY4GdpQ0aTaVX3M8Is9sEATNTiiA\n9dstcPMAAB8LSURBVGDM7EZm1KEHeFfS9nPQhoPxaNDOc6rOGrQBZ9azQJtNqlRV1CUFl5ntR9XT\n94mjJT1QsH12UXelMHMp46EFu86VVGuNanepxzishUfrq/mDpItntfyZYLYqtwVBEATdJyKzQRAE\ndSIis0EQNDs9MTLb09bMBkEQBEEQBP9DxDKD4H+eJArwNUnHz4G65qMT+WIz2wBf4lL2JH7ZecNo\nQonjjGTfa/iDedtIOtHMtsdTqb1mZkOAKyV9Ngt1DMcfeuyHpw67cSbLmT5OuTL/Bewg6YSZtS8I\ngiCoPxGZDYI5S1fki39Ie+aG7tKPJpM4zpFl33g0iYSAZ4TIhBGOJqXLmgVew5XDZpX8OL0GvCLp\nCWAFM1u+DuUHQRAEdSIis0HLYi5hvA/+UM4vga/ieUkXxPPAbo8n1i+SGF4X+DUuOPAR8M9U5k+A\n7+FppcZIOso6kdCVlKVaq7ZvPeAs4BNgCi5LfAwuX3wsrkJ1Ee0SzMfiYhaDgdWT2MXauHraVOAe\nSUeb2UE0j8Tx/MD16dwFgGMk3WZmjwLP4Opjj+Kpy7JzNsLVwK7B5W+vNrMrcfGDEcAOKVXd+jlb\n/5hsfQN/UPMyXHkszxG4EtwUPMfsXmb2o9S/h0t6yMx2LujPzsbpVNqlbK/HVep+UtQfQRAEwZwn\nIrNBq/N2kj4djTs5m0haG79QW4tyieGLgN2TXO7jQJuZrYpnklhHLj28opltSecSumVsC1wHbJTq\nW5R2+eKTcDWts5INQ3DJ44eBv+GO2Ye4YMHGqY1fMLNN1FwSx/1xB39r3PnNLpD74SnmvoWre21X\nfaKkW3CVuB9IuhR4HdjFzDYH+qU2bwwcY2YLJ1t/L2lTSddX9cGgJE37nqRPUxWPSdok9cfFZrZo\nUX92Nk6S3s+V+Tgu8hEEQRA0CRGZDVqZCkl2V1LFXGJ3RJJd/SLt8rdFsrpLSXomvR+DR0ANX785\nNW0fi8uuQucSukWcgkf47sAFAcZVHf867qj9MLWl+vvYH+gL3JoU33oD/c0/7FR1bEMkjiU9aWaX\n4BHVeYDz0q7xSYQBXHShzBmupg1XrFsz5YwF75d+WZUwfZ1ztQTwEZIeyn2+O9k4PqmbrUDH/lye\nzscpz2u48x4EQRA0CRGZDVqdaQBm9nVgW0m7AIfic7uW7OorZpY5quuk16eBAWY2V5JI3ZCODlyZ\nhG4RewDDJW2Mq4MNISdfDJwA/FbSD3A1smz7NPz2+vO4g7mJpEHAb4D7CiKzgwoUy47BHwZbIX3O\nJI6xrkscD0r1XkOJxLGZrQL0TpKwewHZcoQVUzQV/Jb/EyV9lLU1e98r2To61b0pcAPwXO4YJP2x\nKDJbVfbaycbVgRco7s8H6Hyc8iyKq/YFQRAETUI4s0GrkzlizwAfmtkY4Fo8klotf5t/vy9wRYpg\nfgWXWH0CXxN5Lx6dez4nItCZhG4RDwKXpzoGAlfTLl98Gu6knWlmt+JrSzPRjnH4EoYlgLOBMSkr\nwabAs13pjzkocfwMMNDM7sb77hdp+0fAb5PdL0q6OW9f7vU+fM3songk/GZJI4HJaSwfBKZJmtxJ\nu4tYJUlInwcMkfQWHfvzGWqPU7UC3gBShDsIgiBoDkI0IQiCLtEdieNGSNnOCczsWvwht/8U7Q/R\nhCAImp2eKJoQa2aDYBZpBinkOUR3JI573FVyekDw2TJHNgiCIGgM4cwGwSwiacdG2zAnkNTltaKS\nvj47bWkEScSiW0IWQRAEwewn1swGQRAEQRAELUs4s0EQBEEQBEHLEs5sEARBEARB0LKEMxsEQRAE\nQRC0LOHMBkEQBEEQBC1LOLNBEARBEARByxLObBAEQRAEQdCyhDMbBEEQBEEQtCwhmhAEQVAnPpz4\nQaNNCIIgqEll6jTAZWyhZ/zfaqtUepzqZBAEQUOYMGFCZdKkyY02o8ew2GILEf1ZP6I/60ur9uca\n228FwCN/HjV927LLfpl55523USbRt2/vtlk5PyKzQRAEdWKllVZiYg+IcjQLffv2jv6sI9Gf9aVV\n+3PueeYBoH//FRtsSf2INbNBEARBEARByxLObBAEQRAEQdCyhDMbBEEQBEEQtCzhzAZBEARBEAQt\nSzizQRAEQRAEQcsSzmwQBEEQBEHQsoQzGwRBEARBELQs4cwGQRAEQRAELUs4s0EQBEEQBEHLEs5s\nEARBEARB0LKEMxsEQRAEQRC0LOHMBkEQBEEQBC3L/7d37/GWz/Uex197GkOaGSEiRwbxiVyO1BnX\ncW9SVEKZbiSXiCPVEU2cOdFFQlJKKJ1EN5eKJDEMk1Gqk0u8hYRpXIdhiDDr/PH5LntZ+7fWvsy2\n916z38/HYx5779/1+/v+vr9Zn/X9fX+/j4NZMzMzM+tYDmbNzMzMrGM5mDUzMzOzjuVg1szMzMw6\nloNZMzMzM+tYY4e7AGZmS4rbb7+d+fMXDncxlhiPPjre9TmIXJ+DayTX5+qrr8G4ceOGuxhDxsGs\nmdkg+eBR57LscisPdzHMbBR7asGDnPJf72DttdcZ7qIMGQezZmaDZNnlVmb88qsNdzHMzEYVj5k1\nMzMzs47lYNbMzMzMOpaDWTMzMzPrWA5mzczMzKxjOZg1MzMzs47lYNbMzMzMOpaDWTMzMzPrWA5m\nzczMzKxjOZg1MzMzs47lYNbMzMzMOpaDWTMzMzPrWA5mzczMzKxjOZg1MzMzs47lYNbMzMzMOpaD\nWTMzMzPrWA5mzczMzKxjOZg1MzMzs47lYNbMzMzMOpaDWTMzMzPrWGP7u0JEbAscKGla+XsP4L+B\ntwHHAhMk7d6w/P2SVmmzvfMbl2+aNwk4T9LmTdPPBk6V9If+lr/FfpYBjgP+A6gBC8ljvC8i7gbW\nlfSvxdzHycCJwJPAFcDD5eeVkn7fx22sA3xY0mdazJ8BzJN0+mKU89PAlcCNwAcknTUY223ax93A\nFEn39GOd5YG3SjqvxfxJwExJa7aYP4MBHENEHA+8FThU0qw+rrM3MF/SL/qzrxbb2ho4gWyXV0s6\nskyvX3PPAR/vaxvq574PkfT1iuktr9k221okqdcvz73VXbn2z5N0WX/23xdV2264Ht4O1CT9z2Dv\n18zMFk+/g9lGETEN+CSwvaSHIgJgq4j4gKRzymK1dtvo74diX7Y5AF8F/iLpUwAR8S7gx8AWg7Uv\nSYeXbU8B7pK0xwA2cwKwb5v5i11WScfDC8HhfsBZg7HdJvcAD/ZznY2BdwCVwSzwUNluKwM9hj2A\njSQ92dcVJH1vgPuqcjKwu6S/R8SVEfHv5B2VKZImR8TqwPnkF7HBNh3oEcwO8Jr9Y18W6kPd1Rj8\n9thy2w3XwxuB51+i/ZqZ2WIYSDBbA4iIDwKHADtIWtAw7yjgfyJipqS59ZUiYjkyMFqhTPpPSTfX\ne24j4j/ID84nyEDnaWAGsFJEXAisCtwo6YCy/lGlt64L2F/SnRHxSeC9ZG/VLElHlh65LYBlyeDs\ny8DE8vd04GrgHZI+Wi+rpIsi4urGg46IDcie1ZcBrwIOknRdRHwXWBt4OXCKpHMi4vPAtqV+z5f0\n5Yi4CjgU+BqwainXGsAPyR7a04HXkYHKZyVdHRE3AwL+RfZ+j5E0v5Tni8CmwIrAnyW9KMiNiNPK\n/PuBNYFdS119pxxDrZyDGyPi78CtwF+A5UuZ9gDWj4ijyybfGRF7lv0dLeniiLgDmA2sW45hOTKo\nkqQPRcSxwFYNxaoBU4E9JD0dEdsAx5RjHg+8D3iWht74iLgO2Kucq40iYj/gNy2OY0/6oNTdVmX9\nkyT9tEVZpgGvAS6JiLdKerpiW+8Gjijl/kcp638D84AHgMPKoqsDf5e0Q4v9n0Ge/7pHyheeyZKe\nj4jxpX6fIHsJLyMr+t6IGBsRK0p6pKJ8k4AfkYH+JPLcbgBsAlwiaXpEbAicQraPR8gvTIcCK0TE\n14HfAx8pm5wB/EDSqhExmQy2xwBzgfcDp7Y4jp1LeW4ir7mNgNtKHU0BniF7mj9b6u424MgyfS3g\nh5K+ULbZVfZ9CtlOx9LUHsjrbylJJ0bEt4BnJB0WEdOBu8h2/iFgEfB7SYdVbHtP8m7TecD3m+vW\nzMxGhoGMme0Ctgb2Jz8QlmqaPxc4mgxcG30G+I2k7YEDgW+W6fWekG8Be0vaAbizYfpEYB9gc2CH\niFipTL+sLHsC8OUSbO4JbC5pC2CdiHh72c4tkurBw4pkYDeN/BBcgQz4XkTSo03HvD7wSUk7AscD\nHy4BxtbAbuSt6HrPTT0Q2hp4rOE4nyGDmyslzWjY/v7AQ5K2Ad4FfKNMfwXwuTKkY1vy1j8RMYG8\nFfsW4M3AZhHxmvrGIuKdwAqSJpNByOpl1leAk8t+DqP7HP0bME3SJxrKehzZW31sOf77yrF/HDio\nLLcGGWRuTQYQ3yj73CoiJko6WtJ2Df+2l/SspHqv7PrkUIbtgAvI81fV61YvzxWSzmx1HA3bbSki\ndgYmSdoa2B6YXr5o9SiLpM+RbeMtVYFssRfw5bK9i8n2Wivluahsb18ySNyn1f4l7d9UV3uUbTwf\nEZsBN5FB3lxgAvB4QxmeIAPdVtYsZdiFDM4OB+ptA+AM4OBS1kuBIyR9nmxjh1CCXElTJF3ZsN3T\nyWEvmwGXAOu1OY6HyjrjyWB4CtluZpfzOA54Ay8+/68F3g1sRn5hqNuS/GK5i6T7qG4PF5LXJEDQ\n3XM9lTxP+wAfK/9X3BoRL6vY9r2lPDVJT0l6qk0dm5nZMBnoMIN5wI5kEHZOROwsqf4hVJN0bkTs\nFhEHNayzIbBdRLy3/L180zZXlXRr+f0aMkiAvCW/ACAiHiR7VCF7dwDmkAHt64E5kp5v2MYbyu+3\nA0i6JSJOJ3taliJ7SR8BXtl8gBHxPnKoAeQH2j+AoyPin2QwsUDSwoj4OBkMTATqQyveTwa8q5DB\nQV1X+ddsA2Dr0iME8LKIWLH8rvJzRbIXC7LXeuWIOJcc3zueF3+peD1wXTnmhyPitobps8r0P5db\n1AAPNwXv9bLW1YD6+OQH6D4Hj5Rggoh4UlJ9PwuAl0fEEWRw0Ogtkp4tv/8D+FpELARWA66lp66G\nn/XfWx1HX2wAbBoRM8vfY8key76UpconyLsE/0n2bl/UODMiViHb0T6lF3Va1f4j4hBe3KM5v347\nX9IcYM3S030k2WYnNCw7ge4vTVXukvRERDwLPCDpsVK2+jW7HvDNMkxoKcr10qRq2qslqZTxO2Wb\nZ5J3KnocR4P6kIPHyLsBAI8CyzQtd5OkRcBT5bqDbAM7kW3+uTKtR3sodb1sRLy57GP18vuCUhcf\nBj4VEWuS10q9bTVvG6qvWTMzGyEGGszeoXwg6hsRMZXsnTuuzKv/x38QcD35wQD5QX+DpPMiYjWy\n57LRvRGxXgloGx/4ajU+bjPyA3YK8GfytuQnSw/LojL9f8mxlovghaECEyTtEhGrkr1Ca0XEZRFx\nqKRTy3J7kreuzy0f8F3kbcf3S7qtDBGYVAKVTSW9O/IhsntKgLmnpGkR0QXcEhE/7KU+byN7Pr8Y\nERPJccjzy7xF5eeD5C1vyFu2q0vaq/RU78aLP3BvBj4InFKGYqxbpt9a6uUXZezlvKZ91HWRvcxj\nGv6u0m7sYk3SZ9vMB/g2sJakJyMfvhlDd6A+hvyCUH+ga1FDeVodR1/cRj4odmBEjCXvGNwJ/Lqi\nLH1xADBDOWb8W+S5ACAiXkkGt4dLuqWh7D32L2n/5g2X9jML2LUEoAvJHszZ5N2Ir5C97i8MP2mh\ntzGmtwEfVD7wOIXuoUCN5725jQD8IyJeJ+mOiPgv4K+S9utlX30pT7vlauQwjtWB08i7IK3awyXk\nsKKTybsIp5K9yZBfxD8q6ZmI+BU5FImKbZuZ2Qg3kGEGzQ9J7AscGPmWg/p8JD1M3s6s9+J9HnhP\n6ZH6OfkB+sLywMHAdyLicvLW+b+a5jf/vkNEXFnWO0LSzWQP2GwyiP6bpIua1vsrsG3keNgfk8Mh\nIHvX1o+I2RFxLRkI7t607jnATyLil2S9rSrpfmCViJhNBkMnlCB/fkTMIZ+CvkzdT+3X6Fl/NfID\n9vWR42qvAu4pPd2Ny11F3hqmHN9a5fhPKX/XA92apEuAh0u5zgSeKvX5KeDQcvyn0X2buTloqJHB\n87iI+FKLMjcbyEM55wDXRMTF5NsdVpX0AHA5OU7z2+Q5A7gD2LD0gLY6DiDfHBERG1eVUfmU/MKI\nmAX8DlgkaWFVWfp4DL8DLo6I3wCvJm9h1x1H9s7PiIiZEfGrNvvvobSBE4BLS9vYGDhR0h/JOw/X\nAT8lrwEiYmrk0/c9jruX3w8Cvh8R15Qy31ym/yUivk/r838gec1eRRmDW3UcbcrS2zIt25yks8gx\nvXvRuj1cSAapV5LX5xuBn5V5N5Hn+wrybsP1Fduuf+F+Yd8RsUpEtHoI0czMhkFXrfZSPRjcPxFx\nMPDjclv8WPKBjeN6W280iYifA/v1NjY0sjv53yX9qAxXuBl4bcPt/SVauWV/qaQ7h7ssQ6n00u8n\n6YvDXZYlVbnzc7zKm0+abbfvabXxy682xKUyM+u28NG5fPGAzVh77XUq56+w6QYAzP/DzZXzh8NK\nK01YrOFci/VqrkH2APDrMmbxMWDvYS7PSHQE2Yt8ZC/L3QscX8bzvozsuR4VgWzxs/LwzqCIiKXI\nnr1mUsNbMEaALvJhKHvpdJG95WZmNkKMmJ5ZM7NO555ZMxtuCx+dy+lH7si6665bvcCkSfnz7ruH\nqkh9scT0zJqZmZnZYpo/fyEPPfRE5bwVFmUn5vwW84fDSitN6H2hNgbyANioUcZeVk0/f6jL8lKK\niBkRcX10v2uTiJgTEa8dznINVHkArOXruiLi7ogY1zRtn8ikGy912XaLiDvKA2Ezy9sDBmO7S0XE\n9yNiVjmXuw7Gdhu2v3e7bUbE2eXNJs3TDyhvbejPvvaJTEbS3zKeX35Ojoi/RiYvWWzl2PaOiA0i\n4pjB2KaZmQ0eB7PtTa+aWPHezCXBJDJ7W13Hjj+RdHgvY2Zr9LylMVTH+0ZyDHM9qcCsQdru+8nE\nG1PIZAE90tAuDknfK29iaKVV/R1Fjtvuj3pyiH5puC6nktn4Kq/fAZgHzC1vTHldRKw1SNs1M7NB\nsMQPM4ghTuep6pSnk4CzgSfJVz5dLOmYaJ0itzG97FnASRXL9JZKtirN6sFk+s+6GvmgXY18H+d+\nEXGxpP9rKPtSwHfJ973W06/+uLyO6U+lLieS79a9JyIOJd8hXCNTkJ7a5rycV87L2sDvJB1ctWxZ\n/kby9WQblW2/U9LjUZ0a9iry/a/zgXPJd7MK2F5S/fHOb0a+MB+639M7NSLeRr4beYakSyNiJzJr\n1tN0t41NyKQYz5CvD1uPnumLW6WnfROwSXk473fApxsSfTQfc72ONyHf87oXsA7d73SuOxH4Cfma\nLsj2+BwtRMQFwOcl/SEyocZRki6MiF+TmbG2JF+r9zxwraSjIt+tPE/S6VGdKhnyFX1HkG3xIPJc\nrQKcFxEHkK/D6yKTI3yUfG/umU3F+wHwPWBOed3fgcoMeETEvHLdnU2ej0nk9bSPpD9FxP2lLPsC\nz0TEfeQ119v5+zTtU+x+iXy9HeUYPka+C9rMzEaA0dIzO2TpPNuUYQ0yXeubgZ0iYhMqUuSWZRvT\ny76hxTLtUskuR0WaVUnfUM/0svUezIVkAHh2wy34LvJdog9I2pLM+nZced1XDbhe0k7ke2GnRcT6\nwHvIYGgK8K6IaDECHcjAbF8yCH9bRKzcZtkJwLmStiW/OOwcrVPT1ntepwMXlHV+wou/vJ1Zzvfd\nZNanGvCgMkXyrmRCkDHkOd6tbONq4LNl2aVLeziHivTFapHWlXwrwiGlB3U8GdS1UiNTQG9Lptid\nLml203a3k3SxpCeVGekmlGNt1yt5Yam/SWSQt2Nkso6lgX+SX9i2L/W6WkTsWMrSLlUyZFKUHcjk\nBPuU97XeT7bFyeT7e3cmg8FXSLqz4ljOVKY8XkDr3t4acLekt5Z9HVCfLun35JevE8t7pvty/tqm\n2JW0oOFtIDeRX1zMzGyEWOJ7ZoshS+fZxhyV3O4RcT3ZozqXphS5ZdnG9LI90uiW6e1SyS5NRZrV\nMga4eYjEh+q/SLom8uX/xzbMfz3wmzJ/YUT8he50pX8qP+8le+DeQAbZ9YD/lWTvZFXdQWaSe7Ic\nwzx6pjNt1ri/ZYDXUp2atrHs9bGXzelp6+l576c7sUc9JeqDEfE4mQnrcUn1jFLXkMk/LqY7zTBU\npC+O1mldv1tvf+QL/HsbsnJ5+TkbeHtEbEnPntmTJP2ijBO+gPxi0y7r3C/Kvh8u5f4EGWT+nDxf\nK5GJGiDbXONxtEqVDNUpj+t+Wbb9M/JuwXERsTY9e2bPlXRG+b15KEjj3/W2cB89UyYDdEXEq+jb\n+YPWKXaXblpuHpla2szMRojREswOZTrPiyqWA9i43LJfRPZEnkGm231RityKffVIo9uHY+qiIs2q\npK9TMZayBC1108lhFfUMWLeSvVUXlV6/DYG/tSiDgFsk7Vy2+wngxjbl7O841eblW6WmrbuZzAB1\nI5n+uLd9bwZ8OzLd8stLsDYxIlZRZnvbhu4gqJ4ieWl6pi8+TxVpXcv8/4uILSXNJXu6b+jlmCeT\nQdgWwE2SZgPbVWz71WSv78GSZjbPbyTpsYh4Cngv8G5y6MlhZFD+BPllYUdJz0fEvqWM9TS9rVIl\nt7KIHAKyJTlMYWpEbA58QdL2VcfS4J+UdhgRa9B9Xfamqxxnr+evQau22BxQL09mxzMzsxFitAwz\nGAnpPGtkj9gc4KeSbqEiRW5FGVst0+74avRMs9ru4Z0XtiHpGXIow8Qy7dvAiqVuZlIC5Kr1Jd0I\nXBER10bEDcBaZMA/NXpPs9qbHil31T41bI0c6/iOyLS/+9GdIrmVFSPTm54P7F+m7Q9cEJnmeHu6\ne60b66s5fXHlw2fK9LQfAc4vbWZp8ksNDb3LzT5Wlp1K9iq28hlyrOoxkW9JuDIilol8Cr8qAcnP\nyID9UeCy8vvflGmoTwJmlWPaie6Uwq1SJddvwTemoa3/fg15XfyZHJM9kxyf/YU2x1J3A/BYKccM\n4K6GeVX7qprW9vxV/N6sed5kyp0KMzMbGZw0YQiUsYmnShrU1yV1ihimNKtlTO1Dkm4o4z6PLGOP\nR5yIOFnS4U3TZgK7S5q/GNvdEHiTpH6/6qrF9kZ7quRzyLHLf6+a76QJZjbcnM7WFktE7E8+DNTs\nKDr4VVeDoE9pViPizWSvXbMfSfrWAPb7N7Ln/DnyVvehA9jGUDnxJdru/MEKZItRmyq5fDG4o1Ug\na2Zmw8M9s2Zmg8Q9s2Y23EZjz+xoGTNrZmZmZksgB7NmZmZm1rEczJqZmZlZx3Iwa2ZmZmYdy8Gs\nmZmZmXUsB7NmZmZm1rEczJqZmZlZx3Iwa2ZmZmYdy8GsmZmZmXUsB7NmZmZm1rEczJqZmZlZx3Iw\na2ZmZmYdy8GsmZmZmXUsB7NmZmZm1rEczJqZmZlZx3Iwa2ZmZmYdy8GsmZmZmXUsB7NmZmZm1rHG\nDncBzMyWFE8teHC4i2Bmo9xo/H+oq1arDXcZzMzMzMwGxMMMzMzMzKxjOZg1MzMzs47lYNbMzMzM\nOpaDWTMzMzPrWA5mzczMzKxjOZg1MzMzs47l98yamfVBRIwBTgM2Ap4B9pN0Z8P8XYGjgeeA70g6\ns7d1RrOB1GeZ/kdgQVnsLkkfGdKCj1B9aWsRsSxwObCvJLl9VhtIXZZpbpsV+nCtTwMOI6/1m4CD\nga526zRzz6yZWd+8CxgnaQvgSODE+oyIWAo4CdgJ2AY4ICJWLussXbWO9bs+V4qIZQAkbVf+OVjo\n1rI+ASLiTcAsYE2g1pd1RrF+16XbZlvtrvWXA8cC20raClgO2IV+/t/pYNbMrG+2BH4FIOl64E0N\n89YD7pC0QNKzwLXAlLLOpS3WGe36W5/bABsDy0bEZRFxRURMHupCj2Dt6hNgHBkgqB/rjFYDqUu3\nzdba1efTwOaSni5/jy3T+vV/p4NZM7O+mQg83vD38+X2WX3egoZ5T5A9DO3WGe0GUp9PAidImgp8\nFPiB6/MFbduapN9Kuq8/64xiA6lLt83WWtanpJqkhwAi4lDgFZIub7dOFVe0mVnfPA5MaPh7jKRF\n5fcFTfMmAI/1ss5o19/6fBS4HfgBgKS/Ao8Aq770Re0IA2lrbp/VBlIvbputta3PiBgTEV8BdgB2\n78s6zRzMmpn1zWzgbQARsRlwY8O824B1ImL5iBhHDjH4bS/rjHb9rc/rgA9Txs5FxGvI3pt5Q1no\nEWwgbc3ts9pA6sVts7Xe6vN0YGlgt4bhBv06B121Wq3dfDMzAyKi8elayA+vTYHxks6IiF2AY8hO\ngrMkfbNqHUm3D3HRR6QB1udY4LvAGmWdIyTNGeKij0i91WfDcjOBAyXd7vZZbYB16bbZQrv6BG4o\n/2Y1rPJV4OfN67Rrmw5mzczMzKxjeZiBmZmZmXUsB7NmZmZm1rEczJqZmZlZx3Iwa2ZmZmYdy8Gs\nmZmZmXUsB7NmZmZm1rEczJqZmZlZx3Iwa2ZmZmYd6/8BN9vHeuwpjEMAAAAASUVORK5CYII=\n", + "output_type": "pyout", + "prompt_number": 107, "text": [ - "" + "0.4173737373737374" ] } ], - "prompt_number": 176 + "prompt_number": 107 }, { "cell_type": "code", "collapsed": false, "input": [ - "run_test_model(DecisionTreeClassifier(), x_train, y_train, x_test, y_test, corp_train, corp_answer)" + "model_results = run_rank_multiple_models(model_list)\n", + "model_results.sort(key=lambda x: x[1])\n", + "x_labels = [group[0] for group in model_results]\n", + "x_values = [group[1] for group in model_results] \n", + "width = .75\n", + "height = np.arange(len(model_results))\n", + "mean = np.array(x_values).mean()\n", + "\n", + "plt.yticks(height+width/2., x_labels)\n", + "plt.barh(height, x_values, width, color = sbn.color_palette())\n", + "plt.axvline(mean, c='r')\n", + "plt.rc('figure', figsize=(10, 5))\n", + "plt.show()" ], "language": "python", "metadata": {}, "outputs": [ { - "metadata": {}, - "output_type": "pyout", - "prompt_number": 177, - "text": [ - "0.18077668222273341" + "ename": "ValueError", + "evalue": "Found array with dim 30. Expected 1", + "output_type": "pyerr", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m\n\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mmodel_results\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mrun_rank_multiple_models\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mmodel_results\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msort\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mlambda\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mx_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mgroup\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mgroup\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mmodel_results\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mx_values\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mgroup\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mgroup\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mmodel_results\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mwidth\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m.75\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mrun_rank_multiple_models\u001b[0;34m(list_of_models)\u001b[0m\n\u001b[1;32m 29\u001b[0m return [(model, run_test_model(model, corp_train, corp_answer, test_train, test_answer)) \n\u001b[1;32m 30\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 31\u001b[0;31m in list_of_models]\n\u001b[0m\u001b[1;32m 32\u001b[0m \u001b[0mtest_answer\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 28\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mrun_rank_multiple_models\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlist_of_models\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 29\u001b[0m return [(model, run_test_model(model, corp_train, corp_answer, test_train, test_answer)) \n\u001b[0;32m---> 30\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 31\u001b[0m in list_of_models]\n\u001b[1;32m 32\u001b[0m \u001b[0mtest_answer\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mrun_test_model\u001b[0;34m(classifier, x_train, y_train, x_test, y_test)\u001b[0m\n\u001b[1;32m 24\u001b[0m \u001b[0mclassifier\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_train\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_train\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[0mpredicted\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclassifier\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpredict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_test\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 26\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mmetrics\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf1_score\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my_test\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpredicted\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 27\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 28\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mrun_rank_multiple_models\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlist_of_models\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py\u001b[0m in \u001b[0;36mf1_score\u001b[0;34m(y_true, y_pred, labels, pos_label, average, sample_weight)\u001b[0m\n\u001b[1;32m 1396\u001b[0m return fbeta_score(y_true, y_pred, 1, labels=labels,\n\u001b[1;32m 1397\u001b[0m \u001b[0mpos_label\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mpos_label\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0maverage\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0maverage\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1398\u001b[0;31m sample_weight=sample_weight)\n\u001b[0m\u001b[1;32m 1399\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1400\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py\u001b[0m in \u001b[0;36mfbeta_score\u001b[0;34m(y_true, y_pred, beta, labels, pos_label, average, sample_weight)\u001b[0m\n\u001b[1;32m 1493\u001b[0m \u001b[0maverage\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0maverage\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1494\u001b[0m \u001b[0mwarn_for\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'f-score'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1495\u001b[0;31m sample_weight=sample_weight)\n\u001b[0m\u001b[1;32m 1496\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1497\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py\u001b[0m in \u001b[0;36mprecision_recall_fscore_support\u001b[0;34m(y_true, y_pred, beta, labels, pos_label, average, warn_for, sample_weight)\u001b[0m\n\u001b[1;32m 1667\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"beta should be >0 in the F-beta score\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1668\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1669\u001b[0;31m \u001b[0my_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_true\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_pred\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_check_clf_targets\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my_true\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_pred\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1670\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1671\u001b[0m \u001b[0mlabel_order\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlabels\u001b[0m \u001b[0;31m# save this for later\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py\u001b[0m in \u001b[0;36m_check_clf_targets\u001b[0;34m(y_true, y_pred)\u001b[0m\n\u001b[1;32m 107\u001b[0m \u001b[0my_pred\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0marray\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mindicator\u001b[0m \u001b[0mmatrix\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 108\u001b[0m \"\"\"\n\u001b[0;32m--> 109\u001b[0;31m \u001b[0my_true\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_pred\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcheck_arrays\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my_true\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_pred\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mallow_lists\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 110\u001b[0m \u001b[0mtype_true\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype_of_target\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my_true\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 111\u001b[0m \u001b[0mtype_pred\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype_of_target\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my_pred\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/utils/validation.py\u001b[0m in \u001b[0;36mcheck_arrays\u001b[0;34m(*arrays, **options)\u001b[0m\n\u001b[1;32m 252\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0msize\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0mn_samples\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 253\u001b[0m raise ValueError(\"Found array with dim %d. Expected %d\"\n\u001b[0;32m--> 254\u001b[0;31m % (size, n_samples))\n\u001b[0m\u001b[1;32m 255\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 256\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mallow_lists\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marray\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"shape\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mValueError\u001b[0m: Found array with dim 30. Expected 1" ] } ], - "prompt_number": 177 + "prompt_number": 62 }, { "cell_type": "heading", "level": 1, "metadata": {}, "source": [ - "Pass in live code file for response" + "Pass in code file for response" ] }, { @@ -257,18 +436,15 @@ "\n", "def check_code_snippet(a_file, corp_train, corp_answer):\n", " \n", - " # Ingest file\n", - " a_file_df = pd.DataFrame([read_process_file(a_file)], columns=['raw_text'])\n", - " \n", - " # Turn file into features\n", - " a_file_df['paren_count'] = parenthesis_count(a_file_df['raw_text'][0])\n", - " a_cleaned_df = a_file_df.drop(['raw_text'], axis=1)\n", + " # Ingest and featurizes file\n", + " request = Corpus([a_file])\n", + " request_df = request.compl_df_build()\n", "\n", " # Run model\n", " model_created = GaussianNB()\n", " model_created.fit(corp_train, corp_answer)\n", - " predicted = model_created.predict(a_cleaned_df.values)\n", - " predict_prob = model_created.predict_proba(a_cleaned_df.values)\n", + " predicted = model_created.predict(request_df.values)\n", + " predict_prob = model_created.predict_proba(request_df.values)\n", " \n", " #Pretty response\n", " answer_list = list(zip(model_created.classes_, predict_prob[0]))\n", @@ -277,8 +453,7 @@ ], "language": "python", "metadata": {}, - "outputs": [], - "prompt_number": 348 + "outputs": [] }, { "cell_type": "code", @@ -288,27 +463,7 @@ ], "language": "python", "metadata": {}, - "outputs": [ - { - "metadata": {}, - "output_type": "pyout", - "prompt_number": 350, - "text": [ - "[('java', 0.23709565168200461),\n", - " ('perl', 0.15122183416794088),\n", - " ('ruby', 0.13694881776324136),\n", - " ('ocaml', 0.12725804928624451),\n", - " ('python', 0.08496717978985284),\n", - " ('haskell', 0.081919233740987057),\n", - " ('php', 0.064364558495229707),\n", - " ('scala', 0.063449250754175929),\n", - " ('javascript', 0.049179273881515481),\n", - " ('clojure', 0.0035955804751662455),\n", - " ('scheme', 5.6996364138916492e-07)]" - ] - } - ], - "prompt_number": 350 + "outputs": [] }, { "cell_type": "code", @@ -316,8 +471,7 @@ "input": [], "language": "python", "metadata": {}, - "outputs": [], - "prompt_number": 350 + "outputs": [] }, { "cell_type": "code", @@ -325,8 +479,7 @@ "input": [], "language": "python", "metadata": {}, - "outputs": [], - "prompt_number": 329 + "outputs": [] }, { "cell_type": "code", @@ -334,8 +487,7 @@ "input": [], "language": "python", "metadata": {}, - "outputs": [], - "prompt_number": 336 + "outputs": [] }, { "cell_type": "code", @@ -343,8 +495,7 @@ "input": [], "language": "python", "metadata": {}, - "outputs": [], - "prompt_number": 327 + "outputs": [] }, { "cell_type": "code", diff --git a/test/26 b/test/26 deleted file mode 100644 index 182f919..0000000 --- a/test/26 +++ /dev/null @@ -1,35 +0,0 @@ -proc isaac::mix {a b c d e f g h} { - set a [expr {($a ^ ($b << 11)) & 0xffffffff}] - set d [expr {($d + $a) & 0xffffffff}] - set b [expr {($b + $c) & 0xffffffff}] - - set b [expr {($b ^ ($c >> 2)) & 0xffffffff}] - set e [expr {($e + $b) & 0xffffffff}] - set c [expr {($c + $d) & 0xffffffff}] - - set c [expr {($c ^ ($d << 8)) & 0xffffffff}] - set f [expr {($f + $c) & 0xffffffff}] - set d [expr {($d + $e) & 0xffffffff}] - - set d [expr {($d ^ ($e >> 16)) & 0xffffffff}] - set g [expr {($g + $d) & 0xffffffff}] - set e [expr {($e + $f) & 0xffffffff}] - - set e [expr {($e ^ ($f << 10)) & 0xffffffff}] - set h [expr {($h + $e) & 0xffffffff}] - set f [expr {($f + $g) & 0xffffffff}] - - set f [expr {($f ^ ($g >> 4)) & 0xffffffff}] - set a [expr {($a + $f) & 0xffffffff}] - set g [expr {($g + $h) & 0xffffffff}] - - set g [expr {($g ^ ($h << 8)) & 0xffffffff}] - set b [expr {($b + $g) & 0xffffffff}] - set h [expr {($h + $a) & 0xffffffff}] - - set h [expr {($h ^ ($a >> 9)) & 0xffffffff}] - set c [expr {($c + $h) & 0xffffffff}] - set a [expr {($a + $b) & 0xffffffff}] - - return [list $a $b $c $d $e $f $g $h] -} diff --git a/test/27 b/test/27 deleted file mode 100644 index 902ec5c..0000000 --- a/test/27 +++ /dev/null @@ -1,20 +0,0 @@ -proc twitter::follow {nick uhost hand chan argv} { - if {![channel get $chan twitter]} { return } - - if {[string length $argv] < 1} { - $twitter::output_cmd "PRIVMSG $chan :Usage: !follow " - return - } - - if {[catch {::twitlib::query $::twitlib::follow_url [list screen_name $argv]} result]} { - $twitter::output_cmd "PRIVMSG $chan :Twitter failed or already friends with $argv!" - return - } - - if {[dict exists $result error]} { - twitter::output $chan "Follow failed ($argv): [dict get $result error]" - return - } - - twitter::output $chan "Now following [dict get $result screen_name]!" -} \ No newline at end of file diff --git a/test_fixed.csv b/test_fixed.csv new file mode 100644 index 0000000..63c3d03 --- /dev/null +++ b/test_fixed.csv @@ -0,0 +1 @@ +answers 1 4 4 4 9 9 9 2 2 2 11 1 11 11 3 3 10 10 7 7 1 7 5 5 1 8 8 8 8 4 \ No newline at end of file From 26432f45763f6b59262f2082f3af5b0c923b675b Mon Sep 17 00:00:00 2001 From: Zack Cooper Date: Fri, 13 Feb 2015 17:31:06 -0500 Subject: [PATCH 3/6] producing roughly 75 percent when running the model compared to the given tests. Working away to find better features. --- corpus_build.py | 9 +- lang classifier live.ipynb | 5626 ++++++++++++++++++++++++++++++++++-- test/33 | 56 + test_fixed.csv | 2 +- 4 files changed, 5374 insertions(+), 319 deletions(-) create mode 100644 test/33 diff --git a/corpus_build.py b/corpus_build.py index 1b54621..2bf6982 100644 --- a/corpus_build.py +++ b/corpus_build.py @@ -57,7 +57,14 @@ ('less_minus', "((\<\-))"), ('paren_star', "(\(\*|\*\))"), ('def_exists', r"\bdef\b"), - ] + ('end_exists', r"\bend\b"), + ('curly_bracket', "[\{\}]"), + ('slash_star_star', "\/\*\*"), + ('forward_slash', "//"), + ('var_exists', r"\bvar\b"), + ('star_count', r"\b\*\b"), + ('dollar_sign', r"\$"), + ('val_exists', r"\bval\b")] class Corpus(): diff --git a/lang classifier live.ipynb b/lang classifier live.ipynb index 5f96e25..9d0abb2 100644 --- a/lang classifier live.ipynb +++ b/lang classifier live.ipynb @@ -1,23 +1,33 @@ { "metadata": { "name": "", - "signature": "sha256:0f9223d2ddd3e37046d43bb1578139df3c7fa29d3b2f15da299cf9a0a8978d11" + "signature": "sha256:d283fdea1a61866f7cbeaeeaafff75fcae1237f43b5990f43814c78cd7baa485" }, "nbformat": 3, "nbformat_minor": 0, "worksheets": [ { "cells": [ + { + "cell_type": "heading", + "level": 1, + "metadata": {}, + "source": [ + "Brings in corpus and test data" + ] + }, { "cell_type": "code", "collapsed": false, "input": [ "from corpus_build import Corpus, hit_num, raw_file_list\n", "import pandas as pd\n", + "import glob\n", + "import os\n", "\n", "corpus = Corpus(raw_file_list)\n", "corpus_df = corpus.compl_df_build(False)\n", - "corpus_df.head()" + "corpus_df" ], "language": "python", "metadata": {}, @@ -36,386 +46,5347 @@ " less_minus\n", " paren_star\n", " def_exists\n", + " end_exists\n", + " curly_bracket\n", + " slash_star_star\n", + " forward_slash\n", + " var_exists\n", + " star_count\n", + " dollar_sign\n", + " val_exists\n", " \n", " \n", " \n", " \n", - " 0\n", - " 1\n", + " 0 \n", + " 1\n", " 0.071901\n", " 0.000000\n", " 0.002066\n", " 0.000000\n", " 0.001240\n", " 0.000413\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000413\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", - " 1\n", - " 1\n", + " 1 \n", + " 1\n", " 0.071704\n", " 0.000000\n", " 0.002699\n", " 0.000000\n", " 0.000771\n", " 0.000386\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000386\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", - " 2\n", - " 1\n", + " 2 \n", + " 1\n", " 0.063995\n", " 0.000000\n", " 0.002695\n", " 0.000000\n", " 0.000337\n", " 0.000337\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000337\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", - " 3\n", - " 2\n", + " 3 \n", + " 2\n", " 0.024707\n", " 0.002471\n", " 0.002471\n", " 0.000618\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000618\n", + " 0.000000\n", + " 0.001235\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", - " 4\n", - " 2\n", + " 4 \n", + " 2\n", " 0.024352\n", " 0.002118\n", " 0.002647\n", " 0.000529\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.001059\n", + " 0.000000\n", + " 0.000529\n", + " 0.000000\n", + " 0.001059\n", + " 0.000000\n", + " 0.000000\n", " \n", - " \n", - "\n", - "" - ], - "metadata": {}, - "output_type": "pyout", - "prompt_number": 47, - "text": [ - " hit_num parent_count double_colon let_exists less_minus paren_star \\\n", - "0 1 0.071901 0.000000 0.002066 0.000000 0.001240 \n", - "1 1 0.071704 0.000000 0.002699 0.000000 0.000771 \n", - "2 1 0.063995 0.000000 0.002695 0.000000 0.000337 \n", - "3 2 0.024707 0.002471 0.002471 0.000618 0.000000 \n", - "4 2 0.024352 0.002118 0.002647 0.000529 0.000000 \n", - "\n", - " def_exists \n", - "0 0.000413 \n", - "1 0.000386 \n", - "2 0.000337 \n", - "3 0.000000 \n", - "4 0.000000 " - ] - } - ], - "prompt_number": 47 - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "import glob\n", - "import os\n", - "test_file_list = [filename for filename in\n", - " glob.iglob(os.path.join('test/', '*'))]\n", - "test_info = Corpus(test_file_list)\n", - "test_info_df = test_info.compl_df_build(True)\n", - "test_info_df['answers'] = pd.read_csv('test_fixed.csv', dtype='object')\n", - "test_info_df.head()" - ], - "language": "python", - "metadata": {}, - "outputs": [ - { - "html": [ - "
\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", - " \n", - " \n", " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", - " \n", - " \n", " \n", " \n", - " \n", " \n", " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", " \n", " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", - " \n", - " \n", - " \n", " \n", - " \n", " \n", " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", - " \n", - " \n", - " \n", - " \n", " \n", - " \n", - "
parent_countdouble_colonlet_existsless_minusparen_stardef_existsanswers
5 2 0.026361 0.002292 0.002865 0.000573 0.000000 0.000000 0.000000 0.001146 0.000000 0.000573 0.000000 0.001146 0.000000 0.000000
0 0.0457346 3 0.028487 0.000000 0.000000 0.000000 0.000000 0.000000 0.000982 0.015226 0.000000 0.000246 0.000000 0.000000 0.001759 0 0.000000 0.000000 1
1 0.0630377 3 0.032197 0.000000 0.000000 0 0.000000 0.000000 4 0.000000 0.000000 0.010417 0.000000 0.000947 0.000000 0.001420 0.000000 0.000000
2 0.0427958 3 0.029671 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.012981 0.000000 0.000464 0.000000 0.000000 0.000000 0.000058 0 0.000058 0.000000 4
3 0.0470599 4 0.038012 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.010234 0.000000 0.000731 0.006579 0.002193 0.000000 0.000000 4
4 0.015708 0.00056110 9 0.025682 0.000000 0.000000 0.000000 0.004815 0.001605 0.003210 0.011236 0.000000 0.000803 0.000000 0.000000 0.000000 0.000000 0 0.000281 0.002805 9
\n", - "
" - ], - "metadata": {}, - "output_type": "pyout", - "prompt_number": 105, - "text": [ - " parent_count double_colon let_exists less_minus paren_star def_exists \\\n", - "0 0.045734 0.000000 0.001759 0 0.000000 0.000000 \n", - "1 0.063037 0.000000 0.000000 0 0.000000 0.000000 \n", - "2 0.042795 0.000000 0.000058 0 0.000058 0.000000 \n", - "3 0.047059 0.000000 0.000000 0 0.000000 0.000000 \n", - "4 0.015708 0.000561 0.000000 0 0.000281 0.002805 \n", - "\n", - " answers \n", - "0 1 \n", - "1 4 \n", - "2 4 \n", - "3 4 \n", - "4 9 " - ] - } - ], - "prompt_number": 105 - }, - { - "cell_type": "code", - "collapsed": false, - "input": [], - "language": "python", - "metadata": {}, - "outputs": [], - "prompt_number": 101 - }, - { - "cell_type": "code", - "collapsed": false, - "input": [], - "language": "python", - "metadata": {}, - "outputs": [], - "prompt_number": 38 - }, - { - "cell_type": "code", - "collapsed": false, - "input": [], - "language": "python", - "metadata": {}, - "outputs": [], - "prompt_number": 38 - }, - { - "cell_type": "heading", - "level": 1, - "metadata": {}, - "source": [ - "Choosing the Best Model" - ] - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "from sklearn.naive_bayes import GaussianNB, MultinomialNB, BernoulliNB\n", - "from sklearn.neighbors import KNeighborsClassifier\n", - "from sklearn.tree import DecisionTreeClassifier\n", - "from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier\n", - "from sklearn.cross_validation import train_test_split\n", - "from sklearn.cluster import KMeans\n", - "from sklearn import metrics\n", - "from sklearn.cross_validation import cross_val_score\n", - "import numpy as np\n", - "import seaborn as sbn\n", - "import matplotlib.pyplot as plt\n", - "%matplotlib inline\n", - "\n", - "corp_train = corpus_df.values[0::,1::]\n", - "corp_answer = corpus_df.values[0::,0]\n", - "test_train = test_info_df.values[0::,:-1:]\n", - "test_answer = test_info_df.values[0::1,-1]\n", - "\n", - "model_list = [KNeighborsClassifier(), RandomForestClassifier(), DecisionTreeClassifier()]\n", - "# BernoulliNB(), MultinomialNB(), AdaBoostClassifier(), GaussianNB(), \n", - "\n", - "def run_test_model(classifier, x_train, y_train, x_test, y_test):\n", - "\n", - " classifier.fit(x_train, y_train)\n", - " predicted = classifier.predict(x_test)\n", - " return metrics.f1_score(y_test, predicted)\n", - "\n", - "def run_rank_multiple_models(list_of_models):\n", - " return [(model, run_test_model(model, corp_train, corp_answer, test_train, test_answer)) \n", - " for model \n", - " in list_of_models]" - ], - "language": "python", - "metadata": {}, - "outputs": [ - { - "metadata": {}, - "output_type": "pyout", - "prompt_number": 106, - "text": [ - "array([[0.04573438874230431, 0.0, 0.001759014951627089, 0.0, 0.0, 0.0],\n", - " [0.06303724928366762, 0.0, 0.0, 0.0, 0.0, 0.0],\n", - " [0.0427946089160235, 0.0, 5.7597051030987213e-05, 0.0,\n", - " 5.7597051030987213e-05, 0.0],\n", - " [0.047058823529411764, 0.0, 0.0, 0.0, 0.0, 0.0],\n", - " [0.01570827489481066, 0.0005610098176718093, 0.0, 0.0,\n", - " 0.00028050490883590464, 0.002805049088359046],\n", - " [0.0, 0.0, 0.0, 0.0, 0.0, 0.002036659877800407],\n", - " [0.00816326530612245, 0.0, 0.0, 0.0, 0.0, 0.00816326530612245],\n", - " [0.013328977458346946, 0.000914733747141457, 6.533812479581836e-05,\n", - " 0.0004573668735707285, 0.0, 0.00013067624959163673],\n", - " [0.05806451612903226, 0.0064516129032258064, 0.0,\n", - " 0.0064516129032258064, 0.0, 0.0],\n", - " [0.0137524557956778, 0.003929273084479371, 0.0004911591355599214,\n", - " 0.0029469548133595285, 0.0, 0.0],\n", - " [0.12435233160621761, 0.0, 0.0, 0.0, 0.0, 0.0],\n", - " [0.08866995073891626, 0.0, 0.0, 0.0, 0.0, 0.0],\n", - " [0.2222222222222222, 0.0, 0.0, 0.0, 0.0, 0.0],\n", - " [0.11084624553039332, 0.0, 0.0008939213349225268, 0.0, 0.0, 0.0],\n", - " [0.006644518272425249, 0.0, 0.0, 0.0, 0.0, 0.0],\n", - " [0.0065717415115005475, 0.0, 0.0, 0.0, 0.0, 0.0],\n", - " [0.027181688125894134, 0.001072961373390558, 0.0, 0.0,\n", - " 0.000715307582260372, 0.00178826895565093],\n", - " [0.014606155451225874, 0.0, 0.0, 0.0, 0.0, 0.004173187271778821],\n", - " [0.019253910950661854, 0.00030084235860409147, 0.0, 0.0, 0.0, 0.0],\n", - " [0.058997050147492625, 0.0, 0.0, 0.0, 0.0029498525073746312, 0.0],\n", - " [0.0979020979020979, 0.0, 0.0, 0.0, 0.0, 0.0],\n", - " [0.05322415557830092, 0.0015353121801432957, 0.0, 0.0, 0.0, 0.0],\n", - " [0.026448029621793177, 0.0001322401481089659, 0.004892885480031738,\n", - " 0.0, 0.005554086220576567, 0.0],\n", - " [0.017391304347826087, 0.0, 0.0052173913043478265, 0.0, 0.0, 0.0],\n", - " [0.02680067001675042, 0.0, 0.0016750418760469012, 0.0, 0.0,\n", - " 0.0016750418760469012],\n", - " [0.03986710963455149, 0.0, 0.0, 0.0, 0.0, 0.0],\n", - " [0.028050490883590462, 0.0, 0.0, 0.0, 0.0, 0.001402524544179523],\n", - " [0.03680981595092025, 0.0, 0.0, 0.0, 0.0, 0.012269938650306749],\n", - " [0.029607698001480384, 0.0, 0.0, 0.0, 0.0, 0.0030594621268196396],\n", - " [0.05389221556886228, 0.0, 0.0, 0.0, 0.0, 0.0]], dtype=object)" - ] - } - ], - "prompt_number": 106 - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "run_test_model(DecisionTreeClassifier(), corp_train, corp_answer, test_train, test_answer)" - ], - "language": "python", - "metadata": {}, - "outputs": [ - { - "output_type": "stream", - "stream": "stderr", - "text": [ - "/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1773: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no true samples.\n", - " 'recall', 'true', average, warn_for)\n" - ] - }, - { - "metadata": {}, - "output_type": "pyout", - "prompt_number": 107, - "text": [ - "0.4173737373737374" - ] - } - ], - "prompt_number": 107 - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "model_results = run_rank_multiple_models(model_list)\n", - "model_results.sort(key=lambda x: x[1])\n", - "x_labels = [group[0] for group in model_results]\n", - "x_values = [group[1] for group in model_results] \n", - "width = .75\n", - "height = np.arange(len(model_results))\n", - "mean = np.array(x_values).mean()\n", - "\n", - "plt.yticks(height+width/2., x_labels)\n", - "plt.barh(height, x_values, width, color = sbn.color_palette())\n", - "plt.axvline(mean, c='r')\n", - "plt.rc('figure', figsize=(10, 5))\n", - "plt.show()" - ], - "language": "python", - "metadata": {}, - "outputs": [ - { - "ename": "ValueError", - "evalue": "Found array with dim 30. Expected 1", - "output_type": "pyerr", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m\n\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mmodel_results\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mrun_rank_multiple_models\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mmodel_results\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msort\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mlambda\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mx_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mgroup\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mgroup\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mmodel_results\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mx_values\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mgroup\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mgroup\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mmodel_results\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mwidth\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m.75\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m\u001b[0m in \u001b[0;36mrun_rank_multiple_models\u001b[0;34m(list_of_models)\u001b[0m\n\u001b[1;32m 29\u001b[0m return [(model, run_test_model(model, corp_train, corp_answer, test_train, test_answer)) \n\u001b[1;32m 30\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 31\u001b[0;31m in list_of_models]\n\u001b[0m\u001b[1;32m 32\u001b[0m \u001b[0mtest_answer\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 28\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mrun_rank_multiple_models\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlist_of_models\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 29\u001b[0m return [(model, run_test_model(model, corp_train, corp_answer, test_train, test_answer)) \n\u001b[0;32m---> 30\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 31\u001b[0m in list_of_models]\n\u001b[1;32m 32\u001b[0m \u001b[0mtest_answer\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m\u001b[0m in \u001b[0;36mrun_test_model\u001b[0;34m(classifier, x_train, y_train, x_test, y_test)\u001b[0m\n\u001b[1;32m 24\u001b[0m \u001b[0mclassifier\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_train\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_train\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[0mpredicted\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclassifier\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpredict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_test\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 26\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mmetrics\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf1_score\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my_test\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpredicted\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 27\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 28\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mrun_rank_multiple_models\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlist_of_models\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py\u001b[0m in \u001b[0;36mf1_score\u001b[0;34m(y_true, y_pred, labels, pos_label, average, sample_weight)\u001b[0m\n\u001b[1;32m 1396\u001b[0m return fbeta_score(y_true, y_pred, 1, labels=labels,\n\u001b[1;32m 1397\u001b[0m \u001b[0mpos_label\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mpos_label\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0maverage\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0maverage\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1398\u001b[0;31m sample_weight=sample_weight)\n\u001b[0m\u001b[1;32m 1399\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1400\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py\u001b[0m in \u001b[0;36mfbeta_score\u001b[0;34m(y_true, y_pred, beta, labels, pos_label, average, sample_weight)\u001b[0m\n\u001b[1;32m 1493\u001b[0m \u001b[0maverage\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0maverage\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1494\u001b[0m \u001b[0mwarn_for\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'f-score'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1495\u001b[0;31m sample_weight=sample_weight)\n\u001b[0m\u001b[1;32m 1496\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1497\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py\u001b[0m in \u001b[0;36mprecision_recall_fscore_support\u001b[0;34m(y_true, y_pred, beta, labels, pos_label, average, warn_for, sample_weight)\u001b[0m\n\u001b[1;32m 1667\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"beta should be >0 in the F-beta score\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1668\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1669\u001b[0;31m \u001b[0my_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_true\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_pred\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_check_clf_targets\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my_true\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_pred\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1670\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1671\u001b[0m \u001b[0mlabel_order\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlabels\u001b[0m \u001b[0;31m# save this for later\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py\u001b[0m in \u001b[0;36m_check_clf_targets\u001b[0;34m(y_true, y_pred)\u001b[0m\n\u001b[1;32m 107\u001b[0m \u001b[0my_pred\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0marray\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mindicator\u001b[0m \u001b[0mmatrix\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 108\u001b[0m \"\"\"\n\u001b[0;32m--> 109\u001b[0;31m \u001b[0my_true\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_pred\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcheck_arrays\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my_true\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_pred\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mallow_lists\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 110\u001b[0m \u001b[0mtype_true\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype_of_target\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my_true\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 111\u001b[0m \u001b[0mtype_pred\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype_of_target\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my_pred\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/utils/validation.py\u001b[0m in \u001b[0;36mcheck_arrays\u001b[0;34m(*arrays, **options)\u001b[0m\n\u001b[1;32m 252\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0msize\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0mn_samples\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 253\u001b[0m raise ValueError(\"Found array with dim %d. Expected %d\"\n\u001b[0;32m--> 254\u001b[0;31m % (size, n_samples))\n\u001b[0m\u001b[1;32m 255\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 256\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mallow_lists\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marray\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"shape\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mValueError\u001b[0m: Found array with dim 30. Expected 1" - ] - } - ], - "prompt_number": 62 - }, + " \n", + " 11 \n", + " 9\n", + " 0.022440\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.004208\n", + " 0.001403\n", + " 0.003506\n", + " 0.009818\n", + " 0.000000\n", + " 0.000701\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 12 \n", + " 9\n", + " 0.026920\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.004751\n", + " 0.001584\n", + " 0.004751\n", + " 0.011085\n", + " 0.000000\n", + " 0.000792\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 13 \n", + " 9\n", + " 0.023955\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.002178\n", + " 0.002395\n", + " 0.006533\n", + " 0.006969\n", + " 0.000000\n", + " 0.000218\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 14 \n", + " 5\n", + " 0.025814\n", + " 0.000000\n", + " 0.012293\n", + " 0.002459\n", + " 0.001229\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000615\n", + " 0.000000\n", + " 0.001229\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 15 \n", + " 5\n", + " 0.034651\n", + " 0.000000\n", + " 0.009773\n", + " 0.000000\n", + " 0.003554\n", + " 0.000000\n", + " 0.000000\n", + " 0.000888\n", + " 0.000000\n", + " 0.000444\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 16 \n", + " 5\n", + " 0.033503\n", + " 0.000000\n", + " 0.010925\n", + " 0.000000\n", + " 0.004370\n", + " 0.000000\n", + " 0.000000\n", + " 0.001457\n", + " 0.000000\n", + " 0.000728\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 17 \n", + " 6\n", + " 0.025354\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.007457\n", + " 0.000000\n", + " 0.000746\n", + " 0.000000\n", + " 0.000000\n", + " 0.042506\n", + " 0.000000\n", + " \n", + " \n", + " 18 \n", + " 6\n", + " 0.026679\n", + " 0.000953\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.013340\n", + " 0.000000\n", + " 0.000476\n", + " 0.000000\n", + " 0.000000\n", + " 0.037637\n", + " 0.000000\n", + " \n", + " \n", + " 19 \n", + " 7\n", + " 0.036755\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.006337\n", + " 0.000000\n", + " 0.001267\n", + " 0.000000\n", + " 0.000000\n", + " 0.046261\n", + " 0.000000\n", + " \n", + " \n", + " 20 \n", + " 7\n", + " 0.034674\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.008322\n", + " 0.000000\n", + " 0.000693\n", + " 0.000000\n", + " 0.000000\n", + " 0.041609\n", + " 0.000000\n", + " \n", + " \n", + " 21 \n", + " 7\n", + " 0.040650\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.005420\n", + " 0.000000\n", + " 0.000678\n", + " 0.000000\n", + " 0.000000\n", + " 0.044038\n", + " 0.000000\n", + " \n", + " \n", + " 22 \n", + " 7\n", + " 0.038144\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.013402\n", + " 0.000000\n", + " 0.002835\n", + " 0.000000\n", + " 0.000000\n", + " 0.057474\n", + " 0.000000\n", + " \n", + " \n", + " 23 \n", + " 8\n", + " 0.044543\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.002784\n", + " 0.000000\n", + " 0.007795\n", + " 0.000000\n", + " 0.000557\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 24 \n", + " 11\n", + " 0.104895\n", + " 0.000000\n", + " 0.002914\n", + " 0.000000\n", + " 0.001166\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000583\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.004662\n", + " \n", + " \n", + " 25 \n", + " 11\n", + " 0.095606\n", + " 0.000000\n", + " 0.003380\n", + " 0.000000\n", + " 0.000483\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000483\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.001449\n", + " \n", + " \n", + " 26 \n", + " 11\n", + " 0.096643\n", + " 0.000000\n", + " 0.001107\n", + " 0.000000\n", + " 0.000738\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000369\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000738\n", + " \n", + " \n", + " 27 \n", + " 10\n", + " 0.036036\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.004505\n", + " 0.000000\n", + " 0.015444\n", + " 0.000000\n", + " 0.000644\n", + " 0.000644\n", + " 0.000000\n", + " 0.000000\n", + " 0.003218\n", + " \n", + " \n", + " 28 \n", + " 10\n", + " 0.048632\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.003647\n", + " 0.000000\n", + " 0.013374\n", + " 0.000000\n", + " 0.001216\n", + " 0.001216\n", + " 0.002432\n", + " 0.000000\n", + " 0.004863\n", + " \n", + " \n", + " 29 \n", + " 10\n", + " 0.035115\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.003053\n", + " 0.000000\n", + " 0.015267\n", + " 0.000000\n", + " 0.000763\n", + " 0.001527\n", + " 0.002290\n", + " 0.000000\n", + " 0.004580\n", + " \n", + " \n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " \n", + " \n", + " 357\n", + " 8\n", + " 0.052632\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.004848\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000693\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 358\n", + " 8\n", + " 0.044304\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.003956\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000791\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 359\n", + " 8\n", + " 0.036014\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.003001\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000600\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 360\n", + " 11\n", + " 0.090119\n", + " 0.000000\n", + " 0.002172\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000543\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 361\n", + " 11\n", + " 0.085551\n", + " 0.000000\n", + " 0.001901\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000475\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 362\n", + " 11\n", + " 0.108160\n", + " 0.000000\n", + " 0.003863\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000483\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 363\n", + " 10\n", + " 0.063545\n", + " 0.000000\n", + " 0.000000\n", + " 0.000836\n", + " 0.000000\n", + " 0.002508\n", + " 0.000000\n", + " 0.015050\n", + " 0.000000\n", + " 0.003344\n", + " 0.004181\n", + " 0.000836\n", + " 0.000000\n", + " 0.003344\n", + " \n", + " \n", + " 364\n", + " 10\n", + " 0.041924\n", + " 0.000000\n", + " 0.000000\n", + " 0.001644\n", + " 0.000000\n", + " 0.002877\n", + " 0.000000\n", + " 0.013152\n", + " 0.000000\n", + " 0.000411\n", + " 0.003288\n", + " 0.000000\n", + " 0.000000\n", + " 0.004110\n", + " \n", + " \n", + " 365\n", + " 10\n", + " 0.042079\n", + " 0.000495\n", + " 0.000000\n", + " 0.000248\n", + " 0.000000\n", + " 0.002475\n", + " 0.000000\n", + " 0.008911\n", + " 0.000000\n", + " 0.004950\n", + " 0.002475\n", + " 0.001238\n", + " 0.000000\n", + " 0.003465\n", + " \n", + " \n", + " 366\n", + " 10\n", + " 0.042163\n", + " 0.000687\n", + " 0.000000\n", + " 0.000458\n", + " 0.000000\n", + " 0.002521\n", + " 0.000000\n", + " 0.009166\n", + " 0.000000\n", + " 0.004812\n", + " 0.002521\n", + " 0.001146\n", + " 0.000000\n", + " 0.003208\n", + " \n", + " \n", + " 367\n", + " 1\n", + " 0.053812\n", + " 0.000000\n", + " 0.001794\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000897\n", + " 0.001794\n", + " 0.000000\n", + " 0.000897\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 368\n", + " 1\n", + " 0.050473\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.004206\n", + " 0.000000\n", + " 0.001052\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 369\n", + " 2\n", + " 0.016092\n", + " 0.001149\n", + " 0.000000\n", + " 0.005747\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.001149\n", + " 0.000000\n", + " 0.000000\n", + " 0.001149\n", + " 0.000000\n", + " \n", + " \n", + " 370\n", + " 3\n", + " 0.028712\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.014356\n", + " 0.000449\n", + " 0.002692\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 371\n", + " 3\n", + " 0.028812\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.013205\n", + " 0.000600\n", + " 0.004802\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 372\n", + " 3\n", + " 0.020844\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.012710\n", + " 0.000254\n", + " 0.000763\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 373\n", + " 3\n", + " 0.022556\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.015038\n", + " 0.000752\n", + " 0.000752\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 374\n", + " 3\n", + " 0.022871\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.013407\n", + " 0.000394\n", + " 0.001183\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 375\n", + " 3\n", + " 0.028786\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.010013\n", + " 0.000626\n", + " 0.002503\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 376\n", + " 9\n", + " 0.023939\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.004353\n", + " 0.010881\n", + " 0.002176\n", + " 0.000000\n", + " 0.001088\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 377\n", + " 9\n", + " 0.003378\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.010135\n", + " 0.000000\n", + " 0.000000\n", + " 0.001689\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 378\n", + " 5\n", + " 0.063861\n", + " 0.000000\n", + " 0.011611\n", + " 0.000000\n", + " 0.002903\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.001451\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 379\n", + " 5\n", + " 0.056604\n", + " 0.000000\n", + " 0.010613\n", + " 0.000000\n", + " 0.004717\n", + " 0.000000\n", + " 0.000000\n", + " 0.004717\n", + " 0.000000\n", + " 0.001179\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 380\n", + " 5\n", + " 0.054570\n", + " 0.000000\n", + " 0.010914\n", + " 0.001364\n", + " 0.002729\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.001364\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 381\n", + " 6\n", + " 0.034043\n", + " 0.004255\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.017021\n", + " 0.000000\n", + " 0.001064\n", + " 0.000000\n", + " 0.000000\n", + " 0.032979\n", + " 0.000000\n", + " \n", + " \n", + " 382\n", + " 6\n", + " 0.012600\n", + " 0.001260\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.011760\n", + " 0.000000\n", + " 0.000420\n", + " 0.000000\n", + " 0.000000\n", + " 0.015960\n", + " 0.000000\n", + " \n", + " \n", + " 383\n", + " 8\n", + " 0.035528\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.001045\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.001045\n", + " 0.000000\n", + " 0.001045\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 384\n", + " 8\n", + " 0.029233\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.002436\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.001218\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 385\n", + " 11\n", + " 0.080834\n", + " 0.000000\n", + " 0.003911\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.001304\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " \n", + " \n", + " 386\n", + " 10\n", + " 0.033946\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.002829\n", + " 0.000000\n", + " 0.019802\n", + " 0.000000\n", + " 0.004243\n", + " 0.001414\n", + " 0.000000\n", + " 0.000000\n", + " 0.004243\n", + " \n", + " \n", + "\n", + "

387 rows \u00d7 15 columns

\n", + "" + ], + "metadata": {}, + "output_type": "pyout", + "prompt_number": 1, + "text": [ + " hit_num parent_count double_colon let_exists less_minus paren_star \\\n", + "0 1 0.071901 0.000000 0.002066 0.000000 0.001240 \n", + "1 1 0.071704 0.000000 0.002699 0.000000 0.000771 \n", + "2 1 0.063995 0.000000 0.002695 0.000000 0.000337 \n", + "3 2 0.024707 0.002471 0.002471 0.000618 0.000000 \n", + "4 2 0.024352 0.002118 0.002647 0.000529 0.000000 \n", + "5 2 0.026361 0.002292 0.002865 0.000573 0.000000 \n", + "6 3 0.028487 0.000000 0.000000 0.000000 0.000000 \n", + "7 3 0.032197 0.000000 0.000000 0.000000 0.000000 \n", + "8 3 0.029671 0.000000 0.000000 0.000000 0.000000 \n", + "9 4 0.038012 0.000000 0.000000 0.000000 0.000000 \n", + "10 9 0.025682 0.000000 0.000000 0.000000 0.004815 \n", + "11 9 0.022440 0.000000 0.000000 0.000000 0.004208 \n", + "12 9 0.026920 0.000000 0.000000 0.000000 0.004751 \n", + "13 9 0.023955 0.000000 0.000000 0.000000 0.002178 \n", + "14 5 0.025814 0.000000 0.012293 0.002459 0.001229 \n", + "15 5 0.034651 0.000000 0.009773 0.000000 0.003554 \n", + "16 5 0.033503 0.000000 0.010925 0.000000 0.004370 \n", + "17 6 0.025354 0.000000 0.000000 0.000000 0.000000 \n", + "18 6 0.026679 0.000953 0.000000 0.000000 0.000000 \n", + "19 7 0.036755 0.000000 0.000000 0.000000 0.000000 \n", + "20 7 0.034674 0.000000 0.000000 0.000000 0.000000 \n", + "21 7 0.040650 0.000000 0.000000 0.000000 0.000000 \n", + "22 7 0.038144 0.000000 0.000000 0.000000 0.000000 \n", + "23 8 0.044543 0.000000 0.000000 0.000000 0.000000 \n", + "24 11 0.104895 0.000000 0.002914 0.000000 0.001166 \n", + "25 11 0.095606 0.000000 0.003380 0.000000 0.000483 \n", + "26 11 0.096643 0.000000 0.001107 0.000000 0.000738 \n", + "27 10 0.036036 0.000000 0.000000 0.000000 0.000000 \n", + "28 10 0.048632 0.000000 0.000000 0.000000 0.000000 \n", + "29 10 0.035115 0.000000 0.000000 0.000000 0.000000 \n", + ".. ... ... ... ... ... ... \n", + "357 8 0.052632 0.000000 0.000000 0.000000 0.000000 \n", + "358 8 0.044304 0.000000 0.000000 0.000000 0.000000 \n", + "359 8 0.036014 0.000000 0.000000 0.000000 0.000000 \n", + "360 11 0.090119 0.000000 0.002172 0.000000 0.000000 \n", + "361 11 0.085551 0.000000 0.001901 0.000000 0.000000 \n", + "362 11 0.108160 0.000000 0.003863 0.000000 0.000000 \n", + "363 10 0.063545 0.000000 0.000000 0.000836 0.000000 \n", + "364 10 0.041924 0.000000 0.000000 0.001644 0.000000 \n", + "365 10 0.042079 0.000495 0.000000 0.000248 0.000000 \n", + "366 10 0.042163 0.000687 0.000000 0.000458 0.000000 \n", + "367 1 0.053812 0.000000 0.001794 0.000000 0.000000 \n", + "368 1 0.050473 0.000000 0.000000 0.000000 0.000000 \n", + "369 2 0.016092 0.001149 0.000000 0.005747 0.000000 \n", + "370 3 0.028712 0.000000 0.000000 0.000000 0.000000 \n", + "371 3 0.028812 0.000000 0.000000 0.000000 0.000000 \n", + "372 3 0.020844 0.000000 0.000000 0.000000 0.000000 \n", + "373 3 0.022556 0.000000 0.000000 0.000000 0.000000 \n", + "374 3 0.022871 0.000000 0.000000 0.000000 0.000000 \n", + "375 3 0.028786 0.000000 0.000000 0.000000 0.000000 \n", + "376 9 0.023939 0.000000 0.000000 0.000000 0.000000 \n", + "377 9 0.003378 0.000000 0.000000 0.000000 0.000000 \n", + "378 5 0.063861 0.000000 0.011611 0.000000 0.002903 \n", + "379 5 0.056604 0.000000 0.010613 0.000000 0.004717 \n", + "380 5 0.054570 0.000000 0.010914 0.001364 0.002729 \n", + "381 6 0.034043 0.004255 0.000000 0.000000 0.000000 \n", + "382 6 0.012600 0.001260 0.000000 0.000000 0.000000 \n", + "383 8 0.035528 0.000000 0.000000 0.000000 0.000000 \n", + "384 8 0.029233 0.000000 0.000000 0.000000 0.000000 \n", + "385 11 0.080834 0.000000 0.003911 0.000000 0.000000 \n", + "386 10 0.033946 0.000000 0.000000 0.000000 0.000000 \n", + "\n", + " def_exists end_exists curly_bracket slash_star_star forward_slash \\\n", + "0 0.000413 0.000000 0.000000 0.000000 0.000413 \n", + "1 0.000386 0.000000 0.000000 0.000000 0.000386 \n", + "2 0.000337 0.000000 0.000000 0.000000 0.000337 \n", + "3 0.000000 0.000000 0.000000 0.000000 0.000618 \n", + "4 0.000000 0.000000 0.001059 0.000000 0.000529 \n", + "5 0.000000 0.000000 0.001146 0.000000 0.000573 \n", + "6 0.000000 0.000982 0.015226 0.000000 0.000246 \n", + "7 0.000000 0.000000 0.010417 0.000000 0.000947 \n", + "8 0.000000 0.000000 0.012981 0.000000 0.000464 \n", + "9 0.000000 0.000000 0.010234 0.000000 0.000731 \n", + "10 0.001605 0.003210 0.011236 0.000000 0.000803 \n", + "11 0.001403 0.003506 0.009818 0.000000 0.000701 \n", + "12 0.001584 0.004751 0.011085 0.000000 0.000792 \n", + "13 0.002395 0.006533 0.006969 0.000000 0.000218 \n", + "14 0.000000 0.000000 0.000000 0.000000 0.000615 \n", + "15 0.000000 0.000000 0.000888 0.000000 0.000444 \n", + "16 0.000000 0.000000 0.001457 0.000000 0.000728 \n", + "17 0.000000 0.000000 0.007457 0.000000 0.000746 \n", + "18 0.000000 0.000000 0.013340 0.000000 0.000476 \n", + "19 0.000000 0.000000 0.006337 0.000000 0.001267 \n", + "20 0.000000 0.000000 0.008322 0.000000 0.000693 \n", + "21 0.000000 0.000000 0.005420 0.000000 0.000678 \n", + "22 0.000000 0.000000 0.013402 0.000000 0.002835 \n", + "23 0.002784 0.000000 0.007795 0.000000 0.000557 \n", + "24 0.000000 0.000000 0.000000 0.000000 0.000583 \n", + "25 0.000000 0.000000 0.000000 0.000000 0.000483 \n", + "26 0.000000 0.000000 0.000000 0.000000 0.000369 \n", + "27 0.004505 0.000000 0.015444 0.000000 0.000644 \n", + "28 0.003647 0.000000 0.013374 0.000000 0.001216 \n", + "29 0.003053 0.000000 0.015267 0.000000 0.000763 \n", + ".. ... ... ... ... ... \n", + "357 0.004848 0.000000 0.000000 0.000000 0.000693 \n", + "358 0.003956 0.000000 0.000000 0.000000 0.000791 \n", + "359 0.003001 0.000000 0.000000 0.000000 0.000600 \n", + "360 0.000000 0.000000 0.000000 0.000000 0.000543 \n", + "361 0.000000 0.000000 0.000000 0.000000 0.000475 \n", + "362 0.000000 0.000000 0.000000 0.000000 0.000483 \n", + "363 0.002508 0.000000 0.015050 0.000000 0.003344 \n", + "364 0.002877 0.000000 0.013152 0.000000 0.000411 \n", + "365 0.002475 0.000000 0.008911 0.000000 0.004950 \n", + "366 0.002521 0.000000 0.009166 0.000000 0.004812 \n", + "367 0.000000 0.000897 0.001794 0.000000 0.000897 \n", + "368 0.000000 0.000000 0.004206 0.000000 0.001052 \n", + "369 0.000000 0.000000 0.000000 0.000000 0.001149 \n", + "370 0.000000 0.000000 0.014356 0.000449 0.002692 \n", + "371 0.000000 0.000000 0.013205 0.000600 0.004802 \n", + "372 0.000000 0.000000 0.012710 0.000254 0.000763 \n", + "373 0.000000 0.000000 0.015038 0.000752 0.000752 \n", + "374 0.000000 0.000000 0.013407 0.000394 0.001183 \n", + "375 0.000000 0.000000 0.010013 0.000626 0.002503 \n", + "376 0.004353 0.010881 0.002176 0.000000 0.001088 \n", + "377 0.000000 0.010135 0.000000 0.000000 0.001689 \n", + "378 0.000000 0.000000 0.000000 0.000000 0.001451 \n", + "379 0.000000 0.000000 0.004717 0.000000 0.001179 \n", + "380 0.000000 0.000000 0.000000 0.000000 0.001364 \n", + "381 0.000000 0.000000 0.017021 0.000000 0.001064 \n", + "382 0.000000 0.000000 0.011760 0.000000 0.000420 \n", + "383 0.001045 0.000000 0.000000 0.000000 0.001045 \n", + "384 0.002436 0.000000 0.000000 0.000000 0.001218 \n", + "385 0.000000 0.000000 0.000000 0.000000 0.001304 \n", + "386 0.002829 0.000000 0.019802 0.000000 0.004243 \n", + "\n", + " var_exists star_count dollar_sign val_exists \n", + "0 0.000000 0.000000 0.000000 0.000000 \n", + "1 0.000000 0.000000 0.000000 0.000000 \n", + "2 0.000000 0.000000 0.000000 0.000000 \n", + "3 0.000000 0.001235 0.000000 0.000000 \n", + "4 0.000000 0.001059 0.000000 0.000000 \n", + "5 0.000000 0.001146 0.000000 0.000000 \n", + "6 0.000000 0.000000 0.000000 0.000000 \n", + "7 0.000000 0.001420 0.000000 0.000000 \n", + "8 0.000000 0.000000 0.000000 0.000000 \n", + "9 0.006579 0.002193 0.000000 0.000000 \n", + "10 0.000000 0.000000 0.000000 0.000000 \n", + "11 0.000000 0.000000 0.000000 0.000000 \n", + "12 0.000000 0.000000 0.000000 0.000000 \n", + "13 0.000000 0.000000 0.000000 0.000000 \n", + "14 0.000000 0.001229 0.000000 0.000000 \n", + "15 0.000000 0.000000 0.000000 0.000000 \n", + "16 0.000000 0.000000 0.000000 0.000000 \n", + "17 0.000000 0.000000 0.042506 0.000000 \n", + "18 0.000000 0.000000 0.037637 0.000000 \n", + "19 0.000000 0.000000 0.046261 0.000000 \n", + "20 0.000000 0.000000 0.041609 0.000000 \n", + "21 0.000000 0.000000 0.044038 0.000000 \n", + "22 0.000000 0.000000 0.057474 0.000000 \n", + "23 0.000000 0.000000 0.000000 0.000000 \n", + "24 0.000000 0.000000 0.000000 0.004662 \n", + "25 0.000000 0.000000 0.000000 0.001449 \n", + "26 0.000000 0.000000 0.000000 0.000738 \n", + "27 0.000644 0.000000 0.000000 0.003218 \n", + "28 0.001216 0.002432 0.000000 0.004863 \n", + "29 0.001527 0.002290 0.000000 0.004580 \n", + ".. ... ... ... ... \n", + "357 0.000000 0.000000 0.000000 0.000000 \n", + "358 0.000000 0.000000 0.000000 0.000000 \n", + "359 0.000000 0.000000 0.000000 0.000000 \n", + "360 0.000000 0.000000 0.000000 0.000000 \n", + "361 0.000000 0.000000 0.000000 0.000000 \n", + "362 0.000000 0.000000 0.000000 0.000000 \n", + "363 0.004181 0.000836 0.000000 0.003344 \n", + "364 0.003288 0.000000 0.000000 0.004110 \n", + "365 0.002475 0.001238 0.000000 0.003465 \n", + "366 0.002521 0.001146 0.000000 0.003208 \n", + "367 0.000000 0.000000 0.000000 0.000000 \n", + "368 0.000000 0.000000 0.000000 0.000000 \n", + "369 0.000000 0.000000 0.001149 0.000000 \n", + "370 0.000000 0.000000 0.000000 0.000000 \n", + "371 0.000000 0.000000 0.000000 0.000000 \n", + "372 0.000000 0.000000 0.000000 0.000000 \n", + "373 0.000000 0.000000 0.000000 0.000000 \n", + "374 0.000000 0.000000 0.000000 0.000000 \n", + "375 0.000000 0.000000 0.000000 0.000000 \n", + "376 0.000000 0.000000 0.000000 0.000000 \n", + "377 0.000000 0.000000 0.000000 0.000000 \n", + "378 0.000000 0.000000 0.000000 0.000000 \n", + "379 0.000000 0.000000 0.000000 0.000000 \n", + "380 0.000000 0.000000 0.000000 0.000000 \n", + "381 0.000000 0.000000 0.032979 0.000000 \n", + "382 0.000000 0.000000 0.015960 0.000000 \n", + "383 0.000000 0.001045 0.000000 0.000000 \n", + "384 0.000000 0.000000 0.000000 0.000000 \n", + "385 0.000000 0.000000 0.000000 0.000000 \n", + "386 0.001414 0.000000 0.000000 0.004243 \n", + "\n", + "[387 rows x 15 columns]" + ] + } + ], + "prompt_number": 1 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "test_info = Corpus([filename for filename in glob.iglob(os.path.join('test/', '*'))])\n", + "test_info_df = test_info.compl_df_build(True)\n", + "test_info_df['answers'] = pd.read_csv('test_fixed.csv', dtype='object')\n", + "test_info_df" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "html": [ + "
\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
parent_countdouble_colonlet_existsless_minusparen_stardef_existsend_existscurly_bracketslash_star_starforward_slashvar_existsstar_countdollar_signval_existsanswers
0 0.045734 0.000000 0.001759 0.000000 0.000000 0.000000 0.000000 0.001759 0.000000 0.000000 0.000000 0 0.000000 0.000000 1
1 0.063037 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.022923 0.000000 0.000000 0.002865 0 0.000000 0.000000 4
2 0.042795 0.000000 0.000058 0.000000 0.000058 0.000000 0.000000 0.019122 0.000000 0.005069 0.002477 0 0.000173 0.000346 4
3 0.047059 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.035294 0.000000 0.011765 0.005882 0 0.000000 0.000000 4
4 0.015708 0.000561 0.000000 0.000000 0.000281 0.002805 0.004769 0.001122 0.000000 0.000000 0.000000 0 0.000000 0.000000 9
5 0.000000 0.000000 0.000000 0.000000 0.000000 0.002037 0.004073 0.008147 0.000000 0.004073 0.000000 0 0.000000 0.000000 9
6 0.008163 0.000000 0.000000 0.000000 0.000000 0.008163 0.014286 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 9
7 0.013329 0.000915 0.000065 0.000457 0.000000 0.000131 0.000000 0.002483 0.000000 0.000000 0.000000 0 0.002548 0.000000 2
8 0.058065 0.006452 0.000000 0.006452 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 2
9 0.013752 0.003929 0.000491 0.002947 0.000000 0.000000 0.000000 0.006876 0.000000 0.000000 0.000000 0 0.004912 0.000000 2
10 0.124352 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 11
11 0.088670 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 1
12 0.222222 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 11
13 0.110846 0.000000 0.000894 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000894 0.000000 11
14 0.006645 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.002848 0.003797 0.000000 0.000000 0 0.000000 0.000000 3
15 0.006572 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.002191 0.002738 0.000548 0.000000 0 0.000000 0.000000 3
16 0.027182 0.001073 0.000000 0.000000 0.000715 0.001788 0.000000 0.016452 0.000000 0.001073 0.000000 0 0.008941 0.003577 10
17 0.014606 0.000000 0.000000 0.000000 0.000000 0.004173 0.000000 0.003130 0.000000 0.001043 0.000000 0 0.000000 0.007825 10
18 0.019254 0.000301 0.000000 0.000000 0.000000 0.000000 0.000000 0.008123 0.003460 0.000150 0.000301 0 0.014591 0.000000 7
19 0.058997 0.000000 0.000000 0.000000 0.002950 0.000000 0.000000 0.011799 0.000000 0.005900 0.000000 0 0.014749 0.000000 7
20 0.097902 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 1
21 0.053224 0.001535 0.000000 0.000000 0.000000 0.000000 0.000000 0.012282 0.001535 0.000000 0.000000 0 0.020983 0.000000 7
22 0.026448 0.000132 0.004893 0.000000 0.005554 0.000000 0.000397 0.003967 0.000000 0.000000 0.000397 0 0.000000 0.000000 5
23 0.017391 0.000000 0.005217 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 5
24 0.025854 0.000923 0.000000 0.000000 0.000000 0.000923 0.000000 0.014774 0.000000 0.000000 0.000000 0 0.024007 0.000000 6
25 0.026801 0.000000 0.001675 0.000000 0.000000 0.001675 0.000000 0.003350 0.000000 0.000000 0.000000 0 0.000000 0.000000 1
26 0.039867 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.013289 0.000000 0.000000 0.000000 0 0.000000 0.000000 8
27 0.028050 0.000000 0.000000 0.000000 0.000000 0.001403 0.000000 0.001403 0.000000 0.000000 0.000000 0 0.000000 0.000000 8
28 0.036810 0.000000 0.000000 0.000000 0.000000 0.012270 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 8
29 0.029608 0.000000 0.000000 0.000000 0.000000 0.003059 0.000000 0.000888 0.000000 0.000000 0.000000 0 0.000000 0.000000 8
30 0.053892 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.020958 0.000000 0.000000 0.002246 0 0.000000 0.000000 4
\n", + "
" + ], + "metadata": {}, + "output_type": "pyout", + "prompt_number": 2, + "text": [ + " parent_count double_colon let_exists less_minus paren_star \\\n", + "0 0.045734 0.000000 0.001759 0.000000 0.000000 \n", + "1 0.063037 0.000000 0.000000 0.000000 0.000000 \n", + "2 0.042795 0.000000 0.000058 0.000000 0.000058 \n", + "3 0.047059 0.000000 0.000000 0.000000 0.000000 \n", + "4 0.015708 0.000561 0.000000 0.000000 0.000281 \n", + "5 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "6 0.008163 0.000000 0.000000 0.000000 0.000000 \n", + "7 0.013329 0.000915 0.000065 0.000457 0.000000 \n", + "8 0.058065 0.006452 0.000000 0.006452 0.000000 \n", + "9 0.013752 0.003929 0.000491 0.002947 0.000000 \n", + "10 0.124352 0.000000 0.000000 0.000000 0.000000 \n", + "11 0.088670 0.000000 0.000000 0.000000 0.000000 \n", + "12 0.222222 0.000000 0.000000 0.000000 0.000000 \n", + "13 0.110846 0.000000 0.000894 0.000000 0.000000 \n", + "14 0.006645 0.000000 0.000000 0.000000 0.000000 \n", + "15 0.006572 0.000000 0.000000 0.000000 0.000000 \n", + "16 0.027182 0.001073 0.000000 0.000000 0.000715 \n", + "17 0.014606 0.000000 0.000000 0.000000 0.000000 \n", + "18 0.019254 0.000301 0.000000 0.000000 0.000000 \n", + "19 0.058997 0.000000 0.000000 0.000000 0.002950 \n", + "20 0.097902 0.000000 0.000000 0.000000 0.000000 \n", + "21 0.053224 0.001535 0.000000 0.000000 0.000000 \n", + "22 0.026448 0.000132 0.004893 0.000000 0.005554 \n", + "23 0.017391 0.000000 0.005217 0.000000 0.000000 \n", + "24 0.025854 0.000923 0.000000 0.000000 0.000000 \n", + "25 0.026801 0.000000 0.001675 0.000000 0.000000 \n", + "26 0.039867 0.000000 0.000000 0.000000 0.000000 \n", + "27 0.028050 0.000000 0.000000 0.000000 0.000000 \n", + "28 0.036810 0.000000 0.000000 0.000000 0.000000 \n", + "29 0.029608 0.000000 0.000000 0.000000 0.000000 \n", + "30 0.053892 0.000000 0.000000 0.000000 0.000000 \n", + "\n", + " def_exists end_exists curly_bracket slash_star_star forward_slash \\\n", + "0 0.000000 0.000000 0.001759 0.000000 0.000000 \n", + "1 0.000000 0.000000 0.022923 0.000000 0.000000 \n", + "2 0.000000 0.000000 0.019122 0.000000 0.005069 \n", + "3 0.000000 0.000000 0.035294 0.000000 0.011765 \n", + "4 0.002805 0.004769 0.001122 0.000000 0.000000 \n", + "5 0.002037 0.004073 0.008147 0.000000 0.004073 \n", + "6 0.008163 0.014286 0.000000 0.000000 0.000000 \n", + "7 0.000131 0.000000 0.002483 0.000000 0.000000 \n", + "8 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "9 0.000000 0.000000 0.006876 0.000000 0.000000 \n", + "10 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "11 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "12 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "13 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "14 0.000000 0.000000 0.002848 0.003797 0.000000 \n", + "15 0.000000 0.000000 0.002191 0.002738 0.000548 \n", + "16 0.001788 0.000000 0.016452 0.000000 0.001073 \n", + "17 0.004173 0.000000 0.003130 0.000000 0.001043 \n", + "18 0.000000 0.000000 0.008123 0.003460 0.000150 \n", + "19 0.000000 0.000000 0.011799 0.000000 0.005900 \n", + "20 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "21 0.000000 0.000000 0.012282 0.001535 0.000000 \n", + "22 0.000000 0.000397 0.003967 0.000000 0.000000 \n", + "23 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "24 0.000923 0.000000 0.014774 0.000000 0.000000 \n", + "25 0.001675 0.000000 0.003350 0.000000 0.000000 \n", + "26 0.000000 0.000000 0.013289 0.000000 0.000000 \n", + "27 0.001403 0.000000 0.001403 0.000000 0.000000 \n", + "28 0.012270 0.000000 0.000000 0.000000 0.000000 \n", + "29 0.003059 0.000000 0.000888 0.000000 0.000000 \n", + "30 0.000000 0.000000 0.020958 0.000000 0.000000 \n", + "\n", + " var_exists star_count dollar_sign val_exists answers \n", + "0 0.000000 0 0.000000 0.000000 1 \n", + "1 0.002865 0 0.000000 0.000000 4 \n", + "2 0.002477 0 0.000173 0.000346 4 \n", + "3 0.005882 0 0.000000 0.000000 4 \n", + "4 0.000000 0 0.000000 0.000000 9 \n", + "5 0.000000 0 0.000000 0.000000 9 \n", + "6 0.000000 0 0.000000 0.000000 9 \n", + "7 0.000000 0 0.002548 0.000000 2 \n", + "8 0.000000 0 0.000000 0.000000 2 \n", + "9 0.000000 0 0.004912 0.000000 2 \n", + "10 0.000000 0 0.000000 0.000000 11 \n", + "11 0.000000 0 0.000000 0.000000 1 \n", + "12 0.000000 0 0.000000 0.000000 11 \n", + "13 0.000000 0 0.000894 0.000000 11 \n", + "14 0.000000 0 0.000000 0.000000 3 \n", + "15 0.000000 0 0.000000 0.000000 3 \n", + "16 0.000000 0 0.008941 0.003577 10 \n", + "17 0.000000 0 0.000000 0.007825 10 \n", + "18 0.000301 0 0.014591 0.000000 7 \n", + "19 0.000000 0 0.014749 0.000000 7 \n", + "20 0.000000 0 0.000000 0.000000 1 \n", + "21 0.000000 0 0.020983 0.000000 7 \n", + "22 0.000397 0 0.000000 0.000000 5 \n", + "23 0.000000 0 0.000000 0.000000 5 \n", + "24 0.000000 0 0.024007 0.000000 6 \n", + "25 0.000000 0 0.000000 0.000000 1 \n", + "26 0.000000 0 0.000000 0.000000 8 \n", + "27 0.000000 0 0.000000 0.000000 8 \n", + "28 0.000000 0 0.000000 0.000000 8 \n", + "29 0.000000 0 0.000000 0.000000 8 \n", + "30 0.002246 0 0.000000 0.000000 4 " + ] + } + ], + "prompt_number": 2 + }, + { + "cell_type": "heading", + "level": 1, + "metadata": {}, + "source": [ + "Choosing the Best Model" + ] + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "from sklearn.naive_bayes import GaussianNB\n", + "from sklearn.neighbors import KNeighborsClassifier\n", + "from sklearn.tree import DecisionTreeClassifier\n", + "from sklearn.ensemble import RandomForestClassifier\n", + "from sklearn.cluster import KMeans\n", + "from sklearn import metrics\n", + "import numpy as np\n", + "import seaborn as sbn\n", + "import matplotlib.pyplot as plt\n", + "%matplotlib inline\n", + "\n", + "corp_train = corpus_df.values[0::,1::]\n", + "corp_answer = corpus_df.values[0::,0]\n", + "test_train = test_info_df.values[0::,:-1:]\n", + "test_answer = test_info_df.values[0::1,-1]\n", + "\n", + "def run_test_model(classifier, x_train, y_train, x_test, y_test):\n", + "\n", + " classifier.fit(x_train, y_train)\n", + " predicted = classifier.predict(x_test)\n", + " print(metrics.classification_report(y_test, predicted))\n", + " print(metrics.confusion_matrix(y_test, predicted))\n", + " return metrics.f1_score(y_test, predicted)" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 3 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "run_test_model(RandomForestClassifier(50), corp_train, corp_answer, test_train, test_answer)" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + " precision recall f1-score support\n", + "\n", + " 1 0.50 0.25 0.33 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.81 0.77 0.76 31\n", + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n" + ] + }, + { + "metadata": {}, + "output_type": "pyout", + "prompt_number": 19, + "text": [ + "0.760752688172043" + ] + } + ], + "prompt_number": 19 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "answers = []\n", + "for num in range(5,100):\n", + " answers.append((num, run_test_model(RandomForestClassifier(num), corp_train, corp_answer, test_train, test_answer)))\n", + "answers " + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + " precision recall f1-score support\n", + "\n", + " 1 0.50 1.00 0.67 4\n", + " 10 0.00 0.00 0.00 2\n", + " 11 1.00 0.33 0.50 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 0.50 0.67 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 0.60 1.00 0.75 3\n", + "\n", + "avg / total 0.71 0.68 0.64 31\n", + "\n", + "[[4 0 0 0 0 0 0 0 0 0 0]\n", + " [1 0 0 0 0 0 0 0 0 1 0]\n", + " [2 0 1 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [1 0 0 0 0 0 1 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.50 0.25 0.33 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.50 1.00 0.67 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 0.50 0.67 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.60 0.75 0.67 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.75 0.71 0.69 31\n", + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [1 0 0 0 0 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 1 0 0 0 1 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.89 0.84 0.82 31\n", + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 0.60 0.75 0.67 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.83 0.81 0.80 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stderr", + "text": [ + "/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1771: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.\n", + " 'precision', 'predicted', average, warn_for)\n", + "/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1771: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.\n", + " 'precision', 'predicted', average, warn_for)\n", + "/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1771: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.\n", + " 'precision', 'predicted', average, warn_for)\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 2 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.67 0.50 0.57 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.50 0.67 0.57 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.25 1.00 0.40 1\n", + " 7 0.00 0.00 0.00 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 0.67 0.67 0.67 3\n", + "\n", + "avg / total 0.69 0.71 0.69 31\n", + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [1 0 2 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 0 1]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 3 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 1 2]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.50 0.50 0.50 4\n", + " 10 1.00 0.50 0.67 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.81 0.77 0.77 31\n", + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 1 0 0 0 0 0 1 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 0.75 1.00 0.86 3\n", + "\n", + "avg / total 0.85 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 0 1]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 0.67 0.80 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 0.50 0.33 0.40 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 0.60 1.00 0.75 3\n", + "\n", + "avg / total 0.79 0.74 0.73 31\n", + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 2 0 0 0 0 1 0 0]\n", + " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 0.50 0.67 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.50 0.75 0.60 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.81 0.74 0.73 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 1 0 0 0 0 0 0 0 1 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 2 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.87 0.87 31\n", + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.33 0.25 0.29 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.76 0.74 0.72 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.87 0.87 31\n", + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.60 0.75 0.67 4\n", + " 9 1.00 0.67 0.80 3\n", + "\n", + "avg / total 0.87 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 1 2]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.60 0.75 0.67 4\n", + " 9 0.75 1.00 0.86 3\n", + "\n", + "avg / total 0.80 0.74 0.71 31\n", + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 1 1]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.60 0.75 0.67 4\n", + " 9 1.00 0.67 0.80 3\n", + "\n", + "avg / total 0.87 0.77 0.76 31\n", + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 1 2]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.88 0.81 0.80 31\n", + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 0.75 1.00 0.86 3\n", + "\n", + "avg / total 0.85 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 0 1]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.50 0.75 0.60 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.81 0.74 0.72 31\n", + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 2 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 0.75 1.00 0.86 3\n", + "\n", + "avg / total 0.88 0.81 0.80 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 0 1]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.88 0.81 0.80 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.50 0.50 0.50 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.81 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.60 0.75 0.67 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.82 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 2 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.60 0.75 0.67 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.82 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 2 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 0.60 1.00 0.75 3\n", + "\n", + "avg / total 0.84 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.89 0.84 0.82 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.67 0.50 0.57 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 0.50 0.67 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.88 0.81 0.80 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [1 0 0 0 0 0 1 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.87 0.87 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.89 0.81 0.79 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.88 0.81 0.80 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.33 0.25 0.29 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.76 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.67 0.50 0.57 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.86 0.81 0.81 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 0.60 1.00 0.75 3\n", + "\n", + "avg / total 0.84 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.88 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.88 0.81 0.80 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.89 0.81 0.79 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.89 0.81 0.79 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.50 0.50 0.50 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.81 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.60 0.75 0.67 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.86 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.50 0.50 0.50 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.81 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.89 0.84 0.82 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.67 0.50 0.57 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.86 0.81 0.81 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 0.75 1.00 0.86 3\n", + "\n", + "avg / total 0.88 0.81 0.80 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 0 1]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 0.75 1.00 0.86 3\n", + "\n", + "avg / total 0.82 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 1 1]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.88 0.81 0.80 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.50 0.25 0.33 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.81 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.87 0.87 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.67 0.50 0.57 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.87 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.67 0.50 0.57 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.86 0.81 0.81 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.50 0.50 0.50 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.81 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.67 0.50 0.57 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.86 0.81 0.81 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.88 0.81 0.80 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.50 0.50 0.50 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.81 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.89 0.81 0.79 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.50 0.50 0.50 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.81 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.88 0.81 0.80 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.60 0.75 0.67 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.82 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 2 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.60 0.75 0.67 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.82 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 2 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.67 0.50 0.57 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.86 0.81 0.81 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.67 0.50 0.57 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.80 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [1 0 0 0 0 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.88 0.81 0.80 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.25 0.40 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.60 0.75 0.67 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.86 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.88 0.81 0.80 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 0.75 1.00 0.86 3\n", + "\n", + "avg / total 0.82 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 1 1]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.50 0.50 0.50 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 0.75 1.00 0.86 3\n", + "\n", + "avg / total 0.89 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 0 1]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 1.00 0.50 0.67 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.92 0.84 0.84 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", + " precision recall f1-score support\n", + "\n", + " 1 0.50 0.50 0.50 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", + " 2 1.00 1.00 1.00 3\n", + " 3 0.00 0.00 0.00 2\n", + " 4 1.00 1.00 1.00 4\n", + " 5 1.00 1.00 1.00 2\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", + "\n", + "avg / total 0.81 0.77 0.76 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", + " [0 0 0 3 0 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 4 0 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", + " [0 0 0 0 0 0 0 1 0 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n" + ] + }, + { + "output_type": "stream", + "stream": "stderr", + "text": [ + "/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1771: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.\n", + " 'precision', 'predicted', average, warn_for)\n" + ] + }, + { + "metadata": {}, + "output_type": "pyout", + "prompt_number": 17, + "text": [ + "[(5, 0.63709677419354838),\n", + " (6, 0.68817204301075252),\n", + " (7, 0.82311827956989247),\n", + " (8, 0.79516129032258065),\n", + " (9, 0.69032258064516128),\n", + " (10, 0.77135176651305681),\n", + " (11, 0.75552995391705069),\n", + " (12, 0.73210445468509977),\n", + " (13, 0.73064516129032253),\n", + " (14, 0.87135176651305668),\n", + " (15, 0.72235023041474655),\n", + " (16, 0.87135176651305668),\n", + " (17, 0.75860215053763436),\n", + " (18, 0.71251920122887857),\n", + " (19, 0.83694316436251914),\n", + " (20, 0.75860215053763436),\n", + " (21, 0.83694316436251914),\n", + " (22, 0.80376344086021501),\n", + " (23, 0.75552995391705069),\n", + " (24, 0.717741935483871),\n", + " (25, 0.80376344086021501),\n", + " (26, 0.80376344086021501),\n", + " (27, 0.76382488479262667),\n", + " (28, 0.76075268817204289),\n", + " (29, 0.83694316436251914),\n", + " (30, 0.83694316436251914),\n", + " (31, 0.76075268817204289),\n", + " (32, 0.76113671274961581),\n", + " (33, 0.83694316436251914),\n", + " (34, 0.82311827956989247),\n", + " (35, 0.80314900153609825),\n", + " (36, 0.87135176651305668),\n", + " (37, 0.78870967741935483),\n", + " (38, 0.80376344086021501),\n", + " (39, 0.7567588325652842),\n", + " (40, 0.80529953917050678),\n", + " (41, 0.76113671274961581),\n", + " (42, 0.83817204301075277),\n", + " (43, 0.80376344086021501),\n", + " (44, 0.78870967741935483),\n", + " (45, 0.78870967741935483),\n", + " (46, 0.76382488479262667),\n", + " (47, 0.83694316436251914),\n", + " (48, 0.83694316436251914),\n", + " (49, 0.75860215053763436),\n", + " (50, 0.83694316436251914),\n", + " (51, 0.83694316436251914),\n", + " (52, 0.83694316436251914),\n", + " (53, 0.83694316436251914),\n", + " (54, 0.76382488479262667),\n", + " (55, 0.82311827956989247),\n", + " (56, 0.83694316436251914),\n", + " (57, 0.80529953917050678),\n", + " (58, 0.80376344086021501),\n", + " (59, 0.75768049155145911),\n", + " (60, 0.80376344086021501),\n", + " (61, 0.760752688172043),\n", + " (62, 0.83694316436251914),\n", + " (63, 0.83694316436251914),\n", + " (64, 0.87135176651305668),\n", + " (65, 0.83694316436251914),\n", + " (66, 0.83970814132104454),\n", + " (67, 0.80529953917050678),\n", + " (68, 0.76382488479262667),\n", + " (69, 0.80529953917050678),\n", + " (70, 0.83694316436251914),\n", + " (71, 0.83694316436251914),\n", + " (72, 0.83694316436251914),\n", + " (73, 0.80376344086021501),\n", + " (74, 0.83694316436251914),\n", + " (75, 0.76382488479262667),\n", + " (76, 0.78870967741935483),\n", + " (77, 0.76382488479262667),\n", + " (78, 0.80376344086021501),\n", + " (79, 0.76075268817204289),\n", + " (80, 0.83694316436251914),\n", + " (81, 0.83694316436251914),\n", + " (82, 0.76075268817204289),\n", + " (83, 0.80529953917050678),\n", + " (84, 0.83694316436251914),\n", + " (85, 0.83694316436251914),\n", + " (86, 0.75921658986175111),\n", + " (87, 0.83694316436251914),\n", + " (88, 0.80376344086021501),\n", + " (89, 0.83694316436251914),\n", + " (90, 0.83694316436251914),\n", + " (91, 0.75860215053763436),\n", + " (92, 0.83694316436251914),\n", + " (93, 0.80376344086021501),\n", + " (94, 0.83694316436251914),\n", + " (95, 0.83694316436251914),\n", + " (96, 0.75768049155145911),\n", + " (97, 0.83817204301075277),\n", + " (98, 0.83694316436251914),\n", + " (99, 0.76382488479262667)]" + ] + } + ], + "prompt_number": 17 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "clean_answers = [y for x,y in answers]\n", + "plt.plot(clean_answers)\n", + "plt.show()" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "metadata": {}, + "output_type": "display_data", + "png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAECCAYAAAD5OrxGAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztvXu8JFd13/urRz9Pd59nz4xmRjMjzaBtIZBAEpaQkEBg\n+RGHBLA/+QQTcy2DuRDsTz44iWOTa9/ca38IiSPwxTfYMQ7YfIxjI9skflyEYwyIjIkUPUACQc1o\nRpr3nNPn0ed0n3531f2jalftelc/zjmt6fX955x+VHXVrqq11/6ttdeWDMMAQRAEMT3Ie30ABEEQ\nxO5Chp8gCGLKIMNPEAQxZZDhJwiCmDLI8BMEQUwZZPgJgiCmDDXqQ8aYDOCTAG4F0AbwXk3Tzgif\nvxPAvwTQAvCIpmkfj9uGIAiC2FviPP63AUhrmnYPgF8E8DD/gDG2COAjAN4M4F4A/5Ax9lprm0zQ\nNgRBEMTeE2f47wXwKABomvY4gDuFz44D+JamaVVN0wwA/xPA/dY2XwzZhiAIgthj4gx/CcCW8Lpv\nSTkAcBrALYyxfYyxPIC3AJiJ2YYgCILYY+IM8haAovh9TdN0ANA0bQPAhwD8KYA/BPA0gNWobQiC\nIIi9JzK4C+AkgLcCeIQxdjeAZ/kHjDEVwJ2apt3HGMsA+BqAfwfT+AduE4ZhGIYkSUOeAkEQxNQy\nlOGUooq0McYkOBk6APAQgDsAFDRN+xRj7JdhBnP7AH5b07RPB22jadqpmOMwKpXaMMd/zVEuF0Ft\nYUJt4UBt4UBt4VAuF8dv+HcRMvwWdFM7UFs4UFs4UFs4DGv4KehKEAQxZZDhJwiCmDLI8BMEQUwZ\nZPgJgiCmDDL8BEEQUwYZfoIgiCmDDD9BEMSUQYafIAhiyiDDTxAEMWWQ4ScIgpgyyPATBEFMGWT4\nCYIgpgwy/ARBEFMGGX6CIIgpgww/QRDElEGGnyAIYsogw08QBDFlTK3h/7PHzuA//NEzvvf7uo5f\n/f0n8ejj5/fgqCaLbk/Hv/2Dp/DYty7v9aFMFZdWt/FL/+kbOL9Mq0wRO8PUGv7vvLiB51/agO5Z\nenK72cOLV7bwvfMbe3Rkk8PqZhOnL27iuTNre30oU8VzZ9awvNHEc2ep3YmdYWoNf7enAwB61l/v\n+51uf9ePadJotHoAgGant8dHMl1UNpvm32pzj4+EuFaZXsPf111/ve+3u7pvm2ljmxv+NnWCu8lq\ntQUAqFh/CWLcTK3h7/VMY9Yljz+URrsLAGiRx7+rcE+fPH5ip5hawx8m9fRsj58Mvy31tMnw7xa6\nYWB10/T017fa6Os08iTGz9Qa/k4vROrhHn+PHjhb6ulQJ7hbbNY7tvOhGwbWt9p7fETEtcjUGn5u\n4EnqCafRMqWedqcPXTdivk2MAy7vSJL5epXkHmIHmErDr+sG+pYhCwvudro6DGO6jR33+AGgRV7/\nrrBqZfTccF0JAFDZpAAvMX6m0vCLxt6n8fecYXavP92Gv+Ey/KTz7wY8k+fmo/PWa/L4ifEznYZf\nMPZhUg8AdHrT7eVyqQegAO9uwaWdV1qGf5U8fmIHIMPvNfzCaKAz5bn8otRDAd7doVJtQpKA44dm\nocgSefzEjqBGfcgYkwF8EsCtANoA3qtp2hnh87cD+DAAA8CnNU37bev9pwFsWl87q2nae3bg2Iem\nK3jyYVk9AAV4RY+/RR7/rlDZbGGhmEE6pWCxlKXgLrEjRBp+AG8DkNY07R7G2F0AHrbe43wMwGsB\nbAN4njH2X2B2ENA07YEdON6xkFTqmfZc/kabPP7dpNvTUa21wY7MAQDKc1l856UNiq8QYydO6rkX\nwKMAoGna4wDu9HzeBTAHIA9Agun53wYgzxj7EmPsy1aHMVGIXn5YVg8w3VKPrhuuUg2k8e88a1st\nGACWZnMAgKU58y/p/MS4iTP8JQBbwuu+Jf9wHgbwFIDnAPyFpmlbML3/X9c07YcAvB/A5zzb7DmJ\nPf4pDu5yb1+2EsopnXPn4Xp+eS5r/c253ieIcRFnkLcAFMXva5qmAwBj7AiAnwVwFMAxAPsZYz8O\n4BSAzwGApmmnAawBuG68hz0aonHveTz+Xp80fgDYtvT9+WIaAGn8uwHX87mnvzSbtd4nj58YL3Ea\n/0kAbwXwCGPsbgDPCp9lAfQBtDVN0xljKwDmATwEMxj8QcbYQZijhitxB1IuF+O+MjZylW37/3Qm\n5fptNaXY/2dz6V09Ls5e/KaXjaZp6A8sFbC21QYUeWrbYrfY7phOx03HFlEuF3GTlVVVt0Zb09QW\ncVBbjEac4f8CgAcZYyet1w8xxt4JoKBp2qcYY78P4O8YYy0ALwD4jPW9zzDGHuPb8FFCFJXK7q02\ntLbmGP7qZtP121s1pzZKZW17V48LMG/o3f7NIC5dNZOySjnzFlmvNqe2LXaLc1fMNlcNHZVKDao1\nc/zCFVNtnaa2iGLa7osohu0AIw2/pmkGgA943j4lfP5xAB8P2PQnhzqaXYKknnj4rN1FS26gzJKd\nZ7XaQlqVUZox5bWZrIpcRiGNnxg7ExV03S1cWT2RM3enN6uHT95aLJmGnxZj2Xkq1SaW5nKQrIC6\nJElYms2hstmc+rpRxHiZTsOftGTDVHv8ZnB3rpCBJNHyizvNdquLRruHsjXC4pTncuh0dVTrVJ6Z\nGB9k+CPy+Kd5AheXemZyKeTSKmX17DA8c4dn9HB4Zs/yemPXj4m4dplKwy8WXwurzglM9wQuLvVw\nnZmknp3FzuEP8PgBYHnNNPzL6w28cHETBDEKcVk91yTJi7RNr7HjUk8+m0I2o6JaG4/UsFFrI5NS\nkM9O5a0XSmWTT95ye/x8Mte5q1v47tlVPPr4eQDAwx+81w4CE5PD1nYH260uDizk7VjNJDKVT1+k\n1OOauRvt8b90dQuzMxnMFzOu9w3DwKkLVZw4PAtF3tlBVbPdw3Nn19C31g5QFAmvvnERuUz0pT1z\neRMHF2dCv+fy+NMqrrQbMAxjpJu5r+v4Pz/9BG4+Oo8PvO1VQ+1DNww8d2bNtVbA8cOz2OcxmIZh\n4DsvrqPW6Hp34UOWzTaL64xevLKF/fM55LOpoY7dy5nLm1hZNw3+d1/aABAk9ZivH/nyadf7KxvN\nxIbfMAw862kzkblCGjcfW/C9v1JtQoK/MxqW1c0mTl9wRiu5rIpbjy/as8OD6PV1fOuFVWf0LQH3\n3eFvf90wcOp8FTcdmfPtb2u7g+dfWgePj6dUGbedWERKVVzf2251sVpt4egBf4rkpUod55fr9uuw\nNvv457+Fc8s1zBXSuOXYAl7zijJuv2lp4jqB6TT8MVk96ZSMTleP9Pj7uo6P/sHTuOWGBfzcj93q\n+uz5lzbw8B9/Ez/1I9+H+287ON6D9/ClJ87jz0++5Hrv799zFO+4/3joNutbLXzks0/hnlcdwHv+\n/isDv9No9ZBOyVAVGdmMAt0w0OnpyKSUwO8nodnuo97s4tzV4XOwX7i4if/nT551vXf8YAn/+t3u\nMlLnl+v42Oe/lXi/P/r6o/ixN4a32YWVOn7t95/Em+84jHc9eNNgBx1Au9PHR//gaXslOABQZMn2\n8DlLs1lkUgp6fR0/+P3Xo5hL4/NfeQGVahMnDs8m+q3vna/62szLR953Nw4s5F3v/cbnv4WUKuP/\n+unvT3hW0fzuX34Xpy5UXe/9i3/8GrwywIByvvHtq/jMF7/nek+7sImf+mHmeu/ZF9bwiT99Fu97\n6ytx9y0HXJ/98d+exje+s+x6L+jZ/G//40X87VOX8O8/8HoslNzX4T/88TexWe+43vvo/3439s27\n22x1s4lMSkFfN3Dy21dx8ttX8W8eeh2O7J+sCWfTafhj8vhnsil0uu1Iw9/u6Oj0dFQ9NwNgFtsC\ngIsrdd9n44Zne7zj/hshScCffu0sqjX/MYls1NswADx9uoJ393SkVP+oZLvVRd4aDeTS5t9WuzeS\n4ef1f9a2WtB1A7I8uBe0YUlOr79lP9iRefzJV88EZrzw9+5kZbzqxsXQ/dWb3dB9iPyv7y3DwPiu\naa3ZQV83cOLQLN5wq1nR5MBCHtm0+5FMpxT80j+5HdftLyEFA8+dXQPgSENJuFQxj/mB1x7yebPP\nv7SOJ767gsur2y7D3+n2cXW9gZkxSnLVWhv5jIp/9OYTOH2xipPPXY1t90ur5mTLt95zDAulDD77\nqGY/XyL8vQuVOu4O2EdalfETD96Eq2sNPPrE+UDpcn2rDd0wcHlt22X4t1tdbNY7OLK/gDfffhhP\nn6rg2TNrqNY7LsNvGGZhwxsPlfCL77odf/rVM/ji4+exuR39PO4FU2/4gzz+bEaFqshoRwR3eYA4\nqGolf29lFybeNKyg6323HbQNf1wlTf55s93H8y+t47YTS/79tnq2hJXLmMa+2ekjmY8Z8ruW1NDX\nDazXWraMMQi883j1jYu4+5YD+O9PXsD6lv8h5ud487GFyFEXN/xhMgjnKa0CAFjeGE92DQ+WH91f\njB0VHtlfRLlcQKVSG6pw28qG+d37brsOxw6UXJ/NZFU88d0V+zscvtZvo90bWeLjNNo9zBbSuP+2\ng8hlVJx87mps0gA/rgdfdz0KuRQ+/5UXXOtEcPj1rnjOwzAMVKpNlOdzuP+2g3jxyhYefeK8q+R4\n4D5u8B/DTYfncP9tB1FrdPDsmTXfpMZOV4duGMhnVMiSZGdkxd1be8FUZvVwY6/IUqDhTykSMik5\nculFPrkryvDvxoxL/lv5jGJ76EE3tXsb57y4QRPRDQPNds/WvLkXOmppZnH7YQuP2edrHVsuY6aa\neic48e/xTisMu1OLOLdLq9u4YmXVVOsdtMdQqdQ+vuxgI6jFUhYSBmu/ZctweeMgQHgFUG5ADWM8\nlVkN657iMSXe7nH36vJGA/mMao88smnVtTIcJ8zZqje7aLb7KFtOBv/9QRw2p2qquY+w54zPdcmm\nzXPLZZM9j3vBVBv+fFYNzONPqTLSKSVS6ulanwVNbOKGtVJtQd/hGZfNdg+qIiGlKlAVGSlVTuzx\nA8Azpys+uavZ7sEAMGMFMfnDMmouv/i7g0gVItx74seUz6gw4DdOfOGYfEyQW5FlZNJK5MP51PdW\nAAClvNke4+jQG233eSQlpcqYK2YGar+VjQYKuVRgUDrU8Auvx7EWQ7eno68bguGPdyZ03fTW9y84\ns5nzGRWNZoTHX3XPcuaL1++b9xj+gM7M2Ye7U7UNv7WPrP08eO65tv/ejDvHvWJKDb95wXIZ1ZW3\nbxgGej0dKcU0/NFSj/lZp6sHGk7AjBeMKw0yDNGLAsxzirvRuPFcms1iu9WDdr4a+LntVVsezKie\nn2hcvQ/XoPvIxxgQ70MYRT6jRg7Hn9QqUBUJb3rtIQDjkXsGOT4v5dksNrbavvsuiL6uY3Wzhf0L\nwbJaLqOimE/5vFzx9TikimGM4vpWC72+4dLRcxnVlp9E+H3RbPddI4KVqnmtHG89fIRne/weuWjF\nM2IKv+f6rs/zGbOjJalnQuj2dSiyhGxKcXn8fd2AAUBVZWRUOdLjFz/zeZsuA7ezck9jGMNvff6G\nV5tBxae0FffnHsOftb2kMUo9Q3v8zvwCwOmUvOdse9TpBIY/G95my+sNXKzUccuxBTsw6jUMw9D0\ndGCDUJ7LwQCwlmBlrrXNFvq6gX1z+dDv7JvLWd9zngXxvh2HVOF02JYMksDwL1f9ElUuo0LXDd/k\nSvEZFK8Pl6y4x68qMhRZCvzdhj1S944a3Avk5EOeB/46R1LPZNK1MllSqhwY6OUef6erhxbHErfz\n9fwd0ePYWcPv9fjzGcW+gaO2AYBbTyyimE/h6VMV6EJaIV+EhUs9Wdu4jubxj6NDDPX4PZ1va1CP\nP8CLBIAnrU7xDrbP9jz32uPnuf5J5B5uBMM8fsDsSPq6gQ0hSF4Zu8fv9ob536h9Bx17WGxAfM29\nfMAZWXKPX5KkQOeo23NG7u1u3zX/o1JtYr6YsfP+7eeh5XH4rHPJekY1QcHovWaqDb+qyOj1DfuB\n596/qfHL0A3DlWct0oky/Lvk8ff6Ojpd3eU15jMqen3dlrOC4MdXyKZw+01lbDW6OH3RkXt8Ug/X\nNEf2+J1jGiW4q8gS0inZdWxxw+4ochk1NIj5lFaBIkt4zSuWsG/ODKyOw+P3xioGgXueSeSyZY/H\nG7w/8zPupOiG4dr3ODRq7wgnrYZ73hxen8gr9QQdk+uZE67PSrUJSXJqHvFjiHLW+HaAaSvWt9qu\nSWzxHr9Xzpq8CgBTbfh5/jrv6bnen1JlO189TO4RM378MkMfPPttHEYijCC5wPakIm42J6NExR2s\nDMDUsTmOx+/O4x/1BuZe2Xwxg83tzlBF8Botc4TDg31hhoD/FvfOouAdnHcfq5tNvHS1hu87ModC\nLoWUqmChlLGN6SiMIvXwNNjVBE4FH53sn4+Qeubdhr9aM+MHqmK28TikCu8Ih3veUfu2Pf55t9Qj\n7k/cP3/mxE6rUm1ioZiFqjimLhtQe4rvz96H9durm00YcMtN2ZBjaHmcjXTK7NwabfL4E7PV6ODn\nfuMxPPaty2Pfd9cK4HLDz2Ub/le1pB4AoQFeUWP03rytdg/l2RwUWRo6iJmEILkgiXZqe5tpFd93\nZB4zWRVPn6rYIx/H47ekHjuPfzwa/5F9BQDAagKN2nfs7Z6nowvW+JvtHrJpJdEksXyI7PDN06sA\nTJmHs28+j41ae+TKrcNm9QCDLcK+MoDHz40d3+Zw2bxO45Aqgs43yPMWWak2kcuoKOScbKQow39g\nIW+OyKx26XT72Ki1feeez6hod/uumAY32tctzti/DfgzesxjCL/nxM/tzo2Cu8lZ2Whiu9XDs2fW\nxr5vUerhr8W/KVVG2uoUwjx+UeMPSuvKZ1UszeV2VOoJkjOSGP5mu4eMZRRVRcbNR+exUWvbwUKx\nTg/gnrk72vFaht+avj5M2zRbPTtoBojn678GSY1qPiQIxzsmcbYr9z5Hva78eIfx+GcLaaiKbE+y\nimJ5o4lCLmXHa4LwdiT87zHrvMcS3G35Rzim1h78fOmGgZWNJvbP51yTxxyZpe/6bqvdRzGXwkIp\nYx8/v37eMhiOdOnso2Hfm2Zn57SFfx+KbErBvnuuE9C5ZaNHNXvFxBp+nid/RVgfd2z77utIqYrj\n8VtSj1vj5x5/vNQjXthe3yzlkMuo2DeXQ73Z3bEev+HxMIDwySXe7cQH8MQhcz7uC5fMAlq+zJnM\n+IK7iizh4JLpVSWRKkR423pjGkDAZBrPOUaRC9lH3coXFz1OO8C7Pqrh70GWnFjFIMiSWdMnrv36\nuo7VatMllQQxV0gjrcq2l8v/HrEM/zg1/pxntOb1vDk8XdXrrQd52+1OH4a17/JcDhs1s9zKip2N\n492H1XkIzyXf3+FyAbIkOW1hp3Lmffvwafwhjhjl8Q8AN8IrG81E+cpJMQzDp/EHefy2xh9SobPb\nDQ7uci+CG35g5wK8URp/M6Kz8XrDx61iX2cumYt6b3u8Mz5zd9TgLk89dTzMwaQeb0aP69iEa8Br\npmRjZu1y8iFtFmT499t6+GiZPeY1UIYuhbA0m8N2qxfpVKxttc1UzhjDL0kSytbolJc4AASPfwfy\n+MX/gxyKlZDYRFA2kBiz4vfW6mZLSOX07CPt7+jthAc+athwj358HVA6IEBspxC7HbGguT57zcQa\nfq6h93VjrAHSnuDVp7xST1/U+M3Pwjz+thjcFQyi6IU72Rc7a/gHkXq4URSN59H9RaiK7Hj8bbfU\nI8sSMqnRF2Phxo63y6C5/Nwwu6SegMBsp2fWTEku9VgTbQI8fkWWXAFibgBG9fi98y8GJUkbhhnP\n4P3l7MlPlWoTqiLjoKV3jzW4m/WP1oLu1bBspFxA+RAxfiAGqr359/Y+sv4JiaITVZ7L2ckHlWoT\nuYziK1YX5MnbJRs8Uo94jJPCxBp+cWLVOOUeMVdf9Uo9Lo0/OqvH7fELN5CQplf2ZEuMm8CAWUiG\nCqfd7fuMoqrIOHZdERdW6mh1emi0uq4AN2BlQowa3O307WBdJq2MxeMPknoGzZgJy7euN7so5FIu\nr9xOfRwxl38QKSoIntkT5VQkCexynPNqYmWjifJcFumUKYeOw+MPunZRufxODn+wx98MuN65tOoK\nVPPnzlujKEjaawZ0HpUNs/Moz+V8I7N8RkGvb/hifYos2fFB8XyjRuB7weQafqFBL6/ugOEXPH6e\nxtkTOoVMigd3Q7J6QvL4W0Iu745LPUEecIzG7+iQbhnkxKFZ6IaBF6/UsN3q+T2cEdfd1XUD7Y45\n0pAkCeXZLCqbzdAJckHYxkM4tmxAOQl+PbwljsMIkxzqjS4KeXdQNJ0yUzpH6cx13UDL6gSHJYlc\nxkclXqkjCG7szl3dwnar5ypINl6N37nvokanPA3V5/Fzx6YTZLQVn8c/k1V9NYqCRg08WJzNKPZz\ne/rSJjo9PbC4XVBKJ88kEzuJuOdxr3hZGH5eGXGc+02LGr8nuKsKwd3wrJ7gPH7RC18SvKidIKkH\nHLcNABw/yHX+TTRaPd9qVLmMEljYKinejIfyXA7tTt/W0RPtI2DSk6qYGViNgGuQ2OO3h+POsfR1\nHY12D4WAbJh9czmsb0Wv1xBFUPbHoNgyYhKpJ2LWrrM/8zvfsVYCsw3/mLJSmu0+MinFtSJdlOFf\n2TAllmLO3f5Bk6LE622vUbzRQKXaChztRI0axH1858V1AO5UTt8+xA6o45fvSOoZEDFr5vI4pR5R\n4w8L7irxwd1OSHBXLBucSSmYLaR3ObgbXWY4rFTAiUNmnfYXLMPvTf/LplXXtPaBj9VjtB2pIrnc\nE2bQvXprK2RUE0ZQHv920wn2eeHyw7DXtdly7pFhcSZxhbffiuXxRqVycnhH8t1zpuEXC5I1WsHl\nLAaBx3dEwoyibhhYqTaxb96/bm02oDaTOHFqJpvCTFbFmUtb6PX1wGUjI+UiwfDztgjcR9Cood33\njTJJ6hkQ0eO/utYYW3ljbrBVIY+/Z61X2/OUbACi0jl5IFjyXXzAuUH3zeWwttXakaj+oJkS4jZe\nozNbyGBpNovvnd8wF5PwfB4kqQyCNx4xTIDXW0rCPjaP4belnhHSOe2MnnyAx88DvEOO5EaZvMXJ\nZ80a9WGdj1PSOF7mAcyORILTdqLU09eNUAcoKUHB7LDgbrXWRrenB6ahqoq/jLb3OTAD1e7zEAlK\nTxaL+vHry/cRJPV492HOJejZRejscySPfzC44V8sZdDp6VgfYpZn4H4DPf6+6zcTBXetbUoz6dAh\nI2DeNIaBwOXiRmWYrJ6oGjEnDs/aHWNQFkPUfgc91qUh4h9hHn/eMwV/UKmH3wvNIMMf4PHznO5h\nJbxRCrSJlOdyWN0MXvPBKWmcbJWzlCpjoZRx9j3vSD3iMQ8DX4QlaKQWtO/lkDRMjjfu4O1IxXMO\nNtr+3221+/a8Cu9s4ST74HMJvM5GkmJ0e8HEG34+y3Ncco8o50RJPY7GHx7cVRXZugn9er/ofQD+\nJeHGQaPdtxdf4QRp3iJRRodP5AIQGhAb1uP3zlQdJpffkUg8x5bhhenMazVIZU6OtyZ/lOHnmvmw\nmT2jzNoVWZrLodfXfYuAA9GrboUhesflWXcJ4lEMV8ezCAsnbHTqpKEGH3s+m3IlGoQ9c0BwRlNQ\nkTXvvAq+nSJLvoXX3cdu7kOcvxP0W+TxJ4QPLfl0+cur4wnw2sHdlOLP43dN4LKknpAql51uH2lV\nNmWGjqOBemWGnUzpbAYMLYHo2YJRhp8HeAG/x5+NiR0kOVZxP7xa4mAev2mMvVqx9yEcRkrxBjGj\nDL8TQBzV4x9+4XrzOMLb0AnsJpN6zP2Z5zVXSNuOzziyUsLuOafEsjvAv2wXZws+9pmc6ipC6G1P\nVwcW5a233KMG8fj4douz2cB6T0nvOe6kkMafEC6lHLU8/nHl8gfl8Xs1/iRZPZ2ejnTK9PjFkr7e\nJf92MqUzrB5N1MIiUTLI4X0zdlA7yKsGks3e/Z/fuYo/+GvNFRD0/m4mpWB2Jj2cxp8JSc+zji0s\nZTUK7vHzY44y/JmUgvliBueu1vD1b122q5kmPo9xST0RufxJyjF74d8VRwm2Rj2C4QqLKznesNfj\njz72fDblGuF5R1D8+FXFXKbSSyatuOIZgHlfBxn+sBGTrfFbz7s9yvRUg3XOcbIqdEbeeYwxGcAn\nAdwKoA3gvZqmnRE+fzuADwMwAHxa07TfjtsmKfyi8toZ40rp5B1KpMafoDpnt6cjrSqunl/0tL3e\nRxI9+MtPXcTZqzV0rH3MFTN451teEVphstHuYT7gxs5lVKxsmDny3qyIqMlNiizjxoMlfPfchl+P\n9SzGUq238f994xzedt+Nvgf6K89cwumLm/gHb7gBpXza9bviw7U0l8WLl2vo67orzS+MZrsHCfCV\nYvB6X8No6DkriNnt6UinlEjDDwB3sDL+5smL+MwXv4fPfknDrccX8dM/erMvg+bbZ9ewvNHEW+44\nbL83aAwiDD5qWg+IH/GCe7xzSAK/V8X0xUHXja1Um/jK05fw9vtvtJ+vsI4uTONf22whrcooBgTW\nAWeBoGa7h5TqxNh4Rg3vMMpzWcgBJTFkSUJWWLCIF3kTj29fQFu4jt2T1RM0a9d9jpNVkz/uaXsb\ngLSmafcA+EUAD3s+/xiABwHcC+CfM8bmrG0yEdskgmvruYyC8nwOV9a2R04pA4IncAUWaYupztnp\n9pFKyY5BtHr+RtusC84952I+hXRKThTc/ZOvncE3nruCp05V8NSpCr781EWcW64Ffpd7PEHGTTRi\nXuK8Ta7zl2Y86ZweXfSvn7iAv3nqIp45XYGXzW1TcxbXGw4yxgcXZ6AbBs4v1wOPJejYcxnV9zDb\n3lfL/RAOKvXw3wCiPX4A+IkfuAkfff/r8WNvvBHluRyeOb1q532LfOHrZ/G5/37KdR+NK7jLO/2N\ngHWd12ttqIoUajyDOHFoFrmMilceW7DfG3T5wK998zIefeI8nn/JaYuw802FLMZSrbcxV8yE1jHy\nBpy9Jbj4yjjRAAAgAElEQVTnihkcWppxnYeXXEa1R6+tthmYFTviV1w/i0xawSuPBu/DHgG33aNM\nb2eezZiji0lbhSvO8N8L4FEA0DTtcQB3ej7vApgDkAcgwfT87wXwxYhtEuEYYQUHF/PYbvWwte0P\nYg2/34R5/FFSjyr7gzztHnJpZ6EQSZIwO5OOPXY+s/XmYwv4xD+7D2+/7wYAwPpW8GLtUV5jVAZO\n0CQokR/6/iN414M34ZYb3De8U5rZbI9vnTFr1Vfr/uPj5yp+FvTw33p80dzXC6uBx+IlrL6NM5mm\n7/6thDN3AX8Qs94IT+fk7JvL4Udffwxvv/9GAAgMslbrydpiGKIMf7Xexlwh3HgGsVDK4j9+6H68\n/pYD9nuDLh/IRx8brvMNNopByyD2dR1bjQ7mCv6RrH1MntpK3vtCliT86nvvwrsevCl0H2KRtZbt\nrTsjyf3zefzWz7/RXqjIt70n9uGMOtyjUTnBgjN7QZzhLwHYEl73LSmH8zCApwA8B+AvNE3bTLBN\nIrrdPiSYefK8jO/lMcg9rqweu2SDP4+f16oPyl/mFT69Ug//632gZ2cy2NruRs5F4DGCQj6FQi5l\nB+XWa8EjhaCiV5y8HTQLMPwhcom9bVbFW+447JNexIlhyxsNW3rb9HRo7U7fPpeqYAiDOqpXHluA\nqkj4ZlLDHzCj2Dw2/zXwZjvF4fVs660uJCmZcZ6dMeWs6rbbAOuGYXeCGwGjn1EmcAHmsWVSis/w\n93Ud1XobCwEy4KDweEpSw8UNf/Boz3/PeYPqW9tdGIYZYA6DJx60Ip65OHJWNp5hGEPFXPi5tLwa\nf0jMbdIMf9yZbgEoCq9lTdN0AGCMHQHwswCOAmgA+APG2I9HbRNFuVx0vTYkCamUgn37Srjp2CL+\n6hvnUGv3fd8blLR1Iy8tFWzjqqQUlMtFSJaxO7C/hEI+jWxaQV83fL/JJ3XNzKSxz6pgmMqkUC4X\n0er0UZ7Pu7YpL+TxwqVNZPMZzIZ4MjzdM2/t5/i26WG1ev7fB4BNa6Hnxbm87/MFK888m8/4Puv0\nDeSyKvbvK0W2k5cty2uTFBlnrjrSTKuru37jqhCE7+jOdbXi57j+0Jzr4Xj18SU8c6oCOa1iMUCP\ntre36tvMFv3ndMB6LVvXsdMzUMilBrpXygvmdUxnze2a7T5KM+lE7dSTeAaY+1pVa217zea+JNuf\n9YS2SFpPCPA/I4CZ0lnd7rg+W602YRjAgXJh5OelyS+ccPxRVK2RUlO4LyTFNJIHykXfPkozaVxY\nqdvvV60R13UB3+XkLfktlU1haamAZqeP62fSA53rbDED3TBQms1j1XrWlub9z1IYhmFAloBu37zm\nkjXv58A+/3EXZ9JYXm+MfC3GSdxddxLAWwE8whi7G8CzwmdZAH0AbU3TdMbYCkzZJ2qbUCoVt5bd\naHWRUiRUKjUU0uaDdfqldVRuWkqyu1CqVhZJo95CTTWHwbV6C5VKDdsN0zvbrDbQ3G5blQm7vmOz\n68voBnpd80ZdrtSwsrKFRqtnHzcna6WGnj23jsPWsoNeLlmF6HJZFZVKDVLfNLSXlrd8vw8Al69u\nWseg+z+3Fra4fHUL8zn3Ja5td5BLK4H7jKLVMD249WoTp89v2O+vrDdc+3rx4qZzjMKxb9bakCUJ\ntc0G6oL8cPOROTxzqoK/ffwc3vTaQ67fLJeL9vY8cyYlS75j71oZE6tr26hUaqg3OsgOeI6GFeC/\nslxDZTGPzXobxXwq0T76liOwvFp3ff+8EJ85f3kTlevN+MlmrQVFlrBVbaCWUIoR20KklE/hUqWO\ny1eqSFnG54xVXjs/xHX20rKeibVqI3ZfumFgzcowuiK0RWXdvLe7bf+zlFJktDt9XLm6CVWR8eIF\n897KKP7rzOEe/9WVOi5d3oSuG1Ajvh+EtZwwzl+q4sqK6cjovf5A+8imVdS226hUaliz0mc7rY5v\nH2nFrHC6vLyVaCnQQRi2M4kbC38BQIsxdhKmrPMhxtg7GWM/o2naKQC/D+DvGGNfBzAL4PeCthnm\nwPhiKQBw3aLpwY5jEpcT3FWcpRcD0jkBs5Bb0AQurvu7NX5T4uArAYlwKcAri4i0PAHJuUIGsiRF\naPzBE0bE94I0/mHrwPPg7nqthVMXqjh2oIhSPoVNj8YvnqMo9YQtPHLbCbMjj9P5o2YcZz3T55vt\nXuJyDRxR6tENA9utbmhg10smpSCXUVD1XF93W7g171xGHXoRFpGFAJ2f/z8foZMnZZCSA1vbHXuE\nExfY53jvVX7PREk9eSGrhwfyB82QEn930DLe4j58mWQBIzg7GD1iWfNxEnmmmqYZAD7gefuU8PnH\nAXw8YFPvNgPDNXTA7Fnnixm7VOuo+wXcwd2eENxVFcnOGsmkFGw1/MbamQQmu2bmhd1AjuEPNuKA\nEzTlN4ksS5grprERp/EH6aYhk254PZFcZib0OMLgN7R2voq+buA1J5bwpFbx5eFvCecoBvjCOpzy\nXA6Hlmbw/LkNtLt9O6juJWjNVo54vkHLMybB1rJbXSufPzyjJ4jZmYwvuCsae6/GP+rkLc6cYPh5\niQPb8I9B40/zzJsEefziOXo7fSAsEcGJHRXzabvDiA7u+o32MBr/6PtQbMcsqj6UmDiQpGDebjDB\nE7h0V3BuJpsaSy5stx+dx89HAYA5uzfQ4xdGDeLSf2E3UMky/Fvb4ZkRXo8fMB/cjVoHuu4PCkc9\nTGHT7NshI5Ik8NQ77tHddmIJc4U0Wp0+2kIZh3AvN3ykcduJJXR7ul0NMYiogKj4EIdNnY9D7Dy2\nLSlvZgDDP1dIo97suorxiR2BqxNsjbb6lkikx18a3fBLA2SliPMJ6s2u/VxFJSJ489z5PRM08Ypj\ne/ydnqu42iCIZZVH6Tz4rH1n4mbwTHpgtHpH42ZiDX+n13cZ/kxaHrr+uYhYj1+x9DZx6UXxN9Mp\nGX3d8FXWdEk9ovchLOYgMjtj3sRRHn9QyttCMQvdMAIloqhMhLAbbdghrXe/c4U0juwvBI5keBZL\nIZeyhv567MIjt52IT+uMTF8VZu46hmAwj9q5jn3ULMPvrQUfBQ/ai8Ze/J97sn1dR7vbH3nyFme+\naE7iEg0/zwRbKPprzAxD0qwU7v1mrLbfsM4/UgbxjE75vc7vrSB4h+z21ge83sKExGHXR8hZs/bb\n3T5a7R4UKxPQyzhmP4+biTT84oLonExKCTTCg8KNvKrKkCQJKVW2NX7vb4ZV6OwIctFgUk8CjV/w\nini1xKCZmYPoppxGhOeVBJ6jfNuJJXN+gmXsxGE9P8ej+wswDHOU04rRYY8fnEUhl8K3XlgNnaRn\na/wBx55OyZAlcyJQ3DyFMMR89bjJW0EEpXTy/xdLGVTrbXu942GOLwwu56x7PH7Zmj8yDvIZNZHU\nwzucG6z6Wryza7R7yAgTrER8Gn+tjWxaiWwfJ+7QH7o9XVJPa9R99G0pMyhuM4mF2ibS8Pd1A4YB\n19qVGbuEwmhef0fI4+d/XR6/Io4ygss28CFsJqW46tTHST1BE3w4QRIF99jWAyboRHrAIaV0o3Ty\nJPBju+24GZCdtSUst+FXZAkHl8zspWq97VqAPghZlvDqGxdRrXfwyFfO4C//7iX85d+9hLOXnAyh\nqPM15QjFCrAPafiFIKY9eWsgqSfY45ck4Pp9RfT6BurN7tgmb3GCJnFt1NqYLaTHlkGSy6joeBbh\nefJ7Kz5Hhnv8x63Z3/yYgkoyi/vm3wHM+yUs5ZkjlmwYtj3FUhRx92cYXokxbPscefzJEDNvOLbh\nH2H5P/e+Zfsvn83b6+l2Rg/gdDwdT4VOrvunrMVceBnksCFtSpUxk1UjZ+862QmOseEe/0aUxx84\ngSvYwxjV6JTncpjJqrj52DwAYNbKvBC1/M16B6WZtG2QqrV2Iq/sTmuG5KNPnMefPXYWf/bYWfzm\n55/xHXuUAXE/xIOdoxjEHMrjL/DOXQxwtlGaSTvXsdYeWW7zUsinoCqSbWR1w0C13h5LYJfjzex5\n8coWPvlfv40/P/mi63vrVpoqL6zI74tmO1zmE+NRvb6OrUYX8xEZPYAZe+MLIA3bnlmX0R5S6hFW\nA2taM/aDGLTe0W4wnrtvzHQ8xhmAUDRtRMPf1yFJsPV9VZHRE4K7KSXgNzteqafv+pwbnSgDV5pJ\nx0g91rYuqSfc44+64YOWpxNfD2v4f/rvfR/aXd3uhL0SlmEY2Gp0cGhpxk7Hq9bbtiYb9buvecUS\nPvyTd9gP4R99+QW8dKWGXt8MuDdCavFzchlzNaphNV8xiMnnDESVa/Ayx6WeutMWm9sdHFyccTrB\nettuu3F5/LIkYa6QsbO/6o0uen1jvIafG65WD6V8Gueumnnq56+689XXt9qYK6Tt+3aj1rYXYQlb\n91ccnXLHKCqjh5NNqy7DP2j6bpBEO2yAeLtlevxhx0BST0K6XSfzhpOJWRgl8b6tNFGuxXGN3zCM\nwOAu4F93tyMEiIEgb9NvdGZn/FkfIq3A4G60xs9HHF5UxVw60psFNWod+Hw25TIoXnmj2e6j29Mx\nK3j8G/VOIq9MkiScODSLV92wiFfdsIjjB0vo9XUsr5spvHYt/pD4hFl0q4/tITV+8/xMw18bQuqx\ng7vbjrzR7emYLaTtdqrWO2MrySyyUDRTSXt9faypnByvx3+hYk54uljZtsuQ9HVzQZj5UtbV6Xe6\nwYuw2PsWjCLPfEpi+POeZ27wHHzHOYqKQUTvw/xNPrIJOwa7thBJPdFw6SXtyeoBRvf4e54Abko1\nNX4eV3B1NiHB3W7XLUXZ+nLEQ80NQ5jcExTcLc6kochSiMYfPnwGrPrynhrgzkMynlzikiegyY2e\n29i1hxpp8BnO3MjExSf4+zygOIyUwoOYw6ZzAmJRNmcikphrH7Zu8CjMl7IwYN5b487oAYSCZNax\nX7Rmura7faxaM3U36x3ohoGFYgazhTQkyTrfBBIdYBrgai1+8pa4XbPdH9qZcapr9tFqD5dlxX+T\nO2Zh9a+cyYGTU6FzIg2/uCA6Z1zBXW/mjmoFd+1snwCpxzvKcKQex+M39cmO/doLr0sfJvcEBXdl\nScJ8MRPq8UcZUv5giIyrDrz4G5mUgi3LyPFOrTSTdun/wxj+I9zwW0Ym7gHnDx3vJAcd+vPj6/TM\nAmcS/KuQxW2bUmX7+jppiRl7Bq2o8Y/T4xf3zz3+ueJ4MnoAt0ZtGAYuVpxaTfz68HZfKGWhyDJK\nM+lEMQ3R8HPHISqH39lOQbvbx3ZztAlcDcvj91bVTLQPSxri5x6v8U9OTf6JNPyOx+9cjDC9fVA6\nvb5Lx0+pslnbvu+PK2RS0cFdUeoBhBsg0OOPNvxh0o04jBdptIKXXeTwobCYHjnMylRxzM6k7VIF\norHLplWzjEFtOHnjUNk0/BdXzDIdfDgetmAL3zdf92BYqQcwFxPJZ9VEi8NwePltHtzdtGWLtEvj\nH1VuC0LM7OGGf5wevyj1rG220Gz3bRnMNvxWu/NjmS9kEklbeUFy4ZJJkjRU55kb7nrz6q08TjCc\nx29usxHn8fNquRNUk38yDX+Exj9uj5//z2WawICyp7MR1+0FhJvQNjrBGj8QJfX0A72OBWsYL6YJ\ndq3UujiPv68brvjETnibs4U0ag1zdrF38s1cIWMZu8EXFy/kUliazdrepdnRRUtbgHMNhpV6AGCr\nkbxOj8hcwSq/rRu21DNbyCCbVuzyyePO4wfcufw7ovFnHI2aS2933bwfgKnzA04qJ+9w5osZ9Pq6\nLQWFna9ZM0tCo913pJ5EHj+/3m2kQ2JdSfax1ehExiASHUOMvKjIMjJphYK7cQRq/DELowyyb5fh\nV9zLwwVKPb7grrtj4kO8aq2DdEoO9BTjJnG1OsHpYHzavViXP4kBD5rEtTOGP2NO1Gp0XFIPYBrC\nerOLmi2BDeblHjs4i41a285/j9LFeafJjccwQ3dx/8MY/tlCGrphoNbsuuIdkiRhrphxzWkYl9wG\nOPfIRq3lSD1jKNDGyQsaNdf3X3XjAgq5lP3aji1Yx8J/n6+hEXevNgSPf24mueGvN7tDyXqAmY5p\n3y8jafyWvBiRFcTXdJ4UJtLwR2v8w2f1BM0I5r/BZyYGSj0hM3cdqcc8Nt0I9xzsej0hk7iaYR4/\nn8QlVOkc1vDzZSGHMYphzAqT0/wev/n3ynr8wx/EDQfNWvgXlmuxVUW5IdUNY+hzFPc/lMfPS3PU\n2/YIjb83X0ij1hA7wZ3R+NdrZjnpQRahicO+l1p9W9q5fl8B1+8rYMVKod0QNH7A8dqvWOXGozo6\nnhVXrbfNuFGCaye237BtmcuodlZSlGwaBu8suAoR5dhw6XVSmEjDH+zxj57VE5S543j8fddr8/eD\n5aWOLUWZn4s3ddgN7k33E9ENc9nFYKnH7/En8RqDcoebllwyjnLAnLmCU68nyOMHnId/0Af02HWm\n4X/h0iYMI9542P+nhzvH/IiGf1bI7LH1aus9Lr1ctTrBcXr8PItmvdZGtTbeyVuAeC91caGyjZms\nWS33sBWHubS6jfUt9xq/87bHH3/t87bh7yTK6BGPyfx/OEdm1M7Dew0jHTErVXgc64aPg8k0/L2A\n4G56dI2/6ynXADidAE+1CtL4vVk9fD+8MxKHiWHDvWIuBUkKlnradnE3/7ZRHn+0IfRP4hq2Fn8U\nYjmKzbopdfEOjBv+7Za53GMST06EG/5TF6oAolMgx+EBivsfJJWTI87e3dzuoJBL2dIhb4vl9SYU\nWRqrR67IMuYKGVxZ3Ua72x9rYBdw2qVa72BlvYHD5QIkScLhfWZ574srdazXWpgvZuyS5rzzWV7n\nGn/4tc9lVHR7OurNbmKJStzf0Nfb4ywMind5z6h95DNmQbfWiMkp42KiDf+4g7tB+015pR6Xxs+X\n1Asr0hbk8YfXoynlg2fv8pshyOOfDyjUlkTqCZo0MszapHHY+frbHWw1Oijl07a3LQbpshnVNgpJ\nOVQuQFVkvHDJXMI5scc/7AQ1YX5DcYBZuxyxLar1jt0RAE5bcDlwnKMuwDS0fPLauD3+TFqBJAFn\nL2/BgDPH4sg+szTDS1e3sFXv2JVCAff5AuEzrgH3dU3q8Y+jo8+OofMQq8AmKSw3KXLPRBp+b/AU\nEIK7AT3mV5+5hJVq0/e+l0DD7wnuBnv83glczkLwQPKbcDakbENUrZBiztRrxSJcSdIjvR5/XGnk\nYbGrUtZNqUc0duIKUMMMxxVFxqGlGUFD3VmPX+wwhvL4rbbgpSPmZsLaYrzXAHAb+3EbflmSkEur\n9nW43jL8B5fykCTgubPrMODIkgB89XbiPH5Oco9/NG/dt48R4gTO//HnOCmZPRNp+Afx+K+uN/DZ\nL2n44y+fjt9v3+2pA0Jw19L4gwLKXqmn3dORSsm215b0BirNpNHuOBUkOfy3gzx+iU/iEldwSlCW\nQCwZCyC2NPKwzAo6fl837LUHALf3NuyDxeUEIE7qGcPQX/BKB6nFz+FGi6+1K1aZFI3xuDtfwN2x\njNvwA+6254Y/pSo4sJAPnDuQy6j2iBlIPlobyvCPQ+oZetSgBv4f9luTktkz0YY/HVSd02OEeSXF\n589txNbqj5J6AjV+Xp3T6/ELy0ICyY1OWC4/N8ph8YGFYgZb2x37+J3gbnQWgfjdqDVrR4HHLs4v\nm9ke4uQb0fAN+7vXl53F6ZNLPaMbgmGCu4V8Coos4ZKV2+6SelxtMb6sKo642taOGH6rbSQAB5ec\nzph3AoDb45ckydUZRaU6iu2RJIff3GZ0aU88puHlwWQjj0HWLt4NJtLwi2WPOamQrB4+8ard6eP0\nxU1EESn1RGn8AVk9ojeT2OMPmb1rl2sICX7a1Q6FMrcAIhdU8aZz7kT+OGDFLmbS9v5LguFPqbJt\nQIf3+AXDH6ET59JjMPwj5vHLktkWfHlKMR99tpAGV/V3xOPfQakHcNpm30LetS6yy/B7gsr8OLIx\nBdCG0fiTZNLF7iM7+j3DR+mqEh2wFyucTgITafiDyifIkoR0SvYZYbEH/fbZtej99vwpm3ZwN0Dj\nV2QZqiL5JnCZcwGCvfxoj9+/WIf422FDRW9d/mHy+J36/eP3Nl1evme6PX+Qh304RcMf5ZXJsmRn\nDQ3rvWXSim2chzH8gHfE4/yvKjKKM6O1RRSi0d0Jw8/vJ9HQA7BTOgG3xw843nucUR1O6hlDYHYc\nnYe1XdJzJI8/gqCSDYAp93hlFzE96rlYwx+R1ROQxx/2m51e3zXHIK3KdsZK1FqvttTTCPb4wyYd\nzXtW4hrO8I+/VABHfFj9hj/Zwx9GKe8UfIurKsrbfthgn2zV5AeGC+4C7rbwGrH5EdsiinnByEbJ\nKsPCDdz15RnX+2JH4O1w+PnHGdXcEB5/JmVmGnm3HwTxWR129i/fLu6eI6knAUETuADzYns9fm7Y\nVEXGxcp2YCVLe789/36drB5T41c9v5lOKf7qnF3dJfXwpf+AZBq/1+NPovEDwDOnKuh0+4lkG29W\nz7hXfhIR5Z1SYbyGH3B0/jhPPpfQ+4oin1WRTStD1X4B3F7+rK8t0vZvjBvezgs74O0Djqx42OPx\nzxczmMmqLlnP/izhteftYe4n2WhNsjKNkuw/DLfHP9oksLACbc7+reUiJ0TqGf8dOAaCNH7ANPzi\nMn+AY9BefeMCnjm9im+/uI77bzsYuN8gCcmb1eP1+NOq7BpV9HWzdn/ac4PmMiq2W71I3T2sQmec\nx8+OzOG6xTye1Cq4WPlf9ogjyjjZhaFabo1/Zzx+wdjlPcbOKg88SkDzB193PRZKGeyfz0d+j3dq\no3RuP3zXkZEqwIojHm/NmfmE0scwpFQZb73nGJbmxjt5i3P3Kw+g3uzilUcXXO9LkoR/8IYb0On2\nfXMT+PnGdXS8PQatL8Rr/Ixq+E19frTgbtw9d/2+GdzJyrjtxOJQvzNuJtLwB6VdAqb37c3q4Ubz\n+2/ej2dOr+K5M2vhhr/nrwHEDX2Qxs9/U5RmwjqlJDdAaFZPjAyTTav4lZ96Hf70a2fwN09edO0r\ninxGxZX1Bv7Np5/A6ubwVSvjEFM4SyFSzyi/+6obF/GqG+MfmHF4/G++/fDQ2wLO+WbTim+mclLN\ne1jefv+NO7JfALjxYAnvO3hL4GcP3nl94PtJzzdvG/7B1hBwrvdo3voo14N7+nHyWkpV8E/f/uqh\nf2fcTKbU45kgxcmkzNr5uu7Uu+Ce7LEDRZTnsnj+3HpoWmfQWr78f56J4Tf8skvqCZKLAEfriyoO\nlsuoUBXZV6+naUs94dtmUgp+4gduwr9852uxNJvFEWtB6yiOHSii29OxXG1iJqfilhsWcKNV+Gyc\n8E7IzN12n8PtN5Vxx01lvOYV5bH/rpfsiIZgHPBR3WyA98qun0NalXHsQPy1uxY4uGiuvRx3z80X\nM1iazYIdmR9o/1yeGdapsOXZEWIi+Qm454ZhYj3+lCr7ho7iJC7eS4vLHb76xkX87dOXcObSZuBN\n5NTqcS6S19B75ZNMSkFfN+xFv70LrXP4ak1RKYfmYh2pgaUekZuPzuOj7389kKDW08++49XmnIPU\nzt6UtrELGIXMFTL44Dt2x9Ph12CnPOokcI9/LqAt2JF5/Pa/eNMuH9Hekc+q+NjPviH2e+mUgn//\ngXsG3v/BpRlcXW8MHYjPpBQoshQpz8aRHTHOsFdM5NF2PKWTOZm03/A3hSULueF/7ux6iOF3L5kI\n+A2993d5wKrW6GK+mAn1+H/4riM4cXg2VoIpzWRwfrkGwzDsjs1O50zoeciSBCQo9SJJ0o4bfcDx\nbr0yz27zltsPY3Ym7Us53E0WZ7NQZAn7F6LjEcTo/OO3vALveOPxoQPxkiThn/zgTSOlvy5aKax8\nrs3LhYk0/N6a+Zx0QNmGVrtnT574viPzUBUZz51dw4+/6XjgfoHgPP6w11y/rtbNcredbnD84RWH\n5/CKw3Ox5zZrTfDZbvXsTqXV6cdOAJlkFooZHCrP4JVHBxuqj5vD+wq+rJPdppRP45f/tztfdobg\n5Ug6pYzs2LzxNYdG2v5QuYB//e47XPMZXg5EGn7GmAzgkwBuBdAG8F5N085Yn+0H8EfC118D4F9p\nmvY7jLGnAfBptGc1TXvPIAflLYnAsaUeIevCXCjZPI1MWsHxgyWculBFq9PzedBBWT1xht+uN2+l\nYHoXWh8UHvCq1tqC4fcf68sJVZHxq++5a68PY2JIEn8hrh2OH5zd60MYmDhr8zYAaU3T7mGM3QXg\nYes9aJq2DOABAGCMvR7ArwL4FGMsa33+wLAH1e3pgQGboKJpZsVJp5M4eqAI7UIVF1bqPg88aAKX\nN4DsHTbai2tYAVnv6luDwoeGq1st2zsNW2+XIAhiJ4izXvcCeBQANE17HMCd3i8wxiQAnwDwAU3T\nDAC3Acgzxr7EGPuy1WEMRKfXD9b4A2rnNNvutWqPWt7Wuas13/bBM3fdBtebx2+vnGV5/N0QqScp\niyW+sIoz0ezl7vETBPHyIs7wlwBsCa/7lvwj8lYA39Y0jddF3gbw65qm/RCA9wP4XMA2oRiGgW43\nJLjr0fh1w19j/sh+04vm1SJFooq0AYAiS75iUnP2bFvu8Y8m9XDtd83KqzcMA612/2WXDkYQxMuX\nODdzC4AoWMqapnmT5N8F4DeE16cAvAAAmqadZoytAbgOwKWoHyqXzZ/p9nQYAGbyafs9zuKCWSck\nk02hXC6i0TLLLMwWs/Z3FxZmkFZlXF5r+LaXLSN/YH/Jrn/TFVbXSqdk3zZZK7jb7Oool4vIvLhh\nHst83vfdRKhmk2+3+yiXi2i2ezAAlAoZe39D7fcahdrCgdrCgdpiNOIM/0mYHv0jjLG7ATwb8J07\nNU37hvD6IZjB4A8yxg7CHDVciTuQSsWUZuyFCnTDfo/TserpVNa3UanUbLlEltzfPVQu4NzVLVy5\nul1lrUUAABDNSURBVOnS7OtW/vxWtYme1WmIix8rsuz7TcMwoCoyVvhvbpi11tutru+7SejrOmRJ\nwqVKDZVKzS5BoUhmG5TLxaH2ey1CbeFAbeFAbeEwbAcYp1d8AUCLMXYSZmD3Q4yxdzLGfgYAGGNl\nONk7nP8MoMQYewxm1s9DAaOEUIIybzje5RftKpUeffzo/gL6umEviBG1b0mS7M4h6DfNSVdpVO2s\nnvDjS4Iiy+aKWlvuSpsU3CUIYreI9PitYO0HPG+fEj6vALjds00PwE8Oe0BhJZkBv8bPJ295K+Md\nsabEn1uu4agwPZ7v25vJk1LNUhBhxnyukMZLV2vQDcMu0RyUbpqUxVIGpy9uotfXhVm7FNwlCGJ3\nmLgZQ2ElmQH/8outkFLDPLOHr30q7juoFETK6gi8GT2c2UIGfd1Avdkd2eMHzNmdBoCNWnugcg0E\nQRDjYOIMP8/R99bFB/xLITZCSh0cLs9AliRfZo85Mcy/X27Eg34TEMop1511bzMjzBhcEFI6WwOW\nayAIghiVibM2jscfMHM37ZZ67LVqPVJPSlVw3VIeF1bq0HXDTtHs9vRA465avxUq9QgpnePy+AFg\ndbPlrNxF6ZwEQewSE+fxJ9H4uc4eFtwFgCP7imh3+1jeaDj77uuBcg5/L0rqAYBqvSNo/CMYfsHj\nb8asvkUQBDFuJs/wJ9H4vVk9AeUdjgZM5Aor/pZSLY0/IrgLAJvbjsc/SnEobvjXtlqk8RMEsetM\nnOEPW+EK8Gf1tISSzF6OCpk99r7DDH+cxz/jePxRI5KkOIa/ba+3+3Kr500QxMuXiTP8UXn8smzm\n3POsHju4G6CPX7/Pn9nTC6n6yX8r1uOvix7/8E2XSSso5FJY22zZa/2Sx08QxG4xeYa/Fx7cBcxC\nbVxnb0Vo/PmsivJcFueX6zAMw14kPVjqMX8rLKunmE9DkoDqdgedXh+KLEGRR2u6hVLGzOpJsOwi\nQRDEOJlYwx/mfWfSim8CV1hGzJH9RdSbXVxdb6DXC15TF3AmdIVJPbIsoTSTxma9jW5XH8nb5yyW\nsuj0dFSqZtmJLEk9BEHsEhNnbXj1y1DDn1JQb5p1dpzVt4IN/w3XlfCUVsEv/+4TOH7IXPA5MKsn\nRuoBgLmZDK6sbUMpyUOXZBbhOv+lihl8zpHHTxDELjFxhr8bEdwFzGyatlWcTVx9K4g3334IkgQ8\n+b0KTl80SwrlAxZWTmL4ZwtpnFuuYXO7Yy/qPQo8l3+71YMiS0OvG0oQBDEok2f4IyZwAabH3+nq\nQi3+cE85m1bxI3cdxY/cdRRrmy1899wGbg5YF5Yb3SjjywO8zXbP/n8UFoU1WbNpxVdGgiAIYqeY\nOMMflc4JOCmd3a6OZruHfXO5RPtdnM3iDbdeF/hZEo+/ZKV0AqPl8HMWXIZ/4i4DQRDXMBOnL0Sl\ncwLO8outTs9cq3YMQVHb8Cfw+IHRZu1yuNQDULkGgiB2l8kz/DETpLjHv2ktqhK0KPugpCLq8XNm\nRY9/DIa/mE/Z0hJ5/ARB7CaTZ/gjSjYAQDrtNvxBk7cGhefvJ9H4geEXWheRJQmLJbMzGcc5EARB\nJGXiDH9Sjb9aM1ewCpq8NSg8kByVnz8rSj1jyOMHHJ2fPH6CIHaTibM4jsYfntUDmLNogfF4y7ff\nVMbaZgu33LAQ+h231DMeD53r/DRrlyCI3WTyDH+3Dwn+5RE5tsZvLVI+Do1/vpjBP3rzicjvpFQZ\nM1kV260eUmPy+HlK5zhGLQRBEEmZOKknbHlEDs/q2bQWP99NmWTOqss/juAu4Bh+8vgJgthNJs7w\nh5VO5vAc+qrl8e9mKiTX+ccl9Zw4PItsWsENB0tj2R9BEEQSJk5jCFsshWNr/JbHv5syCdf5xxXc\nPbCQxyd//o1j2RdBEERSJs7j74bUzOdkfOmcuyn1mB7/ONI5CYIg9oqJM/ydbj+Rx9+zsn/GEdxN\nyuyYNX6CIIi9YOIsGA/uhpHx1MnZzclPt9ywgAMLebzi8Oyu/SZBEMS4mSiN3zAMdLtxwV33Z7up\n8R9amsFH3nf3rv0eQRDETjBRHn9fN2AgWkrxevxU4IwgCGIwJsrwO+Ua4oO7AGgBE4IgiCGYKKsZ\nV5IZcHv8uYxKC5gQBEEMSKRAzhiTAXwSwK0A2gDeq2naGeuz/QD+SPj6awD8KwCfAvBbQdvEEVeS\nGTAraCqyhL5ukMxDEAQxBHEe/9sApDVNuwfALwJ4mH+gadqypmkPaJr2AIAPA3gKptF/O4BM0DZx\nxJVk5vDZu1TjhiAIYnDiDP+9AB4FAE3THgdwp/cLjDEJwCcAfEDTNMPa5otR24TBNX41xvDzej27\nOXmLIAjiWiHO8JcAbAmv+5b8I/JWAN/WNO30ANsEErfQOofr/Ls5eYsgCOJaIc5ybgEoCq9lTdN0\nz3feBeA3BtzGR7lcxOVqCwAwN5tDuVwM/e5MLg1sNDFXykZ+7+XKtXhOw0Jt4UBt4UBtMRpxhv8k\nTI/+EcbY3QCeDfjOnZqmfWPAbXxUKjWsrtUBAN12F5VKLfS7sjV+kAwj8nsvR8rl4jV3TsNCbeFA\nbeFAbeEwbAcYZ/i/AOBBxthJ6/VDjLF3AihomvYpxlgZwGbcNkkPJm7ZRQ6XemitWoIgiMGJNPxW\nsPYDnrdPCZ9XANyeYJtEdHuDGX7K6iEIghiciZzAFR/cNQ87R8FdgiCIgZkow99JMIELEDx+knoI\ngiAGZqIMf5KSDQBN4CIIghiFyTL8CYO7M1nT4BfyqR0/JoIgiGuNiXKZk2r8b3rtISyUsjh+iBZE\nIQiCGJSJMvzNdg+Au/RyEMV8Gve++rrdOCSCIIhrjomSerYaXQBAiSQcgiCIHWOyDP92BxJIuycI\ngthJJsrw1xodzORSUOSJOiyCIIhriomysFvbHczOpPf6MAiCIK5pJsbw9/o6tls9lMjwEwRB7CgT\nY/hrVmC3SPo+QRDEjjIxhn9ruwMA5PETBEHsMJNj+BuW4c+T4ScIgthJJsfwk8dPEASxK0yO4SeP\nnyAIYleYHMNPHj9BEMSuMIGGn7J6CIIgdpLJMfx2nR7y+AmCIHaSyTH82x1k04q9yApBEASxM0yO\n4W90yNsnCILYBSbC8Ou6gdp2lwK7BEEQu8BEGP56swvdMKhcA0EQxC4wEYa/WmsBAFXmJAiC2AUm\nw/DX2wAoh58gCGI3mAjDv1kzc/iLFNwlCILYcSbC8G/USeohCILYLSbC8G/WucdPwV2CIIidZiIM\nf7VGGj9BEMRuoUZ9yBiTAXwSwK0A2gDeq2naGeHz1wF4GIAE4BKAd2ua1mGMPQ1g0/raWU3T3hP1\nO5sU3CUIgtg1Ig0/gLcBSGuadg9j7C6YRv5tAMAYkwD8DoAf0zTtLGPsZwDcwBg7BwCapj2Q9CCq\ntTYUWUI+E3c4BEEQxKjEST33AngUADRNexzAncJnNwFYA/DzjLGvApjTNE0DcBuAPGPsS4yxL1sd\nRiQb9TZKM2lIkjTMORAEQRADEGf4SwC2hNd9S/4BgCUA9wD4TQA/AOAtjLEHAGwD+HVN034IwPsB\nfE7YJpDNepvq9BAEQewScYZ/C0BR/L6mabr1/xqAFzSTHsyRwZ0ATgH4HABomnba+t51UT/S7vRJ\n3ycIgtgl4kT1kwDeCuARxtjdAJ4VPjsLoMAYO24FfO8D8LsAHoIZDP4gY+wgzFHDlbgDKS/kUS4X\n4742FVA7OFBbOFBbOFBbjEac4f8CgAcZYyet1w8xxt4JoKBp2qcYY+8B8IdWoPekpmlfZIypAD7D\nGHuMbyOMEkLJKBIqldqw53HNUC4XqR0sqC0cqC0cqC0chu0AIw2/pmkGgA943j4lfP4VAHd5tukB\n+MlBD4TKNRAEQewOEzGBC6ByDQRBELvFxBj+Ii2yThAEsStMjOGndE6CIIjdYWIMP0k9BEEQu8NE\nGH5JAgpUmZMgCGJXmAjDX8ynocgTcSgEQRDXPBNhbdnR+b0+BIIgiKlhIgz/L/90bB03giAIYkxM\nhOGnqpwEQRC7x0QYfoIgCGL3IMNPEAQxZZDhJwiCmDLI8BMEQUwZZPgJgiCmDDL8BEEQUwYZfoIg\niCmDDD9BEMSUQYafIAhiyiDDTxAEMWWQ4ScIgpgyyPATBEFMGWT4CYIgpgwy/ARBEFMGGX6CIIgp\ngww/QRDElEGGnyAIYsogw08QBDFlkOEnCIKYMsjwEwRBTBlq1IeMMRnAJwHcCqAN4L2app0RPn8d\ngIcBSAAuAXg3gF7UNgRBEMTeEufxvw1AWtO0ewD8IkwjDwBgjEkAfgfAT2madh+ALwO4wdomE7QN\nQRAEsffEGf57ATwKAJqmPQ7gTuGzmwCsAfh5xthXAcxpmqZZ23wxZBuCIAhij4kz/CUAW8LrviX/\nAMASgHsA/CaAHwDwFsbYAzHbEARBEHtMpMYP04AXhdeypmm69f8agBcsLx+MsUdhevdR2xAEQRB7\nTJzhPwngrQAeYYzdDeBZ4bOzAAqMseNW8PY+AL8L4EzENmFI5XIx/ltTArWFA7WFA7WFA7XFaEiG\nYYR+aAVweYYOADwE4A4ABU3TPmVJOx+FmdVzUtO0DwVto2naqZ06AYIgCGIwIg0/QRAEce1BQVeC\nIIgpgww/QRDElEGGnyAIYsogw08QBDFlxKVz7ihxtYCudRhjKQCfBnAUQAbArwH4LoDfA6AD+DaA\nD2qaNjUReMbYPgBPAXgLzDb4PUxhWzDGfglmWnQKwP8LM7X69zBlbWHZiN+FWSlAB/AzAPqYorZg\njN0F4KOapj3AGDuBgHNnjP0MgPfBrJX2a5qm/VXUPvfa4w+tBTQlvAtARdO0+wH8MID/CLMNPmy9\nJwH4h3t4fLuK1RH+JwDbMM/9Y5jCtmCMvQnA663n4k0AbsT03hc/CGBG07Q3APi/AXwEU9QWjLFf\nAPApmI4hEPBMMMYOAPg5mJUUfgjAv2WMpaP2u9eGP6oW0DTwCIBfsf6XAXQB3K5p2mPWe1+EWQ5j\nWvh1AL8F4Ir1elrb4gcBPMcY+68A/gLAnwO4Y0rboglg1pofNAugg+lqixcAvAOmkQeCn4nXwZxH\n1dU0bcva5lbfngT22vBPdV0fTdO2NU2rM8aKMDuB/wPua1KHebNf8zDGfgrm6OevrbckODc7MEVt\nAaAMc6LkjwN4P4A/xPS2xUkAWQDfgzka/ASmqC00TfszmPINRzz3GsxzLwHYDHg/lL02slNf14cx\ndj2AvwXwWU3T/gtM7Y5TBFDdkwPbfR4C8CBj7CsAXgPg92EaQM40tcUqgL/WNK1nzXpvwf0gT1Nb\n/AJMb5bBvC8+CzPuwZmmtgDc9qEE89y9drQIYCNqJ3tt+E8C+HsAMEBdn2sGxth+AH8N4Bc0Tfs9\n6+1nGGNvtP7/EQCPBW17raFp2hs1TXuTpmkPAPgmzEV9Hp3GtgDwP2DGfMAYOwggD+DLU9oWM3BU\ngQ2YCSlT+YxYBJ37EwDuY4xlGGOzAG6GGfgNZU+zegB8AaaXd9J6/dBeHswe8GGYntyvMMa41v/P\nAHzCCs48D+BP9urg9hgDwD8H8KlpawtN0/6KMXY/Y+wJmM7ZPwXwEqawLWDGfT7DGPs6TE//l2Bm\nfU1bW/CsJd8zYWX1fALA12HeLx/WNK0TtTOq1UMQBDFl7LXUQxAEQewyZPgJgiCmDDL8BEEQUwYZ\nfoIgiCmDDD9BEMSUQYafIAhiyiDDTxAEMWWQ4ScIgpgy/n/7EeWKaeFrwAAAAABJRU5ErkJggg==\n", + "text": [ + "" + ] + } + ], + "prompt_number": 18 + }, { "cell_type": "heading", "level": 1, @@ -453,17 +5424,38 @@ ], "language": "python", "metadata": {}, - "outputs": [] + "outputs": [], + "prompt_number": 7 }, { "cell_type": "code", "collapsed": false, "input": [ - "check_code_snippet('test/1', corp_train, corp_answer)" + "check_code_snippet('test/22', corp_train, corp_answer)" ], "language": "python", "metadata": {}, - "outputs": [] + "outputs": [ + { + "metadata": {}, + "output_type": "pyout", + "prompt_number": 8, + "text": [ + "[('javascript', 1.0),\n", + " ('java', 2.7703304550449276e-81),\n", + " ('clojure', 0.0),\n", + " ('scala', 0.0),\n", + " ('scheme', 0.0),\n", + " ('haskell', 0.0),\n", + " ('ocaml', 0.0),\n", + " ('perl', 0.0),\n", + " ('php', 0.0),\n", + " ('python', 0.0),\n", + " ('ruby', 0.0)]" + ] + } + ], + "prompt_number": 8 }, { "cell_type": "code", diff --git a/test/33 b/test/33 new file mode 100644 index 0000000..5f0b483 --- /dev/null +++ b/test/33 @@ -0,0 +1,56 @@ +#!/usr/local/bin/perl + +use English; +use Carp; +use Getopt::Long; + +sub Usage{ + my $message = shift; + + print STDERR $message, "\n" if $message; + print STDERR "\nUsage: $0 -d(ef) definition_file < source > newsource\n"; + + print STDERR <<'EOM'; + -d(ef) filename : Specifies the definition file which is a set of pairs, each corresponding to a "replacement pattern", e.g., + a A + b B + ... + z Z + would replace all lower cases with upper cases + + -h(elp) : display this message + +EOM + + exit(1); + +} + +if (! &GetOptions("help", "def=s") or + $opt_help) { + &Usage(); +} + +open(D, $opt_def) || die "can't open definition file:$opt_def\n"; +while () { + ($oldp, $newp) = split; + $dic{$oldp}=$newp; +} +close(D); + +$oldStr = ""; +$newStr =""; +while () { + $oldStr = $_; + foreach $k (keys %dic) { + s/$k/$dic{$k}/g; + } + + $newStr = $_; + if ($oldStr ne $newStr) { + print STDERR "\n"; + print STDERR "old>>$oldStr"; + print STDERR "new>>$newStr"; + } + print; +} diff --git a/test_fixed.csv b/test_fixed.csv index 63c3d03..a7f0fb5 100644 --- a/test_fixed.csv +++ b/test_fixed.csv @@ -1 +1 @@ -answers 1 4 4 4 9 9 9 2 2 2 11 1 11 11 3 3 10 10 7 7 1 7 5 5 1 8 8 8 8 4 \ No newline at end of file +answers 1 4 4 4 9 9 9 2 2 2 11 1 11 11 3 3 10 10 7 7 1 7 5 5 6 1 8 8 8 8 4 \ No newline at end of file From 6ea84782930d924e171ef0ab4ad427da40d92f1b Mon Sep 17 00:00:00 2001 From: Zack Cooper Date: Sun, 15 Feb 2015 23:15:15 -0500 Subject: [PATCH 4/6] finished project --- corpus_build.py | 7 +- lang classifier live.ipynb | 2558 ++++++++++-------------------------- 2 files changed, 687 insertions(+), 1878 deletions(-) diff --git a/corpus_build.py b/corpus_build.py index 2bf6982..b659079 100644 --- a/corpus_build.py +++ b/corpus_build.py @@ -64,7 +64,12 @@ ('var_exists', r"\bvar\b"), ('star_count', r"\b\*\b"), ('dollar_sign', r"\$"), - ('val_exists', r"\bval\b")] + ('val_exists', r"\bval\b"), + ('else_if', r"(else if)"), + ('elif', r"(elif)"), + ('elif', r"(print )"), + ('elif', r"(print\()"), + ('void', r"(void)")] class Corpus(): diff --git a/lang classifier live.ipynb b/lang classifier live.ipynb index 9d0abb2..3b16179 100644 --- a/lang classifier live.ipynb +++ b/lang classifier live.ipynb @@ -1,7 +1,7 @@ { "metadata": { "name": "", - "signature": "sha256:d283fdea1a61866f7cbeaeeaafff75fcae1237f43b5990f43814c78cd7baa485" + "signature": "sha256:351106de656a94ded25f92477ba635151dc3e3be72b67d7f2d9459b9b4fb2be8" }, "nbformat": 3, "nbformat_minor": 0, @@ -54,6 +54,8 @@ " star_count\n", " dollar_sign\n", " val_exists\n", + " else_if\n", + " elif\n", " \n", " \n", " \n", @@ -74,6 +76,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 1 \n", @@ -92,6 +96,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 2 \n", @@ -110,6 +116,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 3 \n", @@ -128,6 +136,8 @@ " 0.001235\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 4 \n", @@ -146,6 +156,8 @@ " 0.001059\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 5 \n", @@ -164,6 +176,8 @@ " 0.001146\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 6 \n", @@ -182,6 +196,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 7 \n", @@ -200,6 +216,8 @@ " 0.001420\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 8 \n", @@ -218,6 +236,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 9 \n", @@ -236,6 +256,8 @@ " 0.002193\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.002193\n", " \n", " \n", " 10 \n", @@ -254,6 +276,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 11 \n", @@ -272,6 +296,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 12 \n", @@ -290,6 +316,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 13 \n", @@ -308,6 +336,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 14 \n", @@ -326,6 +356,8 @@ " 0.001229\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 15 \n", @@ -344,6 +376,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 16 \n", @@ -362,6 +396,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 17 \n", @@ -380,6 +416,8 @@ " 0.000000\n", " 0.042506\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 18 \n", @@ -398,6 +436,8 @@ " 0.000000\n", " 0.037637\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 19 \n", @@ -416,6 +456,8 @@ " 0.000000\n", " 0.046261\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 20 \n", @@ -434,6 +476,8 @@ " 0.000000\n", " 0.041609\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 21 \n", @@ -452,6 +496,8 @@ " 0.000000\n", " 0.044038\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 22 \n", @@ -470,6 +516,8 @@ " 0.000000\n", " 0.057474\n", " 0.000000\n", + " 0.000258\n", + " 0.000000\n", " \n", " \n", " 23 \n", @@ -488,6 +536,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.001670\n", " \n", " \n", " 24 \n", @@ -506,6 +556,8 @@ " 0.000000\n", " 0.000000\n", " 0.004662\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 25 \n", @@ -524,6 +576,8 @@ " 0.000000\n", " 0.000000\n", " 0.001449\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 26 \n", @@ -542,6 +596,8 @@ " 0.000000\n", " 0.000000\n", " 0.000738\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 27 \n", @@ -560,6 +616,8 @@ " 0.000000\n", " 0.000000\n", " 0.003218\n", + " 0.000000\n", + " 0.002574\n", " \n", " \n", " 28 \n", @@ -578,6 +636,8 @@ " 0.002432\n", " 0.000000\n", " 0.004863\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 29 \n", @@ -596,6 +656,8 @@ " 0.002290\n", " 0.000000\n", " 0.004580\n", + " 0.000000\n", + " 0.003053\n", " \n", " \n", " ...\n", @@ -614,6 +676,8 @@ " ...\n", " ...\n", " ...\n", + " ...\n", + " ...\n", " \n", " \n", " 357\n", @@ -632,6 +696,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000693\n", " \n", " \n", " 358\n", @@ -650,6 +716,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000791\n", " \n", " \n", " 359\n", @@ -668,6 +736,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000600\n", " \n", " \n", " 360\n", @@ -686,6 +756,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 361\n", @@ -704,6 +776,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 362\n", @@ -722,6 +796,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 363\n", @@ -740,6 +816,8 @@ " 0.000836\n", " 0.000000\n", " 0.003344\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 364\n", @@ -758,6 +836,8 @@ " 0.000000\n", " 0.000000\n", " 0.004110\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 365\n", @@ -776,6 +856,8 @@ " 0.001238\n", " 0.000000\n", " 0.003465\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 366\n", @@ -794,6 +876,8 @@ " 0.001146\n", " 0.000000\n", " 0.003208\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 367\n", @@ -812,6 +896,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 368\n", @@ -830,6 +916,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 369\n", @@ -848,6 +936,8 @@ " 0.000000\n", " 0.001149\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 370\n", @@ -866,6 +956,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 371\n", @@ -884,6 +976,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 372\n", @@ -902,6 +996,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 373\n", @@ -920,6 +1016,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 374\n", @@ -938,6 +1036,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 375\n", @@ -956,6 +1056,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 376\n", @@ -974,6 +1076,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 377\n", @@ -992,6 +1096,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 378\n", @@ -1010,6 +1116,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 379\n", @@ -1028,6 +1136,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 380\n", @@ -1046,6 +1156,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 381\n", @@ -1064,6 +1176,8 @@ " 0.000000\n", " 0.032979\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 382\n", @@ -1082,6 +1196,8 @@ " 0.000000\n", " 0.015960\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 383\n", @@ -1100,6 +1216,8 @@ " 0.001045\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.001045\n", " \n", " \n", " 384\n", @@ -1118,6 +1236,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.001218\n", " \n", " \n", " 385\n", @@ -1136,6 +1256,8 @@ " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", " 386\n", @@ -1154,15 +1276,17 @@ " 0.000000\n", " 0.000000\n", " 0.004243\n", + " 0.000000\n", + " 0.000000\n", " \n", " \n", "\n", - "

387 rows \u00d7 15 columns

\n", + "

387 rows \u00d7 17 columns

\n", "" ], "metadata": {}, "output_type": "pyout", - "prompt_number": 1, + "prompt_number": 9, "text": [ " hit_num parent_count double_colon let_exists less_minus paren_star \\\n", "0 1 0.071901 0.000000 0.002066 0.000000 0.001240 \n", @@ -1290,74 +1414,74 @@ "385 0.000000 0.000000 0.000000 0.000000 0.001304 \n", "386 0.002829 0.000000 0.019802 0.000000 0.004243 \n", "\n", - " var_exists star_count dollar_sign val_exists \n", - "0 0.000000 0.000000 0.000000 0.000000 \n", - "1 0.000000 0.000000 0.000000 0.000000 \n", - "2 0.000000 0.000000 0.000000 0.000000 \n", - "3 0.000000 0.001235 0.000000 0.000000 \n", - "4 0.000000 0.001059 0.000000 0.000000 \n", - "5 0.000000 0.001146 0.000000 0.000000 \n", - "6 0.000000 0.000000 0.000000 0.000000 \n", - "7 0.000000 0.001420 0.000000 0.000000 \n", - "8 0.000000 0.000000 0.000000 0.000000 \n", - "9 0.006579 0.002193 0.000000 0.000000 \n", - "10 0.000000 0.000000 0.000000 0.000000 \n", - "11 0.000000 0.000000 0.000000 0.000000 \n", - "12 0.000000 0.000000 0.000000 0.000000 \n", - "13 0.000000 0.000000 0.000000 0.000000 \n", - "14 0.000000 0.001229 0.000000 0.000000 \n", - "15 0.000000 0.000000 0.000000 0.000000 \n", - "16 0.000000 0.000000 0.000000 0.000000 \n", - "17 0.000000 0.000000 0.042506 0.000000 \n", - "18 0.000000 0.000000 0.037637 0.000000 \n", - "19 0.000000 0.000000 0.046261 0.000000 \n", - "20 0.000000 0.000000 0.041609 0.000000 \n", - "21 0.000000 0.000000 0.044038 0.000000 \n", - "22 0.000000 0.000000 0.057474 0.000000 \n", - "23 0.000000 0.000000 0.000000 0.000000 \n", - "24 0.000000 0.000000 0.000000 0.004662 \n", - "25 0.000000 0.000000 0.000000 0.001449 \n", - "26 0.000000 0.000000 0.000000 0.000738 \n", - "27 0.000644 0.000000 0.000000 0.003218 \n", - "28 0.001216 0.002432 0.000000 0.004863 \n", - "29 0.001527 0.002290 0.000000 0.004580 \n", - ".. ... ... ... ... \n", - "357 0.000000 0.000000 0.000000 0.000000 \n", - "358 0.000000 0.000000 0.000000 0.000000 \n", - "359 0.000000 0.000000 0.000000 0.000000 \n", - "360 0.000000 0.000000 0.000000 0.000000 \n", - "361 0.000000 0.000000 0.000000 0.000000 \n", - "362 0.000000 0.000000 0.000000 0.000000 \n", - "363 0.004181 0.000836 0.000000 0.003344 \n", - "364 0.003288 0.000000 0.000000 0.004110 \n", - "365 0.002475 0.001238 0.000000 0.003465 \n", - "366 0.002521 0.001146 0.000000 0.003208 \n", - "367 0.000000 0.000000 0.000000 0.000000 \n", - "368 0.000000 0.000000 0.000000 0.000000 \n", - "369 0.000000 0.000000 0.001149 0.000000 \n", - "370 0.000000 0.000000 0.000000 0.000000 \n", - "371 0.000000 0.000000 0.000000 0.000000 \n", - "372 0.000000 0.000000 0.000000 0.000000 \n", - "373 0.000000 0.000000 0.000000 0.000000 \n", - "374 0.000000 0.000000 0.000000 0.000000 \n", - "375 0.000000 0.000000 0.000000 0.000000 \n", - "376 0.000000 0.000000 0.000000 0.000000 \n", - "377 0.000000 0.000000 0.000000 0.000000 \n", - "378 0.000000 0.000000 0.000000 0.000000 \n", - "379 0.000000 0.000000 0.000000 0.000000 \n", - "380 0.000000 0.000000 0.000000 0.000000 \n", - "381 0.000000 0.000000 0.032979 0.000000 \n", - "382 0.000000 0.000000 0.015960 0.000000 \n", - "383 0.000000 0.001045 0.000000 0.000000 \n", - "384 0.000000 0.000000 0.000000 0.000000 \n", - "385 0.000000 0.000000 0.000000 0.000000 \n", - "386 0.001414 0.000000 0.000000 0.004243 \n", - "\n", - "[387 rows x 15 columns]" + " var_exists star_count dollar_sign val_exists else_if elif \n", + "0 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "1 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "2 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "3 0.000000 0.001235 0.000000 0.000000 0.000000 0.000000 \n", + "4 0.000000 0.001059 0.000000 0.000000 0.000000 0.000000 \n", + "5 0.000000 0.001146 0.000000 0.000000 0.000000 0.000000 \n", + "6 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "7 0.000000 0.001420 0.000000 0.000000 0.000000 0.000000 \n", + "8 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "9 0.006579 0.002193 0.000000 0.000000 0.000000 0.002193 \n", + "10 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "11 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "12 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "13 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "14 0.000000 0.001229 0.000000 0.000000 0.000000 0.000000 \n", + "15 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "16 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "17 0.000000 0.000000 0.042506 0.000000 0.000000 0.000000 \n", + "18 0.000000 0.000000 0.037637 0.000000 0.000000 0.000000 \n", + "19 0.000000 0.000000 0.046261 0.000000 0.000000 0.000000 \n", + "20 0.000000 0.000000 0.041609 0.000000 0.000000 0.000000 \n", + "21 0.000000 0.000000 0.044038 0.000000 0.000000 0.000000 \n", + "22 0.000000 0.000000 0.057474 0.000000 0.000258 0.000000 \n", + "23 0.000000 0.000000 0.000000 0.000000 0.000000 0.001670 \n", + "24 0.000000 0.000000 0.000000 0.004662 0.000000 0.000000 \n", + "25 0.000000 0.000000 0.000000 0.001449 0.000000 0.000000 \n", + "26 0.000000 0.000000 0.000000 0.000738 0.000000 0.000000 \n", + "27 0.000644 0.000000 0.000000 0.003218 0.000000 0.002574 \n", + "28 0.001216 0.002432 0.000000 0.004863 0.000000 0.000000 \n", + "29 0.001527 0.002290 0.000000 0.004580 0.000000 0.003053 \n", + ".. ... ... ... ... ... ... \n", + "357 0.000000 0.000000 0.000000 0.000000 0.000000 0.000693 \n", + "358 0.000000 0.000000 0.000000 0.000000 0.000000 0.000791 \n", + "359 0.000000 0.000000 0.000000 0.000000 0.000000 0.000600 \n", + "360 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "361 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "362 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "363 0.004181 0.000836 0.000000 0.003344 0.000000 0.000000 \n", + "364 0.003288 0.000000 0.000000 0.004110 0.000000 0.000000 \n", + "365 0.002475 0.001238 0.000000 0.003465 0.000000 0.000000 \n", + "366 0.002521 0.001146 0.000000 0.003208 0.000000 0.000000 \n", + "367 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "368 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "369 0.000000 0.000000 0.001149 0.000000 0.000000 0.000000 \n", + "370 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "371 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "372 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "373 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "374 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "375 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "376 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "377 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "378 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "379 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "380 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "381 0.000000 0.000000 0.032979 0.000000 0.000000 0.000000 \n", + "382 0.000000 0.000000 0.015960 0.000000 0.000000 0.000000 \n", + "383 0.000000 0.001045 0.000000 0.000000 0.000000 0.001045 \n", + "384 0.000000 0.000000 0.000000 0.000000 0.000000 0.001218 \n", + "385 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", + "386 0.001414 0.000000 0.000000 0.004243 0.000000 0.000000 \n", + "\n", + "[387 rows x 17 columns]" ] } ], - "prompt_number": 1 + "prompt_number": 9 }, { "cell_type": "code", @@ -1392,6 +1516,8 @@ " star_count\n", " dollar_sign\n", " val_exists\n", + " else_if\n", + " elif\n", " answers\n", " \n", " \n", @@ -1412,6 +1538,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 1\n", " \n", " \n", @@ -1430,6 +1558,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 4\n", " \n", " \n", @@ -1448,6 +1578,8 @@ " 0\n", " 0.000173\n", " 0.000346\n", + " 0.000288\n", + " 0.000000\n", " 4\n", " \n", " \n", @@ -1466,6 +1598,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 4\n", " \n", " \n", @@ -1484,6 +1618,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 9\n", " \n", " \n", @@ -1502,6 +1638,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 9\n", " \n", " \n", @@ -1520,6 +1658,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 9\n", " \n", " \n", @@ -1538,6 +1678,8 @@ " 0\n", " 0.002548\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 2\n", " \n", " \n", @@ -1556,6 +1698,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 2\n", " \n", " \n", @@ -1574,6 +1718,8 @@ " 0\n", " 0.004912\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 2\n", " \n", " \n", @@ -1592,6 +1738,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 11\n", " \n", " \n", @@ -1610,6 +1758,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 1\n", " \n", " \n", @@ -1628,6 +1778,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 11\n", " \n", " \n", @@ -1646,6 +1798,8 @@ " 0\n", " 0.000894\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 11\n", " \n", " \n", @@ -1664,6 +1818,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 3\n", " \n", " \n", @@ -1682,6 +1838,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 3\n", " \n", " \n", @@ -1700,6 +1858,8 @@ " 0\n", " 0.008941\n", " 0.003577\n", + " 0.000000\n", + " 0.000000\n", " 10\n", " \n", " \n", @@ -1718,6 +1878,8 @@ " 0\n", " 0.000000\n", " 0.007825\n", + " 0.000000\n", + " 0.000000\n", " 10\n", " \n", " \n", @@ -1736,6 +1898,8 @@ " 0\n", " 0.014591\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 7\n", " \n", " \n", @@ -1754,6 +1918,8 @@ " 0\n", " 0.014749\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 7\n", " \n", " \n", @@ -1772,6 +1938,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 1\n", " \n", " \n", @@ -1790,6 +1958,8 @@ " 0\n", " 0.020983\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 7\n", " \n", " \n", @@ -1808,6 +1978,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 5\n", " \n", " \n", @@ -1826,6 +1998,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 5\n", " \n", " \n", @@ -1844,6 +2018,8 @@ " 0\n", " 0.024007\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 6\n", " \n", " \n", @@ -1862,6 +2038,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 1\n", " \n", " \n", @@ -1880,6 +2058,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.004983\n", " 8\n", " \n", " \n", @@ -1898,6 +2078,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 8\n", " \n", " \n", @@ -1916,6 +2098,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 8\n", " \n", " \n", @@ -1934,6 +2118,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 8\n", " \n", " \n", @@ -1952,6 +2138,8 @@ " 0\n", " 0.000000\n", " 0.000000\n", + " 0.000000\n", + " 0.000000\n", " 4\n", " \n", " \n", @@ -1960,7 +2148,7 @@ ], "metadata": {}, "output_type": "pyout", - "prompt_number": 2, + "prompt_number": 10, "text": [ " parent_count double_colon let_exists less_minus paren_star \\\n", "0 0.045734 0.000000 0.001759 0.000000 0.000000 \n", @@ -2028,60 +2216,89 @@ "29 0.003059 0.000000 0.000888 0.000000 0.000000 \n", "30 0.000000 0.000000 0.020958 0.000000 0.000000 \n", "\n", - " var_exists star_count dollar_sign val_exists answers \n", - "0 0.000000 0 0.000000 0.000000 1 \n", - "1 0.002865 0 0.000000 0.000000 4 \n", - "2 0.002477 0 0.000173 0.000346 4 \n", - "3 0.005882 0 0.000000 0.000000 4 \n", - "4 0.000000 0 0.000000 0.000000 9 \n", - "5 0.000000 0 0.000000 0.000000 9 \n", - "6 0.000000 0 0.000000 0.000000 9 \n", - "7 0.000000 0 0.002548 0.000000 2 \n", - "8 0.000000 0 0.000000 0.000000 2 \n", - "9 0.000000 0 0.004912 0.000000 2 \n", - "10 0.000000 0 0.000000 0.000000 11 \n", - "11 0.000000 0 0.000000 0.000000 1 \n", - "12 0.000000 0 0.000000 0.000000 11 \n", - "13 0.000000 0 0.000894 0.000000 11 \n", - "14 0.000000 0 0.000000 0.000000 3 \n", - "15 0.000000 0 0.000000 0.000000 3 \n", - "16 0.000000 0 0.008941 0.003577 10 \n", - "17 0.000000 0 0.000000 0.007825 10 \n", - "18 0.000301 0 0.014591 0.000000 7 \n", - "19 0.000000 0 0.014749 0.000000 7 \n", - "20 0.000000 0 0.000000 0.000000 1 \n", - "21 0.000000 0 0.020983 0.000000 7 \n", - "22 0.000397 0 0.000000 0.000000 5 \n", - "23 0.000000 0 0.000000 0.000000 5 \n", - "24 0.000000 0 0.024007 0.000000 6 \n", - "25 0.000000 0 0.000000 0.000000 1 \n", - "26 0.000000 0 0.000000 0.000000 8 \n", - "27 0.000000 0 0.000000 0.000000 8 \n", - "28 0.000000 0 0.000000 0.000000 8 \n", - "29 0.000000 0 0.000000 0.000000 8 \n", - "30 0.002246 0 0.000000 0.000000 4 " + " var_exists star_count dollar_sign val_exists else_if elif \\\n", + "0 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "1 0.002865 0 0.000000 0.000000 0.000000 0.000000 \n", + "2 0.002477 0 0.000173 0.000346 0.000288 0.000000 \n", + "3 0.005882 0 0.000000 0.000000 0.000000 0.000000 \n", + "4 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "5 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "6 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "7 0.000000 0 0.002548 0.000000 0.000000 0.000000 \n", + "8 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "9 0.000000 0 0.004912 0.000000 0.000000 0.000000 \n", + "10 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "11 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "12 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "13 0.000000 0 0.000894 0.000000 0.000000 0.000000 \n", + "14 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "15 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "16 0.000000 0 0.008941 0.003577 0.000000 0.000000 \n", + "17 0.000000 0 0.000000 0.007825 0.000000 0.000000 \n", + "18 0.000301 0 0.014591 0.000000 0.000000 0.000000 \n", + "19 0.000000 0 0.014749 0.000000 0.000000 0.000000 \n", + "20 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "21 0.000000 0 0.020983 0.000000 0.000000 0.000000 \n", + "22 0.000397 0 0.000000 0.000000 0.000000 0.000000 \n", + "23 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "24 0.000000 0 0.024007 0.000000 0.000000 0.000000 \n", + "25 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "26 0.000000 0 0.000000 0.000000 0.000000 0.004983 \n", + "27 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "28 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "29 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", + "30 0.002246 0 0.000000 0.000000 0.000000 0.000000 \n", + "\n", + " answers \n", + "0 1 \n", + "1 4 \n", + "2 4 \n", + "3 4 \n", + "4 9 \n", + "5 9 \n", + "6 9 \n", + "7 2 \n", + "8 2 \n", + "9 2 \n", + "10 11 \n", + "11 1 \n", + "12 11 \n", + "13 11 \n", + "14 3 \n", + "15 3 \n", + "16 10 \n", + "17 10 \n", + "18 7 \n", + "19 7 \n", + "20 1 \n", + "21 7 \n", + "22 5 \n", + "23 5 \n", + "24 6 \n", + "25 1 \n", + "26 8 \n", + "27 8 \n", + "28 8 \n", + "29 8 \n", + "30 4 " ] } ], - "prompt_number": 2 + "prompt_number": 10 }, { "cell_type": "heading", "level": 1, "metadata": {}, "source": [ - "Choosing the Best Model" + "RandomForestClassifier Model" ] }, { "cell_type": "code", "collapsed": false, "input": [ - "from sklearn.naive_bayes import GaussianNB\n", - "from sklearn.neighbors import KNeighborsClassifier\n", - "from sklearn.tree import DecisionTreeClassifier\n", "from sklearn.ensemble import RandomForestClassifier\n", - "from sklearn.cluster import KMeans\n", "from sklearn import metrics\n", "import numpy as np\n", "import seaborn as sbn\n", @@ -2104,7 +2321,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 3 + "prompt_number": 11 }, { "cell_type": "code", @@ -2121,25 +2338,25 @@ "text": [ " precision recall f1-score support\n", "\n", - " 1 0.50 0.25 0.33 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 0.60 1.00 0.75 3\n", "\n", - "avg / total 0.81 0.77 0.76 31\n", + "avg / total 0.84 0.77 0.76 31\n", "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 0 2]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -2151,20 +2368,20 @@ { "metadata": {}, "output_type": "pyout", - "prompt_number": 19, + "prompt_number": 12, "text": [ - "0.760752688172043" + "0.76113671274961581" ] } ], - "prompt_number": 19 + "prompt_number": 12 }, { "cell_type": "code", "collapsed": false, "input": [ "answers = []\n", - "for num in range(5,100):\n", + "for num in range(20,70):\n", " answers.append((num, run_test_model(RandomForestClassifier(num), corp_train, corp_answer, test_train, test_answer)))\n", "answers " ], @@ -2177,1423 +2394,112 @@ "text": [ " precision recall f1-score support\n", "\n", - " 1 0.50 1.00 0.67 4\n", - " 10 0.00 0.00 0.00 2\n", - " 11 1.00 0.33 0.50 3\n", + " 1 0.50 0.50 0.50 4\n", + " 10 1.00 1.00 1.00 2\n", + " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 0.50 0.67 2\n", + " 4 0.80 1.00 0.89 4\n", + " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 0.60 1.00 0.75 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.71 0.68 0.64 31\n", + "avg / total 0.78 0.77 0.75 31\n", "\n", - "[[4 0 0 0 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 0 1 0]\n", - " [2 0 1 0 0 0 0 0 0 0 0]\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", + " [0 2 0 0 0 0 0 0 0 0 0]\n", + " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [1 0 0 0 0 0 1 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.25 0.33 4\n", + " 1 0.50 0.50 0.50 4\n", " 10 1.00 1.00 1.00 2\n", - " 11 0.50 1.00 0.67 3\n", + " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", - " 5 1.00 0.50 0.67 2\n", + " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.75 0.71 0.69 31\n", + "avg / total 0.81 0.77 0.76 31\n", "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 0 1 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 1 0 0 0 1 0 0 0 0]\n", + " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.25 0.40 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.50 0.50 0.50 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.89 0.84 0.82 31\n", + "avg / total 0.88 0.81 0.80 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 0.50 0.50 0.50 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", - " 8 0.60 0.75 0.67 4\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.83 0.81 0.80 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stderr", - "text": [ - "/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1771: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.\n", - " 'precision', 'predicted', average, warn_for)\n", - "/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1771: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.\n", - " 'precision', 'predicted', average, warn_for)\n", - "/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1771: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.\n", - " 'precision', 'predicted', average, warn_for)\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 2 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 0.67 0.50 0.57 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.50 0.67 0.57 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.25 1.00 0.40 1\n", - " 7 0.00 0.00 0.00 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 0.67 0.67 0.67 3\n", - "\n", - "avg / total 0.69 0.71 0.69 31\n", - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [1 0 2 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 1]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 3 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 1 2]]\n", - " precision recall f1-score support\n", - "\n", - " 1 0.50 0.50 0.50 4\n", - " 10 1.00 0.50 0.67 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.67 0.80 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.81 0.77 0.77 31\n", - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 1 0 0 0 0 0 1 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.25 0.40 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 0.75 1.00 0.86 3\n", - "\n", - "avg / total 0.85 0.77 0.76 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 1]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 0.67 0.80 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 0.50 0.33 0.40 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 0.60 1.00 0.75 3\n", - "\n", - "avg / total 0.79 0.74 0.73 31\n", - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 2 0 0 0 0 1 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 2]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 0.50 0.67 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.50 0.75 0.60 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.81 0.74 0.73 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 1 0 0 0 0 0 0 0 1 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 2 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.92 0.87 0.87 31\n", - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 0.33 0.25 0.29 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.76 0.74 0.72 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.92 0.87 0.87 31\n", - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.25 0.40 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", - " 9 1.00 0.67 0.80 3\n", - "\n", - "avg / total 0.87 0.77 0.76 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 1 2]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.25 0.40 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", - " 9 0.75 1.00 0.86 3\n", - "\n", - "avg / total 0.80 0.74 0.71 31\n", - "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 1 1]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.92 0.84 0.84 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.25 0.40 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", - " 9 1.00 0.67 0.80 3\n", - "\n", - "avg / total 0.87 0.77 0.76 31\n", - "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 1 2]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.92 0.84 0.84 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.88 0.81 0.80 31\n", - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.25 0.40 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 0.75 1.00 0.86 3\n", - "\n", - "avg / total 0.85 0.77 0.76 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 1]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.25 0.40 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.50 0.75 0.60 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.81 0.74 0.72 31\n", - "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 2 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 0.75 1.00 0.86 3\n", - "\n", - "avg / total 0.88 0.81 0.80 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 1]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.88 0.81 0.80 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 0.50 0.50 0.50 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.81 0.77 0.76 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.82 0.77 0.76 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 2 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.92 0.84 0.84 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.92 0.84 0.84 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.82 0.77 0.76 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 2 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 0.60 1.00 0.75 3\n", - "\n", - "avg / total 0.84 0.77 0.76 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 2]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.92 0.84 0.84 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.25 0.40 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.89 0.84 0.82 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 0.67 0.50 0.57 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 0.50 0.67 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.88 0.81 0.80 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [1 0 0 0 0 0 1 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.92 0.87 0.87 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.25 0.40 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.89 0.81 0.79 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.88 0.81 0.80 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 0.33 0.25 0.29 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.76 0.77 0.76 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 0.67 0.50 0.57 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.86 0.81 0.81 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 1 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 0.60 1.00 0.75 3\n", - "\n", - "avg / total 0.84 0.77 0.76 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 2]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.88 0.84 0.84 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.88 0.81 0.80 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.25 0.40 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.89 0.81 0.79 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.25 0.40 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.89 0.81 0.79 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 0.50 0.50 0.50 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.81 0.77 0.76 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.92 0.84 0.84 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.92 0.84 0.84 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.25 0.40 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.86 0.77 0.76 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ + "avg / total 0.81 0.77 0.76 31\n", "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -3639,27 +2545,21 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", + " 8 0.60 0.75 0.67 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ + "avg / total 0.82 0.77 0.76 31\n", "\n", "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 2 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -3672,15 +2572,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.50 0.50 0.50 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", + " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.88 0.81 0.80 31\n" ] }, { @@ -3692,7 +2592,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -3705,15 +2605,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", + " 8 0.60 0.75 0.67 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.82 0.77 0.76 31\n" ] }, { @@ -3725,7 +2625,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 2 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -3734,7 +2634,7 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.50 0.50 4\n", + " 1 0.50 0.25 0.33 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", @@ -3743,43 +2643,22 @@ " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", + " 8 0.60 0.75 0.67 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.81 0.77 0.76 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ + "avg / total 0.76 0.74 0.72 31\n", "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [1 0 0 0 0 0 0 0 0 1 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.25 0.40 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.89 0.84 0.82 31\n" + " [0 0 0 0 0 0 0 0 0 0 3]]" ] }, { @@ -3787,98 +2666,33 @@ "stream": "stdout", "text": [ "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.92 0.84 0.84 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 0.67 0.50 0.57 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", " 3 0.50 0.50 0.50 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.75 1.00 0.86 3\n", "\n", - "avg / total 0.86 0.81 0.81 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ + "avg / total 0.88 0.81 0.80 31\n", "\n", "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 0 1]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 1.00 0.50 0.67 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 0.75 1.00 0.86 3\n", - "\n", - "avg / total 0.88 0.81 0.80 31\n" + " [0 0 0 0 0 0 0 0 0 0 3]]" ] }, { @@ -3886,17 +2700,6 @@ "stream": "stdout", "text": [ "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 1]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", " 1 1.00 0.50 0.67 4\n", @@ -3941,8 +2744,8 @@ " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 0.75 1.00 0.86 3\n", "\n", "avg / total 0.88 0.81 0.80 31\n" ] @@ -3956,7 +2759,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 1 0 0 0 0 0 1]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -3965,17 +2768,17 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.25 0.33 4\n", + " 1 0.67 0.50 0.57 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 0.75 1.00 0.86 3\n", "\n", "avg / total 0.81 0.77 0.76 31\n" ] @@ -3985,11 +2788,11 @@ "stream": "stdout", "text": [ "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [1 0 0 0 0 0 0 0 0 0 1]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -3998,11 +2801,11 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 0.50 0.50 0.50 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", @@ -4010,7 +2813,7 @@ " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.81 0.77 0.76 31\n" ] }, { @@ -4022,7 +2825,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4101,15 +2904,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.60 1.00 0.75 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.84 0.77 0.76 31\n" ] }, { @@ -4121,7 +2924,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 0 2]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4130,19 +2933,19 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.67 0.50 0.57 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.50 1.00 0.67 1\n", " 7 1.00 0.67 0.80 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.60 1.00 0.75 3\n", "\n", - "avg / total 0.87 0.84 0.84 31\n" + "avg / total 0.84 0.81 0.80 31\n" ] }, { @@ -4154,7 +2957,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 0 2]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4167,15 +2970,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", + " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.86 0.81 0.81 31\n" + "avg / total 0.80 0.77 0.76 31\n" ] }, { @@ -4187,7 +2990,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [1 0 0 0 0 0 0 0 0 1 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4196,19 +2999,19 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.50 0.50 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 0.50 0.50 0.50 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", + " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.81 0.77 0.76 31\n" + "avg / total 0.88 0.81 0.80 31\n" ] }, { @@ -4220,7 +3023,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 1 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4229,11 +3032,11 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.67 0.50 0.57 4\n", + " 1 0.50 0.50 0.50 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", @@ -4241,7 +3044,7 @@ " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.86 0.81 0.81 31\n" + "avg / total 0.81 0.77 0.76 31\n" ] }, { @@ -4253,7 +3056,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4266,15 +3069,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 0.75 1.00 0.86 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.82 0.77 0.76 31\n" ] }, { @@ -4286,7 +3089,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 1 1]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4295,19 +3098,19 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 0.67 0.50 0.57 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", + " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.80 0.77 0.76 31\n" ] }, { @@ -4319,7 +3122,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [1 0 0 0 0 0 0 0 0 1 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4361,19 +3164,19 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 0.50 0.50 0.50 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.88 0.81 0.80 31\n" + "avg / total 0.81 0.77 0.76 31\n" ] }, { @@ -4385,7 +3188,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4394,19 +3197,19 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 0.67 0.50 0.57 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", + " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.80 0.77 0.76 31\n" ] }, { @@ -4418,7 +3221,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [1 0 0 0 0 0 0 0 0 1 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4427,7 +3230,7 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.50 0.50 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", @@ -4437,9 +3240,9 @@ " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.60 1.00 0.75 3\n", "\n", - "avg / total 0.81 0.77 0.76 31\n" + "avg / total 0.84 0.77 0.76 31\n" ] }, { @@ -4451,7 +3254,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 0 2]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4460,19 +3263,19 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.25 0.40 4\n", + " 1 0.50 0.50 0.50 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.89 0.81 0.79 31\n" + "avg / total 0.81 0.77 0.76 31\n" ] }, { @@ -4480,11 +3283,11 @@ "stream": "stdout", "text": [ "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4493,7 +3296,7 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.50 0.50 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", @@ -4503,9 +3306,9 @@ " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.60 1.00 0.75 3\n", "\n", - "avg / total 0.81 0.77 0.76 31\n" + "avg / total 0.84 0.77 0.76 31\n" ] }, { @@ -4517,7 +3320,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 0 2]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4530,15 +3333,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.88 0.81 0.80 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -4550,7 +3353,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4563,15 +3366,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.82 0.77 0.76 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -4583,7 +3386,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 2 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4596,15 +3399,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.50 0.50 0.50 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.75 1.00 0.86 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.88 0.81 0.80 31\n" ] }, { @@ -4616,7 +3419,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 1 0 0 0 0 0 1]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4667,10 +3470,10 @@ " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", - " 9 1.00 1.00 1.00 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 0.60 1.00 0.75 3\n", "\n", - "avg / total 0.82 0.77 0.76 31\n" + "avg / total 0.84 0.77 0.76 31\n" ] }, { @@ -4682,7 +3485,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 2 0]\n", + " [0 0 0 0 0 0 0 0 0 0 2]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4691,19 +3494,19 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.67 0.50 0.57 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.60 1.00 0.75 3\n", "\n", - "avg / total 0.86 0.81 0.81 31\n" + "avg / total 0.84 0.77 0.76 31\n" ] }, { @@ -4715,7 +3518,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 0 2]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4761,15 +3564,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.60 1.00 0.75 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.84 0.77 0.76 31\n" ] }, { @@ -4781,7 +3584,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [0 0 0 0 0 0 0 0 0 0 2]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4794,15 +3597,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 0.50 0.50 0.50 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.80 0.77 0.76 31\n" + "avg / total 0.86 0.81 0.81 31\n" ] }, { @@ -4814,7 +3617,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 0 1 0]\n", + " [1 0 0 0 1 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4860,15 +3663,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.88 0.81 0.80 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -4880,7 +3683,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4922,19 +3725,19 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 0.67 0.50 0.57 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", + " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.80 0.77 0.76 31\n" ] }, { @@ -4946,7 +3749,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [1 0 0 0 0 0 0 0 0 1 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -4955,19 +3758,19 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.25 0.40 4\n", + " 1 0.33 0.25 0.29 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", + " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.86 0.77 0.76 31\n" + "avg / total 0.76 0.74 0.72 31\n" ] }, { @@ -4979,7 +3782,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -5021,11 +3824,11 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 0.67 0.50 0.57 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", @@ -5033,7 +3836,7 @@ " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.88 0.81 0.80 31\n" + "avg / total 0.80 0.77 0.76 31\n" ] }, { @@ -5045,7 +3848,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [1 0 0 0 0 0 0 0 0 1 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -5054,11 +3857,11 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 0.50 0.50 0.50 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", @@ -5066,7 +3869,7 @@ " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.81 0.77 0.76 31\n" ] }, { @@ -5078,7 +3881,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 2 0 0 0 0 0 0]\n", + " [2 0 0 0 0 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -5120,7 +3923,7 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 0.50 0.25 0.33 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", @@ -5129,10 +3932,10 @@ " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 0.75 1.00 0.86 3\n", + " 8 0.60 0.75 0.67 4\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.82 0.77 0.76 31\n" + "avg / total 0.76 0.74 0.72 31\n" ] }, { @@ -5140,11 +3943,11 @@ "stream": "stdout", "text": [ "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 1 1]\n", + " [1 0 0 0 0 0 0 0 0 1 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -5157,15 +3960,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.00 0.00 0.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 0.75 1.00 0.86 3\n", + " 9 0.60 1.00 0.75 3\n", "\n", - "avg / total 0.89 0.84 0.84 31\n" + "avg / total 0.84 0.77 0.76 31\n" ] }, { @@ -5177,11 +3980,11 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 1]\n", + " [0 0 0 0 0 0 0 0 0 0 2]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", @@ -5216,39 +4019,6 @@ " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]\n", - " precision recall f1-score support\n", - "\n", - " 1 0.50 0.50 0.50 4\n", - " 10 1.00 1.00 1.00 2\n", - " 11 0.60 1.00 0.75 3\n", - " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", - " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", - "\n", - "avg / total 0.81 0.77 0.76 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", - " [0 2 0 0 0 0 0 0 0 0 0]\n", - " [0 0 3 0 0 0 0 0 0 0 0]\n", - " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 4 0 0 0 0 0]\n", - " [0 0 0 0 0 0 2 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n" ] }, @@ -5256,6 +4026,8 @@ "output_type": "stream", "stream": "stderr", "text": [ + "/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1771: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.\n", + " 'precision', 'predicted', average, warn_for)\n", "/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1771: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.\n", " 'precision', 'predicted', average, warn_for)\n" ] @@ -5263,107 +4035,62 @@ { "metadata": {}, "output_type": "pyout", - "prompt_number": 17, + "prompt_number": 13, "text": [ - "[(5, 0.63709677419354838),\n", - " (6, 0.68817204301075252),\n", - " (7, 0.82311827956989247),\n", - " (8, 0.79516129032258065),\n", - " (9, 0.69032258064516128),\n", - " (10, 0.77135176651305681),\n", - " (11, 0.75552995391705069),\n", - " (12, 0.73210445468509977),\n", - " (13, 0.73064516129032253),\n", - " (14, 0.87135176651305668),\n", - " (15, 0.72235023041474655),\n", - " (16, 0.87135176651305668),\n", - " (17, 0.75860215053763436),\n", - " (18, 0.71251920122887857),\n", - " (19, 0.83694316436251914),\n", - " (20, 0.75860215053763436),\n", - " (21, 0.83694316436251914),\n", + "[(20, 0.74948796722990274),\n", + " (21, 0.76382488479262667),\n", " (22, 0.80376344086021501),\n", - " (23, 0.75552995391705069),\n", - " (24, 0.717741935483871),\n", - " (25, 0.80376344086021501),\n", + " (23, 0.76382488479262667),\n", + " (24, 0.83694316436251914),\n", + " (25, 0.76075268817204289),\n", " (26, 0.80376344086021501),\n", - " (27, 0.76382488479262667),\n", - " (28, 0.76075268817204289),\n", - " (29, 0.83694316436251914),\n", - " (30, 0.83694316436251914),\n", - " (31, 0.76075268817204289),\n", - " (32, 0.76113671274961581),\n", - " (33, 0.83694316436251914),\n", - " (34, 0.82311827956989247),\n", - " (35, 0.80314900153609825),\n", - " (36, 0.87135176651305668),\n", - " (37, 0.78870967741935483),\n", - " (38, 0.80376344086021501),\n", - " (39, 0.7567588325652842),\n", - " (40, 0.80529953917050678),\n", - " (41, 0.76113671274961581),\n", - " (42, 0.83817204301075277),\n", - " (43, 0.80376344086021501),\n", - " (44, 0.78870967741935483),\n", - " (45, 0.78870967741935483),\n", - " (46, 0.76382488479262667),\n", - " (47, 0.83694316436251914),\n", - " (48, 0.83694316436251914),\n", - " (49, 0.75860215053763436),\n", + " (27, 0.76075268817204289),\n", + " (28, 0.717741935483871),\n", + " (29, 0.80376344086021501),\n", + " (30, 0.75768049155145911),\n", + " (31, 0.80376344086021501),\n", + " (32, 0.75921658986175111),\n", + " (33, 0.76382488479262667),\n", + " (34, 0.83694316436251914),\n", + " (35, 0.87135176651305668),\n", + " (36, 0.76113671274961581),\n", + " (37, 0.79554531490015357),\n", + " (38, 0.75921658986175111),\n", + " (39, 0.80376344086021501),\n", + " (40, 0.76382488479262667),\n", + " (41, 0.75768049155145911),\n", + " (42, 0.75921658986175111),\n", + " (43, 0.83694316436251914),\n", + " (44, 0.76382488479262667),\n", + " (45, 0.75921658986175111),\n", + " (46, 0.76113671274961581),\n", + " (47, 0.76382488479262667),\n", + " (48, 0.76113671274961581),\n", + " (49, 0.83694316436251914),\n", " (50, 0.83694316436251914),\n", - " (51, 0.83694316436251914),\n", + " (51, 0.80376344086021501),\n", " (52, 0.83694316436251914),\n", - " (53, 0.83694316436251914),\n", - " (54, 0.76382488479262667),\n", - " (55, 0.82311827956989247),\n", - " (56, 0.83694316436251914),\n", + " (53, 0.76113671274961581),\n", + " (54, 0.76113671274961581),\n", + " (55, 0.83694316436251914),\n", + " (56, 0.76113671274961581),\n", " (57, 0.80529953917050678),\n", - " (58, 0.80376344086021501),\n", - " (59, 0.75768049155145911),\n", - " (60, 0.80376344086021501),\n", - " (61, 0.760752688172043),\n", - " (62, 0.83694316436251914),\n", + " (58, 0.83694316436251914),\n", + " (59, 0.83694316436251914),\n", + " (60, 0.83694316436251914),\n", + " (61, 0.75921658986175111),\n", + " (62, 0.72235023041474655),\n", " (63, 0.83694316436251914),\n", - " (64, 0.87135176651305668),\n", - " (65, 0.83694316436251914),\n", - " (66, 0.83970814132104454),\n", - " (67, 0.80529953917050678),\n", - " (68, 0.76382488479262667),\n", - " (69, 0.80529953917050678),\n", - " (70, 0.83694316436251914),\n", - " (71, 0.83694316436251914),\n", - " (72, 0.83694316436251914),\n", - " (73, 0.80376344086021501),\n", - " (74, 0.83694316436251914),\n", - " (75, 0.76382488479262667),\n", - " (76, 0.78870967741935483),\n", - " (77, 0.76382488479262667),\n", - " (78, 0.80376344086021501),\n", - " (79, 0.76075268817204289),\n", - " (80, 0.83694316436251914),\n", - " (81, 0.83694316436251914),\n", - " (82, 0.76075268817204289),\n", - " (83, 0.80529953917050678),\n", - " (84, 0.83694316436251914),\n", - " (85, 0.83694316436251914),\n", - " (86, 0.75921658986175111),\n", - " (87, 0.83694316436251914),\n", - " (88, 0.80376344086021501),\n", - " (89, 0.83694316436251914),\n", - " (90, 0.83694316436251914),\n", - " (91, 0.75860215053763436),\n", - " (92, 0.83694316436251914),\n", - " (93, 0.80376344086021501),\n", - " (94, 0.83694316436251914),\n", - " (95, 0.83694316436251914),\n", - " (96, 0.75768049155145911),\n", - " (97, 0.83817204301075277),\n", - " (98, 0.83694316436251914),\n", - " (99, 0.76382488479262667)]" + " (64, 0.75921658986175111),\n", + " (65, 0.76382488479262667),\n", + " (66, 0.83694316436251914),\n", + " (67, 0.717741935483871),\n", + " (68, 0.76113671274961581),\n", + " (69, 0.83694316436251914)]" ] } ], - "prompt_number": 17 + "prompt_number": 13 }, { "cell_type": "code", @@ -5379,20 +4106,20 @@ { "metadata": {}, "output_type": "display_data", - "png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAECCAYAAAD5OrxGAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztvXu8JFd13/urRz9Pd59nz4xmRjMjzaBtIZBAEpaQkEBg\n+RGHBLA/+QQTcy2DuRDsTz44iWOTa9/ca38IiSPwxTfYMQ7YfIxjI9skflyEYwyIjIkUPUACQc1o\nRpr3nNPn0ed0n3531f2jalftelc/zjmt6fX955x+VHXVrqq11/6ttdeWDMMAQRAEMT3Ie30ABEEQ\nxO5Chp8gCGLKIMNPEAQxZZDhJwiCmDLI8BMEQUwZZPgJgiCmDDXqQ8aYDOCTAG4F0AbwXk3Tzgif\nvxPAvwTQAvCIpmkfj9uGIAiC2FviPP63AUhrmnYPgF8E8DD/gDG2COAjAN4M4F4A/5Ax9lprm0zQ\nNgRBEMTeE2f47wXwKABomvY4gDuFz44D+JamaVVN0wwA/xPA/dY2XwzZhiAIgthj4gx/CcCW8Lpv\nSTkAcBrALYyxfYyxPIC3AJiJ2YYgCILYY+IM8haAovh9TdN0ANA0bQPAhwD8KYA/BPA0gNWobQiC\nIIi9JzK4C+AkgLcCeIQxdjeAZ/kHjDEVwJ2apt3HGMsA+BqAfwfT+AduE4ZhGIYkSUOeAkEQxNQy\nlOGUooq0McYkOBk6APAQgDsAFDRN+xRj7JdhBnP7AH5b07RPB22jadqpmOMwKpXaMMd/zVEuF0Ft\nYUJt4UBt4UBt4VAuF8dv+HcRMvwWdFM7UFs4UFs4UFs4DGv4KehKEAQxZZDhJwiCmDLI8BMEQUwZ\nZPgJgiCmDDL8BEEQUwYZfoIgiCmDDD9BEMSUQYafIAhiyiDDTxAEMWWQ4ScIgpgyyPATBEFMGWT4\nCYIgpgwy/ARBEFMGGX6CIIgpgww/QRDElEGGnyAIYsogw08QBDFlTK3h/7PHzuA//NEzvvf7uo5f\n/f0n8ejj5/fgqCaLbk/Hv/2Dp/DYty7v9aFMFZdWt/FL/+kbOL9Mq0wRO8PUGv7vvLiB51/agO5Z\nenK72cOLV7bwvfMbe3Rkk8PqZhOnL27iuTNre30oU8VzZ9awvNHEc2ep3YmdYWoNf7enAwB61l/v\n+51uf9ePadJotHoAgGant8dHMl1UNpvm32pzj4+EuFaZXsPf111/ve+3u7pvm2ljmxv+NnWCu8lq\ntQUAqFh/CWLcTK3h7/VMY9Yljz+URrsLAGiRx7+rcE+fPH5ip5hawx8m9fRsj58Mvy31tMnw7xa6\nYWB10/T017fa6Os08iTGz9Qa/k4vROrhHn+PHjhb6ulQJ7hbbNY7tvOhGwbWt9p7fETEtcjUGn5u\n4EnqCafRMqWedqcPXTdivk2MAy7vSJL5epXkHmIHmErDr+sG+pYhCwvudro6DGO6jR33+AGgRV7/\nrrBqZfTccF0JAFDZpAAvMX6m0vCLxt6n8fecYXavP92Gv+Ey/KTz7wY8k+fmo/PWa/L4ifEznYZf\nMPZhUg8AdHrT7eVyqQegAO9uwaWdV1qGf5U8fmIHIMPvNfzCaKAz5bn8otRDAd7doVJtQpKA44dm\nocgSefzEjqBGfcgYkwF8EsCtANoA3qtp2hnh87cD+DAAA8CnNU37bev9pwFsWl87q2nae3bg2Iem\nK3jyYVk9AAV4RY+/RR7/rlDZbGGhmEE6pWCxlKXgLrEjRBp+AG8DkNY07R7G2F0AHrbe43wMwGsB\nbAN4njH2X2B2ENA07YEdON6xkFTqmfZc/kabPP7dpNvTUa21wY7MAQDKc1l856UNiq8QYydO6rkX\nwKMAoGna4wDu9HzeBTAHIA9Agun53wYgzxj7EmPsy1aHMVGIXn5YVg8w3VKPrhuuUg2k8e88a1st\nGACWZnMAgKU58y/p/MS4iTP8JQBbwuu+Jf9wHgbwFIDnAPyFpmlbML3/X9c07YcAvB/A5zzb7DmJ\nPf4pDu5yb1+2EsopnXPn4Xp+eS5r/c253ieIcRFnkLcAFMXva5qmAwBj7AiAnwVwFMAxAPsZYz8O\n4BSAzwGApmmnAawBuG68hz0aonHveTz+Xp80fgDYtvT9+WIaAGn8uwHX87mnvzSbtd4nj58YL3Ea\n/0kAbwXwCGPsbgDPCp9lAfQBtDVN0xljKwDmATwEMxj8QcbYQZijhitxB1IuF+O+MjZylW37/3Qm\n5fptNaXY/2dz6V09Ls5e/KaXjaZp6A8sFbC21QYUeWrbYrfY7phOx03HFlEuF3GTlVVVt0Zb09QW\ncVBbjEac4f8CgAcZYyet1w8xxt4JoKBp2qcYY78P4O8YYy0ALwD4jPW9zzDGHuPb8FFCFJXK7q02\ntLbmGP7qZtP121s1pzZKZW17V48LMG/o3f7NIC5dNZOySjnzFlmvNqe2LXaLc1fMNlcNHZVKDao1\nc/zCFVNtnaa2iGLa7osohu0AIw2/pmkGgA943j4lfP5xAB8P2PQnhzqaXYKknnj4rN1FS26gzJKd\nZ7XaQlqVUZox5bWZrIpcRiGNnxg7ExV03S1cWT2RM3enN6uHT95aLJmGnxZj2Xkq1SaW5nKQrIC6\nJElYms2hstmc+rpRxHiZTsOftGTDVHv8ZnB3rpCBJNHyizvNdquLRruHsjXC4pTncuh0dVTrVJ6Z\nGB9k+CPy+Kd5AheXemZyKeTSKmX17DA8c4dn9HB4Zs/yemPXj4m4dplKwy8WXwurzglM9wQuLvVw\nnZmknp3FzuEP8PgBYHnNNPzL6w28cHETBDEKcVk91yTJi7RNr7HjUk8+m0I2o6JaG4/UsFFrI5NS\nkM9O5a0XSmWTT95ye/x8Mte5q1v47tlVPPr4eQDAwx+81w4CE5PD1nYH260uDizk7VjNJDKVT1+k\n1OOauRvt8b90dQuzMxnMFzOu9w3DwKkLVZw4PAtF3tlBVbPdw3Nn19C31g5QFAmvvnERuUz0pT1z\neRMHF2dCv+fy+NMqrrQbMAxjpJu5r+v4Pz/9BG4+Oo8PvO1VQ+1DNww8d2bNtVbA8cOz2OcxmIZh\n4DsvrqPW6Hp34UOWzTaL64xevLKF/fM55LOpoY7dy5nLm1hZNw3+d1/aABAk9ZivH/nyadf7KxvN\nxIbfMAw862kzkblCGjcfW/C9v1JtQoK/MxqW1c0mTl9wRiu5rIpbjy/as8OD6PV1fOuFVWf0LQH3\n3eFvf90wcOp8FTcdmfPtb2u7g+dfWgePj6dUGbedWERKVVzf2251sVpt4egBf4rkpUod55fr9uuw\nNvv457+Fc8s1zBXSuOXYAl7zijJuv2lp4jqB6TT8MVk96ZSMTleP9Pj7uo6P/sHTuOWGBfzcj93q\n+uz5lzbw8B9/Ez/1I9+H+287ON6D9/ClJ87jz0++5Hrv799zFO+4/3joNutbLXzks0/hnlcdwHv+\n/isDv9No9ZBOyVAVGdmMAt0w0OnpyKSUwO8nodnuo97s4tzV4XOwX7i4if/nT551vXf8YAn/+t3u\nMlLnl+v42Oe/lXi/P/r6o/ixN4a32YWVOn7t95/Em+84jHc9eNNgBx1Au9PHR//gaXslOABQZMn2\n8DlLs1lkUgp6fR0/+P3Xo5hL4/NfeQGVahMnDs8m+q3vna/62szLR953Nw4s5F3v/cbnv4WUKuP/\n+unvT3hW0fzuX34Xpy5UXe/9i3/8GrwywIByvvHtq/jMF7/nek+7sImf+mHmeu/ZF9bwiT99Fu97\n6ytx9y0HXJ/98d+exje+s+x6L+jZ/G//40X87VOX8O8/8HoslNzX4T/88TexWe+43vvo/3439s27\n22x1s4lMSkFfN3Dy21dx8ttX8W8eeh2O7J+sCWfTafhj8vhnsil0uu1Iw9/u6Oj0dFQ9NwNgFtsC\ngIsrdd9n44Zne7zj/hshScCffu0sqjX/MYls1NswADx9uoJ393SkVP+oZLvVRd4aDeTS5t9WuzeS\n4ef1f9a2WtB1A7I8uBe0YUlOr79lP9iRefzJV88EZrzw9+5kZbzqxsXQ/dWb3dB9iPyv7y3DwPiu\naa3ZQV83cOLQLN5wq1nR5MBCHtm0+5FMpxT80j+5HdftLyEFA8+dXQPgSENJuFQxj/mB1x7yebPP\nv7SOJ767gsur2y7D3+n2cXW9gZkxSnLVWhv5jIp/9OYTOH2xipPPXY1t90ur5mTLt95zDAulDD77\nqGY/XyL8vQuVOu4O2EdalfETD96Eq2sNPPrE+UDpcn2rDd0wcHlt22X4t1tdbNY7OLK/gDfffhhP\nn6rg2TNrqNY7LsNvGGZhwxsPlfCL77odf/rVM/ji4+exuR39PO4FU2/4gzz+bEaFqshoRwR3eYA4\nqGolf29lFybeNKyg6323HbQNf1wlTf55s93H8y+t47YTS/79tnq2hJXLmMa+2ekjmY8Z8ruW1NDX\nDazXWraMMQi883j1jYu4+5YD+O9PXsD6lv8h5ud487GFyFEXN/xhMgjnKa0CAFjeGE92DQ+WH91f\njB0VHtlfRLlcQKVSG6pw28qG+d37brsOxw6UXJ/NZFU88d0V+zscvtZvo90bWeLjNNo9zBbSuP+2\ng8hlVJx87mps0gA/rgdfdz0KuRQ+/5UXXOtEcPj1rnjOwzAMVKpNlOdzuP+2g3jxyhYefeK8q+R4\n4D5u8B/DTYfncP9tB1FrdPDsmTXfpMZOV4duGMhnVMiSZGdkxd1be8FUZvVwY6/IUqDhTykSMik5\nculFPrkryvDvxoxL/lv5jGJ76EE3tXsb57y4QRPRDQPNds/WvLkXOmppZnH7YQuP2edrHVsuY6aa\neic48e/xTisMu1OLOLdLq9u4YmXVVOsdtMdQqdQ+vuxgI6jFUhYSBmu/ZctweeMgQHgFUG5ADWM8\nlVkN657iMSXe7nH36vJGA/mMao88smnVtTIcJ8zZqje7aLb7KFtOBv/9QRw2p2qquY+w54zPdcmm\nzXPLZZM9j3vBVBv+fFYNzONPqTLSKSVS6ulanwVNbOKGtVJtQd/hGZfNdg+qIiGlKlAVGSlVTuzx\nA8Azpys+uavZ7sEAMGMFMfnDMmouv/i7g0gVItx74seUz6gw4DdOfOGYfEyQW5FlZNJK5MP51PdW\nAAClvNke4+jQG233eSQlpcqYK2YGar+VjQYKuVRgUDrU8Auvx7EWQ7eno68bguGPdyZ03fTW9y84\ns5nzGRWNZoTHX3XPcuaL1++b9xj+gM7M2Ye7U7UNv7WPrP08eO65tv/ejDvHvWJKDb95wXIZ1ZW3\nbxgGej0dKcU0/NFSj/lZp6sHGk7AjBeMKw0yDNGLAsxzirvRuPFcms1iu9WDdr4a+LntVVsezKie\nn2hcvQ/XoPvIxxgQ70MYRT6jRg7Hn9QqUBUJb3rtIQDjkXsGOT4v5dksNrbavvsuiL6uY3Wzhf0L\nwbJaLqOimE/5vFzx9TikimGM4vpWC72+4dLRcxnVlp9E+H3RbPddI4KVqnmtHG89fIRne/weuWjF\nM2IKv+f6rs/zGbOjJalnQuj2dSiyhGxKcXn8fd2AAUBVZWRUOdLjFz/zeZsuA7ezck9jGMNvff6G\nV5tBxae0FffnHsOftb2kMUo9Q3v8zvwCwOmUvOdse9TpBIY/G95my+sNXKzUccuxBTsw6jUMw9D0\ndGCDUJ7LwQCwlmBlrrXNFvq6gX1z+dDv7JvLWd9zngXxvh2HVOF02JYMksDwL1f9ElUuo0LXDd/k\nSvEZFK8Pl6y4x68qMhRZCvzdhj1S944a3Avk5EOeB/46R1LPZNK1MllSqhwY6OUef6erhxbHErfz\n9fwd0ePYWcPv9fjzGcW+gaO2AYBbTyyimE/h6VMV6EJaIV+EhUs9Wdu4jubxj6NDDPX4PZ1va1CP\nP8CLBIAnrU7xDrbP9jz32uPnuf5J5B5uBMM8fsDsSPq6gQ0hSF4Zu8fv9ob536h9Bx17WGxAfM29\nfMAZWXKPX5KkQOeo23NG7u1u3zX/o1JtYr6YsfP+7eeh5XH4rHPJekY1QcHovWaqDb+qyOj1DfuB\n596/qfHL0A3DlWct0oky/Lvk8ff6Ojpd3eU15jMqen3dlrOC4MdXyKZw+01lbDW6OH3RkXt8Ug/X\nNEf2+J1jGiW4q8gS0inZdWxxw+4ochk1NIj5lFaBIkt4zSuWsG/ODKyOw+P3xioGgXueSeSyZY/H\nG7w/8zPupOiG4dr3ODRq7wgnrYZ73hxen8gr9QQdk+uZE67PSrUJSXJqHvFjiHLW+HaAaSvWt9qu\nSWzxHr9Xzpq8CgBTbfh5/jrv6bnen1JlO189TO4RM378MkMfPPttHEYijCC5wPakIm42J6NExR2s\nDMDUsTmOx+/O4x/1BuZe2Xwxg83tzlBF8Botc4TDg31hhoD/FvfOouAdnHcfq5tNvHS1hu87ModC\nLoWUqmChlLGN6SiMIvXwNNjVBE4FH53sn4+Qeubdhr9aM+MHqmK28TikCu8Ih3veUfu2Pf55t9Qj\n7k/cP3/mxE6rUm1ioZiFqjimLhtQe4rvz96H9durm00YcMtN2ZBjaHmcjXTK7NwabfL4E7PV6ODn\nfuMxPPaty2Pfd9cK4HLDz2Ub/le1pB4AoQFeUWP03rytdg/l2RwUWRo6iJmEILkgiXZqe5tpFd93\nZB4zWRVPn6rYIx/H47ekHjuPfzwa/5F9BQDAagKN2nfs7Z6nowvW+JvtHrJpJdEksXyI7PDN06sA\nTJmHs28+j41ae+TKrcNm9QCDLcK+MoDHz40d3+Zw2bxO45Aqgs43yPMWWak2kcuoKOScbKQow39g\nIW+OyKx26XT72Ki1feeez6hod/uumAY32tctzti/DfgzesxjCL/nxM/tzo2Cu8lZ2Whiu9XDs2fW\nxr5vUerhr8W/KVVG2uoUwjx+UeMPSuvKZ1UszeV2VOoJkjOSGP5mu4eMZRRVRcbNR+exUWvbwUKx\nTg/gnrk72vFaht+avj5M2zRbPTtoBojn678GSY1qPiQIxzsmcbYr9z5Hva78eIfx+GcLaaiKbE+y\nimJ5o4lCLmXHa4LwdiT87zHrvMcS3G35Rzim1h78fOmGgZWNJvbP51yTxxyZpe/6bqvdRzGXwkIp\nYx8/v37eMhiOdOnso2Hfm2Zn57SFfx+KbErBvnuuE9C5ZaNHNXvFxBp+nid/RVgfd2z77utIqYrj\n8VtSj1vj5x5/vNQjXthe3yzlkMuo2DeXQ73Z3bEev+HxMIDwySXe7cQH8MQhcz7uC5fMAlq+zJnM\n+IK7iizh4JLpVSWRKkR423pjGkDAZBrPOUaRC9lH3coXFz1OO8C7Pqrh70GWnFjFIMiSWdMnrv36\nuo7VatMllQQxV0gjrcq2l8v/HrEM/zg1/pxntOb1vDk8XdXrrQd52+1OH4a17/JcDhs1s9zKip2N\n492H1XkIzyXf3+FyAbIkOW1hp3Lmffvwafwhjhjl8Q8AN8IrG81E+cpJMQzDp/EHefy2xh9SobPb\nDQ7uci+CG35g5wK8URp/M6Kz8XrDx61iX2cumYt6b3u8Mz5zd9TgLk89dTzMwaQeb0aP69iEa8Br\npmRjZu1y8iFtFmT499t6+GiZPeY1UIYuhbA0m8N2qxfpVKxttc1UzhjDL0kSytbolJc4AASPfwfy\n+MX/gxyKlZDYRFA2kBiz4vfW6mZLSOX07CPt7+jthAc+athwj358HVA6IEBspxC7HbGguT57zcQa\nfq6h93VjrAHSnuDVp7xST1/U+M3Pwjz+thjcFQyi6IU72Rc7a/gHkXq4URSN59H9RaiK7Hj8bbfU\nI8sSMqnRF2Phxo63y6C5/Nwwu6SegMBsp2fWTEku9VgTbQI8fkWWXAFibgBG9fi98y8GJUkbhhnP\n4P3l7MlPlWoTqiLjoKV3jzW4m/WP1oLu1bBspFxA+RAxfiAGqr359/Y+sv4JiaITVZ7L2ckHlWoT\nuYziK1YX5MnbJRs8Uo94jJPCxBp+cWLVOOUeMVdf9Uo9Lo0/OqvH7fELN5CQplf2ZEuMm8CAWUiG\nCqfd7fuMoqrIOHZdERdW6mh1emi0uq4AN2BlQowa3O307WBdJq2MxeMPknoGzZgJy7euN7so5FIu\nr9xOfRwxl38QKSoIntkT5VQkCexynPNqYmWjifJcFumUKYeOw+MPunZRufxODn+wx98MuN65tOoK\nVPPnzlujKEjaawZ0HpUNs/Moz+V8I7N8RkGvb/hifYos2fFB8XyjRuB7weQafqFBL6/ugOEXPH6e\nxtkTOoVMigd3Q7J6QvL4W0Iu745LPUEecIzG7+iQbhnkxKFZ6IaBF6/UsN3q+T2cEdfd1XUD7Y45\n0pAkCeXZLCqbzdAJckHYxkM4tmxAOQl+PbwljsMIkxzqjS4KeXdQNJ0yUzpH6cx13UDL6gSHJYlc\nxkclXqkjCG7szl3dwnar5ypINl6N37nvokanPA3V5/Fzx6YTZLQVn8c/k1V9NYqCRg08WJzNKPZz\ne/rSJjo9PbC4XVBKJ88kEzuJuOdxr3hZGH5eGXGc+02LGr8nuKsKwd3wrJ7gPH7RC18SvKidIKkH\nHLcNABw/yHX+TTRaPd9qVLmMEljYKinejIfyXA7tTt/W0RPtI2DSk6qYGViNgGuQ2OO3h+POsfR1\nHY12D4WAbJh9czmsb0Wv1xBFUPbHoNgyYhKpJ2LWrrM/8zvfsVYCsw3/mLJSmu0+MinFtSJdlOFf\n2TAllmLO3f5Bk6LE622vUbzRQKXaChztRI0axH1858V1AO5UTt8+xA6o45fvSOoZEDFr5vI4pR5R\n4w8L7irxwd1OSHBXLBucSSmYLaR3ObgbXWY4rFTAiUNmnfYXLMPvTf/LplXXtPaBj9VjtB2pIrnc\nE2bQvXprK2RUE0ZQHv920wn2eeHyw7DXtdly7pFhcSZxhbffiuXxRqVycnhH8t1zpuEXC5I1WsHl\nLAaBx3dEwoyibhhYqTaxb96/bm02oDaTOHFqJpvCTFbFmUtb6PX1wGUjI+UiwfDztgjcR9Cood33\njTJJ6hkQ0eO/utYYW3ljbrBVIY+/Z61X2/OUbACi0jl5IFjyXXzAuUH3zeWwttXakaj+oJkS4jZe\nozNbyGBpNovvnd8wF5PwfB4kqQyCNx4xTIDXW0rCPjaP4belnhHSOe2MnnyAx88DvEOO5EaZvMXJ\nZ80a9WGdj1PSOF7mAcyORILTdqLU09eNUAcoKUHB7LDgbrXWRrenB6ahqoq/jLb3OTAD1e7zEAlK\nTxaL+vHry/cRJPV492HOJejZRejscySPfzC44V8sZdDp6VgfYpZn4H4DPf6+6zcTBXetbUoz6dAh\nI2DeNIaBwOXiRmWYrJ6oGjEnDs/aHWNQFkPUfgc91qUh4h9hHn/eMwV/UKmH3wvNIMMf4PHznO5h\nJbxRCrSJlOdyWN0MXvPBKWmcbJWzlCpjoZRx9j3vSD3iMQ8DX4QlaKQWtO/lkDRMjjfu4O1IxXMO\nNtr+3221+/a8Cu9s4ST74HMJvM5GkmJ0e8HEG34+y3Ncco8o50RJPY7GHx7cVRXZugn9er/ofQD+\nJeHGQaPdtxdf4QRp3iJRRodP5AIQGhAb1uP3zlQdJpffkUg8x5bhhenMazVIZU6OtyZ/lOHnmvmw\nmT2jzNoVWZrLodfXfYuAA9GrboUhesflWXcJ4lEMV8ezCAsnbHTqpKEGH3s+m3IlGoQ9c0BwRlNQ\nkTXvvAq+nSJLvoXX3cdu7kOcvxP0W+TxJ4QPLfl0+cur4wnw2sHdlOLP43dN4LKknpAql51uH2lV\nNmWGjqOBemWGnUzpbAYMLYHo2YJRhp8HeAG/x5+NiR0kOVZxP7xa4mAev2mMvVqx9yEcRkrxBjGj\nDL8TQBzV4x9+4XrzOMLb0AnsJpN6zP2Z5zVXSNuOzziyUsLuOafEsjvAv2wXZws+9pmc6ipC6G1P\nVwcW5a233KMG8fj4douz2cB6T0nvOe6kkMafEC6lHLU8/nHl8gfl8Xs1/iRZPZ2ejnTK9PjFkr7e\nJf92MqUzrB5N1MIiUTLI4X0zdlA7yKsGks3e/Z/fuYo/+GvNFRD0/m4mpWB2Jj2cxp8JSc+zji0s\nZTUK7vHzY44y/JmUgvliBueu1vD1b122q5kmPo9xST0RufxJyjF74d8VRwm2Rj2C4QqLKznesNfj\njz72fDblGuF5R1D8+FXFXKbSSyatuOIZgHlfBxn+sBGTrfFbz7s9yvRUg3XOcbIqdEbeeYwxGcAn\nAdwKoA3gvZqmnRE+fzuADwMwAHxa07TfjtsmKfyi8toZ40rp5B1KpMafoDpnt6cjrSqunl/0tL3e\nRxI9+MtPXcTZqzV0rH3MFTN451teEVphstHuYT7gxs5lVKxsmDny3qyIqMlNiizjxoMlfPfchl+P\n9SzGUq238f994xzedt+Nvgf6K89cwumLm/gHb7gBpXza9bviw7U0l8WLl2vo67orzS+MZrsHCfCV\nYvB6X8No6DkriNnt6UinlEjDDwB3sDL+5smL+MwXv4fPfknDrccX8dM/erMvg+bbZ9ewvNHEW+44\nbL83aAwiDD5qWg+IH/GCe7xzSAK/V8X0xUHXja1Um/jK05fw9vtvtJ+vsI4uTONf22whrcooBgTW\nAWeBoGa7h5TqxNh4Rg3vMMpzWcgBJTFkSUJWWLCIF3kTj29fQFu4jt2T1RM0a9d9jpNVkz/uaXsb\ngLSmafcA+EUAD3s+/xiABwHcC+CfM8bmrG0yEdskgmvruYyC8nwOV9a2R04pA4IncAUWaYupztnp\n9pFKyY5BtHr+RtusC84952I+hXRKThTc/ZOvncE3nruCp05V8NSpCr781EWcW64Ffpd7PEHGTTRi\nXuK8Ta7zl2Y86ZweXfSvn7iAv3nqIp45XYGXzW1TcxbXGw4yxgcXZ6AbBs4v1wOPJejYcxnV9zDb\n3lfL/RAOKvXw3wCiPX4A+IkfuAkfff/r8WNvvBHluRyeOb1q532LfOHrZ/G5/37KdR+NK7jLO/2N\ngHWd12ttqIoUajyDOHFoFrmMilceW7DfG3T5wK998zIefeI8nn/JaYuw802FLMZSrbcxV8yE1jHy\nBpy9Jbj4yjjRAAAgAElEQVTnihkcWppxnYeXXEa1R6+tthmYFTviV1w/i0xawSuPBu/DHgG33aNM\nb2eezZiji0lbhSvO8N8L4FEA0DTtcQB3ej7vApgDkAcgwfT87wXwxYhtEuEYYQUHF/PYbvWwte0P\nYg2/34R5/FFSjyr7gzztHnJpZ6EQSZIwO5OOPXY+s/XmYwv4xD+7D2+/7wYAwPpW8GLtUV5jVAZO\n0CQokR/6/iN414M34ZYb3De8U5rZbI9vnTFr1Vfr/uPj5yp+FvTw33p80dzXC6uBx+IlrL6NM5mm\n7/6thDN3AX8Qs94IT+fk7JvL4Udffwxvv/9GAAgMslbrydpiGKIMf7Xexlwh3HgGsVDK4j9+6H68\n/pYD9nuDLh/IRx8brvMNNopByyD2dR1bjQ7mCv6RrH1MntpK3vtCliT86nvvwrsevCl0H2KRtZbt\nrTsjyf3zefzWz7/RXqjIt70n9uGMOtyjUTnBgjN7QZzhLwHYEl73LSmH8zCApwA8B+AvNE3bTLBN\nIrrdPiSYefK8jO/lMcg9rqweu2SDP4+f16oPyl/mFT69Ug//632gZ2cy2NruRs5F4DGCQj6FQi5l\nB+XWa8EjhaCiV5y8HTQLMPwhcom9bVbFW+447JNexIlhyxsNW3rb9HRo7U7fPpeqYAiDOqpXHluA\nqkj4ZlLDHzCj2Dw2/zXwZjvF4fVs660uJCmZcZ6dMeWs6rbbAOuGYXeCGwGjn1EmcAHmsWVSis/w\n93Ud1XobCwEy4KDweEpSw8UNf/Boz3/PeYPqW9tdGIYZYA6DJx60Ip65OHJWNp5hGEPFXPi5tLwa\nf0jMbdIMf9yZbgEoCq9lTdN0AGCMHQHwswCOAmgA+APG2I9HbRNFuVx0vTYkCamUgn37Srjp2CL+\n6hvnUGv3fd8blLR1Iy8tFWzjqqQUlMtFSJaxO7C/hEI+jWxaQV83fL/JJ3XNzKSxz6pgmMqkUC4X\n0er0UZ7Pu7YpL+TxwqVNZPMZzIZ4MjzdM2/t5/i26WG1ev7fB4BNa6Hnxbm87/MFK888m8/4Puv0\nDeSyKvbvK0W2k5cty2uTFBlnrjrSTKuru37jqhCE7+jOdbXi57j+0Jzr4Xj18SU8c6oCOa1iMUCP\ntre36tvMFv3ndMB6LVvXsdMzUMilBrpXygvmdUxnze2a7T5KM+lE7dSTeAaY+1pVa217zea+JNuf\n9YS2SFpPCPA/I4CZ0lnd7rg+W602YRjAgXJh5OelyS+ccPxRVK2RUlO4LyTFNJIHykXfPkozaVxY\nqdvvV60R13UB3+XkLfktlU1haamAZqeP62fSA53rbDED3TBQms1j1XrWlub9z1IYhmFAloBu37zm\nkjXv58A+/3EXZ9JYXm+MfC3GSdxddxLAWwE8whi7G8CzwmdZAH0AbU3TdMbYCkzZJ2qbUCoVt5bd\naHWRUiRUKjUU0uaDdfqldVRuWkqyu1CqVhZJo95CTTWHwbV6C5VKDdsN0zvbrDbQ3G5blQm7vmOz\n68voBnpd80ZdrtSwsrKFRqtnHzcna6WGnj23jsPWsoNeLlmF6HJZFZVKDVLfNLSXlrd8vw8Al69u\nWseg+z+3Fra4fHUL8zn3Ja5td5BLK4H7jKLVMD249WoTp89v2O+vrDdc+3rx4qZzjMKxb9bakCUJ\ntc0G6oL8cPOROTxzqoK/ffwc3vTaQ67fLJeL9vY8cyYlS75j71oZE6tr26hUaqg3OsgOeI6GFeC/\nslxDZTGPzXobxXwq0T76liOwvFp3ff+8EJ85f3kTlevN+MlmrQVFlrBVbaCWUIoR20KklE/hUqWO\ny1eqSFnG54xVXjs/xHX20rKeibVqI3ZfumFgzcowuiK0RWXdvLe7bf+zlFJktDt9XLm6CVWR8eIF\n897KKP7rzOEe/9WVOi5d3oSuG1Ajvh+EtZwwzl+q4sqK6cjovf5A+8imVdS226hUaliz0mc7rY5v\nH2nFrHC6vLyVaCnQQRi2M4kbC38BQIsxdhKmrPMhxtg7GWM/o2naKQC/D+DvGGNfBzAL4PeCthnm\nwPhiKQBw3aLpwY5jEpcT3FWcpRcD0jkBs5Bb0AQurvu7NX5T4uArAYlwKcAri4i0PAHJuUIGsiRF\naPzBE0bE94I0/mHrwPPg7nqthVMXqjh2oIhSPoVNj8YvnqMo9YQtPHLbCbMjj9P5o2YcZz3T55vt\nXuJyDRxR6tENA9utbmhg10smpSCXUVD1XF93W7g171xGHXoRFpGFAJ2f/z8foZMnZZCSA1vbHXuE\nExfY53jvVX7PREk9eSGrhwfyB82QEn930DLe4j58mWQBIzg7GD1iWfNxEnmmmqYZAD7gefuU8PnH\nAXw8YFPvNgPDNXTA7Fnnixm7VOuo+wXcwd2eENxVFcnOGsmkFGw1/MbamQQmu2bmhd1AjuEPNuKA\nEzTlN4ksS5grprERp/EH6aYhk254PZFcZib0OMLgN7R2voq+buA1J5bwpFbx5eFvCecoBvjCOpzy\nXA6Hlmbw/LkNtLt9O6juJWjNVo54vkHLMybB1rJbXSufPzyjJ4jZmYwvuCsae6/GP+rkLc6cYPh5\niQPb8I9B40/zzJsEefziOXo7fSAsEcGJHRXzabvDiA7u+o32MBr/6PtQbMcsqj6UmDiQpGDebjDB\nE7h0V3BuJpsaSy5stx+dx89HAYA5uzfQ4xdGDeLSf2E3UMky/Fvb4ZkRXo8fMB/cjVoHuu4PCkc9\nTGHT7NshI5Ik8NQ77tHddmIJc4U0Wp0+2kIZh3AvN3ykcduJJXR7ul0NMYiogKj4EIdNnY9D7Dy2\nLSlvZgDDP1dIo97suorxiR2BqxNsjbb6lkikx18a3fBLA2SliPMJ6s2u/VxFJSJ489z5PRM08Ypj\ne/ydnqu42iCIZZVH6Tz4rH1n4mbwTHpgtHpH42ZiDX+n13cZ/kxaHrr+uYhYj1+x9DZx6UXxN9Mp\nGX3d8FXWdEk9ovchLOYgMjtj3sRRHn9QyttCMQvdMAIloqhMhLAbbdghrXe/c4U0juwvBI5keBZL\nIZeyhv567MIjt52IT+uMTF8VZu46hmAwj9q5jn3ULMPvrQUfBQ/ai8Ze/J97sn1dR7vbH3nyFme+\naE7iEg0/zwRbKPprzAxD0qwU7v1mrLbfsM4/UgbxjE75vc7vrSB4h+z21ge83sKExGHXR8hZs/bb\n3T5a7R4UKxPQyzhmP4+biTT84oLonExKCTTCg8KNvKrKkCQJKVW2NX7vb4ZV6OwIctFgUk8CjV/w\nini1xKCZmYPoppxGhOeVBJ6jfNuJJXN+gmXsxGE9P8ej+wswDHOU04rRYY8fnEUhl8K3XlgNnaRn\na/wBx55OyZAlcyJQ3DyFMMR89bjJW0EEpXTy/xdLGVTrbXu942GOLwwu56x7PH7Zmj8yDvIZNZHU\nwzucG6z6Wryza7R7yAgTrER8Gn+tjWxaiWwfJ+7QH7o9XVJPa9R99G0pMyhuM4mF2ibS8Pd1A4YB\n19qVGbuEwmhef0fI4+d/XR6/Io4ygss28CFsJqW46tTHST1BE3w4QRIF99jWAyboRHrAIaV0o3Ty\nJPBju+24GZCdtSUst+FXZAkHl8zspWq97VqAPghZlvDqGxdRrXfwyFfO4C//7iX85d+9hLOXnAyh\nqPM15QjFCrAPafiFIKY9eWsgqSfY45ck4Pp9RfT6BurN7tgmb3GCJnFt1NqYLaTHlkGSy6joeBbh\nefJ7Kz5Hhnv8x63Z3/yYgkoyi/vm3wHM+yUs5ZkjlmwYtj3FUhRx92cYXokxbPscefzJEDNvOLbh\nH2H5P/e+Zfsvn83b6+l2Rg/gdDwdT4VOrvunrMVceBnksCFtSpUxk1UjZ+862QmOseEe/0aUxx84\ngSvYwxjV6JTncpjJqrj52DwAYNbKvBC1/M16B6WZtG2QqrV2Iq/sTmuG5KNPnMefPXYWf/bYWfzm\n55/xHXuUAXE/xIOdoxjEHMrjL/DOXQxwtlGaSTvXsdYeWW7zUsinoCqSbWR1w0C13h5LYJfjzex5\n8coWPvlfv40/P/mi63vrVpoqL6zI74tmO1zmE+NRvb6OrUYX8xEZPYAZe+MLIA3bnlmX0R5S6hFW\nA2taM/aDGLTe0W4wnrtvzHQ8xhmAUDRtRMPf1yFJsPV9VZHRE4K7KSXgNzteqafv+pwbnSgDV5pJ\nx0g91rYuqSfc44+64YOWpxNfD2v4f/rvfR/aXd3uhL0SlmEY2Gp0cGhpxk7Hq9bbtiYb9buvecUS\nPvyTd9gP4R99+QW8dKWGXt8MuDdCavFzchlzNaphNV8xiMnnDESVa/Ayx6WeutMWm9sdHFyccTrB\nettuu3F5/LIkYa6QsbO/6o0uen1jvIafG65WD6V8Gueumnnq56+689XXt9qYK6Tt+3aj1rYXYQlb\n91ccnXLHKCqjh5NNqy7DP2j6bpBEO2yAeLtlevxhx0BST0K6XSfzhpOJWRgl8b6tNFGuxXGN3zCM\nwOAu4F93tyMEiIEgb9NvdGZn/FkfIq3A4G60xs9HHF5UxVw60psFNWod+Hw25TIoXnmj2e6j29Mx\nK3j8G/VOIq9MkiScODSLV92wiFfdsIjjB0vo9XUsr5spvHYt/pD4hFl0q4/tITV+8/xMw18bQuqx\ng7vbjrzR7emYLaTtdqrWO2MrySyyUDRTSXt9faypnByvx3+hYk54uljZtsuQ9HVzQZj5UtbV6Xe6\nwYuw2PsWjCLPfEpi+POeZ27wHHzHOYqKQUTvw/xNPrIJOwa7thBJPdFw6SXtyeoBRvf4e54Abko1\nNX4eV3B1NiHB3W7XLUXZ+nLEQ80NQ5jcExTcLc6kochSiMYfPnwGrPrynhrgzkMynlzikiegyY2e\n29i1hxpp8BnO3MjExSf4+zygOIyUwoOYw6ZzAmJRNmcikphrH7Zu8CjMl7IwYN5b487oAYSCZNax\nX7Rmura7faxaM3U36x3ohoGFYgazhTQkyTrfBBIdYBrgai1+8pa4XbPdH9qZcapr9tFqD5dlxX+T\nO2Zh9a+cyYGTU6FzIg2/uCA6Z1zBXW/mjmoFd+1snwCpxzvKcKQex+M39cmO/doLr0sfJvcEBXdl\nScJ8MRPq8UcZUv5giIyrDrz4G5mUgi3LyPFOrTSTdun/wxj+I9zwW0Ym7gHnDx3vJAcd+vPj6/TM\nAmcS/KuQxW2bUmX7+jppiRl7Bq2o8Y/T4xf3zz3+ueJ4MnoAt0ZtGAYuVpxaTfz68HZfKGWhyDJK\nM+lEMQ3R8HPHISqH39lOQbvbx3ZztAlcDcvj91bVTLQPSxri5x6v8U9OTf6JNPyOx+9cjDC9fVA6\nvb5Lx0+pslnbvu+PK2RS0cFdUeoBhBsg0OOPNvxh0o04jBdptIKXXeTwobCYHjnMylRxzM6k7VIF\norHLplWzjEFtOHnjUNk0/BdXzDIdfDgetmAL3zdf92BYqQcwFxPJZ9VEi8NwePltHtzdtGWLtEvj\nH1VuC0LM7OGGf5wevyj1rG220Gz3bRnMNvxWu/NjmS9kEklbeUFy4ZJJkjRU55kb7nrz6q08TjCc\nx29usxHn8fNquRNUk38yDX+Exj9uj5//z2WawICyp7MR1+0FhJvQNjrBGj8QJfX0A72OBWsYL6YJ\ndq3UujiPv68brvjETnibs4U0ag1zdrF38s1cIWMZu8EXFy/kUliazdrepdnRRUtbgHMNhpV6AGCr\nkbxOj8hcwSq/rRu21DNbyCCbVuzyyePO4wfcufw7ovFnHI2aS2933bwfgKnzA04qJ+9w5osZ9Pq6\nLQWFna9ZM0tCo913pJ5EHj+/3m2kQ2JdSfax1ehExiASHUOMvKjIMjJphYK7cQRq/DELowyyb5fh\nV9zLwwVKPb7grrtj4kO8aq2DdEoO9BTjJnG1OsHpYHzavViXP4kBD5rEtTOGP2NO1Gp0XFIPYBrC\nerOLmi2BDeblHjs4i41a285/j9LFeafJjccwQ3dx/8MY/tlCGrphoNbsuuIdkiRhrphxzWkYl9wG\nOPfIRq3lSD1jKNDGyQsaNdf3X3XjAgq5lP3aji1Yx8J/n6+hEXevNgSPf24mueGvN7tDyXqAmY5p\n3y8jafyWvBiRFcTXdJ4UJtLwR2v8w2f1BM0I5r/BZyYGSj0hM3cdqcc8Nt0I9xzsej0hk7iaYR4/\nn8QlVOkc1vDzZSGHMYphzAqT0/wev/n3ynr8wx/EDQfNWvgXlmuxVUW5IdUNY+hzFPc/lMfPS3PU\n2/YIjb83X0ij1hA7wZ3R+NdrZjnpQRahicO+l1p9W9q5fl8B1+8rYMVKod0QNH7A8dqvWOXGozo6\nnhVXrbfNuFGCaye237BtmcuodlZSlGwaBu8suAoR5dhw6XVSmEjDH+zxj57VE5S543j8fddr8/eD\n5aWOLUWZn4s3ddgN7k33E9ENc9nFYKnH7/En8RqDcoebllwyjnLAnLmCU68nyOMHnId/0Af02HWm\n4X/h0iYMI9542P+nhzvH/IiGf1bI7LH1aus9Lr1ctTrBcXr8PItmvdZGtTbeyVuAeC91caGyjZms\nWS33sBWHubS6jfUt9xq/87bHH3/t87bh7yTK6BGPyfx/OEdm1M7Dew0jHTErVXgc64aPg8k0/L2A\n4G56dI2/6ynXADidAE+1CtL4vVk9fD+8MxKHiWHDvWIuBUkKlnradnE3/7ZRHn+0IfRP4hq2Fn8U\nYjmKzbopdfEOjBv+7Za53GMST06EG/5TF6oAolMgx+EBivsfJJWTI87e3dzuoJBL2dIhb4vl9SYU\nWRqrR67IMuYKGVxZ3Ua72x9rYBdw2qVa72BlvYHD5QIkScLhfWZ574srdazXWpgvZuyS5rzzWV7n\nGn/4tc9lVHR7OurNbmKJStzf0Nfb4ywMind5z6h95DNmQbfWiMkp42KiDf+4g7tB+015pR6Xxs+X\n1Asr0hbk8YfXoynlg2fv8pshyOOfDyjUlkTqCZo0MszapHHY+frbHWw1Oijl07a3LQbpshnVNgpJ\nOVQuQFVkvHDJXMI5scc/7AQ1YX5DcYBZuxyxLar1jt0RAE5bcDlwnKMuwDS0fPLauD3+TFqBJAFn\nL2/BgDPH4sg+szTDS1e3sFXv2JVCAff5AuEzrgH3dU3q8Y+jo8+OofMQq8AmKSw3KXLPRBp+b/AU\nEIK7AT3mV5+5hJVq0/e+l0DD7wnuBnv83glczkLwQPKbcDakbENUrZBiztRrxSJcSdIjvR5/XGnk\nYbGrUtZNqUc0duIKUMMMxxVFxqGlGUFD3VmPX+wwhvL4rbbgpSPmZsLaYrzXAHAb+3EbflmSkEur\n9nW43jL8B5fykCTgubPrMODIkgB89XbiPH5Oco9/NG/dt48R4gTO//HnOCmZPRNp+Afx+K+uN/DZ\nL2n44y+fjt9v3+2pA0Jw19L4gwLKXqmn3dORSsm215b0BirNpNHuOBUkOfy3gzx+iU/iEldwSlCW\nQCwZCyC2NPKwzAo6fl837LUHALf3NuyDxeUEIE7qGcPQX/BKB6nFz+FGi6+1K1aZFI3xuDtfwN2x\njNvwA+6254Y/pSo4sJAPnDuQy6j2iBlIPlobyvCPQ+oZetSgBv4f9luTktkz0YY/HVSd02OEeSXF\n589txNbqj5J6AjV+Xp3T6/ELy0ICyY1OWC4/N8ph8YGFYgZb2x37+J3gbnQWgfjdqDVrR4HHLs4v\nm9ke4uQb0fAN+7vXl53F6ZNLPaMbgmGCu4V8Coos4ZKV2+6SelxtMb6sKo642taOGH6rbSQAB5ec\nzph3AoDb45ckydUZRaU6iu2RJIff3GZ0aU88puHlwWQjj0HWLt4NJtLwi2WPOamQrB4+8ard6eP0\nxU1EESn1RGn8AVk9ojeT2OMPmb1rl2sICX7a1Q6FMrcAIhdU8aZz7kT+OGDFLmbS9v5LguFPqbJt\nQIf3+AXDH6ET59JjMPwj5vHLktkWfHlKMR99tpAGV/V3xOPfQakHcNpm30LetS6yy/B7gsr8OLIx\nBdCG0fiTZNLF7iM7+j3DR+mqEh2wFyucTgITafiDyifIkoR0SvYZYbEH/fbZtej99vwpm3ZwN0Dj\nV2QZqiL5JnCZcwGCvfxoj9+/WIf422FDRW9d/mHy+J36/eP3Nl1evme6PX+Qh304RcMf5ZXJsmRn\nDQ3rvWXSim2chzH8gHfE4/yvKjKKM6O1RRSi0d0Jw8/vJ9HQA7BTOgG3xw843nucUR1O6hlDYHYc\nnYe1XdJzJI8/gqCSDYAp93hlFzE96rlYwx+R1ROQxx/2m51e3zXHIK3KdsZK1FqvttTTCPb4wyYd\nzXtW4hrO8I+/VABHfFj9hj/Zwx9GKe8UfIurKsrbfthgn2zV5AeGC+4C7rbwGrH5EdsiinnByEbJ\nKsPCDdz15RnX+2JH4O1w+PnHGdXcEB5/JmVmGnm3HwTxWR129i/fLu6eI6knAUETuADzYns9fm7Y\nVEXGxcp2YCVLe789/36drB5T41c9v5lOKf7qnF3dJfXwpf+AZBq/1+NPovEDwDOnKuh0+4lkG29W\nz7hXfhIR5Z1SYbyGH3B0/jhPPpfQ+4oin1WRTStD1X4B3F7+rK8t0vZvjBvezgs74O0Djqx42OPx\nzxczmMmqLlnP/izhteftYe4n2WhNsjKNkuw/DLfHP9oksLACbc7+reUiJ0TqGf8dOAaCNH7ANPzi\nMn+AY9BefeMCnjm9im+/uI77bzsYuN8gCcmb1eP1+NOq7BpV9HWzdn/ac4PmMiq2W71I3T2sQmec\nx8+OzOG6xTye1Cq4WPlf9ogjyjjZhaFabo1/Zzx+wdjlPcbOKg88SkDzB193PRZKGeyfz0d+j3dq\no3RuP3zXkZEqwIojHm/NmfmE0scwpFQZb73nGJbmxjt5i3P3Kw+g3uzilUcXXO9LkoR/8IYb0On2\nfXMT+PnGdXS8PQatL8Rr/Ixq+E19frTgbtw9d/2+GdzJyrjtxOJQvzNuJtLwB6VdAqb37c3q4Ubz\n+2/ej2dOr+K5M2vhhr/nrwHEDX2Qxs9/U5RmwjqlJDdAaFZPjAyTTav4lZ96Hf70a2fwN09edO0r\ninxGxZX1Bv7Np5/A6ubwVSvjEFM4SyFSzyi/+6obF/GqG+MfmHF4/G++/fDQ2wLO+WbTim+mclLN\ne1jefv+NO7JfALjxYAnvO3hL4GcP3nl94PtJzzdvG/7B1hBwrvdo3voo14N7+nHyWkpV8E/f/uqh\nf2fcTKbU45kgxcmkzNr5uu7Uu+Ce7LEDRZTnsnj+3HpoWmfQWr78f56J4Tf8skvqCZKLAEfriyoO\nlsuoUBXZV6+naUs94dtmUgp+4gduwr9852uxNJvFEWtB6yiOHSii29OxXG1iJqfilhsWcKNV+Gyc\n8E7IzN12n8PtN5Vxx01lvOYV5bH/rpfsiIZgHPBR3WyA98qun0NalXHsQPy1uxY4uGiuvRx3z80X\nM1iazYIdmR9o/1yeGdapsOXZEWIi+Qm454ZhYj3+lCr7ho7iJC7eS4vLHb76xkX87dOXcObSZuBN\n5NTqcS6S19B75ZNMSkFfN+xFv70LrXP4ak1RKYfmYh2pgaUekZuPzuOj7389kKDW08++49XmnIPU\nzt6UtrELGIXMFTL44Dt2x9Ph12CnPOokcI9/LqAt2JF5/Pa/eNMuH9Hekc+q+NjPviH2e+mUgn//\ngXsG3v/BpRlcXW8MHYjPpBQoshQpz8aRHTHOsFdM5NF2PKWTOZm03/A3hSULueF/7ux6iOF3L5kI\n+A2993d5wKrW6GK+mAn1+H/4riM4cXg2VoIpzWRwfrkGwzDsjs1O50zoeciSBCQo9SJJ0o4bfcDx\nbr0yz27zltsPY3Ym7Us53E0WZ7NQZAn7F6LjEcTo/OO3vALveOPxoQPxkiThn/zgTSOlvy5aKax8\nrs3LhYk0/N6a+Zx0QNmGVrtnT574viPzUBUZz51dw4+/6XjgfoHgPP6w11y/rtbNcredbnD84RWH\n5/CKw3Ox5zZrTfDZbvXsTqXV6cdOAJlkFooZHCrP4JVHBxuqj5vD+wq+rJPdppRP45f/tztfdobg\n5Ug6pYzs2LzxNYdG2v5QuYB//e47XPMZXg5EGn7GmAzgkwBuBdAG8F5N085Yn+0H8EfC118D4F9p\nmvY7jLGnAfBptGc1TXvPIAflLYnAsaUeIevCXCjZPI1MWsHxgyWculBFq9PzedBBWT1xht+uN2+l\nYHoXWh8UHvCq1tqC4fcf68sJVZHxq++5a68PY2JIEn8hrh2OH5zd60MYmDhr8zYAaU3T7mGM3QXg\nYes9aJq2DOABAGCMvR7ArwL4FGMsa33+wLAH1e3pgQGboKJpZsVJp5M4eqAI7UIVF1bqPg88aAKX\nN4DsHTbai2tYAVnv6luDwoeGq1st2zsNW2+XIAhiJ4izXvcCeBQANE17HMCd3i8wxiQAnwDwAU3T\nDAC3Acgzxr7EGPuy1WEMRKfXD9b4A2rnNNvutWqPWt7Wuas13/bBM3fdBtebx2+vnGV5/N0QqScp\niyW+sIoz0ezl7vETBPHyIs7wlwBsCa/7lvwj8lYA39Y0jddF3gbw65qm/RCA9wP4XMA2oRiGgW43\nJLjr0fh1w19j/sh+04vm1SJFooq0AYAiS75iUnP2bFvu8Y8m9XDtd83KqzcMA612/2WXDkYQxMuX\nODdzC4AoWMqapnmT5N8F4DeE16cAvAAAmqadZoytAbgOwKWoHyqXzZ/p9nQYAGbyafs9zuKCWSck\nk02hXC6i0TLLLMwWs/Z3FxZmkFZlXF5r+LaXLSN/YH/Jrn/TFVbXSqdk3zZZK7jb7Oool4vIvLhh\nHst83vfdRKhmk2+3+yiXi2i2ezAAlAoZe39D7fcahdrCgdrCgdpiNOIM/0mYHv0jjLG7ATwb8J07\nNU37hvD6IZjB4A8yxg7CHDVciTuQSsWUZuyFCnTDfo/TserpVNa3UanUbLlEltzfPVQu4NzVLVy5\nul1lrUUAABDNSURBVOnS7OtW/vxWtYme1WmIix8rsuz7TcMwoCoyVvhvbpi11tutru+7SejrOmRJ\nwqVKDZVKzS5BoUhmG5TLxaH2ey1CbeFAbeFAbeEwbAcYp1d8AUCLMXYSZmD3Q4yxdzLGfgYAGGNl\nONk7nP8MoMQYewxm1s9DAaOEUIIybzje5RftKpUeffzo/gL6umEviBG1b0mS7M4h6DfNSVdpVO2s\nnvDjS4Iiy+aKWlvuSpsU3CUIYreI9PitYO0HPG+fEj6vALjds00PwE8Oe0BhJZkBv8bPJ295K+Md\nsabEn1uu4agwPZ7v25vJk1LNUhBhxnyukMZLV2vQDcMu0RyUbpqUxVIGpy9uotfXhVm7FNwlCGJ3\nmLgZQ2ElmQH/8outkFLDPLOHr30q7juoFETK6gi8GT2c2UIGfd1Avdkd2eMHzNmdBoCNWnugcg0E\nQRDjYOIMP8/R99bFB/xLITZCSh0cLs9AliRfZo85Mcy/X27Eg34TEMop1511bzMjzBhcEFI6WwOW\nayAIghiVibM2jscfMHM37ZZ67LVqPVJPSlVw3VIeF1bq0HXDTtHs9vRA465avxUq9QgpnePy+AFg\ndbPlrNxF6ZwEQewSE+fxJ9H4uc4eFtwFgCP7imh3+1jeaDj77uuBcg5/L0rqAYBqvSNo/CMYfsHj\nb8asvkUQBDFuJs/wJ9H4vVk9AeUdjgZM5Aor/pZSLY0/IrgLAJvbjsc/SnEobvjXtlqk8RMEsetM\nnOEPW+EK8Gf1tISSzF6OCpk99r7DDH+cxz/jePxRI5KkOIa/ba+3+3Kr500QxMuXiTP8UXn8smzm\n3POsHju4G6CPX7/Pn9nTC6n6yX8r1uOvix7/8E2XSSso5FJY22zZa/2Sx08QxG4xeYa/Fx7cBcxC\nbVxnb0Vo/PmsivJcFueX6zAMw14kPVjqMX8rLKunmE9DkoDqdgedXh+KLEGRR2u6hVLGzOpJsOwi\nQRDEOJlYwx/mfWfSim8CV1hGzJH9RdSbXVxdb6DXC15TF3AmdIVJPbIsoTSTxma9jW5XH8nb5yyW\nsuj0dFSqZtmJLEk9BEHsEhNnbXj1y1DDn1JQb5p1dpzVt4IN/w3XlfCUVsEv/+4TOH7IXPA5MKsn\nRuoBgLmZDK6sbUMpyUOXZBbhOv+lihl8zpHHTxDELjFxhr8bEdwFzGyatlWcTVx9K4g3334IkgQ8\n+b0KTl80SwrlAxZWTmL4ZwtpnFuuYXO7Yy/qPQo8l3+71YMiS0OvG0oQBDEok2f4IyZwAabH3+nq\nQi3+cE85m1bxI3cdxY/cdRRrmy1899wGbg5YF5Yb3SjjywO8zXbP/n8UFoU1WbNpxVdGgiAIYqeY\nOMMflc4JOCmd3a6OZruHfXO5RPtdnM3iDbdeF/hZEo+/ZKV0AqPl8HMWXIZ/4i4DQRDXMBOnL0Sl\ncwLO8outTs9cq3YMQVHb8Cfw+IHRZu1yuNQDULkGgiB2l8kz/DETpLjHv2ktqhK0KPugpCLq8XNm\nRY9/DIa/mE/Z0hJ5/ARB7CaTZ/gjSjYAQDrtNvxBk7cGhefvJ9H4geEXWheRJQmLJbMzGcc5EARB\nJGXiDH9Sjb9aM1ewCpq8NSg8kByVnz8rSj1jyOMHHJ2fPH6CIHaTibM4jsYfntUDmLNogfF4y7ff\nVMbaZgu33LAQ+h231DMeD53r/DRrlyCI3WTyDH+3Dwn+5RE5tsZvLVI+Do1/vpjBP3rzicjvpFQZ\nM1kV260eUmPy+HlK5zhGLQRBEEmZOKknbHlEDs/q2bQWP99NmWTOqss/juAu4Bh+8vgJgthNJs7w\nh5VO5vAc+qrl8e9mKiTX+ccl9Zw4PItsWsENB0tj2R9BEEQSJk5jCFsshWNr/JbHv5syCdf5xxXc\nPbCQxyd//o1j2RdBEERSJs7j74bUzOdkfOmcuyn1mB7/ONI5CYIg9oqJM/ydbj+Rx9+zsn/GEdxN\nyuyYNX6CIIi9YOIsGA/uhpHx1MnZzclPt9ywgAMLebzi8Oyu/SZBEMS4mSiN3zAMdLtxwV33Z7up\n8R9amsFH3nf3rv0eQRDETjBRHn9fN2AgWkrxevxU4IwgCGIwJsrwO+Ua4oO7AGgBE4IgiCGYKKsZ\nV5IZcHv8uYxKC5gQBEEMSKRAzhiTAXwSwK0A2gDeq2naGeuz/QD+SPj6awD8KwCfAvBbQdvEEVeS\nGTAraCqyhL5ukMxDEAQxBHEe/9sApDVNuwfALwJ4mH+gadqypmkPaJr2AIAPA3gKptF/O4BM0DZx\nxJVk5vDZu1TjhiAIYnDiDP+9AB4FAE3THgdwp/cLjDEJwCcAfEDTNMPa5otR24TBNX41xvDzej27\nOXmLIAjiWiHO8JcAbAmv+5b8I/JWAN/WNO30ANsEErfQOofr/Ls5eYsgCOJaIc5ybgEoCq9lTdN0\nz3feBeA3BtzGR7lcxOVqCwAwN5tDuVwM/e5MLg1sNDFXykZ+7+XKtXhOw0Jt4UBt4UBtMRpxhv8k\nTI/+EcbY3QCeDfjOnZqmfWPAbXxUKjWsrtUBAN12F5VKLfS7sjV+kAwj8nsvR8rl4jV3TsNCbeFA\nbeFAbeEwbAcYZ/i/AOBBxthJ6/VDjLF3AihomvYpxlgZwGbcNkkPJm7ZRQ6XemitWoIgiMGJNPxW\nsPYDnrdPCZ9XANyeYJtEdHuDGX7K6iEIghiciZzAFR/cNQ87R8FdgiCIgZkow99JMIELEDx+knoI\ngiAGZqIMf5KSDQBN4CIIghiFyTL8CYO7M1nT4BfyqR0/JoIgiGuNiXKZk2r8b3rtISyUsjh+iBZE\nIQiCGJSJMvzNdg+Au/RyEMV8Gve++rrdOCSCIIhrjomSerYaXQBAiSQcgiCIHWOyDP92BxJIuycI\ngthJJsrw1xodzORSUOSJOiyCIIhriomysFvbHczOpPf6MAiCIK5pJsbw9/o6tls9lMjwEwRB7CgT\nY/hrVmC3SPo+QRDEjjIxhn9ruwMA5PETBEHsMJNj+BuW4c+T4ScIgthJJsfwk8dPEASxK0yO4SeP\nnyAIYleYHMNPHj9BEMSuMIGGn7J6CIIgdpLJMfx2nR7y+AmCIHaSyTH82x1k04q9yApBEASxM0yO\n4W90yNsnCILYBSbC8Ou6gdp2lwK7BEEQu8BEGP56swvdMKhcA0EQxC4wEYa/WmsBAFXmJAiC2AUm\nw/DX2wAoh58gCGI3mAjDv1kzc/iLFNwlCILYcSbC8G/USeohCILYLSbC8G/WucdPwV2CIIidZiIM\nf7VGGj9BEMRuoUZ9yBiTAXwSwK0A2gDeq2naGeHz1wF4GIAE4BKAd2ua1mGMPQ1g0/raWU3T3hP1\nO5sU3CUIgtg1Ig0/gLcBSGuadg9j7C6YRv5tAMAYkwD8DoAf0zTtLGPsZwDcwBg7BwCapj2Q9CCq\ntTYUWUI+E3c4BEEQxKjEST33AngUADRNexzAncJnNwFYA/DzjLGvApjTNE0DcBuAPGPsS4yxL1sd\nRiQb9TZKM2lIkjTMORAEQRADEGf4SwC2hNd9S/4BgCUA9wD4TQA/AOAtjLEHAGwD+HVN034IwPsB\nfE7YJpDNepvq9BAEQewScYZ/C0BR/L6mabr1/xqAFzSTHsyRwZ0ATgH4HABomnba+t51UT/S7vRJ\n3ycIgtgl4kT1kwDeCuARxtjdAJ4VPjsLoMAYO24FfO8D8LsAHoIZDP4gY+wgzFHDlbgDKS/kUS4X\n4742FVA7OFBbOFBbOFBbjEac4f8CgAcZYyet1w8xxt4JoKBp2qcYY+8B8IdWoPekpmlfZIypAD7D\nGHuMbyOMEkLJKBIqldqw53HNUC4XqR0sqC0cqC0cqC0chu0AIw2/pmkGgA943j4lfP4VAHd5tukB\n+MlBD4TKNRAEQewOEzGBC6ByDQRBELvFxBj+Ii2yThAEsStMjOGndE6CIIjdYWIMP0k9BEEQu8NE\nGH5JAgpUmZMgCGJXmAjDX8ynocgTcSgEQRDXPBNhbdnR+b0+BIIgiKlhIgz/L/90bB03giAIYkxM\nhOGnqpwEQRC7x0QYfoIgCGL3IMNPEAQxZZDhJwiCmDLI8BMEQUwZZPgJgiCmDDL8BEEQUwYZfoIg\niCmDDD9BEMSUQYafIAhiyiDDTxAEMWWQ4ScIgpgyyPATBEFMGWT4CYIgpgwy/ARBEFMGGX6CIIgp\ngww/QRDElEGGnyAIYsogw08QBDFlkOEnCIKYMsjwEwRBTBlq1IeMMRnAJwHcCqAN4L2app0RPn8d\ngIcBSAAuAXg3gF7UNgRBEMTeEufxvw1AWtO0ewD8IkwjDwBgjEkAfgfAT2madh+ALwO4wdomE7QN\nQRAEsffEGf57ATwKAJqmPQ7gTuGzmwCsAfh5xthXAcxpmqZZ23wxZBuCIAhij4kz/CUAW8LrviX/\nAMASgHsA/CaAHwDwFsbYAzHbEARBEHtMpMYP04AXhdeypmm69f8agBcsLx+MsUdhevdR2xAEQRB7\nTJzhPwngrQAeYYzdDeBZ4bOzAAqMseNW8PY+AL8L4EzENmFI5XIx/ltTArWFA7WFA7WFA7XFaEiG\nYYR+aAVweYYOADwE4A4ABU3TPmVJOx+FmdVzUtO0DwVto2naqZ06AYIgCGIwIg0/QRAEce1BQVeC\nIIgpgww/QRDElEGGnyAIYsogw08QBDFlxKVz7ihxtYCudRhjKQCfBnAUQAbArwH4LoDfA6AD+DaA\nD2qaNjUReMbYPgBPAXgLzDb4PUxhWzDGfglmWnQKwP8LM7X69zBlbWHZiN+FWSlAB/AzAPqYorZg\njN0F4KOapj3AGDuBgHNnjP0MgPfBrJX2a5qm/VXUPvfa4w+tBTQlvAtARdO0+wH8MID/CLMNPmy9\nJwH4h3t4fLuK1RH+JwDbMM/9Y5jCtmCMvQnA663n4k0AbsT03hc/CGBG07Q3APi/AXwEU9QWjLFf\nAPApmI4hEPBMMMYOAPg5mJUUfgjAv2WMpaP2u9eGP6oW0DTwCIBfsf6XAXQB3K5p2mPWe1+EWQ5j\nWvh1AL8F4Ir1elrb4gcBPMcY+68A/gLAnwO4Y0rboglg1pofNAugg+lqixcAvAOmkQeCn4nXwZxH\n1dU0bcva5lbfngT22vBPdV0fTdO2NU2rM8aKMDuB/wPua1KHebNf8zDGfgrm6OevrbckODc7MEVt\nAaAMc6LkjwN4P4A/xPS2xUkAWQDfgzka/ASmqC00TfszmPINRzz3GsxzLwHYDHg/lL02slNf14cx\ndj2AvwXwWU3T/gtM7Y5TBFDdkwPbfR4C8CBj7CsAXgPg92EaQM40tcUqgL/WNK1nzXpvwf0gT1Nb\n/AJMb5bBvC8+CzPuwZmmtgDc9qEE89y9drQIYCNqJ3tt+E8C+HsAMEBdn2sGxth+AH8N4Bc0Tfs9\n6+1nGGNvtP7/EQCPBW17raFp2hs1TXuTpmkPAPgmzEV9Hp3GtgDwP2DGfMAYOwggD+DLU9oWM3BU\ngQ2YCSlT+YxYBJ37EwDuY4xlGGOzAG6GGfgNZU+zegB8AaaXd9J6/dBeHswe8GGYntyvMMa41v/P\nAHzCCs48D+BP9urg9hgDwD8H8KlpawtN0/6KMXY/Y+wJmM7ZPwXwEqawLWDGfT7DGPs6TE//l2Bm\nfU1bW/CsJd8zYWX1fALA12HeLx/WNK0TtTOq1UMQBDFl7LXUQxAEQewyZPgJgiCmDDL8BEEQUwYZ\nfoIgiCmDDD9BEMSUQYafIAhiyiDDTxAEMWWQ4ScIgpgy/n/7EeWKaeFrwAAAAABJRU5ErkJggg==\n", + "png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAD9CAYAAAC/fMwDAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztvXuQNHd53/vtnlvPZXd2d/b2vrpLQIOQBAEFKZIMhxO7\nsI8tR0Bcx5QLH1PgxBhSjsvnOAlOpSoV59gVl4xN5eAkHBuZY46dQIwPuCwgVjBgAQoICQldWne9\n0vvu7rs7e51Lz637/NH96+6Z7cuvLzM9O/N8qlR6dy49PT3dTz+/5/n+vj9B13UQBEEQs4+Y9g4Q\nBEEQk4ECPkEQxJxAAZ8gCGJOoIBPEAQxJ1DAJwiCmBMo4BMEQcwJ2bR3AAD6/YF+cNBKezemguXl\nEuhYGNCxsKFjYUPHwmZtbUEI8/qpyPCz2UzauzA10LGwoWNhQ8fCho5FdKYi4BMEQRDjhwI+QRDE\nnEABnyAIYk6ggE8QBDEnUMAnCIKYEyjgEwRBzAkU8AmCIOYECvjEEC21h//23Vcw0LS0d4UgiISh\ngE8M8cDDr+JPH3gWT7y4n/auEASRMBTwiSEu1Y0p6021n/KeEASRNBTwiSG29poAALU7SHlPCIJI\nGgr4hIWm69jeNzL8DgV8gpg5KOATFvvHKrp9o1mrdqmkQxCzBgV8wmK7blvOUkmHIGYPCviExRYF\nfIKYaSjgExZb+3bA7/Qo4BPErEEBn7BgCh0AUDtUwyeIWYMCPmGxtd9CbVECQBk+QcwiFPAJAEBT\n7eG42cUVa2XkcyLaVMMniJmDAj4BwFbobK6UIOUypMMniBmEAj4BwFbonKuVIOWzpMMniBmEAj4B\nANiqGw3bc7UyCvkMyTIJYgahgE8AsDP8zVoJUt4o6ei6nvJeEQSRJBTwCQCGQqdSzGGxlEchn4EO\noNsjT3yCmCUo4BPoDzTsHrSxWSsBAKR8FgCgkjSTIGYKCvgELh+0oek6zq2YAT+XAUAGagQxa1DA\nJxwKnTIAQMobAZ+kmQQxW1DAJyyFDivpFPIsw6eATxCzBAV8YkiDD9gZPgV8gpgtKOAT2N5vIpsR\nsFYtAnA0bamGTxAzBQX8OUfXdWzVW9hYKUEUBQBUwyeIWSXr96QsyyKATwK4BUAHwIcURXne8fy7\nAXwMgA7gjxRF+Q/m498HcGS+7AVFUT44hn0nEuCw0YXaHVgKHYBKOgQxq/gGfAD3AMgrinKHLMu3\nAbjXfIzxuwD+DoAmgCdlWf5TGDcGKIryzjHsL5EwdsO2bD1mNW1Jh08QM0VQSedOAF8GAEVRHgJw\n68jzPQBLAEoABBiZ/psAlGRZ/oosyw+YNwpiShlt2AJUwyeIWSUo4C8COHb8PTDLPIx7ATwM4HEA\nX1IU5RhGtv87iqK8C8AvAfjsyHuIKYLZIp93ZPj2xCvK8Alilggq6RwDWHD8LSqKogGALMtXA/go\ngGsAtAD8iSzL/xDAFwE8BwCKojwry3IdwDkAF/0+aG1twe/puWKSx6J+0gEAvPF16ygWjNNByxgB\nXxDF1H+XtD9/mqBjYUPHIhpBAf9BAHcD+Jwsy7cDeMzxnARgAKCjKIomy/JlAMsAPgCjyfsRWZbP\nwxglbAXtyO7uSYTdnz3W1hYmeixe3j7GymIBjeM2GuZjzVYXAHB4rKb6u0z6WEwzdCxs6FjYhL3x\nBQX8LwD4MVmWHzT//oAsy+8DUFEU5VOyLP8xgG/JsqzCyOo/bb7u07Isf4O9h40KiOmi3enj4KSD\nN167PPQ4q+F3qIZPEDOFb8BXFEUH8OGRh59xPP9xAB93eev74+8aMW6295kHfnno8WxGQEYUqIZP\nEDMGNVPnGLthWxp6XBAESPkMyTIJYsaggD/HbO2f1uAzCvkM1A4FfIKYJSjgzzFuGnyGlM+iQxk+\nQcwUFPDnmO16C8VCBtVy/tRzUj5DE68IYsaggD+nDDQN2/stbK6UIQjCqecLuQz6Ax39AQmsCGJW\noIA/p+wdqhho+qmGLYMM1Ahi9qCAP6ew+v1mQMAni2SCmB0o4M8pTKFzzkWhA5CBGkHMIhTw5xQ/\nhQ5A69oSxCxCAX9O2ao3kREFrC0VXZ+XyBOfIGYOCvhziK7r2K63sL5cRDbjfgpYFsk0+YogZoYg\n87SZoT/Q8Ad/8UPcduMG3vaGjbR3J1VOWj001T5ed9WS52sk0yq505uNGv7n/uY5ZEQR73n79Wnv\niisPPPwqvvzQy9A9nr/nHa/BXW/kP2+/+/RlPPj4Fj76nps9b+pheOrlA/zlt17CR99zs2WjHYfn\nLh7hD//ySfRCyn4FAP/rj8m49bWrsfcBAD7zFQXrS0X8+G1XJ7K9//r15yEIwHvefkMi2/vSt17C\n5YMWPviTNyayvbnJ8HcP23jk2T1896nLae9K6hyYHvi1quT5msKMLYLyjUcv4Rs/uJT2bnjyvacv\no37cgSgIp/47POniv/2PC6G2980fXMJjz9exc9BOZP++r+ziqZcP8NJ2MrbEDyuXsXPQhq7D9Tt7\n/Vc/7uDBx5L5HXVdx9cfuYhv/TDQvZ2brz96CX/7WHLbe+jJHTz67F5i25ubDL/ZNjLVptpLeU/S\np2Eeg0ox5/maWZJlarqOltqHKArQdd11olnaNNUeSoUs/t2H7zj13G9+5nu4sHOC/kDjztbZWsXN\ndjLnO7tuktrexV1j//7NB9+GkuR9Ho7y4Xu/jpNWMvvQ7WvQATTVZEaxmq6jqfasaycJmu2e73Ua\nlrnJ8Bvmidpoz0aJIg7souUJ+O0ZCPhqpw8dwEDTp3bE0mj3UC6651/naiX0Bzp2D/my9Xanj/qx\nMYpLKkCzJCGphOniXhPLC4VQwR4ASlLWupbjwpKZpL5TS+1D14FOV4OuexXn+NHNG0iZAn542ElC\nGb59LPwDPlsEZToDZBgajgwuqWCRNE21j7JH8Du/asyVuLTX5NoWW+cAsAN1XJpWwhR/ey21h4OT\nDq5YdZ8D4kdZyqFhrsgWF6ZA6/Y09Prxz3N2jDQ9GUuSTm+A/kD3PC+iMDcBP+kh6VmGXbR+J5Jt\nrXD2R0TO33wab/jd3gC9vuZ5A2YLzF+qt1yfH8V5Y2gmNKK1S6Lxt3fR3L8r1sIH/Eoxi5baTyag\nOpKZJL6X8+ba6cXfP3bMvUZ+UZibgM+CXLevoTvn2nKeDJ9NvJoFi2RnkJ/GDJ8FG6+h+zkzE97i\nzPAv1R0BP6kMP8GEidXvr1ithH4vS1JanfgBeijgJ/C9nNtIIlFix7xCGX54hrO8s5+1xoEdC7/M\nYZbM01pTXtKxfg/J/fdYXZSQz2W4Szpbe/ZIIIlzXdN06ximneGzc7aVwH44g3IiGb7j3EqiFGpf\npxTwQ9OY8mH9JGGNa78MP29NvDr7N8ehm/0UNu2DSmyiKODK9Qq29lvQtOBm4CVzFrVz23FomU3v\npLZ3cbcBwC5VhYE1eZPIyNWhkk787TkFIUmUdFjvySsRiMJ8BvwpzPImSaPdQzYjWFp7N0RBMJY5\nnIGSzrQ3bZscMtmr1hfQ62vYO1Z9t9XtDbB72Ma1mwvGthMuVSQRGC/uNbG2JFllwzCw4JfEfjjL\nlUkkAsMZfgIlHcrwo+O8+1JJx5B6BenRpVxmJko6zoA1nQE/uDl31aZR7w4q62zvt6DrwFUbCyjk\nM8lkrmpyydJxs4uTVi9S/R6wg18SATrpDN95bBJp2nIkAmGZm4DfTPCkPes0OCdzSPnMTMgyp/23\nbwaUdADg6g0jY9+q+wd8pwtqRcolEhibCSZLcer3gN3ATEJuOpThJ1LScTRtE7AksVQ61LQNj/PH\nSEqbfBbRNB2tTp+r81/Iz0aGP+1N2wbH0P3KdSPgB2X47Pnzq2WUi9lEznXnTbLX12Ipt1j9PooG\nH7BLOsk0bROWZSbctLXPC6rhh6Jj6pyZ8mQaG3eTIswwUcpn0ekNoCUwazBNmu0eBADZjDiVAZ/9\nJn7NuXOrZWREAZf2/LX4TJJ5vlZGWcqh0x3E1qyzmwazdYgzSrIz/GglnSSbtuOUZSZZ0qEMPyTs\nh9hYNhb7mGeVDk82yZgVP52m2kdJyqJSTG5afpI0OVRT2YyIzZUStupN32n7W/UWioUMlip5u94d\nM3tl18/6cjH29i7uNSEKAjZX3BfeCYJlu8k0bROWZTonXiXUtBVg2EkkxVwEfHaRb6yYJ+wUXvST\nIszsPWlGJl81TD+SSjE3lQ17FryCLuxzq2Wo3YHldjpKf6BhZ7+F87UyBEGwbiBxz3d2zDaW410/\nuq7j4m4TGytF5LLRQg/LdpP4HdXEM/xkZZksURETNPubq4CfRIZy1uGZZcuYFYvklulTUynm0O70\nMdDiX4xJ0mj3UCxkkRH9L8fz5nKUlzwat5cP2hhoujUzl5WI4o5q2A0p7gj54KSDdqcfuX4PGElI\nRhSSyfDN81pKQM3EehuL5fzQtuPAEpUkmYuAzwJ8tVwwftw5zvCtgM9RF5wFAzXmU1OWsolK+pLE\nME4LHnGdtywW3Ov4W476PeDMhuOd76MJU9QbyKWY9XsAEAQBC6V8Ir8hG7muLEqxm8DsmNQWjTUm\n4qp0dF035NMJ1u+BOQn4zm53WcpRDR+8Tduzb6Dm9Klh33na6vi8nue2iZp7hm8rdIxM3Kp3xwyO\nzXYf2YyIpYWC8XfE4Piq5aETPcMHjN8yiWtY7Q6Qz4qoFHNoqX2uWcxeNK2AbxyjuCWdbk8znDIT\nVOgAcxbwK8WcIVWbsgxvklid/1AB/+xm+E6fmmkM+N3eAN2+xvV7bKyUIAje0sxLlgbfCKiVhDJ8\nw5M9G7sncHHPlGRG1OAzFkrG/IK4nvOd3gCFfAZlKQsd8QzZrAzfXEUu7qh4HJOugDkJ+M4FPyrF\nnCXTnEeiZfhnOOA7pG3lBCV9SdEM4ZeSy4pYXyri0p67Umdrr4l8VrSCTjmhGxwbgVgBP+IN5NJe\nE9mMYJWGolIp5aHp8RezUbsDFHIZh5op+nFix3h5QYKA+EIHHgvzKMxFwHfWrS171Tkt64Rq2s5S\nSUfKWsPjacrww/qlnF8to6n2Ty3zp2k6tvZbOFcrW6qOJGSZzCnTuGGy4xd+e5qu4+JeE5sr5cDm\ndBALpWRGLp3uAFI+a4+EYoz8mSRzoZhDPoEZ6kEOqlGZq4BfLubsrGdOlTo81siMWWjaOgOqVdKZ\nopt9WM9zVq4ZLevsHbXR62s4t2rr2yvMaCzGDY45ZZalrCUbjbK9vSMV3Z6GK2OWcwBgoZQ39yP6\nNaybIwQpn7G+V5wk0HmeFXLxTQeD1kiIylwE/Ga7h4woQDLrdeyxeaTR7nNJAAFnhn+GA76V4U9n\n07bRDndhs4bsqKcOq987LYeTKFU4ez4ZUUSxkI20vUu78Tx0nMQtLQHGnAVN140afgKJgFMYIuUy\nsRdZalANPzoNhztkUlK1s0pT7aHC2fm3avhneOKVHbDiNx3HAY+tghN7fdthaeaWw0OHkc2IKOQz\nsW5w1ixg87qpFLORSkSsYXs+pkIHMGr4QLxSFUtipJwzCYzftK0Uc8gn4DLLY6gXhbkJ+OxiT0qq\ndlZphND2SjMw8cp54UyjDj9sDf/cirs0k5V4ztWGLQsqUjaRQMaum7KUi3QDsZY1jKHBZyRRw2dl\nSmeGH2sk5LDHkPJGhh9HRcRjmR2FmQ/4rOnE6pmWveoUZXmTohOwWPYodg1/egJkWIaatgnNPE2S\nsEP3Qj6D2qJ0OuDXW8iIpxUwceedjMp4y8UcehHWhb6410Q+J2LVVBDFwcrwY/yObNRayGcSa9oK\nAlAsZFHIZzDQdPQH0QN+mAmSYZj5gG81nRwnLDCfJZ1mCIUOMCs1fOZTY9SgS4VkLIOTwvY858/k\nzq+WcdToWt9N13VcqjexWSud6s2UizmoMRwzmyOBx15xij84DjQNW/UmrlgtJ+ILU7Ey/OgBuuMo\n6STVtC1LOWOluFx8D6pxrHYFzEHAH5UhJuUgeBYJ45QJzIgOv91HIZexzLoqxWgliXER5cJmZRtm\nsXBw0kGnO7AUPE7inu+j8wSi9EEuH7TRH+iJ1O8Bp0onmQw/CUM2Z9nYCvgxrpum2jecMgtU0gnF\naFabhFTtrBJGgw8YTb9sRjjbAd+cJcooF3NotnuxZ2kmRdimLeBo3JplHdsD/7TlcNzzfTRJiCJ6\nsOr3EZc1HKWSQNLmzPCLhQxEQYg88jN8b/p2wE9A7NBUe4ZTppicUyYwBwF/NMiV5lilEzbgA/Yi\nKGeVptpHqWB/30oxh/5An5rvZMhkM6EmI9lKHTPgm5m+WwYdt4Q5OsU/yuxdtuhJEhp8577EKcE4\nm7aCIKAkZSPfFNudPjRdt/aLiR3iSDPDiCvCMDcBn52ouayIQi4zVUqNSRFm0hVDymfO7Ezbgaah\n3ekPyVCnbbZtUw1/YY/aJFumaW4lnZgNydF1VaPU8NmyhkmVdDIZYz5AHE8sZ0kHML5X1BHDqJIp\nnzPCatSRMRsxJK3QAeYg4Ls1KsvFaJNHzjpRMvzCGV7IvKUOBysAiSgykqTZDu95XpJyqFbyVg1/\nq96EIBjmaqPEvcE11R6yGcEKYuUINfyLe00UC1ksm26bSWAE6PgZvpQz5abFHFpqtFJfY2TFsrgz\n1Lt9Df0Bn6FeWGY+4DdcaqRRtcRnHZbBhCvpGJNIpqXmHQY3LfM0zbZlTpmVCH4p52tl1I9VqN0+\nLu01sb5ccl1FKq5jJlOfCKa6Jqw9Ra+vYWe/jSvWytY2kiCu3JSNWu0M3yj1dSPYGo8anRVirhQ3\nqoxKktkP+C7rhVZiStXOKlG0vVKOaYrP3rFym62YlINkEsTxS2Hlm2deOURT7bs2bJ3bjhocneoT\nwFHS4Tx+2/staLoe2wN/lHIxi25Pi+x6y4IxU6LFWSt3tIpQMEdDUQP+uJwygbkI+C4lHUt3Ox3D\n+kkRVpYJ2MPTs6jUYQHVuVZsEj4sSRFmbYJRmKfO95Rd82/3gBrHNkDTmVPmsMopzPZY/T7xgB/T\n9dZq2ubsDB+IlgiMTp4r5OJdM+OaZQvMQcB30znP6+Qrp4kcL2d58pVbQJ2mkk4cvxQW4B95xgz4\nLg1bIN73bY9MWgScTVu+7V1MYFlDN+K63qqjGX6MJHA0xhTy8TL8cU26AuYg4DfaPUj5DLIZ+6vG\nVS6cVZwmcrywC+IsNm7daqHTFPCtcmOEGj6bZMWyQactspM4MmS3GxJzzORVyFxM0CXTSVzXW+cC\n5sb2oh+n0SoCawRHvWbCWmaHYS4C/miT0lIuzFmG73YsgjjbGf5p2wLbPC/93z5OSWehlBv6LZmp\n2ihxZMhu/S8gnELm4l4DC6UcFs3ZsUkR1/WWnc/53GgNP8pxGinpxG3aUkknOm6yt2lc6m7cjJrI\n8WLV8HtnbzTk9NFh2Bl++t8nTsAXBMFq1K5WJSvIuBFVhuy0lh7eXo7r2ul0B9g7VBOv3wPx+3Bq\nd2DOJDflpjFigt20NfbJatpGTJKoaRsRS/Y2ckFNoy/6uBk1kePFskjunMEMv306UyrkMshmhKko\n6Yx6zYflnBlI3Tx0nESVMHr1GCpSFl0Ox8ydgxZ0x34mSZT5AE46vcFQL8tq2kYq6fSRz4nIZY3t\nFawkKaYsk2r4Bhd3G/j0Xz0VOGTy0p1bNrkc2cEjz+7iL775QmI6dOXCAf7sgWehhdhetzfAffc/\nhVdNxUMUoky6Ahw1/DFYEfzwxTo+97Xnxqbxdy5gzhAEgTtDHTejMzTDwhq15z3q94xKMYd2J7wM\n2Us2ymvIVj9SASARS+RRwlzDbnS6fUuhA9i/QZQRw2iplGX4Ua0VxrW8IXBGA/7Xf3AJ33xsC0++\nuO/7Oi/deRiVzl9952V88cGXcNxKJkB87ZGL+Op3X7FWKOLh6QsH+MYPtvCNRy9F/tyoAX+cNfyv\nPHQB9z90AZcP24lvGzB+XzdVUmVKJt653ZDCcNP1K6hW8njza1Z9Xxe1/GFfP6dLOkDw9bN3bAT8\n2uI4An78Gr5bhh8lEWiovaEYk4+5cBDbh6SdMgHAd4uyLIsAPgngFgAdAB9SFOV5x/PvBvAxADqA\nP1IU5T8EvScJ9g6NE2k3IFB4ZVBhftw9M0upH6moluM3nthFVz/ucEvV6scd8/9q5M+NnuEzTXHy\nNe8983vtHraxseyfpUah2TY05KOqpHIxh4t7TQw0LZRpWdJYF3YElQ5glHI+/tG7Al/nDNCLIc5h\nL3kg7/VjZ/hF39dFge1T1Bp+pzcY6nuUIngEAcbauJ3uYOgYMU/8yBOv1B5KheSdMoHgDP8eAHlF\nUe4A8M8B3Dvy/O8C+DEAdwL4NVmWl8z3FHzeExuWEQZlht4nLN+P2+sPcNToAgD2jpLJQlsd4zP3\nQwRv9lp2AUUhqrZ3XJ74uq5b32v3YDwZfkvtDTVsGUnY6yZBUzWcMp2S4XEQVYbsNQJhGX9Q45ud\nr7UxlnSiZOT9gYb+QB8q6bD1f8Nuz2syYyEnRpdltntjUegAwQH/TgBfBgBFUR4CcOvI8z0ASwBK\nAAQYmf6dAO73eU8sdF23MvvdQ/8A6JXV5nMZ5LNicIZiZqBAvGDrpGll+PzbY6+Nk+FHneQzLh3+\ncatnTYsP+h2joOs6mqq742BlSqSZ47LAHSXq3AMveWCYkk4uK2KxlPx3zJuL2kQp6YzaKjAqEQzZ\nvGJMIR89w2+q/bE0bIHggL8I4Njx98As2TDuBfAwgMcBfElRlCOO98TisNG1AkVQhu9XxihzrHzk\nzOr3Egr4bfOECpXhm5/dVPtodyJauFrTv8PKMseT4TtvoOOo4avdAQaa7hpQp2Ux8yjWyFEIOzuW\nwWZmOzNhgD/g149UrCxKiZqmDe1HxAXanV74w9vLhR71jUoyGYVcNJfZrrnu9LjOi6Cr/xjAguNv\nUVEUDQBkWb4awEcBXAOgBeBPZFn+h37v8WNtbSHoJQCAyydd69/1ozZWahVkPGpdmnmiXXmuemr7\n1UoBuwct389Vn6tb/z5q97j30Qtd19Ey5Y3H7b7n9kYfP2za31nPZCLtx0A3jsVVVyyFen/WPPF0\nQYj9/Z0ol+ycYP+kw30seLm8b1gH15aKp7axaa68lMlnE/1OYej2Buj2NCwvStz7EHVfz20sGv8I\nee60uwMslvNYX18cevzKhhHkNHifE2qnj0a7h9deFe5842VtbQGLlQL2j9TQ21fNaLS0OHxuLC1K\nuHC5geWVMneZ7bntEwDAxurC0LYqpTy26v7xxY26mWSuuJy3SRAU8B8EcDeAz8myfDuAxxzPSQAG\nADqKomiyLF+GUd7xe48nu7snXDv87EtGEBYEoD/Q8ewLe541wl1zgYhep3dq+1JORFPtY3vnyLNx\n99Krh9a/t3Yb3PvoRbdnS+N26k3X7a2tLQw9rmm61aQGjO9fyobPmHYPjADYa3dDfQ82LD1qqLG/\nv5MXX3Ec23oTly8fn8oER49FGC6YF2IGLueW+Rtc3D7G7nryGnEeDhtGuTCfFbi+Y5xjMegaAXpn\nL9w5fNzsYrGcP/WeXsdIQHb3W57bYx46C8VcoucNYB8LySzL7lw+DrU4+taOkWzog8HQvuUzxjZe\nfuWAu7l9acd8/8i2MgIw0HRsbR+F6tG8ctmQXmcFvpgY9qYQtCdfAKDKsvwgjPLNr8qy/D5Zln9R\nUZRnAPwxgG/JsvxNAFUA97m9J9QeBXDZbPBdf97IOvzKAX52wLbbnvcQjpV0Fks57B2psfXiLUc5\n5uCkA00L3t5howNN161RTNQ6ftSmbT4rQhDGV9KpLRbQ6Q5wkpDsldHymcU6DX4645xNOUqUiYaa\nrpslp9M5IU9JZ5wNW+d+6EDoMqc64pTJiOI75NcndH4WL+OcdAUEZPiKougAPjzy8DOO5z8O4OMu\nbx19T2Kwhu0br13B8xePsXvYxhuuWXZ9bcPUYRcLp6edO70zFjx8PupHKjKigBuuqOKRZ/dw3OrF\nkmY6by4DTcdRsxu4CtC+2Ti+7twinrt4FLl57GYix4MgGDr2pJu27Mb1+muW8eDj29g9bIeSDAbh\n5qPDmAa31CjLTUYlyvdVO33ouvsNiUchw37fcUy6svfDvpGFuXHaxmmjzejwVtJeAd8pdggTvOPO\nzQjizE282j1sIyMKkK9asv72ouGhwwb4tMR7RypqixLWlorm3/Gai62RTIQnW2evee1VVe73uBHF\nOI1RyAWva/vQkzuhZgLvHako5DK47lzwSC0KjSnP8O0b0gSbtiG+r5/gwXDMzPg2ONm1Mo5JV4yo\nhmfMF8ptQp6xvfjHiY0ewip1xmmcBpzBgH/5sI1aVbLW7/QL+H7rhQZlPd3eAEfNLmpVycpS4koz\nWYa/VDEyWR6lDnvNDeeryIhC5H2IsnYqQ8pnfTP8w0YH//GLT+C/fO057m3Wj1XUqhLWzZtp0lp8\nPxnqNKx6Ne6hu5NcNoN8TgxlQxAUeIKWCR2nrQIjqvWzp0onwkjIq1Qa1TFz3KW+MxXw250+Tlo9\nrC8VsbRQQDYjWjX9UVgN0uuCCsrynENSNlMwrjSz1TE+68r1ytBn+OHcj+WFgjVdPQxeJnK8FMx1\nbb1gNhHb9RbX9lqmvNQ5egqaNR2Wll9JJ6aXehI0xjx0H6US0j8oaN5GuehvyFY/NsqhS5XkFi4f\npRJxNS97AfORGn4hWklHwOnZ0laGP2U1/DMV8FlQWFsqQhQErC1JnoGi7VODBIKXfts7cgZ8I0vZ\nixmU2mYQusq0VNg/6vi93HiNWcOvVSXUFiUcOeYh8BLVVoFRzGfQ7WueTeZLZqCvH6no9YNPcHYT\nq1Ul1KoSBCH5ko6f9XA2Y5Qk0s3wxzt0HyWsY6ZfSQwwHTN7mufvvXekYnmhMBZ7AEbUXoyaYIbf\naPdQkrKnVEKWB1Xoks74VrsCznDAZ/9vqn3XHygoyAWZL+05fECY0iB+hm9c5GEz/EI+g1Iha+3H\n/km4/fBrYPJQCFAcsMxeBzxHXE6cCp1sRkRtUUo+4Lf9v3NQSWLcjLs5N0pZyoZyzAw8fj72FL2+\nhqNGd6ynCru/AAAgAElEQVTlHGPfohmesSB8euJV+Ay/6dEbkyJn+PGu1SDOWMA3AsX6sh3wjcdP\nB4vAgB8w29JqOlUlFAtZlKVs/IBvXhxrS0XkcyJ3Db9mzlZkDbCwdfy4Gb5kDnW96pFb+7bz5/Y+\nR8A/HpbsrS0VcdToJmrBbC9+4n7hVIrhZ1UmyTjXLXXDMhvjlDAGlRb8Fv1mCck4G7ZAdMMzr5KO\nJV/t8N1AmH2H2zHKR2zasuMZ1VAviDMV8C+PZPhWw8/FiyVI9hY03ZwFVfZZq0tF1I/jafHZxVaW\nsqgtSoEZfrvTR1PtY2XRqIPWIjaP4wYX217B/cLactTudw6C6/hWQ29x+MYdt2TmpNE2jMm8JtVV\nijn0+tpYfP55sJtzkynphNXiB/UY/JaK3JuABt/Yh4RLOiFN5iz7DrcMP6IHVVPtoVjIjs3F9UwF\nfLukY2aGZqZ/2SXI8Gb4Xivc7Jka/KqpqFmtSuj1NRw7bA7CwjL8UiGLlUUJTbXvK3fcPzHr92am\nZAX8kI3buBm+X0mn3enj4KRj3ZS294MD/t6pDN/4f5Imaq2OvzY77VXPmmo/0ryIqIQNZkE9BlvC\neHp7k5h0ZexDxKZtz12Hn8+JyIgC9w3E77qKqtIxRgzjSwLOVsA/aGOxlLN+KP+SjvtqV4x81ljP\n0q9pW6tKVjNmNYE6fstRZqiZAXL/2Ltxy0o+K2bAX02rpONjoMYy+luur0EQgB2OgF8fuZmum174\nSdbxDS987++btjRzUsZpDBa4eZfwC+ox+C0xaI/gxhvwpUIWghBFlmlc86MzbcOuhuYb8KPq8Mfs\noHpmAv5A01A/Vq2sHgDWqt6ZYVCQEwQBlWLW9cft9gY4bg43nZKQZrY6feSyxtqXLIj71fGtWrd5\nc2DvmXSG77cIytaeEeCvXK9grVrkyvDrxypWFgvWzTRpLT4r1fgpYNKefNVsu1s3j4uwDU7mlDk6\nOWl0e243EKuks5T8widOREGI5HCp9gbIiAKyGbcJmVnu7fmVSoOEDm4w+fQ4+zpnJuDvH3cw0HQr\nOABGY2SpkndVhvDUrb20xHsuk0ZspU70oNRS+5bWt8YRvPePh5tfuayIaiUf+qaTVA3frR7JGrbn\nVkrYWCnhpNXzzbh6feNm6mzoWSWdpBaZ4VDApBnw2Q1pEpOuGGEDfkPto1zMeVob+9kQ1I9VCABW\nAmxDkqAkuSdtfnS6AxRyGfcZ+GZM4FlzOumSTlw1HQ9nJuCPNmwZ60tF7J+op+RmPFltWcqhpfZP\n6cvtppP9WYmUdDp9q/tuZ+veJZ26qdNfcQTH1UWJ23iN4Wcix4PfurasYXtutYyNFeN47fgoddj3\ndS57V5JyKEtZLkknDzwXjl/TcdxMWpIJ2H7tvLNtjdJC8AjJ7eZeP1KtiZHjhmX4YcQUandwqmFr\nba+Qha4Daic4UPvFmCiyzEnMvj4zAZ8N90cD/tpyEbp+uq7NM0W5LGWhw8Xjxsw0h0s68QK+rusj\nGT6r4ftn+AIwZLBWq0oYaLplr8uDn4kcD341/O16C8VCBtVyHpum3YVfHd+robe+XMTeUZsrswqC\nZ/JKxUdWOG4mLcl0fhZPvdtyygxIloDTN8yBpuHgpDN2Saa1H8Us+gMN3RCTETu9gXepKsRx8lrk\nHYia4Y8/ETg7Ad/M8NeXRwK+eQMYbfg12z0UzGXQvPD6cVlQX3NkoVI+i0oxFzngd/saBpqOonly\nLC/w1fCrlfxQpsRTChrFz0SOBynnLsscaBp2DlrYXClDEATL38ivjl8fKVMx1paK6A90HJ7w38i8\nsCev8DRtJ6/Fn8TQfZQwJR3mlOk3IvTSwB+cGHbe4550xahEmHzV6foE/BD+PLaSyUeHHyLDb/hs\nLynOTMD3K+kAp5U6ho+O/wVV8ZCqeemIV6sS6kdqpCzUKckEzHp8Oe8ZuDVNd82Uomjx4xinAd4T\nr/aOVPQHOs7VjEC/aapt/LT4XsfWunEnUNaxM6XpbNrGbaJHIcw6vo0A4zTAsKeQ8qftKSYlyWSU\nfeShbmiajm5fO6XQsbYXwoHTXjb09O8oCgLyOTGUtQLPeRuXMxPwdw/byOfEU370XoGi0e4HBjn7\nxz2d4WcztmyQsVqV0B9E0+KzslHJkTWtLErYP+643kCOml0MNH2ofg+Ez/B5hudBeCkOmEKHBfzl\nxQLyWdE/ww8I+EmYqLGgVuLR4afgiR91Qfk4hHHM5N2/iovowWsENy7YSKPF+Tt6afAZYUZCQaW5\nQi6DbpiAP4FS35kI+LquY/ewjbWl4ull8JZPBwpeFYSXFrt+1EZtUTpliBRHmtkeyfABo44/0HTX\nG8ioQsd6T8gMv80xPA/CS6XDFDqbK8YygaIgYH25hJ39tmcTzUvBse5RmosCy878RnhSPmNMskml\naTtZ4zRGWeLTmPMaeBnbcx8dT6qkE7Y05zXL1tpeCLuGRruHXFb0HC0Y60iEKOn4jBiS4kwE/Ea7\nh3ZnMFRTZywUc5DymaGAzztkdqv/dboDHLd6ridsHGkms0Z2emT46err1qSr4cDIbgC8NslJlA+8\nmrZMoXN+tWQ9trlSRKc3wGHDfRTkpeBYd7lxR4Wn+cUm2aTStE1BpcM+jzeQAe7NyKHtFbPo9AZD\n7q2TL+n4W6SMwjJ875IOf4YftKiQlA+b4ZMsE8Bp0zQngiBgbamI3UPb54Z3aOR2N7en/Z/+LNsm\nOXyGP1rDB+zg7Tbb1rJFHsnwmZEbb4afRMAveHjpbNdbyIjCUF9lw0ep46fgWKoUkM0ICQV8vuZX\nJaWAn0YN3/i8LNqdPgaav6LFrxnpxK3BWR+ZHT5uwvrpsHM4kaZtwGzpsBk+lXRMLh8awWO0YctY\nXzKyymNzIWxe3bnb3dxNksmII820a/guGb7L9vwuHGa8xqM9TmLt1IwoIp8Vh05eXdexVW9ibak4\nlK0zaea2S+P28KQLTdddsz9RFLBaLSbTtOU0JqtIWdd5GOMmDVmm8/OCsnzeko6bH9HekYqFUs4z\ng04aL+GFF9ZqVzGbtv2BhnZn4Fs2LOQzGGg6vyU1NW0NWIbvFfCtOr4ZLHgzKLe7uVWDXHIL+Mbn\n1KOUdNwy/Kq3Fn//2HtoXKtK6PY0ruw0qWxydNUrY0Zt32rYMvwy/KCGHlvfgLcB50VT7SGbES1p\nnBflYs51Hsa4SUOWaXweX7mCBc/ghGk4OGq6jv1jdWL1eyB805adw4EZftAxsvpE/hk+wK/FNxxe\nx+eUCZyVgH/grsFnjCo8eJsfbndz58InoxTyGSyUomnxWVAphqjh53Oia1AIo9QJMpHjRcpnhk7c\nrbrZsB0J+Pbkq9M3xaD6rp/ddRiaKp9PTVrSzGa7h8IEnTIZvNkrb6Y5GhyPm130B/rEFDqA0/WW\nM8P3WPyEYS1zGLA9nkSqENIi2SgRjTcJOBMB//JhGwK8M8NRhQfvkLmQyyCbEU4NSQFvlcFq1Sin\nhNXiu/m7LBRzyGVFzxo+W/hklDBKncQy/Fx2qIa/ZWbw52vloddVioZNgps0cy8ow19ORqnTbPe4\nVEmpBXy1F9gQHQe8s4sb3D2w4e1Nygd/eB/CWWSoASUdURRQKmQDa/g8MUYKmeHHlU/zcCYC/u5h\nGyuLBc9Zs8x86/JISScoyxNMt73GSA0/mxGxOKL3Z9SqxozQIw8VihduJR1BELDishBKpztAo93z\nbHyFWfkqqXqxVDBKOqxvwJY1HM3wASPL3z1sn2oOBmX4ti9+9ICvmRYWPJlSWhbJPHNExgGvAqWp\n+jtlMiojPYG6z+h4XGQzIgr5DL9KJ6CkAxhxI+gY8Vi35EM4Zvb6A3R72tgTgakP+L3+AIcnHc/6\nPWCURkRBsNwWw2S15ZGl7nYPh33wR1mNMNMVcJR0CsM/aG2xgEa7N5QF2EvEubsNWvJQrpJOMhJA\nKZeBrsPyLLFM01ZOB/yNlRIGmn6q9MVubF4+6esJzLZtd/rQ4T/pipHGIiiWdfOEJZkA/6zUJqcV\nx+jExUlPumKw5jsPXuvZOmGmin7wxJgwq15NwlYBOAMBf/dQhQ7vhi1g3OVr1YJV62+GqFsb/teG\nHara7aPRdtfgM9hzYa18W2of+ax4apTi5osfJG0LU9JpJjSZY/Tk3ao3US3nXQOrV+O2fqSiUsx5\nXmyrCcy2DTOpyW9d1nHR4lTAjANeewXe0sJoDX/SGnxGSXK3OXfDXs/Wx0lVyqLb13w19DzOlmGa\ntrzKqLicgYDv37BlrC0VcdQ0FsJuqD0Iwuls2o2ylDPtUPuOIWlwwA/buG11+kMNW4abFt9Lg89Y\nKOaQz4qcTdtgEzkenFr8bm+A+pF6SqHDsKSZjsatruuomwuye35GLoNqJR8v4IcY0VQ8rDXGCWsu\nplHD59GY67oeuFqYtb2RhqlVw59whl+Wsmh3Blzyx07ATFuAT77KIwwJ45g5KbuNqQ/4XqZpozhN\n1NgyYV5lGSdlh084z7TwqNJMpzWyEzaT1hm82Y3HK8MXBAE108gtiGY72ESOB3vVqwG291vQAWyO\nNGwZG8vMF9/O8I9bPfT6WmD2t24uFs+rXR4lTKZkN20nJ8tMS4Pv/Ey/EU27M4Cm69yjY8CR4R+r\nKBWyQ3NNJgH7XjzyWrVnLm8YUNIB/G+MPPNbCiEcMy013bzX8Hc5A75Tmhk05dmJc1jqJ8lk1CJk\n+Lquo+1Y/GRoey4lHdtHx3vFoBrHIuhAcg1Cp70CU+B4Zfgby6dtkuuc2d/akrm+QchlHBnNEBdO\nGiqdNIzTGDw+MWEm/zDHzGa7Z4zgzHWgJ00YwzPepm3Q9njkzpbpIJV0+AnS4DOcrpnNdp874Dtd\nE3lKOoVcBoshtfjdnuGFXyqc3ic3TT37N/PMd4Onjp/kUnqSY3hqNWw9An4hn8HyQmE44PtMJHMS\nd31bduHwNG3D+KYkRUMNzgzHRT6XQT4r+n7fsIGnbNbPmfBg0uUcwD6WPI1bFnwln0l57Dr1216j\n3YMA/xt3mKYtBXyTy4dtlArZwIyI3RAu7JxwD0mB4WHuno+tgpNatRjKF9/NVoHBVrMareFXy3nf\nujvP5KskPVucJR026ercintJBzDq+AcnHetk587wY5qohWnaenm6jxPeWazjwmsdZ4at6uK7IZWL\nWTTUvq3ASjPD5+jFdLoDCAICFkZiZV7/kk5JykIUvcvG+TBN2wmdF1Md8DXdkPatBWT3gJ3hv7B1\nAoA/g7LrkH3sHqnIZb01+IxVc5lBXi0+U2a41fDz5oiBXTCapmP/RA00n+LJ8JOsFxccq15t1VvI\n50Qs+5ScrBm3pqcOz+gJ8F7BjJewJZNJG6hNKpPzoixlfXsWYRRuxvZy6HQH1szqdEo63guqj8JW\nu/KTnPL48zQ4FhWKluHPcQ3/qNFFr69Zw3w/igVjCULWKAyb4bOSjtfsVie2UocvKPll+MDwQihH\njY45Pd07mAJ8NslxFy93wk7edseo4W+ulHyb4pY00yzNhC3pRNXihy5JFPk84pMizaYtYFwXfo6Z\nYY8fu85e3jESrVRKOkxey5Hhq71BoLFbkAOnrutcfcIwKp2k5ssEMdUB//KBv0vmKM7XcdfwzQNc\nP1IDNfiMsDbJbrNsndQWjZW0Tlo9q5QRlOHzTABLtqRjnLyX9hro9TWc81DoMDZXjN+C1fH3jlQU\ncpnAUgFzWozqpxPWU7zCoblOkjRlmYAdULzq02FHSCw4XjADvpvp4LixVDocNfxOd4CCx2pXDK/1\nehlqd4CBFlw2DqXDT8DVloepDvh+PvhuOF/HfcKaP+6FnQYAe/KPH+w1YTN8Nx0+MDz5ijUrgzKl\npUoBGVHwr+EnOExkNfyXzJKZV8OWMTr5qn5sKDiCRk/2+gbeq2b50VKNZhrPHAxg8vYKPMsvjpMg\nAzV7xme4kii7ftLJ8Pn9dNTuwLdha2yP3UDct8drv+21NKjrNtU+ioXMWJ0ygSkP+LwafEaUDJ9d\n8K/umgE/TIbPqdQJzvBNLf6Ril3T+z8owxdFAcsLBa4afhIZPhuevrprNmwDMvzVqoSMKGBnv4WW\n2ke70+cOBuvLxvoGYf2KAOPCKUlZrjkYwOSlmc2EJsJFJWh2sTUzmzthsreXz4kTX9TFuQ9BTVtN\n19HpDXw1+Mb2/G8gDc6yFwv4PKPHoMVUkmKqA/6eFfD5AoXzdbw1SLa26cBcBIMn4Fv1c96A32Fl\nBvd9cs3wq/41fLYfR42u5yQl3pWLeGAlHaZMcvPQcZIRRawtFbG93+Ku3zPY77htqoHC0AjpODhq\nADZummoyE+GiEuQfFLbH4AzwPP2vccBr+8wCb5ApHJOvelku8za2RVE4tXCQF402BXxcPmwjIwpY\n8dGjO1mPkOEbjpnORUmCPyufy2CxnOc2ULNUOh5DQEtxc9zhruGz9+lwX0AFSLaG78yKBAAbK8Gj\nrs2VEppqHy9vs4Ze8E0MsH/HrZABP4wtAGPSWvyGGm7/kiaoIdlU+xCFYKdMe3v2OT1Jl0wnhRzf\ngvRBq1058Wvmh7muCiPrSLhhOWVOIBGY7oB/0MZqVfLVujpZX7azzjBBzpnN8J60li8+x/J4QSWd\n4Qy/hVxWxALH/gfZJCcZ8IuOALC6JCGXDb5o2E3hyZf2AYTI8M1eDLNg5qXb19AfaKEWkZhkSac/\n0NDpDlJT6ADBEkbDOC3YKdPeniPDT0GSCdgL0gdl+DxOmYySjwNnqICfCw74vGswJ8HUBvx2x3Cu\n5NHgM6qVvLWKUJi7JTtpc1kRiyW+g860+IeN04uXjBLUtF0o5ZDNGGZohvc/39DY6iV4ZfghTOSC\nyGZEqy4eVL9nsMYtC/iri+F6MWFLOq0IF844Av5Ro+N6kfM2+8ZJUA0/bGmhPFTS4RvBjQPmeuuH\n7ZTJkeFLObQ67usdhyl7FfKZQB3+JO02pjbgX9wzLnYeDT5DFARsrhRRLGS5MlAGuwBXOVQkjDCe\nOkEZvigIWFksYOegjaNGl/vC8Zt89dzFI7x6uYHFUp67gemH4BjmByl0GJvmiIstLs+bAdYWjYbv\nY8/t4aTF37iNElAXS8Yku4ef2cVxiM9y4+XtE3zi84/hV//9g/g/Pvkt3P+dl4e8jixJZooZfsWn\npMNKYmH2rxJhdDwOylIOzXbfV9mlcjhl2tsz7RpGDNmOm118/9ldAEC15D9BE+DL8HkXbEqCqQ34\n33liGwBw8/W1UO/7hZ94A375nptCvYfdqcOcsGtVfmlmq9NHPif6rmFaW5TQNk8unvo94Kz9Dwf8\nh57cwb/7fx9Bt6fhve+4gWtbPBSsgB8uwweAbEZAtRJ8gRivFfETt1+NvcM2PvFfH+PWyNvGX/wB\n6+qNCm6/cQMvb5/g337me6H7BgDw0vYxPvH5x/Cv7/suHn1uD9dsLmCg6fjc3zyPX/+Db+OvzMCf\n9qQr52e7lT/UruGUGeaGGbb/NS7KUtZc08L7XOlwNm0B997Oxd0GfvMz38OFnQbuuGkTV6wFXweF\nXAb9ge7r/movaj/+8yK9saUPvb6Gh57cQbWcx03Xr4R67/XnF0N/XsUK+PwnbBhpZtvDGtnJiiOr\n55UvsmY2y/B1XcdffuslfOGbL0LKZ/DL770ZN4W8YfrBLpTNAIUOY6mStzKclQXvVcTcuOdHrsdJ\ne4CvP/Iq/tOXnsQv33NTYC+nEXLSFWCMXH7x7huxvlzEFx98Cb/5mYfxkXffhBuvDT7vXtw6xhf/\n9kX84Pk6AOA1V1bxD+66Djdes4x2p4+//t6r+Op3X8Hn/+Z5fPmhC5CvWjL3bxpq+Kcz/Cg3JLbE\nYKebjnEaw9mM9iph2k6ZPOtk2H46GwCeeHEfn/yLx9HuDHDPXdfh7juv5aoGsGum2xt4JnxJyqeD\nmMqA/+hze2iqffz4bVePfSICMFzS4SVUSafTD/TncV4sK5wlnVxWRLWSR/1YRa+v4b77n8a3n9hG\nbVHCr/zMLbhyrcK1HV7YhcJb0hEEARsrRVzYaYTO/kRBwK/87JuxU2/g+8/s4s8eeBbv+9HX+l5k\nUX1qBEHAPT9yPdaXi7jv/qfx8f/yA7z/XTLe/qbzp17bUvt4WLmMbz+xjacvHAIAXmsG+jdcs2zt\nX0nK4afvug4/eutV+OuHX8FX/8crePgZoxSQZg0/b84BePrCAf73Tz449NxgYJRDwt6QKlIWg4HG\nPYIbB2WH/81q1f01QQuYu22vpfbxN49exJ985RmIIvCPfvpG3H7jJvd+OSdfeU22m/sM/8HHtwAA\nd958biKfx5qEV4QIkJYWP8DkSzcX1Q7Kip1lnDCZ0uqihJe2T3Dvf34Uz7xyiOvPL+KfvPcWVANu\nMFG48+ZNXLVewQJH7ZKxuVIyAn6E7C+XzeCj77kZv/Un38dfP/wqalUJ73rb1Z6vb8W8cO646RxW\nq0X8+z9/HPfd/zR2Dlp47ztuwGCg4/EX6vjOE9t49Lm6NTx//dVLuPuOa/F6R6AfpSRl8dN3Xocf\nfetVeODhV/DESwd443XhRq1Jc9fN5/D4C/VTj4tZAZsrJbzpNeFGhe948xVodfqJ9IqiYnv9ezdu\no5R0/vwbL+Dl7RNUijn8k/fejNdeuRRqv3j8dJjwIygpTIKpC/gHJx08/kId151bxBWrfLXiuLzt\nDRtYrRZxwxX85aB8LoNqOR+4UEenZ9RFg1YBqkUM+LWqhOcvHeOZVw5x6+vX8aGffINly5o0//Nb\nrgz9HrYYStT6bknK4Z/+zJvwb/+f7+E///fnsLIo4e++fv3U69qdvjVbOk7z63VXLeE3fv6t+L3P\nPYb7v3MBT798gJ39ttW8O1cr4fY3buL2Gze4Z4Ab3yOLu++8DnffeV3kfUuK979LTnR7P3XHtYlu\nLwo8yxKyBnqYpu3L2yc4VyvhV37mTaEEJAwePx1r4aUJ+BBNXcD/zhPb0HXgrpv5h01xEUUBr7nS\nYxzow2rVyK41TfesLwcpdBjOMg7zyOfhirUK8NRl/OTfuwbvfvv1qWZZbtxwhXFcr9lciLyNWlXC\nP/2ZN+G3Pvt9fOpLT6JazuO1V1ZxcbeJx1+o4/EX6nj21SMMNB0CwD1Rz4uN5RJ+4/1vxSe/8Die\nvnCIpUoeP/Kmq3D7jZu4eqOSymxSwp8FU0595COTZkGXp6TDbuZvuGYZH3n3TZG9j3iWOdw7aiOf\n45t7E5epCvi6ruNvH99CNiPibTdupL07gbDs+rDR8VTWBGnwGez9S5VCqAz9J267Gm97/fqQImaa\nuOWGGn77H98eKht24+qNBXzk3Tfh9z/3GH7/849BymdwcGJc3AKAa88t4KbranjL69YSUYtUijn8\n2s++GRd3m7hyrcI9+Y9IhzUOW22VY3lDxrWbC/itf3Q7VpekWH1EiaOkUz9SsVotTiSRmKqA/8LW\nMbbqLbztDeupKhl4ca6j6xnwOTP8Qi6Dq9crOL8ertGazYhTG+wZzhnQcbjpuhr+tx9/PT79V08h\nIwq4/cYN3Hx9DW+8bmUs9c+MKOLqjegjE2JysNLhjk/A74TQ4RuCg/jnbT7AMbOl9tFU+7j+fPgK\nQxSmKuA/+LihvZ9UszYuTqWOV1U0aPETJ7/x87difX0BB/vhteDzwl23nMMtN9RQKeYo6yYsSlIW\niyV7ASQ3Ohzr2SZN0KpXk14acmomXnV7Azz05A6WKnm8kUMDPQ3wLELS5szwAUNm6Tc5izBYLOcp\n2BOn2FgpYe9I9ZzkFGambVIENW1519FOiqmJLo88u4d2p487bjp3Zi7mVWu2rY8nfYTZnwRBhGdj\nuQRN1y3H2VFYwB+Xis2NIFkmSxYnNUt5agL+31ra+8mpc+LCPG/87BV4m7YEQcSDubN61fE7XWM9\n20kq2YIzfFbSmYwP0VQE/L3DNp58cR83XLHI7dMyDeSyGVQred8Mn7dpSxBEPKzGrUcdX+VY7Spp\ngpY5rB9NtobvG4VkWRYBfBLALQA6AD6kKMrz5nMbAP7M8fI3A/hniqL8J1mWvw/gyHz8BUVRPuj3\nOV97+BXoODvNWierVQkvbZ1goGmu8q0wTVuCIKJjraPsmeH3J9qwBYKbtntHKvJZ0ZpHMG6CotA9\nAPKKotwhy/JtAO41H4OiKDsA3gkAsiz/PQD/BsCnZFmWzOffybsTD3z3AnJZEW97/fRr70dZqxbx\n/MVjHJx0XIdlYZq2BEFEZ91cO8Mrw+/0BqFsQZIgz9G0rYWwZY9LUEnnTgBfBgBFUR4CcOvoC2RZ\nFgB8AsCHFUXRAbwJQEmW5a/IsvyAeaPw5eJuE2993dqZzIL9POkBRw2fAj5BjJVCLoPlhQJ2Dk4H\nfN20Tp50Sccvw293DA3+JG2lgwL+IoBjx98Ds8zj5G4AP1QU5Vnz7yaA31EU5V0AfgnAZ13ec4o7\nbzl75Rwg2Ca5pfZRyGVIbkkQE2BzpYT949MrjvX6GnSdb5Ztkvg1besTbtgCwSWdYwDOqYaioiij\nItefA/B7jr+fAfAcACiK8qwsy3UA5wBc9PqQjZUSfuStVyNzRuSYTm642pgz0O5pWFs7PSuz0x+g\nUsq5PudFmNfOOnQsbOhY2Hgdi2vOV/HUywfoCyKudLyGeexUK9LEj2M+K2Kgn97nF3eNCZbXnK9O\nbJ+CAv6DMDL4z8myfDuAx1xec6uiKN92/P0BGE3ej8iyfB7GKGHL70N++yN3Yb/e4N/rKSIHw0P8\n5a0j7O6enHr+pNnFUqXg+pwba2sL3K+ddehY2NCxsPE7FlXTKfWp53ZRztoJpKXN17WJH8d8LoNG\nq3vqc1+4cAAAKGaFyPsU9kYRVGf4AgBVluUHYTRsf1WW5ffJsvyLACDL8hpsNQ7jDwEsyrL8DRgq\nng+4jAqGWI1prJUmzEPHrYav6zpanT5p8AliQtieOsN1fHsB88lfi4VcxnWZTjZ/Z5I1fN9vbzZh\nP6MC9BoAAAnOSURBVDzy8DOO53cBvGXkPX0A709qB6edXFbEkocWX+0OoOuk0CGISWFNvtoflmaq\nvcnbKjCkfMZa5MTJpCddAVMy8eqss1otYv+4g4E2PJBpkwafICbK2lIRguCd4acR8PO5DDq900WO\nvSMVuayIxQlp8AEK+ImwuiRB03UcHA/fxWmWLUFMlmxGxFq1eEqLb3nhT3jiFWBk+P2BdsrUrX6k\norY4OQ0+QAE/EbykmTTLliAmz/pKEcetnpVwAUCnx7+8YdIwaaazjq92+2i0exOzVGBQwE8AL9dM\n5pRZKpBTJkFMik2Xxm2Y1a6SxnbMtDP8SXvoMCjgJ4C9EMpwo8gq6VCGTxATw/bUsQO+VcNPoaRj\nG6jZI469CdsiMyjgJ4DXQihWSYdq+AQxMdyUOqlm+FZJx87w01DoABTwE2FlQYKA0yWdNmX4BDFx\n3LT4nRRlmewznRk+lXTOMLmsiKWFAjVtCWIKqC1KyGaEIaWOmmJJR3JZ9SqNSVcABfzEWK1KODgZ\n1uKTLJMgJo8oClhbKmJnvw1dN6xPrAXM8+nMtDX2Ybikk82IWCxP1q6ZAn5CrFYNLf6+Q4tvZ/ik\n0iGISbK5UkKr08dJ21DKdaaghj/atK1VpYkutwhQwE+Mmos0s2XKMouFyZ9kBDHPsDr+ZbNxy4Jt\nKiqd/HDTttMdpKLBByjgJ8aqizSz1emjkM+4Ln1IEMT4YEqdbbOO3+kNkM+KEFOwYB/N8PeO02nY\nAhTwE8NNmtlS+1S/J4gUGFXqpLHaFWO0aVs3k0IK+GcYN3uFdqdPCh2CSAFr8pUjw0+jnAM4mrZd\no6ST1qQrgAJ+YqwsDmvxNdMLnzJ8gpg8S5U8CrkMdg6MbLrTHaTSsAWc1gpmSYdp8Bcnvw4IBfyE\nyGZELC8WrOFah7zwCSI1BEHAxnIROwet1BYwZ4zKMinDnxFWFyXsn3TQH2jko0MQKbO+UkK3p2Hv\nSMVA01OxRgacJR27hp/NCKhWJqvBByjgJ0qtWoSuA/snHXLKJIiU2TSVOi9vG+vFFlKYdGV8rhFm\nmUqH+eBPWoMPUMBPFEupc9im1a4IImWYUufCZTPgp5ThZ0QR2YyITk9DpzfAcSsdDT5AAT9RnEod\nKukQRLowpc7L2w0A6cyyZUj5DDq9gSXbrk3YJZNB0ShBnAGfTfCgpi1BpMPGslnS2WElnfQCfiEn\notMdOGyRKcM/89SWbHsFyvAJIl0qxRzKUhbHzS6AdNazZRTyWTPDT8clk0EBP0FWFgoQBKMLT4uf\nEES6CIKAdbOOD6Sd4WegUoY/W2QzIlYWCtg7dmb4pNIhiLRgSh0g3Rp+ISeiP9Bw+ZDZKqRTw6eA\nnzC1ahEHxx2ctIxhZJFKOgSRGhtTkuEzH/5Le01kxHQ0+AAF/MRZrUrQAby6aygDqKRDEOnBlDoA\nIOXSuxbzOSPU7uy3U/HBZ1DATxhWm9uqG6ZN5IVPEOmx4SjppJvhG5+t6Xpq9XuAAn7isO77QNMh\nkRc+QaSKs6STbg3fHl1QwJ8hnM0YkmQSRLoUC1lr3di0ZtoCtr0CYCyynhYU8BPGefem+j1BpM+m\nOQEr3Qzf/uy0FDoAzbRNnJXFAkRBgKbrFPAJYgq47cYNQBCsTD8NnAE/rUlXAAX8xMmIIpYXCqgf\nq6TBJ4gp4J1vuRLvfMuVqe6Ds2FMNfwZg/2gVMMnCAKwdfgZUcBSpZDaflDAHwNWwKeSDkEQMGba\nAkbDlhkrpgEF/DFQowyfIAgHrIafZv0eoIA/FtZM10yq4RMEAdglnTTr9wA1bcfCW+U1vHK5YagD\nCIKYe67aqOCn7rgGt70h3ZhAAX8MSPksfvbvvzbt3SAIYkoQBQHvefsNae8GlXQIgiDmBQr4BEEQ\ncwIFfIIgiDmBAj5BEMScQAGfIAhiTqCATxAEMSdQwCcIgpgTKOATBEHMCRTwCYIg5gQK+ARBEHMC\nBXyCIIg5gQI+QRDEnEABnyAIYk6ggE8QBDEnUMAnCIKYEyjgEwRBzAm+C6DIsiwC+CSAWwB0AHxI\nUZTnzec2APyZ4+VvBvDPAHwKwB+4vYcgCIJIj6AM/x4AeUVR7gDwzwHcy55QFGVHUZR3KoryTgAf\nA/AwjGD/bgAFt/cQBEEQ6REU8O8E8GUAUBTlIQC3jr5AlmUBwCcAfFhRFN18z/1+7yEIgiAmT1DA\nXwRw7Ph7YJZ5nNwN4IeKojwb4j0EQRDEhAlaxPwYwILjb1FRFG3kNT8H4PdCvmcUYW1tIeAl8wMd\nCxs6FjZ0LGzoWEQjKPN+EMD/AgCyLN8O4DGX19yqKMq3Q76HIAiCmDBBGf4XAPyYLMsPmn9/QJbl\n9wGoKIryKVmW1wAcBb0nud0lCIIgoiLoup72PhAEQRATgJqpBEEQcwIFfIIgiDmBAj5BEMScENS0\nHSt+1g3zgizLtwH4bUVR3inL8msA3AdAA/BDAB8xJ7PNPLIs5wD8EYBrABQA/CaApzCHx0OW5QyM\nWeuvA6AD+CUY18d9mLNjwZBleR3GbP6/D+MY3Ic5PBayLH8ftlDmBQC/hRDHIu0M39O6YR6QZfnX\nYVzYBfOh3wXwMUVR3g5AAPAP0tq3FPg5ALvmd/9xAP8XjPNhHo/HTwHQFEW5C8C/BPB/Yn6PBUsG\n/iOAJozvPpfXiSzLEgAwSxtFUT6IkMci7YAfaN0w4zwH4D0wfigAeIuiKN8w/30/gB9NZa/S4XMA\n/pX5bxFAD3N6PBRF+f8A/GPzz2sBHAB46zweC5PfgWHIuGX+PZfnBYA3ASjJsvwVWZYfMOc5hToW\naQf8ubZhUBTlzwH0HQ8Jjn83AFQnu0fpoShKU1GUhizLCzCC/7/E8Pk5b8djIMvyfQB+H8BnMafn\nhizLvwBj5PdV8yEBc3osYIxwfkdRlHfBKPN9duT5wGORdnCNYsMwyzi/+wKAw7R2JA1kWb4KwH8H\n8BlFUf4Uc348FEX5BQAygP8bgOR4ap6OxQdgTOT8GgwL9j8GsOZ4fp6OxTMwg7zpXVYHsOF4PvBY\npB3wyYZhmEdkWX6H+e+fAPANvxfPEub6Cl8F8OuKotxnPjyXx0OW5ffLsvwvzD/bAAYAvjePx0JR\nlHcoivI/mTbsjwL4eQBfnsdjAePmdy8AyLJ8HkaA/2qYY5GqSgdkw8BgXfVfA/ApWZbzAJ4E8Pn0\ndmnifAzGcPRfybLMavm/AuATc3g8Pg/gPlmWvw4gB+M4PI35PTec6Jjf6+QPAXxalmUW1D8AI8vn\nPhZkrUAQBDEnpF3SIQiCICYEBXyCIIg5gQI+QRDEnEABnyAIYk6ggE8QBDEnUMAnCIKYEyjgEwRB\nzAkU8AmCIOaE/x8Ys6GoNREgGQAAAABJRU5ErkJggg==\n", "text": [ - "" + "" ] } ], - "prompt_number": 18 + "prompt_number": 14 }, { "cell_type": "heading", "level": 1, "metadata": {}, "source": [ - "Pass in code file for response" + "Pass in any file of code" ] }, { @@ -5405,14 +4132,78 @@ " reversed_dict = dict((v,k) for k,v in a_dictionary.items())\n", " return [(reversed_dict.get(x),y) for x,y in a_list]\n", "\n", - "def check_code_snippet(a_file, corp_train, corp_answer):\n", + "def check_code_file(a_file, corp_train, corp_answer):\n", " \n", " # Ingest and featurizes file\n", " request = Corpus([a_file])\n", " request_df = request.compl_df_build()\n", "\n", " # Run model\n", - " model_created = GaussianNB()\n", + " model_created = RandomForestClassifier(50)\n", + " model_created.fit(corp_train, corp_answer)\n", + " predicted = model_created.predict(request_df.values)\n", + " predict_prob = model_created.predict_proba(request_df.values)\n", + " \n", + " #Pretty response\n", + " answer_list = list(zip(model_created.classes_, predict_prob[0]))\n", + " answer_list.sort(key=lambda x: x[1], reverse=True)\n", + " return reverse_dict_match(hit_num, answer_list)" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 31 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "check_code_file('test/22', corp_train, corp_answer)" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "metadata": {}, + "output_type": "pyout", + "prompt_number": 28, + "text": [ + "[('ruby', 0.26000000000000001),\n", + " ('java', 0.16),\n", + " ('python', 0.16),\n", + " ('clojure', 0.14000000000000001),\n", + " ('scheme', 0.10000000000000001),\n", + " ('php', 0.059999999999999998),\n", + " ('haskell', 0.040000000000000001),\n", + " ('javascript', 0.040000000000000001),\n", + " ('perl', 0.040000000000000001),\n", + " ('scala', 0.0),\n", + " ('ocaml', 0.0)]" + ] + } + ], + "prompt_number": 28 + }, + { + "cell_type": "heading", + "level": 1, + "metadata": {}, + "source": [ + "Pass in any code snippet" + ] + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "def check_code_snippet(code_snippet, corp_train, corp_answer):\n", + " request = Corpus()\n", + " code_snippet_df = pd.DataFrame(code_snippet, columns=['raw_text'])\n", + " code_snippet_df = request.feature_breakout(code_snippet_df)\n", + " request_df = request.clean_df(code_snippet_df, snippet='True')\n", + "\n", + " # Run model\n", + " model_created = RandomForestClassifier(50)\n", " model_created.fit(corp_train, corp_answer)\n", " predicted = model_created.predict(request_df.values)\n", " predict_prob = model_created.predict_proba(request_df.values)\n", @@ -5425,13 +4216,21 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 7 + "prompt_number": 34 }, { "cell_type": "code", "collapsed": false, "input": [ - "check_code_snippet('test/22', corp_train, corp_answer)" + "ocaml = \"\"\"| pid ->\n", + " Unix.close output;\n", + " let input = Unix.in_channel_of_descr input in fun () ->\n", + " let v = Marshal.from_channel input in\n", + " ignore (Unix.waitpid [] pid);\n", + " close_in input;\n", + " match v with `Res x -> x | `Exn e -> raise e\"\"\"\n", + "\n", + "check_code_snippet([ocaml], corp_train, corp_answer)" ], "language": "python", "metadata": {}, @@ -5439,23 +4238,23 @@ { "metadata": {}, "output_type": "pyout", - "prompt_number": 8, + "prompt_number": 33, "text": [ - "[('javascript', 1.0),\n", - " ('java', 2.7703304550449276e-81),\n", - " ('clojure', 0.0),\n", + "[('ocaml', 0.73999999999999999),\n", + " ('clojure', 0.10000000000000001),\n", + " ('scheme', 0.059999999999999998),\n", + " ('python', 0.059999999999999998),\n", + " ('haskell', 0.02),\n", + " ('java', 0.02),\n", " ('scala', 0.0),\n", - " ('scheme', 0.0),\n", - " ('haskell', 0.0),\n", - " ('ocaml', 0.0),\n", + " ('javascript', 0.0),\n", " ('perl', 0.0),\n", " ('php', 0.0),\n", - " ('python', 0.0),\n", " ('ruby', 0.0)]" ] } ], - "prompt_number": 8 + "prompt_number": 33 }, { "cell_type": "code", @@ -5463,7 +4262,8 @@ "input": [], "language": "python", "metadata": {}, - "outputs": [] + "outputs": [], + "prompt_number": 16 }, { "cell_type": "code", @@ -5471,7 +4271,8 @@ "input": [], "language": "python", "metadata": {}, - "outputs": [] + "outputs": [], + "prompt_number": 16 }, { "cell_type": "code", @@ -5479,7 +4280,8 @@ "input": [], "language": "python", "metadata": {}, - "outputs": [] + "outputs": [], + "prompt_number": 16 }, { "cell_type": "code", @@ -5487,7 +4289,8 @@ "input": [], "language": "python", "metadata": {}, - "outputs": [] + "outputs": [], + "prompt_number": 16 }, { "cell_type": "code", @@ -5495,7 +4298,8 @@ "input": [], "language": "python", "metadata": {}, - "outputs": [] + "outputs": [], + "prompt_number": 16 } ], "metadata": {} From 0628242b4ed37f49fbb80723d4a2e60d8e1ff4c8 Mon Sep 17 00:00:00 2001 From: Zack Cooper Date: Mon, 16 Feb 2015 08:51:58 -0500 Subject: [PATCH 5/6] again --- lang classifier live.ipynb | 3052 +++++++----------------------------- 1 file changed, 565 insertions(+), 2487 deletions(-) diff --git a/lang classifier live.ipynb b/lang classifier live.ipynb index 3b16179..d9ebd8b 100644 --- a/lang classifier live.ipynb +++ b/lang classifier live.ipynb @@ -1,7 +1,7 @@ { "metadata": { "name": "", - "signature": "sha256:351106de656a94ded25f92477ba635151dc3e3be72b67d7f2d9459b9b4fb2be8" + "signature": "sha256:5d38ae2fb3758ef4f48e8df732eaeef65eb86124e67ec22c3c221b664a5301d2" }, "nbformat": 3, "nbformat_minor": 0, @@ -27,7 +27,7 @@ "\n", "corpus = Corpus(raw_file_list)\n", "corpus_df = corpus.compl_df_build(False)\n", - "corpus_df" + "corpus_df.head()" ], "language": "python", "metadata": {}, @@ -56,2091 +56,292 @@ " val_exists\n", " else_if\n", " elif\n", + " void\n", " \n", " \n", " \n", " \n", - " 0 \n", - " 1\n", + " 0\n", + " 1\n", " 0.071901\n", " 0.000000\n", " 0.002066\n", " 0.000000\n", " 0.001240\n", " 0.000413\n", + " 0\n", " 0.000000\n", - " 0.000000\n", - " 0.000000\n", + " 0\n", " 0.000413\n", + " 0\n", " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", + " 0\n", + " 0\n", + " 0\n", + " 0\n", " 0.000000\n", " \n", " \n", - " 1 \n", - " 1\n", + " 1\n", + " 1\n", " 0.071704\n", " 0.000000\n", " 0.002699\n", " 0.000000\n", " 0.000771\n", " 0.000386\n", + " 0\n", " 0.000000\n", - " 0.000000\n", - " 0.000000\n", + " 0\n", " 0.000386\n", + " 0\n", " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", + " 0\n", + " 0\n", + " 0\n", + " 0\n", " 0.000000\n", " \n", " \n", - " 2 \n", - " 1\n", + " 2\n", + " 1\n", " 0.063995\n", " 0.000000\n", " 0.002695\n", " 0.000000\n", " 0.000337\n", " 0.000337\n", + " 0\n", " 0.000000\n", - " 0.000000\n", - " 0.000000\n", + " 0\n", " 0.000337\n", + " 0\n", " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", + " 0\n", + " 0\n", + " 0\n", + " 0\n", + " 0.000337\n", " \n", " \n", - " 3 \n", - " 2\n", + " 3\n", + " 2\n", " 0.024707\n", " 0.002471\n", " 0.002471\n", " 0.000618\n", " 0.000000\n", " 0.000000\n", + " 0\n", " 0.000000\n", - " 0.000000\n", - " 0.000000\n", + " 0\n", " 0.000618\n", - " 0.000000\n", + " 0\n", " 0.001235\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", + " 0\n", + " 0\n", + " 0\n", + " 0\n", " 0.000000\n", " \n", " \n", - " 4 \n", - " 2\n", + " 4\n", + " 2\n", " 0.024352\n", " 0.002118\n", " 0.002647\n", " 0.000529\n", " 0.000000\n", " 0.000000\n", - " 0.000000\n", + " 0\n", " 0.001059\n", - " 0.000000\n", + " 0\n", " 0.000529\n", - " 0.000000\n", + " 0\n", " 0.001059\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 5 \n", - " 2\n", - " 0.026361\n", - " 0.002292\n", - " 0.002865\n", - " 0.000573\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.001146\n", - " 0.000000\n", - " 0.000573\n", - " 0.000000\n", - " 0.001146\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 6 \n", - " 3\n", - " 0.028487\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000982\n", - " 0.015226\n", - " 0.000000\n", - " 0.000246\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 7 \n", - " 3\n", - " 0.032197\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.010417\n", - " 0.000000\n", - " 0.000947\n", - " 0.000000\n", - " 0.001420\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 8 \n", - " 3\n", - " 0.029671\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.012981\n", - " 0.000000\n", - " 0.000464\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 9 \n", - " 4\n", - " 0.038012\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.010234\n", - " 0.000000\n", - " 0.000731\n", - " 0.006579\n", - " 0.002193\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.002193\n", - " \n", - " \n", - " 10 \n", - " 9\n", - " 0.025682\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.004815\n", - " 0.001605\n", - " 0.003210\n", - " 0.011236\n", - " 0.000000\n", - " 0.000803\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 11 \n", - " 9\n", - " 0.022440\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.004208\n", - " 0.001403\n", - " 0.003506\n", - " 0.009818\n", - " 0.000000\n", - " 0.000701\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 12 \n", - " 9\n", - " 0.026920\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.004751\n", - " 0.001584\n", - " 0.004751\n", - " 0.011085\n", - " 0.000000\n", - " 0.000792\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 13 \n", - " 9\n", - " 0.023955\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.002178\n", - " 0.002395\n", - " 0.006533\n", - " 0.006969\n", - " 0.000000\n", - " 0.000218\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 14 \n", - " 5\n", - " 0.025814\n", - " 0.000000\n", - " 0.012293\n", - " 0.002459\n", - " 0.001229\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000615\n", - " 0.000000\n", - " 0.001229\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 15 \n", - " 5\n", - " 0.034651\n", - " 0.000000\n", - " 0.009773\n", - " 0.000000\n", - " 0.003554\n", - " 0.000000\n", - " 0.000000\n", - " 0.000888\n", - " 0.000000\n", - " 0.000444\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 16 \n", - " 5\n", - " 0.033503\n", - " 0.000000\n", - " 0.010925\n", - " 0.000000\n", - " 0.004370\n", - " 0.000000\n", - " 0.000000\n", - " 0.001457\n", - " 0.000000\n", - " 0.000728\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 17 \n", - " 6\n", - " 0.025354\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.007457\n", - " 0.000000\n", - " 0.000746\n", - " 0.000000\n", - " 0.000000\n", - " 0.042506\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 18 \n", - " 6\n", - " 0.026679\n", - " 0.000953\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.013340\n", - " 0.000000\n", - " 0.000476\n", - " 0.000000\n", - " 0.000000\n", - " 0.037637\n", - " 0.000000\n", - " 0.000000\n", + " 0\n", + " 0\n", + " 0\n", + " 0\n", " 0.000000\n", " \n", - " \n", - " 19 \n", - " 7\n", - " 0.036755\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.006337\n", - " 0.000000\n", - " 0.001267\n", - " 0.000000\n", - " 0.000000\n", - " 0.046261\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 20 \n", - " 7\n", - " 0.034674\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.008322\n", - " 0.000000\n", - " 0.000693\n", - " 0.000000\n", - " 0.000000\n", - " 0.041609\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 21 \n", - " 7\n", - " 0.040650\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.005420\n", - " 0.000000\n", - " 0.000678\n", - " 0.000000\n", - " 0.000000\n", - " 0.044038\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 22 \n", - " 7\n", - " 0.038144\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.013402\n", - " 0.000000\n", - " 0.002835\n", - " 0.000000\n", - " 0.000000\n", - " 0.057474\n", - " 0.000000\n", - " 0.000258\n", - " 0.000000\n", - " \n", - " \n", - " 23 \n", - " 8\n", - " 0.044543\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.002784\n", - " 0.000000\n", - " 0.007795\n", - " 0.000000\n", - " 0.000557\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.001670\n", - " \n", - " \n", - " 24 \n", - " 11\n", - " 0.104895\n", - " 0.000000\n", - " 0.002914\n", - " 0.000000\n", - " 0.001166\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000583\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.004662\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 25 \n", - " 11\n", - " 0.095606\n", - " 0.000000\n", - " 0.003380\n", - " 0.000000\n", - " 0.000483\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000483\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.001449\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 26 \n", - " 11\n", - " 0.096643\n", - " 0.000000\n", - " 0.001107\n", - " 0.000000\n", - " 0.000738\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000369\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000738\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 27 \n", - " 10\n", - " 0.036036\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.004505\n", - " 0.000000\n", - " 0.015444\n", - " 0.000000\n", - " 0.000644\n", - " 0.000644\n", - " 0.000000\n", - " 0.000000\n", - " 0.003218\n", - " 0.000000\n", - " 0.002574\n", - " \n", - " \n", - " 28 \n", - " 10\n", - " 0.048632\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.003647\n", - " 0.000000\n", - " 0.013374\n", - " 0.000000\n", - " 0.001216\n", - " 0.001216\n", - " 0.002432\n", - " 0.000000\n", - " 0.004863\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 29 \n", - " 10\n", - " 0.035115\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.003053\n", - " 0.000000\n", - " 0.015267\n", - " 0.000000\n", - " 0.000763\n", - " 0.001527\n", - " 0.002290\n", - " 0.000000\n", - " 0.004580\n", - " 0.000000\n", - " 0.003053\n", - " \n", - " \n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " \n", - " \n", - " 357\n", - " 8\n", - " 0.052632\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.004848\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000693\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000693\n", - " \n", - " \n", - " 358\n", - " 8\n", - " 0.044304\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.003956\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000791\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000791\n", - " \n", - " \n", - " 359\n", - " 8\n", - " 0.036014\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.003001\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000600\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000600\n", - " \n", - " \n", - " 360\n", - " 11\n", - " 0.090119\n", - " 0.000000\n", - " 0.002172\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000543\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 361\n", - " 11\n", - " 0.085551\n", - " 0.000000\n", - " 0.001901\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000475\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 362\n", - " 11\n", - " 0.108160\n", - " 0.000000\n", - " 0.003863\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000483\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 363\n", - " 10\n", - " 0.063545\n", - " 0.000000\n", - " 0.000000\n", - " 0.000836\n", - " 0.000000\n", - " 0.002508\n", - " 0.000000\n", - " 0.015050\n", - " 0.000000\n", - " 0.003344\n", - " 0.004181\n", - " 0.000836\n", - " 0.000000\n", - " 0.003344\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 364\n", - " 10\n", - " 0.041924\n", - " 0.000000\n", - " 0.000000\n", - " 0.001644\n", - " 0.000000\n", - " 0.002877\n", - " 0.000000\n", - " 0.013152\n", - " 0.000000\n", - " 0.000411\n", - " 0.003288\n", - " 0.000000\n", - " 0.000000\n", - " 0.004110\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 365\n", - " 10\n", - " 0.042079\n", - " 0.000495\n", - " 0.000000\n", - " 0.000248\n", - " 0.000000\n", - " 0.002475\n", - " 0.000000\n", - " 0.008911\n", - " 0.000000\n", - " 0.004950\n", - " 0.002475\n", - " 0.001238\n", - " 0.000000\n", - " 0.003465\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 366\n", - " 10\n", - " 0.042163\n", - " 0.000687\n", - " 0.000000\n", - " 0.000458\n", - " 0.000000\n", - " 0.002521\n", - " 0.000000\n", - " 0.009166\n", - " 0.000000\n", - " 0.004812\n", - " 0.002521\n", - " 0.001146\n", - " 0.000000\n", - " 0.003208\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 367\n", - " 1\n", - " 0.053812\n", - " 0.000000\n", - " 0.001794\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000897\n", - " 0.001794\n", - " 0.000000\n", - " 0.000897\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 368\n", - " 1\n", - " 0.050473\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.004206\n", - " 0.000000\n", - " 0.001052\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 369\n", - " 2\n", - " 0.016092\n", - " 0.001149\n", - " 0.000000\n", - " 0.005747\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.001149\n", - " 0.000000\n", - " 0.000000\n", - " 0.001149\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 370\n", - " 3\n", - " 0.028712\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.014356\n", - " 0.000449\n", - " 0.002692\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 371\n", - " 3\n", - " 0.028812\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.013205\n", - " 0.000600\n", - " 0.004802\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 372\n", - " 3\n", - " 0.020844\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.012710\n", - " 0.000254\n", - " 0.000763\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 373\n", - " 3\n", - " 0.022556\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.015038\n", - " 0.000752\n", - " 0.000752\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 374\n", - " 3\n", - " 0.022871\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.013407\n", - " 0.000394\n", - " 0.001183\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 375\n", - " 3\n", - " 0.028786\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.010013\n", - " 0.000626\n", - " 0.002503\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 376\n", - " 9\n", - " 0.023939\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.004353\n", - " 0.010881\n", - " 0.002176\n", - " 0.000000\n", - " 0.001088\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 377\n", - " 9\n", - " 0.003378\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.010135\n", - " 0.000000\n", - " 0.000000\n", - " 0.001689\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 378\n", - " 5\n", - " 0.063861\n", - " 0.000000\n", - " 0.011611\n", - " 0.000000\n", - " 0.002903\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.001451\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 379\n", - " 5\n", - " 0.056604\n", - " 0.000000\n", - " 0.010613\n", - " 0.000000\n", - " 0.004717\n", - " 0.000000\n", - " 0.000000\n", - " 0.004717\n", - " 0.000000\n", - " 0.001179\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 380\n", - " 5\n", - " 0.054570\n", - " 0.000000\n", - " 0.010914\n", - " 0.001364\n", - " 0.002729\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.001364\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 381\n", - " 6\n", - " 0.034043\n", - " 0.004255\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.017021\n", - " 0.000000\n", - " 0.001064\n", - " 0.000000\n", - " 0.000000\n", - " 0.032979\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 382\n", - " 6\n", - " 0.012600\n", - " 0.001260\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.011760\n", - " 0.000000\n", - " 0.000420\n", - " 0.000000\n", - " 0.000000\n", - " 0.015960\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 383\n", - " 8\n", - " 0.035528\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.001045\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.001045\n", - " 0.000000\n", - " 0.001045\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.001045\n", - " \n", - " \n", - " 384\n", - " 8\n", - " 0.029233\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.002436\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.001218\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.001218\n", - " \n", - " \n", - " 385\n", - " 11\n", - " 0.080834\n", - " 0.000000\n", - " 0.003911\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.001304\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - " 386\n", - " 10\n", - " 0.033946\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.002829\n", - " 0.000000\n", - " 0.019802\n", - " 0.000000\n", - " 0.004243\n", - " 0.001414\n", - " 0.000000\n", - " 0.000000\n", - " 0.004243\n", - " 0.000000\n", - " 0.000000\n", - " \n", - " \n", - "\n", - "

387 rows \u00d7 17 columns

\n", - "" - ], - "metadata": {}, - "output_type": "pyout", - "prompt_number": 9, - "text": [ - " hit_num parent_count double_colon let_exists less_minus paren_star \\\n", - "0 1 0.071901 0.000000 0.002066 0.000000 0.001240 \n", - "1 1 0.071704 0.000000 0.002699 0.000000 0.000771 \n", - "2 1 0.063995 0.000000 0.002695 0.000000 0.000337 \n", - "3 2 0.024707 0.002471 0.002471 0.000618 0.000000 \n", - "4 2 0.024352 0.002118 0.002647 0.000529 0.000000 \n", - "5 2 0.026361 0.002292 0.002865 0.000573 0.000000 \n", - "6 3 0.028487 0.000000 0.000000 0.000000 0.000000 \n", - "7 3 0.032197 0.000000 0.000000 0.000000 0.000000 \n", - "8 3 0.029671 0.000000 0.000000 0.000000 0.000000 \n", - "9 4 0.038012 0.000000 0.000000 0.000000 0.000000 \n", - "10 9 0.025682 0.000000 0.000000 0.000000 0.004815 \n", - "11 9 0.022440 0.000000 0.000000 0.000000 0.004208 \n", - "12 9 0.026920 0.000000 0.000000 0.000000 0.004751 \n", - "13 9 0.023955 0.000000 0.000000 0.000000 0.002178 \n", - "14 5 0.025814 0.000000 0.012293 0.002459 0.001229 \n", - "15 5 0.034651 0.000000 0.009773 0.000000 0.003554 \n", - "16 5 0.033503 0.000000 0.010925 0.000000 0.004370 \n", - "17 6 0.025354 0.000000 0.000000 0.000000 0.000000 \n", - "18 6 0.026679 0.000953 0.000000 0.000000 0.000000 \n", - "19 7 0.036755 0.000000 0.000000 0.000000 0.000000 \n", - "20 7 0.034674 0.000000 0.000000 0.000000 0.000000 \n", - "21 7 0.040650 0.000000 0.000000 0.000000 0.000000 \n", - "22 7 0.038144 0.000000 0.000000 0.000000 0.000000 \n", - "23 8 0.044543 0.000000 0.000000 0.000000 0.000000 \n", - "24 11 0.104895 0.000000 0.002914 0.000000 0.001166 \n", - "25 11 0.095606 0.000000 0.003380 0.000000 0.000483 \n", - "26 11 0.096643 0.000000 0.001107 0.000000 0.000738 \n", - "27 10 0.036036 0.000000 0.000000 0.000000 0.000000 \n", - "28 10 0.048632 0.000000 0.000000 0.000000 0.000000 \n", - "29 10 0.035115 0.000000 0.000000 0.000000 0.000000 \n", - ".. ... ... ... ... ... ... \n", - "357 8 0.052632 0.000000 0.000000 0.000000 0.000000 \n", - "358 8 0.044304 0.000000 0.000000 0.000000 0.000000 \n", - "359 8 0.036014 0.000000 0.000000 0.000000 0.000000 \n", - "360 11 0.090119 0.000000 0.002172 0.000000 0.000000 \n", - "361 11 0.085551 0.000000 0.001901 0.000000 0.000000 \n", - "362 11 0.108160 0.000000 0.003863 0.000000 0.000000 \n", - "363 10 0.063545 0.000000 0.000000 0.000836 0.000000 \n", - "364 10 0.041924 0.000000 0.000000 0.001644 0.000000 \n", - "365 10 0.042079 0.000495 0.000000 0.000248 0.000000 \n", - "366 10 0.042163 0.000687 0.000000 0.000458 0.000000 \n", - "367 1 0.053812 0.000000 0.001794 0.000000 0.000000 \n", - "368 1 0.050473 0.000000 0.000000 0.000000 0.000000 \n", - "369 2 0.016092 0.001149 0.000000 0.005747 0.000000 \n", - "370 3 0.028712 0.000000 0.000000 0.000000 0.000000 \n", - "371 3 0.028812 0.000000 0.000000 0.000000 0.000000 \n", - "372 3 0.020844 0.000000 0.000000 0.000000 0.000000 \n", - "373 3 0.022556 0.000000 0.000000 0.000000 0.000000 \n", - "374 3 0.022871 0.000000 0.000000 0.000000 0.000000 \n", - "375 3 0.028786 0.000000 0.000000 0.000000 0.000000 \n", - "376 9 0.023939 0.000000 0.000000 0.000000 0.000000 \n", - "377 9 0.003378 0.000000 0.000000 0.000000 0.000000 \n", - "378 5 0.063861 0.000000 0.011611 0.000000 0.002903 \n", - "379 5 0.056604 0.000000 0.010613 0.000000 0.004717 \n", - "380 5 0.054570 0.000000 0.010914 0.001364 0.002729 \n", - "381 6 0.034043 0.004255 0.000000 0.000000 0.000000 \n", - "382 6 0.012600 0.001260 0.000000 0.000000 0.000000 \n", - "383 8 0.035528 0.000000 0.000000 0.000000 0.000000 \n", - "384 8 0.029233 0.000000 0.000000 0.000000 0.000000 \n", - "385 11 0.080834 0.000000 0.003911 0.000000 0.000000 \n", - "386 10 0.033946 0.000000 0.000000 0.000000 0.000000 \n", - "\n", - " def_exists end_exists curly_bracket slash_star_star forward_slash \\\n", - "0 0.000413 0.000000 0.000000 0.000000 0.000413 \n", - "1 0.000386 0.000000 0.000000 0.000000 0.000386 \n", - "2 0.000337 0.000000 0.000000 0.000000 0.000337 \n", - "3 0.000000 0.000000 0.000000 0.000000 0.000618 \n", - "4 0.000000 0.000000 0.001059 0.000000 0.000529 \n", - "5 0.000000 0.000000 0.001146 0.000000 0.000573 \n", - "6 0.000000 0.000982 0.015226 0.000000 0.000246 \n", - "7 0.000000 0.000000 0.010417 0.000000 0.000947 \n", - "8 0.000000 0.000000 0.012981 0.000000 0.000464 \n", - "9 0.000000 0.000000 0.010234 0.000000 0.000731 \n", - "10 0.001605 0.003210 0.011236 0.000000 0.000803 \n", - "11 0.001403 0.003506 0.009818 0.000000 0.000701 \n", - "12 0.001584 0.004751 0.011085 0.000000 0.000792 \n", - "13 0.002395 0.006533 0.006969 0.000000 0.000218 \n", - "14 0.000000 0.000000 0.000000 0.000000 0.000615 \n", - "15 0.000000 0.000000 0.000888 0.000000 0.000444 \n", - "16 0.000000 0.000000 0.001457 0.000000 0.000728 \n", - "17 0.000000 0.000000 0.007457 0.000000 0.000746 \n", - "18 0.000000 0.000000 0.013340 0.000000 0.000476 \n", - "19 0.000000 0.000000 0.006337 0.000000 0.001267 \n", - "20 0.000000 0.000000 0.008322 0.000000 0.000693 \n", - "21 0.000000 0.000000 0.005420 0.000000 0.000678 \n", - "22 0.000000 0.000000 0.013402 0.000000 0.002835 \n", - "23 0.002784 0.000000 0.007795 0.000000 0.000557 \n", - "24 0.000000 0.000000 0.000000 0.000000 0.000583 \n", - "25 0.000000 0.000000 0.000000 0.000000 0.000483 \n", - "26 0.000000 0.000000 0.000000 0.000000 0.000369 \n", - "27 0.004505 0.000000 0.015444 0.000000 0.000644 \n", - "28 0.003647 0.000000 0.013374 0.000000 0.001216 \n", - "29 0.003053 0.000000 0.015267 0.000000 0.000763 \n", - ".. ... ... ... ... ... \n", - "357 0.004848 0.000000 0.000000 0.000000 0.000693 \n", - "358 0.003956 0.000000 0.000000 0.000000 0.000791 \n", - "359 0.003001 0.000000 0.000000 0.000000 0.000600 \n", - "360 0.000000 0.000000 0.000000 0.000000 0.000543 \n", - "361 0.000000 0.000000 0.000000 0.000000 0.000475 \n", - "362 0.000000 0.000000 0.000000 0.000000 0.000483 \n", - "363 0.002508 0.000000 0.015050 0.000000 0.003344 \n", - "364 0.002877 0.000000 0.013152 0.000000 0.000411 \n", - "365 0.002475 0.000000 0.008911 0.000000 0.004950 \n", - "366 0.002521 0.000000 0.009166 0.000000 0.004812 \n", - "367 0.000000 0.000897 0.001794 0.000000 0.000897 \n", - "368 0.000000 0.000000 0.004206 0.000000 0.001052 \n", - "369 0.000000 0.000000 0.000000 0.000000 0.001149 \n", - "370 0.000000 0.000000 0.014356 0.000449 0.002692 \n", - "371 0.000000 0.000000 0.013205 0.000600 0.004802 \n", - "372 0.000000 0.000000 0.012710 0.000254 0.000763 \n", - "373 0.000000 0.000000 0.015038 0.000752 0.000752 \n", - "374 0.000000 0.000000 0.013407 0.000394 0.001183 \n", - "375 0.000000 0.000000 0.010013 0.000626 0.002503 \n", - "376 0.004353 0.010881 0.002176 0.000000 0.001088 \n", - "377 0.000000 0.010135 0.000000 0.000000 0.001689 \n", - "378 0.000000 0.000000 0.000000 0.000000 0.001451 \n", - "379 0.000000 0.000000 0.004717 0.000000 0.001179 \n", - "380 0.000000 0.000000 0.000000 0.000000 0.001364 \n", - "381 0.000000 0.000000 0.017021 0.000000 0.001064 \n", - "382 0.000000 0.000000 0.011760 0.000000 0.000420 \n", - "383 0.001045 0.000000 0.000000 0.000000 0.001045 \n", - "384 0.002436 0.000000 0.000000 0.000000 0.001218 \n", - "385 0.000000 0.000000 0.000000 0.000000 0.001304 \n", - "386 0.002829 0.000000 0.019802 0.000000 0.004243 \n", - "\n", - " var_exists star_count dollar_sign val_exists else_if elif \n", - "0 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "1 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "2 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "3 0.000000 0.001235 0.000000 0.000000 0.000000 0.000000 \n", - "4 0.000000 0.001059 0.000000 0.000000 0.000000 0.000000 \n", - "5 0.000000 0.001146 0.000000 0.000000 0.000000 0.000000 \n", - "6 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "7 0.000000 0.001420 0.000000 0.000000 0.000000 0.000000 \n", - "8 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "9 0.006579 0.002193 0.000000 0.000000 0.000000 0.002193 \n", - "10 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "11 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "12 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "13 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "14 0.000000 0.001229 0.000000 0.000000 0.000000 0.000000 \n", - "15 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "16 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "17 0.000000 0.000000 0.042506 0.000000 0.000000 0.000000 \n", - "18 0.000000 0.000000 0.037637 0.000000 0.000000 0.000000 \n", - "19 0.000000 0.000000 0.046261 0.000000 0.000000 0.000000 \n", - "20 0.000000 0.000000 0.041609 0.000000 0.000000 0.000000 \n", - "21 0.000000 0.000000 0.044038 0.000000 0.000000 0.000000 \n", - "22 0.000000 0.000000 0.057474 0.000000 0.000258 0.000000 \n", - "23 0.000000 0.000000 0.000000 0.000000 0.000000 0.001670 \n", - "24 0.000000 0.000000 0.000000 0.004662 0.000000 0.000000 \n", - "25 0.000000 0.000000 0.000000 0.001449 0.000000 0.000000 \n", - "26 0.000000 0.000000 0.000000 0.000738 0.000000 0.000000 \n", - "27 0.000644 0.000000 0.000000 0.003218 0.000000 0.002574 \n", - "28 0.001216 0.002432 0.000000 0.004863 0.000000 0.000000 \n", - "29 0.001527 0.002290 0.000000 0.004580 0.000000 0.003053 \n", - ".. ... ... ... ... ... ... \n", - "357 0.000000 0.000000 0.000000 0.000000 0.000000 0.000693 \n", - "358 0.000000 0.000000 0.000000 0.000000 0.000000 0.000791 \n", - "359 0.000000 0.000000 0.000000 0.000000 0.000000 0.000600 \n", - "360 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "361 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "362 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "363 0.004181 0.000836 0.000000 0.003344 0.000000 0.000000 \n", - "364 0.003288 0.000000 0.000000 0.004110 0.000000 0.000000 \n", - "365 0.002475 0.001238 0.000000 0.003465 0.000000 0.000000 \n", - "366 0.002521 0.001146 0.000000 0.003208 0.000000 0.000000 \n", - "367 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "368 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "369 0.000000 0.000000 0.001149 0.000000 0.000000 0.000000 \n", - "370 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "371 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "372 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "373 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "374 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "375 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "376 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "377 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "378 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "379 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "380 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "381 0.000000 0.000000 0.032979 0.000000 0.000000 0.000000 \n", - "382 0.000000 0.000000 0.015960 0.000000 0.000000 0.000000 \n", - "383 0.000000 0.001045 0.000000 0.000000 0.000000 0.001045 \n", - "384 0.000000 0.000000 0.000000 0.000000 0.000000 0.001218 \n", - "385 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "386 0.001414 0.000000 0.000000 0.004243 0.000000 0.000000 \n", - "\n", - "[387 rows x 17 columns]" - ] - } - ], - "prompt_number": 9 - }, - { - "cell_type": "code", - "collapsed": false, - "input": [ - "test_info = Corpus([filename for filename in glob.iglob(os.path.join('test/', '*'))])\n", - "test_info_df = test_info.compl_df_build(True)\n", - "test_info_df['answers'] = pd.read_csv('test_fixed.csv', dtype='object')\n", - "test_info_df" - ], - "language": "python", - "metadata": {}, - "outputs": [ - { - "html": [ - "
\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + "
parent_countdouble_colonlet_existsless_minusparen_stardef_existsend_existscurly_bracketslash_star_starforward_slashvar_existsstar_countdollar_signval_existselse_ifelifanswers
0 0.045734 0.000000 0.001759 0.000000 0.000000 0.000000 0.000000 0.001759 0.000000 0.000000 0.000000 0 0.000000 0.000000 0.000000 0.000000 1
1 0.063037 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.022923 0.000000 0.000000 0.002865 0 0.000000 0.000000 0.000000 0.000000 4
2 0.042795 0.000000 0.000058 0.000000 0.000058 0.000000 0.000000 0.019122 0.000000 0.005069 0.002477 0 0.000173 0.000346 0.000288 0.000000 4
3 0.047059 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.035294 0.000000 0.011765 0.005882 0 0.000000 0.000000 0.000000 0.000000 4
4 0.015708 0.000561 0.000000 0.000000 0.000281 0.002805 0.004769 0.001122 0.000000 0.000000 0.000000 0 0.000000 0.000000 0.000000 0.000000 9
5 0.000000 0.000000 0.000000 0.000000 0.000000 0.002037 0.004073 0.008147 0.000000 0.004073 0.000000 0 0.000000 0.000000 0.000000 0.000000 9
6 0.008163 0.000000 0.000000 0.000000 0.000000 0.008163 0.014286 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 0.000000 0.000000 9
7 0.013329 0.000915 0.000065 0.000457 0.000000 0.000131 0.000000 0.002483 0.000000 0.000000 0.000000 0 0.002548 0.000000 0.000000 0.000000 2
8 0.058065 0.006452 0.000000 0.006452 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 0.000000 0.000000 2
9 0.013752 0.003929 0.000491 0.002947 0.000000 0.000000 0.000000 0.006876 0.000000 0.000000 0.000000 0 0.004912 0.000000 0.000000 0.000000 2
10 0.124352 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 0.000000 0.000000 11
11 0.088670 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 0.000000 0.000000 1
12 0.222222 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 0.000000 0.000000 11
13 0.110846 0.000000 0.000894 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000894 0.000000 0.000000 0.000000 11
14 0.006645 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.002848 0.003797 0.000000 0.000000 0 0.000000 0.000000 0.000000 0.000000 3
15 0.006572 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.002191 0.002738 0.000548 0.000000 0 0.000000 0.000000 0.000000 0.000000 3
16 0.027182 0.001073 0.000000 0.000000 0.000715 0.001788 0.000000 0.016452 0.000000 0.001073 0.000000 0 0.008941 0.003577 0.000000 0.000000 10
17 0.014606 0.000000 0.000000 0.000000 0.000000 0.004173 0.000000 0.003130 0.000000 0.001043 0.000000 0 0.000000 0.007825 0.000000 0.000000 10
18 0.019254 0.000301 0.000000 0.000000 0.000000 0.000000 0.000000 0.008123 0.003460 0.000150 0.000301 0 0.014591 0.000000 0.000000 0.000000 7
19 0.058997 0.000000 0.000000 0.000000 0.002950 0.000000 0.000000 0.011799 0.000000 0.005900 0.000000 0 0.014749 0.000000 0.000000 0.000000 7
20 0.097902 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0 0.000000 0.000000 0.000000 0.000000 1
21 0.053224 0.001535 0.000000 0.000000 0.000000 0.000000 0.000000 0.012282 0.001535 0.000000 0.000000 0 0.020983 0.000000 0.000000 0.000000 7
\n", + "
" + ], + "metadata": {}, + "output_type": "pyout", + "prompt_number": 11, + "text": [ + " hit_num parent_count double_colon let_exists less_minus paren_star \\\n", + "0 1 0.071901 0.000000 0.002066 0.000000 0.001240 \n", + "1 1 0.071704 0.000000 0.002699 0.000000 0.000771 \n", + "2 1 0.063995 0.000000 0.002695 0.000000 0.000337 \n", + "3 2 0.024707 0.002471 0.002471 0.000618 0.000000 \n", + "4 2 0.024352 0.002118 0.002647 0.000529 0.000000 \n", + "\n", + " def_exists end_exists curly_bracket slash_star_star forward_slash \\\n", + "0 0.000413 0 0.000000 0 0.000413 \n", + "1 0.000386 0 0.000000 0 0.000386 \n", + "2 0.000337 0 0.000000 0 0.000337 \n", + "3 0.000000 0 0.000000 0 0.000618 \n", + "4 0.000000 0 0.001059 0 0.000529 \n", + "\n", + " var_exists star_count dollar_sign val_exists else_if elif void \n", + "0 0 0.000000 0 0 0 0 0.000000 \n", + "1 0 0.000000 0 0 0 0 0.000000 \n", + "2 0 0.000000 0 0 0 0 0.000337 \n", + "3 0 0.001235 0 0 0 0 0.000000 \n", + "4 0 0.001059 0 0 0 0 0.000000 " + ] + } + ], + "prompt_number": 11 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "test_info = Corpus([filename for filename in glob.iglob(os.path.join('test/', '*'))])\n", + "test_info_df = test_info.compl_df_build(True)\n", + "test_info_df['answers'] = pd.read_csv('test_fixed.csv', dtype='object')\n", + "test_info_df.head()" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "html": [ + "
\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", + " \n", + " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", " \n", - " \n", + " \n", " \n", " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", " \n", " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", " \n", - " \n", " \n", - " \n", + " \n", " \n", " \n", " \n", + " \n", " \n", " \n", + " \n", + " \n", " \n", " \n", " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", " \n", + " \n", " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", " \n", - " \n", " \n", - " \n", + " \n", " \n", " \n", " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", - " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", " \n", " \n", " \n", " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", "
parent_countdouble_colonlet_existsless_minusparen_stardef_existsend_existscurly_bracketslash_star_starforward_slashvar_existsstar_countdollar_signval_existselse_ifelifvoidanswers
22 0.026448 0.000132 0.004893 0.000000 0.005554 0.000000 0.000397 0.003967 0.0000000 0.045734 0.000000 0.000397 0.001759 0 0.000000 0.000000 0.000000 0.000000 5
23 0.017391 0.000000 0.005217 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.001759 0 0.000000 0.000000 0.000000 0.000000 5
24 0.025854 0.000923 0.000000 0.000000 0.000000 0.000923 0.000000 0.014774 0.000000 0.000000 0.000000 0 0.024007 0.000000 0.000000 0.000000 6
25 0.026801 0.000000 0.001675 0.000000 0.000000 0.001675 0.000000 0.003350 0.000000 0.000000 0.000000 0 0.000000 0.000000 0.000000 0.000000 1
26 0.039867 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.013289 0.000000 0.000000 0.000000 0 0.000000 0.000000 0.000000 0.004983 8 1
27 0.028050 0.000000 0.000000 0.0000001 0.063037 0.000000 0.001403 0.000000 0.001403 0 0.000000 0.000000 0.000000 0.022923 0 0.000000 0.002865 0 0.000000 0.000000 0.000000 8 0 0 4
28 0.036810 0.000000 0.000000 0.000000 0.000000 0.012270 0.000000 0.000000 0.000000 0.0000002 0.042795 0.000000 0.000058 0 0.000058 0.000000 0.000000 0.000000 0.000000 8 0.019122 0 0.005069 0.002477 0 0.000173 0.000346 0.000288 0 0 4
29 0.029608 0.000000 0.000000 0.0000003 0.047059 0.000000 0.003059 0.000000 0.000888 0 0.000000 0.000000 0.000000 0.035294 0 0.011765 0.005882 0 0.000000 0.000000 0.000000 0.000000 8 0 0 4
30 0.053892 0.000000 0.000000 0.000000 0.000000 0.0000004 0.015708 0.000561 0.000000 0.020958 0 0.000281 0.002805 0.004769 0.001122 0 0.000000 0.000000 0.002246 0 0.000000 0.000000 0.000000 0.000000 4 0 0 9
\n", @@ -2148,143 +349,32 @@ ], "metadata": {}, "output_type": "pyout", - "prompt_number": 10, + "prompt_number": 12, "text": [ - " parent_count double_colon let_exists less_minus paren_star \\\n", - "0 0.045734 0.000000 0.001759 0.000000 0.000000 \n", - "1 0.063037 0.000000 0.000000 0.000000 0.000000 \n", - "2 0.042795 0.000000 0.000058 0.000000 0.000058 \n", - "3 0.047059 0.000000 0.000000 0.000000 0.000000 \n", - "4 0.015708 0.000561 0.000000 0.000000 0.000281 \n", - "5 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "6 0.008163 0.000000 0.000000 0.000000 0.000000 \n", - "7 0.013329 0.000915 0.000065 0.000457 0.000000 \n", - "8 0.058065 0.006452 0.000000 0.006452 0.000000 \n", - "9 0.013752 0.003929 0.000491 0.002947 0.000000 \n", - "10 0.124352 0.000000 0.000000 0.000000 0.000000 \n", - "11 0.088670 0.000000 0.000000 0.000000 0.000000 \n", - "12 0.222222 0.000000 0.000000 0.000000 0.000000 \n", - "13 0.110846 0.000000 0.000894 0.000000 0.000000 \n", - "14 0.006645 0.000000 0.000000 0.000000 0.000000 \n", - "15 0.006572 0.000000 0.000000 0.000000 0.000000 \n", - "16 0.027182 0.001073 0.000000 0.000000 0.000715 \n", - "17 0.014606 0.000000 0.000000 0.000000 0.000000 \n", - "18 0.019254 0.000301 0.000000 0.000000 0.000000 \n", - "19 0.058997 0.000000 0.000000 0.000000 0.002950 \n", - "20 0.097902 0.000000 0.000000 0.000000 0.000000 \n", - "21 0.053224 0.001535 0.000000 0.000000 0.000000 \n", - "22 0.026448 0.000132 0.004893 0.000000 0.005554 \n", - "23 0.017391 0.000000 0.005217 0.000000 0.000000 \n", - "24 0.025854 0.000923 0.000000 0.000000 0.000000 \n", - "25 0.026801 0.000000 0.001675 0.000000 0.000000 \n", - "26 0.039867 0.000000 0.000000 0.000000 0.000000 \n", - "27 0.028050 0.000000 0.000000 0.000000 0.000000 \n", - "28 0.036810 0.000000 0.000000 0.000000 0.000000 \n", - "29 0.029608 0.000000 0.000000 0.000000 0.000000 \n", - "30 0.053892 0.000000 0.000000 0.000000 0.000000 \n", - "\n", - " def_exists end_exists curly_bracket slash_star_star forward_slash \\\n", - "0 0.000000 0.000000 0.001759 0.000000 0.000000 \n", - "1 0.000000 0.000000 0.022923 0.000000 0.000000 \n", - "2 0.000000 0.000000 0.019122 0.000000 0.005069 \n", - "3 0.000000 0.000000 0.035294 0.000000 0.011765 \n", - "4 0.002805 0.004769 0.001122 0.000000 0.000000 \n", - "5 0.002037 0.004073 0.008147 0.000000 0.004073 \n", - "6 0.008163 0.014286 0.000000 0.000000 0.000000 \n", - "7 0.000131 0.000000 0.002483 0.000000 0.000000 \n", - "8 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "9 0.000000 0.000000 0.006876 0.000000 0.000000 \n", - "10 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "11 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "12 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "13 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "14 0.000000 0.000000 0.002848 0.003797 0.000000 \n", - "15 0.000000 0.000000 0.002191 0.002738 0.000548 \n", - "16 0.001788 0.000000 0.016452 0.000000 0.001073 \n", - "17 0.004173 0.000000 0.003130 0.000000 0.001043 \n", - "18 0.000000 0.000000 0.008123 0.003460 0.000150 \n", - "19 0.000000 0.000000 0.011799 0.000000 0.005900 \n", - "20 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "21 0.000000 0.000000 0.012282 0.001535 0.000000 \n", - "22 0.000000 0.000397 0.003967 0.000000 0.000000 \n", - "23 0.000000 0.000000 0.000000 0.000000 0.000000 \n", - "24 0.000923 0.000000 0.014774 0.000000 0.000000 \n", - "25 0.001675 0.000000 0.003350 0.000000 0.000000 \n", - "26 0.000000 0.000000 0.013289 0.000000 0.000000 \n", - "27 0.001403 0.000000 0.001403 0.000000 0.000000 \n", - "28 0.012270 0.000000 0.000000 0.000000 0.000000 \n", - "29 0.003059 0.000000 0.000888 0.000000 0.000000 \n", - "30 0.000000 0.000000 0.020958 0.000000 0.000000 \n", - "\n", - " var_exists star_count dollar_sign val_exists else_if elif \\\n", - "0 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "1 0.002865 0 0.000000 0.000000 0.000000 0.000000 \n", - "2 0.002477 0 0.000173 0.000346 0.000288 0.000000 \n", - "3 0.005882 0 0.000000 0.000000 0.000000 0.000000 \n", - "4 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "5 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "6 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "7 0.000000 0 0.002548 0.000000 0.000000 0.000000 \n", - "8 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "9 0.000000 0 0.004912 0.000000 0.000000 0.000000 \n", - "10 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "11 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "12 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "13 0.000000 0 0.000894 0.000000 0.000000 0.000000 \n", - "14 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "15 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "16 0.000000 0 0.008941 0.003577 0.000000 0.000000 \n", - "17 0.000000 0 0.000000 0.007825 0.000000 0.000000 \n", - "18 0.000301 0 0.014591 0.000000 0.000000 0.000000 \n", - "19 0.000000 0 0.014749 0.000000 0.000000 0.000000 \n", - "20 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "21 0.000000 0 0.020983 0.000000 0.000000 0.000000 \n", - "22 0.000397 0 0.000000 0.000000 0.000000 0.000000 \n", - "23 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "24 0.000000 0 0.024007 0.000000 0.000000 0.000000 \n", - "25 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "26 0.000000 0 0.000000 0.000000 0.000000 0.004983 \n", - "27 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "28 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "29 0.000000 0 0.000000 0.000000 0.000000 0.000000 \n", - "30 0.002246 0 0.000000 0.000000 0.000000 0.000000 \n", - "\n", - " answers \n", - "0 1 \n", - "1 4 \n", - "2 4 \n", - "3 4 \n", - "4 9 \n", - "5 9 \n", - "6 9 \n", - "7 2 \n", - "8 2 \n", - "9 2 \n", - "10 11 \n", - "11 1 \n", - "12 11 \n", - "13 11 \n", - "14 3 \n", - "15 3 \n", - "16 10 \n", - "17 10 \n", - "18 7 \n", - "19 7 \n", - "20 1 \n", - "21 7 \n", - "22 5 \n", - "23 5 \n", - "24 6 \n", - "25 1 \n", - "26 8 \n", - "27 8 \n", - "28 8 \n", - "29 8 \n", - "30 4 " + " parent_count double_colon let_exists less_minus paren_star def_exists \\\n", + "0 0.045734 0.000000 0.001759 0 0.000000 0.000000 \n", + "1 0.063037 0.000000 0.000000 0 0.000000 0.000000 \n", + "2 0.042795 0.000000 0.000058 0 0.000058 0.000000 \n", + "3 0.047059 0.000000 0.000000 0 0.000000 0.000000 \n", + "4 0.015708 0.000561 0.000000 0 0.000281 0.002805 \n", + "\n", + " end_exists curly_bracket slash_star_star forward_slash var_exists \\\n", + "0 0.000000 0.001759 0 0.000000 0.000000 \n", + "1 0.000000 0.022923 0 0.000000 0.002865 \n", + "2 0.000000 0.019122 0 0.005069 0.002477 \n", + "3 0.000000 0.035294 0 0.011765 0.005882 \n", + "4 0.004769 0.001122 0 0.000000 0.000000 \n", + "\n", + " star_count dollar_sign val_exists else_if elif void answers \n", + "0 0 0.000000 0.000000 0.000000 0 0 1 \n", + "1 0 0.000000 0.000000 0.000000 0 0 4 \n", + "2 0 0.000173 0.000346 0.000288 0 0 4 \n", + "3 0 0.000000 0.000000 0.000000 0 0 4 \n", + "4 0 0.000000 0.000000 0.000000 0 0 9 " ] } ], - "prompt_number": 10 + "prompt_number": 12 }, { "cell_type": "heading", @@ -2321,7 +411,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 11 + "prompt_number": 3 }, { "cell_type": "code", @@ -2342,21 +432,21 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 0.60 1.00 0.75 3\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.84 0.77 0.76 31\n", + "avg / total 0.92 0.84 0.84 31\n", "\n", "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -2368,13 +458,13 @@ { "metadata": {}, "output_type": "pyout", - "prompt_number": 12, + "prompt_number": 4, "text": [ - "0.76113671274961581" + "0.83694316436251914" ] } ], - "prompt_number": 12 + "prompt_number": 4 }, { "cell_type": "code", @@ -2394,73 +484,73 @@ "text": [ " precision recall f1-score support\n", "\n", - " 1 0.50 0.50 0.50 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 1.00 1.00 1.00 2\n", " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.78 0.77 0.75 31\n", + "avg / total 0.92 0.87 0.87 31\n", "\n", "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.50 0.50 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 1.00 1.00 1.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.75 1.00 0.86 3\n", "\n", - "avg / total 0.81 0.77 0.76 31\n", + "avg / total 0.92 0.87 0.87 31\n", "\n", "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 0 0 0 0 0 3 1]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 1.00 0.25 0.40 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 1.00 1.00 1.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", " 8 0.75 0.75 0.75 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.75 1.00 0.86 3\n", "\n", - "avg / total 0.88 0.81 0.80 31\n" + "avg / total 0.89 0.84 0.82 31\n" ] }, { @@ -2468,43 +558,43 @@ "stream": "stdout", "text": [ "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 0 0 0 0 0 3 1]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.50 0.50 4\n", + " 1 0.50 0.25 0.33 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 1.00 1.00 1.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", + " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.81 0.77 0.76 31\n", + "avg / total 0.84 0.81 0.79 31\n", "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [1 0 0 0 0 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -2512,15 +602,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -2537,7 +627,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -2545,26 +635,26 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.82 0.77 0.76 31\n", + "avg / total 0.92 0.87 0.87 31\n", "\n", "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 2 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -2572,15 +662,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.88 0.81 0.80 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -2592,12 +682,12 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -2605,15 +695,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.82 0.77 0.76 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -2625,96 +715,94 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 2 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.25 0.33 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.76 0.74 0.72 31\n", + "avg / total 0.92 0.87 0.87 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 0 1 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 1.00 0.25 0.40 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 0.75 1.00 0.86 3\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.88 0.81 0.80 31\n", + "avg / total 0.89 0.84 0.82 31\n" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 1]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", " [0 0 0 0 1 0 0 0 0 3 0]\n", - " [0 0 0 0 0 0 0 0 0 0 3]]" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ - "\n", + " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 0.75 1.00 0.86 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.82 0.77 0.76 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -2726,12 +814,12 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 1 1]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -2739,15 +827,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 0.75 1.00 0.86 3\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.88 0.81 0.80 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -2759,7 +847,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 1]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -2768,19 +856,19 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.67 0.50 0.57 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 0.75 1.00 0.86 3\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.81 0.77 0.76 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -2792,7 +880,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 0 0 1]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -2801,19 +889,19 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.50 0.50 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.81 0.77 0.76 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -2825,12 +913,12 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -2838,15 +926,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -2863,7 +951,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -2871,15 +959,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.87 0.87 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -2895,8 +983,8 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -2904,15 +992,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 1.00 1.00 1.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 0.60 1.00 0.75 3\n", + " 8 1.00 1.00 1.00 4\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.84 0.77 0.76 31\n" + "avg / total 0.94 0.87 0.87 31\n" ] }, { @@ -2924,28 +1012,28 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 4 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 1.00 0.25 0.40 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", - " 8 1.00 0.75 0.86 4\n", - " 9 0.60 1.00 0.75 3\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 0.75 0.75 0.75 4\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.84 0.81 0.80 31\n" + "avg / total 0.89 0.81 0.79 31\n" ] }, { @@ -2953,32 +1041,32 @@ "stream": "stdout", "text": [ "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.67 0.50 0.57 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.80 0.77 0.76 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -2990,12 +1078,12 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 0 1 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -3003,15 +1091,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.88 0.81 0.80 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3023,28 +1111,28 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 1 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.50 0.50 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.81 0.77 0.76 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3056,12 +1144,12 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -3069,15 +1157,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 0.75 1.00 0.86 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.82 0.77 0.76 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3089,28 +1177,28 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 1 1]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.67 0.50 0.57 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.80 0.77 0.76 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3122,12 +1210,12 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 0 1 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -3164,19 +1252,19 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.50 0.50 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.81 0.77 0.76 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3188,28 +1276,28 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.67 0.50 0.57 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.80 0.77 0.76 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3221,12 +1309,12 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 0 1 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -3234,15 +1322,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 1.00 1.00 1.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 0.60 1.00 0.75 3\n", + " 9 0.75 1.00 0.86 3\n", "\n", - "avg / total 0.84 0.77 0.76 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -3254,20 +1342,20 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 3 1]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.50 0.50 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", @@ -3275,7 +1363,7 @@ " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.81 0.77 0.76 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -3287,7 +1375,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -3300,15 +1388,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 0.60 1.00 0.75 3\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.84 0.77 0.76 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -3320,7 +1408,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -3333,15 +1421,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3358,7 +1446,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -3366,15 +1454,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3391,7 +1479,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -3399,15 +1487,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 0.75 1.00 0.86 3\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.88 0.81 0.80 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -3419,7 +1507,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 1]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -3465,15 +1553,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 0.60 1.00 0.75 3\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.84 0.77 0.76 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -3485,7 +1573,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -3498,15 +1586,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 0.60 1.00 0.75 3\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.84 0.77 0.76 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -3518,7 +1606,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -3564,15 +1652,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 0.60 1.00 0.75 3\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.84 0.77 0.76 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3584,28 +1672,28 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.67 0.50 0.57 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.50 0.50 0.50 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.86 0.81 0.81 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3617,12 +1705,12 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 1 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -3663,13 +1751,13 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 1.00 1.00 1.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.75 1.00 0.86 3\n", "\n", "avg / total 0.92 0.84 0.84 31\n" ] @@ -3688,7 +1776,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 3 1]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -3725,19 +1813,19 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.67 0.50 0.57 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.80 0.77 0.76 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3749,28 +1837,28 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 0 1 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.33 0.25 0.29 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.76 0.74 0.72 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3778,16 +1866,16 @@ "stream": "stdout", "text": [ "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -3795,15 +1883,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3820,23 +1908,23 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.67 0.50 0.57 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.80 0.77 0.76 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3848,28 +1936,28 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 0 1 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.50 0.50 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.81 0.77 0.76 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3881,12 +1969,12 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [2 0 0 0 0 0 0 0 0 0 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -3894,15 +1982,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3919,23 +2007,23 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.25 0.33 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 0.60 0.75 0.67 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.76 0.74 0.72 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -3943,16 +2031,16 @@ "stream": "stdout", "text": [ "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 0 1 0]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -3960,15 +2048,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.00 0.00 0.00 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 0.60 1.00 0.75 3\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.84 0.77 0.76 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -3980,7 +2068,7 @@ " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 2]\n", + " [0 0 0 0 2 0 0 0 0 0 0]\n", " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", @@ -3993,15 +2081,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -4018,79 +2106,69 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n" ] }, - { - "output_type": "stream", - "stream": "stderr", - "text": [ - "/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1771: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.\n", - " 'precision', 'predicted', average, warn_for)\n", - "/Users/zackjcooper/.pyenv/versions/sandbox/lib/python3.4/site-packages/sklearn/metrics/metrics.py:1771: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.\n", - " 'precision', 'predicted', average, warn_for)\n" - ] - }, { "metadata": {}, "output_type": "pyout", - "prompt_number": 13, + "prompt_number": 5, "text": [ - "[(20, 0.74948796722990274),\n", - " (21, 0.76382488479262667),\n", - " (22, 0.80376344086021501),\n", - " (23, 0.76382488479262667),\n", - " (24, 0.83694316436251914),\n", - " (25, 0.76075268817204289),\n", - " (26, 0.80376344086021501),\n", - " (27, 0.76075268817204289),\n", - " (28, 0.717741935483871),\n", - " (29, 0.80376344086021501),\n", - " (30, 0.75768049155145911),\n", - " (31, 0.80376344086021501),\n", - " (32, 0.75921658986175111),\n", - " (33, 0.76382488479262667),\n", - " (34, 0.83694316436251914),\n", - " (35, 0.87135176651305668),\n", - " (36, 0.76113671274961581),\n", - " (37, 0.79554531490015357),\n", - " (38, 0.75921658986175111),\n", - " (39, 0.80376344086021501),\n", - " (40, 0.76382488479262667),\n", - " (41, 0.75768049155145911),\n", - " (42, 0.75921658986175111),\n", + "[(20, 0.86991807475678429),\n", + " (21, 0.87043010752688177),\n", + " (22, 0.82219662058371723),\n", + " (23, 0.793010752688172),\n", + " (24, 0.83550947260624675),\n", + " (25, 0.86991807475678429),\n", + " (26, 0.83550947260624675),\n", + " (27, 0.83550947260624675),\n", + " (28, 0.87135176651305668),\n", + " (29, 0.82311827956989247),\n", + " (30, 0.83550947260624675),\n", + " (31, 0.83694316436251914),\n", + " (32, 0.83694316436251914),\n", + " (33, 0.83550947260624675),\n", + " (34, 0.83550947260624675),\n", + " (35, 0.83550947260624675),\n", + " (36, 0.86827956989247301),\n", + " (37, 0.78870967741935483),\n", + " (38, 0.83550947260624675),\n", + " (39, 0.83550947260624675),\n", + " (40, 0.83550947260624675),\n", + " (41, 0.83550947260624675),\n", + " (42, 0.83550947260624675),\n", " (43, 0.83694316436251914),\n", - " (44, 0.76382488479262667),\n", - " (45, 0.75921658986175111),\n", - " (46, 0.76113671274961581),\n", - " (47, 0.76382488479262667),\n", - " (48, 0.76113671274961581),\n", - " (49, 0.83694316436251914),\n", - " (50, 0.83694316436251914),\n", - " (51, 0.80376344086021501),\n", + " (44, 0.83550947260624675),\n", + " (45, 0.83550947260624675),\n", + " (46, 0.83602150537634401),\n", + " (47, 0.83694316436251914),\n", + " (48, 0.83694316436251914),\n", + " (49, 0.83550947260624675),\n", + " (50, 0.83550947260624675),\n", + " (51, 0.83694316436251914),\n", " (52, 0.83694316436251914),\n", - " (53, 0.76113671274961581),\n", - " (54, 0.76113671274961581),\n", + " (53, 0.83694316436251914),\n", + " (54, 0.83694316436251914),\n", " (55, 0.83694316436251914),\n", - " (56, 0.76113671274961581),\n", - " (57, 0.80529953917050678),\n", + " (56, 0.83550947260624675),\n", + " (57, 0.83550947260624675),\n", " (58, 0.83694316436251914),\n", - " (59, 0.83694316436251914),\n", + " (59, 0.83602150537634401),\n", " (60, 0.83694316436251914),\n", - " (61, 0.75921658986175111),\n", - " (62, 0.72235023041474655),\n", - " (63, 0.83694316436251914),\n", - " (64, 0.75921658986175111),\n", - " (65, 0.76382488479262667),\n", - " (66, 0.83694316436251914),\n", - " (67, 0.717741935483871),\n", - " (68, 0.76113671274961581),\n", - " (69, 0.83694316436251914)]" + " (61, 0.83550947260624675),\n", + " (62, 0.83550947260624675),\n", + " (63, 0.83550947260624675),\n", + " (64, 0.83550947260624675),\n", + " (65, 0.83550947260624675),\n", + " (66, 0.83550947260624675),\n", + " (67, 0.83550947260624675),\n", + " (68, 0.83694316436251914),\n", + " (69, 0.83550947260624675)]" ] } ], - "prompt_number": 13 + "prompt_number": 5 }, { "cell_type": "code", @@ -4106,13 +2184,13 @@ { "metadata": {}, "output_type": "display_data", - "png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAD9CAYAAAC/fMwDAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztvXuQNHd53/vtnlvPZXd2d/b2vrpLQIOQBAEFKZIMhxO7\nsI8tR0Bcx5QLH1PgxBhSjsvnOAlOpSoV59gVl4xN5eAkHBuZY46dQIwPuCwgVjBgAQoICQldWne9\n0vvu7rs7e51Lz637/NH96+6Z7cuvLzM9O/N8qlR6dy49PT3dTz+/5/n+vj9B13UQBEEQs4+Y9g4Q\nBEEQk4ECPkEQxJxAAZ8gCGJOoIBPEAQxJ1DAJwiCmBMo4BMEQcwJ2bR3AAD6/YF+cNBKezemguXl\nEuhYGNCxsKFjYUPHwmZtbUEI8/qpyPCz2UzauzA10LGwoWNhQ8fCho5FdKYi4BMEQRDjhwI+QRDE\nnEABnyAIYk6ggE8QBDEnUMAnCIKYEyjgEwRBzAkU8AmCIOYECvjEEC21h//23Vcw0LS0d4UgiISh\ngE8M8cDDr+JPH3gWT7y4n/auEASRMBTwiSEu1Y0p6021n/KeEASRNBTwiSG29poAALU7SHlPCIJI\nGgr4hIWm69jeNzL8DgV8gpg5KOATFvvHKrp9o1mrdqmkQxCzBgV8wmK7blvOUkmHIGYPCviExRYF\nfIKYaSjgExZb+3bA7/Qo4BPErEEBn7BgCh0AUDtUwyeIWYMCPmGxtd9CbVECQBk+QcwiFPAJAEBT\n7eG42cUVa2XkcyLaVMMniJmDAj4BwFbobK6UIOUypMMniBmEAj4BwFbonKuVIOWzpMMniBmEAj4B\nANiqGw3bc7UyCvkMyTIJYgahgE8AsDP8zVoJUt4o6ei6nvJeEQSRJBTwCQCGQqdSzGGxlEchn4EO\noNsjT3yCmCUo4BPoDzTsHrSxWSsBAKR8FgCgkjSTIGYKCvgELh+0oek6zq2YAT+XAUAGagQxa1DA\nJxwKnTIAQMobAZ+kmQQxW1DAJyyFDivpFPIsw6eATxCzBAV8YkiDD9gZPgV8gpgtKOAT2N5vIpsR\nsFYtAnA0bamGTxAzBQX8OUfXdWzVW9hYKUEUBQBUwyeIWSXr96QsyyKATwK4BUAHwIcURXne8fy7\nAXwMgA7gjxRF+Q/m498HcGS+7AVFUT44hn0nEuCw0YXaHVgKHYBKOgQxq/gGfAD3AMgrinKHLMu3\nAbjXfIzxuwD+DoAmgCdlWf5TGDcGKIryzjHsL5EwdsO2bD1mNW1Jh08QM0VQSedOAF8GAEVRHgJw\n68jzPQBLAEoABBiZ/psAlGRZ/oosyw+YNwpiShlt2AJUwyeIWSUo4C8COHb8PTDLPIx7ATwM4HEA\nX1IU5RhGtv87iqK8C8AvAfjsyHuIKYLZIp93ZPj2xCvK8Alilggq6RwDWHD8LSqKogGALMtXA/go\ngGsAtAD8iSzL/xDAFwE8BwCKojwry3IdwDkAF/0+aG1twe/puWKSx6J+0gEAvPF16ygWjNNByxgB\nXxDF1H+XtD9/mqBjYUPHIhpBAf9BAHcD+Jwsy7cDeMzxnARgAKCjKIomy/JlAMsAPgCjyfsRWZbP\nwxglbAXtyO7uSYTdnz3W1hYmeixe3j7GymIBjeM2GuZjzVYXAHB4rKb6u0z6WEwzdCxs6FjYhL3x\nBQX8LwD4MVmWHzT//oAsy+8DUFEU5VOyLP8xgG/JsqzCyOo/bb7u07Isf4O9h40KiOmi3enj4KSD\nN167PPQ4q+F3qIZPEDOFb8BXFEUH8OGRh59xPP9xAB93eev74+8aMW6295kHfnno8WxGQEYUqIZP\nEDMGNVPnGLthWxp6XBAESPkMyTIJYsaggD/HbO2f1uAzCvkM1A4FfIKYJSjgzzFuGnyGlM+iQxk+\nQcwUFPDnmO16C8VCBtVy/tRzUj5DE68IYsaggD+nDDQN2/stbK6UIQjCqecLuQz6Ax39AQmsCGJW\noIA/p+wdqhho+qmGLYMM1Ahi9qCAP6ew+v1mQMAni2SCmB0o4M8pTKFzzkWhA5CBGkHMIhTw5xQ/\nhQ5A69oSxCxCAX9O2ao3kREFrC0VXZ+XyBOfIGYOCvhziK7r2K63sL5cRDbjfgpYFsk0+YogZoYg\n87SZoT/Q8Ad/8UPcduMG3vaGjbR3J1VOWj001T5ed9WS52sk0yq505uNGv7n/uY5ZEQR73n79Wnv\niisPPPwqvvzQy9A9nr/nHa/BXW/kP2+/+/RlPPj4Fj76nps9b+pheOrlA/zlt17CR99zs2WjHYfn\nLh7hD//ySfRCyn4FAP/rj8m49bWrsfcBAD7zFQXrS0X8+G1XJ7K9//r15yEIwHvefkMi2/vSt17C\n5YMWPviTNyayvbnJ8HcP23jk2T1896nLae9K6hyYHvi1quT5msKMLYLyjUcv4Rs/uJT2bnjyvacv\no37cgSgIp/47POniv/2PC6G2980fXMJjz9exc9BOZP++r+ziqZcP8NJ2MrbEDyuXsXPQhq7D9Tt7\n/Vc/7uDBx5L5HXVdx9cfuYhv/TDQvZ2brz96CX/7WHLbe+jJHTz67F5i25ubDL/ZNjLVptpLeU/S\np2Eeg0ox5/maWZJlarqOltqHKArQdd11olnaNNUeSoUs/t2H7zj13G9+5nu4sHOC/kDjztbZWsXN\ndjLnO7tuktrexV1j//7NB9+GkuR9Ho7y4Xu/jpNWMvvQ7WvQATTVZEaxmq6jqfasaycJmu2e73Ua\nlrnJ8Bvmidpoz0aJIg7souUJ+O0ZCPhqpw8dwEDTp3bE0mj3UC6651/naiX0Bzp2D/my9Xanj/qx\nMYpLKkCzJCGphOniXhPLC4VQwR4ASlLWupbjwpKZpL5TS+1D14FOV4OuexXn+NHNG0iZAn542ElC\nGb59LPwDPlsEZToDZBgajgwuqWCRNE21j7JH8Du/asyVuLTX5NoWW+cAsAN1XJpWwhR/ey21h4OT\nDq5YdZ8D4kdZyqFhrsgWF6ZA6/Y09Prxz3N2jDQ9GUuSTm+A/kD3PC+iMDcBP+kh6VmGXbR+J5Jt\nrXD2R0TO33wab/jd3gC9vuZ5A2YLzF+qt1yfH8V5Y2gmNKK1S6Lxt3fR3L8r1sIH/Eoxi5baTyag\nOpKZJL6X8+ba6cXfP3bMvUZ+UZibgM+CXLevoTvn2nKeDJ9NvJoFi2RnkJ/GDJ8FG6+h+zkzE97i\nzPAv1R0BP6kMP8GEidXvr1ithH4vS1JanfgBeijgJ/C9nNtIIlFix7xCGX54hrO8s5+1xoEdC7/M\nYZbM01pTXtKxfg/J/fdYXZSQz2W4Szpbe/ZIIIlzXdN06ximneGzc7aVwH44g3IiGb7j3EqiFGpf\npxTwQ9OY8mH9JGGNa78MP29NvDr7N8ehm/0UNu2DSmyiKODK9Qq29lvQtOBm4CVzFrVz23FomU3v\npLZ3cbcBwC5VhYE1eZPIyNWhkk787TkFIUmUdFjvySsRiMJ8BvwpzPImSaPdQzYjWFp7N0RBMJY5\nnIGSzrQ3bZscMtmr1hfQ62vYO1Z9t9XtDbB72Ma1mwvGthMuVSQRGC/uNbG2JFllwzCw4JfEfjjL\nlUkkAsMZfgIlHcrwo+O8+1JJx5B6BenRpVxmJko6zoA1nQE/uDl31aZR7w4q62zvt6DrwFUbCyjk\nM8lkrmpyydJxs4uTVi9S/R6wg18SATrpDN95bBJp2nIkAmGZm4DfTPCkPes0OCdzSPnMTMgyp/23\nbwaUdADg6g0jY9+q+wd8pwtqRcolEhibCSZLcer3gN3ATEJuOpThJ1LScTRtE7AksVQ61LQNj/PH\nSEqbfBbRNB2tTp+r81/Iz0aGP+1N2wbH0P3KdSPgB2X47Pnzq2WUi9lEznXnTbLX12Ipt1j9PooG\nH7BLOsk0bROWZSbctLXPC6rhh6Jj6pyZ8mQaG3eTIswwUcpn0ekNoCUwazBNmu0eBADZjDiVAZ/9\nJn7NuXOrZWREAZf2/LX4TJJ5vlZGWcqh0x3E1qyzmwazdYgzSrIz/GglnSSbtuOUZSZZ0qEMPyTs\nh9hYNhb7mGeVDk82yZgVP52m2kdJyqJSTG5afpI0OVRT2YyIzZUStupN32n7W/UWioUMlip5u94d\nM3tl18/6cjH29i7uNSEKAjZX3BfeCYJlu8k0bROWZTonXiXUtBVg2EkkxVwEfHaRb6yYJ+wUXvST\nIszsPWlGJl81TD+SSjE3lQ17FryCLuxzq2Wo3YHldjpKf6BhZ7+F87UyBEGwbiBxz3d2zDaW410/\nuq7j4m4TGytF5LLRQg/LdpP4HdXEM/xkZZksURETNPubq4CfRIZy1uGZZcuYFYvklulTUynm0O70\nMdDiX4xJ0mj3UCxkkRH9L8fz5nKUlzwat5cP2hhoujUzl5WI4o5q2A0p7gj54KSDdqcfuX4PGElI\nRhSSyfDN81pKQM3EehuL5fzQtuPAEpUkmYuAzwJ8tVwwftw5zvCtgM9RF5wFAzXmU1OWsolK+pLE\nME4LHnGdtywW3Ov4W476PeDMhuOd76MJU9QbyKWY9XsAEAQBC6V8Ir8hG7muLEqxm8DsmNQWjTUm\n4qp0dF035NMJ1u+BOQn4zm53WcpRDR+8Tduzb6Dm9Klh33na6vi8nue2iZp7hm8rdIxM3Kp3xwyO\nzXYf2YyIpYWC8XfE4Piq5aETPcMHjN8yiWtY7Q6Qz4qoFHNoqX2uWcxeNK2AbxyjuCWdbk8znDIT\nVOgAcxbwK8WcIVWbsgxvklid/1AB/+xm+E6fmmkM+N3eAN2+xvV7bKyUIAje0sxLlgbfCKiVhDJ8\nw5M9G7sncHHPlGRG1OAzFkrG/IK4nvOd3gCFfAZlKQsd8QzZrAzfXEUu7qh4HJOugDkJ+M4FPyrF\nnCXTnEeiZfhnOOA7pG3lBCV9SdEM4ZeSy4pYXyri0p67Umdrr4l8VrSCTjmhGxwbgVgBP+IN5NJe\nE9mMYJWGolIp5aHp8RezUbsDFHIZh5op+nFix3h5QYKA+EIHHgvzKMxFwHfWrS171Tkt64Rq2s5S\nSUfKWsPjacrww/qlnF8to6n2Ty3zp2k6tvZbOFcrW6qOJGSZzCnTuGGy4xd+e5qu4+JeE5sr5cDm\ndBALpWRGLp3uAFI+a4+EYoz8mSRzoZhDPoEZ6kEOqlGZq4BfLubsrGdOlTo81siMWWjaOgOqVdKZ\nopt9WM9zVq4ZLevsHbXR62s4t2rr2yvMaCzGDY45ZZalrCUbjbK9vSMV3Z6GK2OWcwBgoZQ39yP6\nNaybIwQpn7G+V5wk0HmeFXLxTQeD1kiIylwE/Ga7h4woQDLrdeyxeaTR7nNJAAFnhn+GA76V4U9n\n07bRDndhs4bsqKcOq987LYeTKFU4ez4ZUUSxkI20vUu78Tx0nMQtLQHGnAVN140afgKJgFMYIuUy\nsRdZalANPzoNhztkUlK1s0pT7aHC2fm3avhneOKVHbDiNx3HAY+tghN7fdthaeaWw0OHkc2IKOQz\nsW5w1ixg87qpFLORSkSsYXs+pkIHMGr4QLxSFUtipJwzCYzftK0Uc8gn4DLLY6gXhbkJ+OxiT0qq\ndlZphND2SjMw8cp54UyjDj9sDf/cirs0k5V4ztWGLQsqUjaRQMaum7KUi3QDsZY1jKHBZyRRw2dl\nSmeGH2sk5LDHkPJGhh9HRcRjmR2FmQ/4rOnE6pmWveoUZXmTohOwWPYodg1/egJkWIaatgnNPE2S\nsEP3Qj6D2qJ0OuDXW8iIpxUwceedjMp4y8UcehHWhb6410Q+J2LVVBDFwcrwY/yObNRayGcSa9oK\nAlAsZFHIZzDQdPQH0QN+mAmSYZj5gG81nRwnLDCfJZ1mCIUOMCs1fOZTY9SgS4VkLIOTwvY858/k\nzq+WcdToWt9N13VcqjexWSud6s2UizmoMRwzmyOBx15xij84DjQNW/UmrlgtJ+ILU7Ey/OgBuuMo\n6STVtC1LOWOluFx8D6pxrHYFzEHAH5UhJuUgeBYJ45QJzIgOv91HIZexzLoqxWgliXER5cJmZRtm\nsXBw0kGnO7AUPE7inu+j8wSi9EEuH7TRH+iJ1O8Bp0onmQw/CUM2Z9nYCvgxrpum2jecMgtU0gnF\naFabhFTtrBJGgw8YTb9sRjjbAd+cJcooF3NotnuxZ2kmRdimLeBo3JplHdsD/7TlcNzzfTRJiCJ6\nsOr3EZc1HKWSQNLmzPCLhQxEQYg88jN8b/p2wE9A7NBUe4ZTppicUyYwBwF/NMiV5lilEzbgA/Yi\nKGeVptpHqWB/30oxh/5An5rvZMhkM6EmI9lKHTPgm5m+WwYdt4Q5OsU/yuxdtuhJEhp8577EKcE4\nm7aCIKAkZSPfFNudPjRdt/aLiR3iSDPDiCvCMDcBn52ouayIQi4zVUqNSRFm0hVDymfO7Ezbgaah\n3ekPyVCnbbZtUw1/YY/aJFumaW4lnZgNydF1VaPU8NmyhkmVdDIZYz5AHE8sZ0kHML5X1BHDqJIp\nnzPCatSRMRsxJK3QAeYg4Ls1KsvFaJNHzjpRMvzCGV7IvKUOBysAiSgykqTZDu95XpJyqFbyVg1/\nq96EIBjmaqPEvcE11R6yGcEKYuUINfyLe00UC1ksm26bSWAE6PgZvpQz5abFHFpqtFJfY2TFsrgz\n1Lt9Df0Bn6FeWGY+4DdcaqRRtcRnHZbBhCvpGJNIpqXmHQY3LfM0zbZlTpmVCH4p52tl1I9VqN0+\nLu01sb5ccl1FKq5jJlOfCKa6Jqw9Ra+vYWe/jSvWytY2kiCu3JSNWu0M3yj1dSPYGo8anRVirhQ3\nqoxKktkP+C7rhVZiStXOKlG0vVKOaYrP3rFym62YlINkEsTxS2Hlm2deOURT7bs2bJ3bjhocneoT\nwFHS4Tx+2/staLoe2wN/lHIxi25Pi+x6y4IxU6LFWSt3tIpQMEdDUQP+uJwygbkI+C4lHUt3Ox3D\n+kkRVpYJ2MPTs6jUYQHVuVZsEj4sSRFmbYJRmKfO95Rd82/3gBrHNkDTmVPmsMopzPZY/T7xgB/T\n9dZq2ubsDB+IlgiMTp4r5OJdM+OaZQvMQcB30znP6+Qrp4kcL2d58pVbQJ2mkk4cvxQW4B95xgz4\nLg1bIN73bY9MWgScTVu+7V1MYFlDN+K63qqjGX6MJHA0xhTy8TL8cU26AuYg4DfaPUj5DLIZ+6vG\nVS6cVZwmcrywC+IsNm7daqHTFPCtcmOEGj6bZMWyQactspM4MmS3GxJzzORVyFxM0CXTSVzXW+cC\n5sb2oh+n0SoCawRHvWbCWmaHYS4C/miT0lIuzFmG73YsgjjbGf5p2wLbPC/93z5OSWehlBv6LZmp\n2ihxZMhu/S8gnELm4l4DC6UcFs3ZsUkR1/WWnc/53GgNP8pxGinpxG3aUkknOm6yt2lc6m7cjJrI\n8WLV8HtnbzTk9NFh2Bl++t8nTsAXBMFq1K5WJSvIuBFVhuy0lh7eXo7r2ul0B9g7VBOv3wPx+3Bq\nd2DOJDflpjFigt20NfbJatpGTJKoaRsRS/Y2ckFNoy/6uBk1kePFskjunMEMv306UyrkMshmhKko\n6Yx6zYflnBlI3Tx0nESVMHr1GCpSFl0Ox8ydgxZ0x34mSZT5AE46vcFQL8tq2kYq6fSRz4nIZY3t\nFawkKaYsk2r4Bhd3G/j0Xz0VOGTy0p1bNrkc2cEjz+7iL775QmI6dOXCAf7sgWehhdhetzfAffc/\nhVdNxUMUoky6Ahw1/DFYEfzwxTo+97Xnxqbxdy5gzhAEgTtDHTejMzTDwhq15z3q94xKMYd2J7wM\n2Us2ymvIVj9SASARS+RRwlzDbnS6fUuhA9i/QZQRw2iplGX4Ua0VxrW8IXBGA/7Xf3AJ33xsC0++\nuO/7Oi/deRiVzl9952V88cGXcNxKJkB87ZGL+Op3X7FWKOLh6QsH+MYPtvCNRy9F/tyoAX+cNfyv\nPHQB9z90AZcP24lvGzB+XzdVUmVKJt653ZDCcNP1K6hW8njza1Z9Xxe1/GFfP6dLOkDw9bN3bAT8\n2uI4An78Gr5bhh8lEWiovaEYk4+5cBDbh6SdMgHAd4uyLIsAPgngFgAdAB9SFOV5x/PvBvAxADqA\nP1IU5T8EvScJ9g6NE2k3IFB4ZVBhftw9M0upH6moluM3nthFVz/ucEvV6scd8/9q5M+NnuEzTXHy\nNe8983vtHraxseyfpUah2TY05KOqpHIxh4t7TQw0LZRpWdJYF3YElQ5glHI+/tG7Al/nDNCLIc5h\nL3kg7/VjZ/hF39dFge1T1Bp+pzcY6nuUIngEAcbauJ3uYOgYMU/8yBOv1B5KheSdMoHgDP8eAHlF\nUe4A8M8B3Dvy/O8C+DEAdwL4NVmWl8z3FHzeExuWEQZlht4nLN+P2+sPcNToAgD2jpLJQlsd4zP3\nQwRv9lp2AUUhqrZ3XJ74uq5b32v3YDwZfkvtDTVsGUnY6yZBUzWcMp2S4XEQVYbsNQJhGX9Q45ud\nr7UxlnSiZOT9gYb+QB8q6bD1f8Nuz2syYyEnRpdltntjUegAwQH/TgBfBgBFUR4CcOvI8z0ASwBK\nAAQYmf6dAO73eU8sdF23MvvdQ/8A6JXV5nMZ5LNicIZiZqBAvGDrpGll+PzbY6+Nk+FHneQzLh3+\ncatnTYsP+h2joOs6mqq742BlSqSZ47LAHSXq3AMveWCYkk4uK2KxlPx3zJuL2kQp6YzaKjAqEQzZ\nvGJMIR89w2+q/bE0bIHggL8I4Njx98As2TDuBfAwgMcBfElRlCOO98TisNG1AkVQhu9XxihzrHzk\nzOr3Egr4bfOECpXhm5/dVPtodyJauFrTv8PKMseT4TtvoOOo4avdAQaa7hpQp2Ux8yjWyFEIOzuW\nwWZmOzNhgD/g149UrCxKiZqmDe1HxAXanV74w9vLhR71jUoyGYVcNJfZrrnu9LjOi6Cr/xjAguNv\nUVEUDQBkWb4awEcBXAOgBeBPZFn+h37v8WNtbSHoJQCAyydd69/1ozZWahVkPGpdmnmiXXmuemr7\n1UoBuwct389Vn6tb/z5q97j30Qtd19Ey5Y3H7b7n9kYfP2za31nPZCLtx0A3jsVVVyyFen/WPPF0\nQYj9/Z0ol+ycYP+kw30seLm8b1gH15aKp7axaa68lMlnE/1OYej2Buj2NCwvStz7EHVfz20sGv8I\nee60uwMslvNYX18cevzKhhHkNHifE2qnj0a7h9deFe5842VtbQGLlQL2j9TQ21fNaLS0OHxuLC1K\nuHC5geWVMneZ7bntEwDAxurC0LYqpTy26v7xxY26mWSuuJy3SRAU8B8EcDeAz8myfDuAxxzPSQAG\nADqKomiyLF+GUd7xe48nu7snXDv87EtGEBYEoD/Q8ewLe541wl1zgYhep3dq+1JORFPtY3vnyLNx\n99Krh9a/t3Yb3PvoRbdnS+N26k3X7a2tLQw9rmm61aQGjO9fyobPmHYPjADYa3dDfQ82LD1qqLG/\nv5MXX3Ec23oTly8fn8oER49FGC6YF2IGLueW+Rtc3D7G7nryGnEeDhtGuTCfFbi+Y5xjMegaAXpn\nL9w5fNzsYrGcP/WeXsdIQHb3W57bYx46C8VcoucNYB8LySzL7lw+DrU4+taOkWzog8HQvuUzxjZe\nfuWAu7l9acd8/8i2MgIw0HRsbR+F6tG8ctmQXmcFvpgY9qYQtCdfAKDKsvwgjPLNr8qy/D5Zln9R\nUZRnAPwxgG/JsvxNAFUA97m9J9QeBXDZbPBdf97IOvzKAX52wLbbnvcQjpV0Fks57B2psfXiLUc5\n5uCkA00L3t5howNN161RTNQ6ftSmbT4rQhDGV9KpLRbQ6Q5wkpDsldHymcU6DX4645xNOUqUiYaa\nrpslp9M5IU9JZ5wNW+d+6EDoMqc64pTJiOI75NcndH4WL+OcdAUEZPiKougAPjzy8DOO5z8O4OMu\nbx19T2Kwhu0br13B8xePsXvYxhuuWXZ9bcPUYRcLp6edO70zFjx8PupHKjKigBuuqOKRZ/dw3OrF\nkmY6by4DTcdRsxu4CtC+2Ti+7twinrt4FLl57GYix4MgGDr2pJu27Mb1+muW8eDj29g9bIeSDAbh\n5qPDmAa31CjLTUYlyvdVO33ouvsNiUchw37fcUy6svfDvpGFuXHaxmmjzejwVtJeAd8pdggTvOPO\nzQjizE282j1sIyMKkK9asv72ouGhwwb4tMR7RypqixLWlorm3/Gai62RTIQnW2evee1VVe73uBHF\nOI1RyAWva/vQkzuhZgLvHako5DK47lzwSC0KjSnP8O0b0gSbtiG+r5/gwXDMzPg2ONm1Mo5JV4yo\nhmfMF8ptQp6xvfjHiY0ewip1xmmcBpzBgH/5sI1aVbLW7/QL+H7rhQZlPd3eAEfNLmpVycpS4koz\nWYa/VDEyWR6lDnvNDeeryIhC5H2IsnYqQ8pnfTP8w0YH//GLT+C/fO057m3Wj1XUqhLWzZtp0lp8\nPxnqNKx6Ne6hu5NcNoN8TgxlQxAUeIKWCR2nrQIjqvWzp0onwkjIq1Qa1TFz3KW+MxXw250+Tlo9\nrC8VsbRQQDYjWjX9UVgN0uuCCsrynENSNlMwrjSz1TE+68r1ytBn+OHcj+WFgjVdPQxeJnK8FMx1\nbb1gNhHb9RbX9lqmvNQ5egqaNR2Wll9JJ6aXehI0xjx0H6US0j8oaN5GuehvyFY/NsqhS5XkFi4f\npRJxNS97AfORGn4hWklHwOnZ0laGP2U1/DMV8FlQWFsqQhQErC1JnoGi7VODBIKXfts7cgZ8I0vZ\nixmU2mYQusq0VNg/6vi93HiNWcOvVSXUFiUcOeYh8BLVVoFRzGfQ7WueTeZLZqCvH6no9YNPcHYT\nq1Ul1KoSBCH5ko6f9XA2Y5Qk0s3wxzt0HyWsY6ZfSQwwHTN7mufvvXekYnmhMBZ7AEbUXoyaYIbf\naPdQkrKnVEKWB1Xoks74VrsCznDAZ/9vqn3XHygoyAWZL+05fECY0iB+hm9c5GEz/EI+g1Iha+3H\n/km4/fBrYPJQCFAcsMxeBzxHXE6cCp1sRkRtUUo+4Lf9v3NQSWLcjLs5N0pZyoZyzAw8fj72FL2+\nhqNGd6ynCru/AAAgAElEQVTlHGPfohmesSB8euJV+Ay/6dEbkyJn+PGu1SDOWMA3AsX6sh3wjcdP\nB4vAgB8w29JqOlUlFAtZlKVs/IBvXhxrS0XkcyJ3Db9mzlZkDbCwdfy4Gb5kDnW96pFb+7bz5/Y+\nR8A/HpbsrS0VcdToJmrBbC9+4n7hVIrhZ1UmyTjXLXXDMhvjlDAGlRb8Fv1mCck4G7ZAdMMzr5KO\nJV/t8N1AmH2H2zHKR2zasuMZ1VAviDMV8C+PZPhWw8/FiyVI9hY03ZwFVfZZq0tF1I/jafHZxVaW\nsqgtSoEZfrvTR1PtY2XRqIPWIjaP4wYX217B/cLactTudw6C6/hWQ29x+MYdt2TmpNE2jMm8JtVV\nijn0+tpYfP55sJtzkynphNXiB/UY/JaK3JuABt/Yh4RLOiFN5iz7DrcMP6IHVVPtoVjIjs3F9UwF\nfLukY2aGZqZ/2SXI8Gb4Xivc7Jka/KqpqFmtSuj1NRw7bA7CwjL8UiGLlUUJTbXvK3fcPzHr92am\nZAX8kI3buBm+X0mn3enj4KRj3ZS294MD/t6pDN/4f5Imaq2OvzY77VXPmmo/0ryIqIQNZkE9BlvC\neHp7k5h0ZexDxKZtz12Hn8+JyIgC9w3E77qKqtIxRgzjSwLOVsA/aGOxlLN+KP+SjvtqV4x81ljP\n0q9pW6tKVjNmNYE6fstRZqiZAXL/2Ltxy0o+K2bAX02rpONjoMYy+luur0EQgB2OgF8fuZmum174\nSdbxDS987++btjRzUsZpDBa4eZfwC+ox+C0xaI/gxhvwpUIWghBFlmlc86MzbcOuhuYb8KPq8Mfs\noHpmAv5A01A/Vq2sHgDWqt6ZYVCQEwQBlWLW9cft9gY4bg43nZKQZrY6feSyxtqXLIj71fGtWrd5\nc2DvmXSG77cIytaeEeCvXK9grVrkyvDrxypWFgvWzTRpLT4r1fgpYNKefNVsu1s3j4uwDU7mlDk6\nOWl0e243EKuks5T8widOREGI5HCp9gbIiAKyGbcJmVnu7fmVSoOEDm4w+fQ4+zpnJuDvH3cw0HQr\nOABGY2SpkndVhvDUrb20xHsuk0ZspU70oNRS+5bWt8YRvPePh5tfuayIaiUf+qaTVA3frR7JGrbn\nVkrYWCnhpNXzzbh6feNm6mzoWSWdpBaZ4VDApBnw2Q1pEpOuGGEDfkPto1zMeVob+9kQ1I9VCABW\nAmxDkqAkuSdtfnS6AxRyGfcZ+GZM4FlzOumSTlw1HQ9nJuCPNmwZ60tF7J+op+RmPFltWcqhpfZP\n6cvtppP9WYmUdDp9q/tuZ+veJZ26qdNfcQTH1UWJ23iN4Wcix4PfurasYXtutYyNFeN47fgoddj3\ndS57V5JyKEtZLkknDzwXjl/TcdxMWpIJ2H7tvLNtjdJC8AjJ7eZeP1KtiZHjhmX4YcQUandwqmFr\nba+Qha4Daic4UPvFmCiyzEnMvj4zAZ8N90cD/tpyEbp+uq7NM0W5LGWhw8Xjxsw0h0s68QK+rusj\nGT6r4ftn+AIwZLBWq0oYaLplr8uDn4kcD341/O16C8VCBtVyHpum3YVfHd+robe+XMTeUZsrswqC\nZ/JKxUdWOG4mLcl0fhZPvdtyygxIloDTN8yBpuHgpDN2Saa1H8Us+gMN3RCTETu9gXepKsRx8lrk\nHYia4Y8/ETg7Ad/M8NeXRwK+eQMYbfg12z0UzGXQvPD6cVlQX3NkoVI+i0oxFzngd/saBpqOonly\nLC/w1fCrlfxQpsRTChrFz0SOBynnLsscaBp2DlrYXClDEATL38ivjl8fKVMx1paK6A90HJ7w38i8\nsCev8DRtJ6/Fn8TQfZQwJR3mlOk3IvTSwB+cGHbe4550xahEmHzV6foE/BD+PLaSyUeHHyLDb/hs\nLynOTMD3K+kAp5U6ho+O/wVV8ZCqeemIV6sS6kdqpCzUKckEzHp8Oe8ZuDVNd82Uomjx4xinAd4T\nr/aOVPQHOs7VjEC/aapt/LT4XsfWunEnUNaxM6XpbNrGbaJHIcw6vo0A4zTAsKeQ8qftKSYlyWSU\nfeShbmiajm5fO6XQsbYXwoHTXjb09O8oCgLyOTGUtQLPeRuXMxPwdw/byOfEU370XoGi0e4HBjn7\nxz2d4WcztmyQsVqV0B9E0+KzslHJkTWtLErYP+643kCOml0MNH2ofg+Ez/B5hudBeCkOmEKHBfzl\nxQLyWdE/ww8I+EmYqLGgVuLR4afgiR91Qfk4hHHM5N2/iovowWsENy7YSKPF+Tt6afAZYUZCQaW5\nQi6DbpiAP4FS35kI+LquY/ewjbWl4ull8JZPBwpeFYSXFrt+1EZtUTpliBRHmtkeyfABo44/0HTX\nG8ioQsd6T8gMv80xPA/CS6XDFDqbK8YygaIgYH25hJ39tmcTzUvBse5RmosCy878RnhSPmNMskml\naTtZ4zRGWeLTmPMaeBnbcx8dT6qkE7Y05zXL1tpeCLuGRruHXFb0HC0Y60iEKOn4jBiS4kwE/Ea7\nh3ZnMFRTZywUc5DymaGAzztkdqv/dboDHLd6ridsHGkms0Z2emT46err1qSr4cDIbgC8NslJlA+8\nmrZMoXN+tWQ9trlSRKc3wGHDfRTkpeBYd7lxR4Wn+cUm2aTStE1BpcM+jzeQAe7NyKHtFbPo9AZD\n7q2TL+n4W6SMwjJ875IOf4YftKiQlA+b4ZMsE8Bp0zQngiBgbamI3UPb54Z3aOR2N7en/Z/+LNsm\nOXyGP1rDB+zg7Tbb1rJFHsnwmZEbb4afRMAveHjpbNdbyIjCUF9lw0ep46fgWKoUkM0ICQV8vuZX\nJaWAn0YN3/i8LNqdPgaav6LFrxnpxK3BWR+ZHT5uwvrpsHM4kaZtwGzpsBk+lXRMLh8awWO0YctY\nXzKyymNzIWxe3bnb3dxNksmII820a/guGb7L9vwuHGa8xqM9TmLt1IwoIp8Vh05eXdexVW9ibak4\nlK0zaea2S+P28KQLTdddsz9RFLBaLSbTtOU0JqtIWdd5GOMmDVmm8/OCsnzeko6bH9HekYqFUs4z\ng04aL+GFF9ZqVzGbtv2BhnZn4Fs2LOQzGGg6vyU1NW0NWIbvFfCtOr4ZLHgzKLe7uVWDXHIL+Mbn\n1KOUdNwy/Kq3Fn//2HtoXKtK6PY0ruw0qWxydNUrY0Zt32rYMvwy/KCGHlvfgLcB50VT7SGbES1p\nnBflYs51Hsa4SUOWaXweX7mCBc/ghGk4OGq6jv1jdWL1eyB805adw4EZftAxsvpE/hk+wK/FNxxe\nx+eUCZyVgH/grsFnjCo8eJsfbndz58InoxTyGSyUomnxWVAphqjh53Oia1AIo9QJMpHjRcpnhk7c\nrbrZsB0J+Pbkq9M3xaD6rp/ddRiaKp9PTVrSzGa7h8IEnTIZvNkrb6Y5GhyPm130B/rEFDqA0/WW\nM8P3WPyEYS1zGLA9nkSqENIi2SgRjTcJOBMB//JhGwK8M8NRhQfvkLmQyyCbEU4NSQFvlcFq1Sin\nhNXiu/m7LBRzyGVFzxo+W/hklDBKncQy/Fx2qIa/ZWbw52vloddVioZNgps0cy8ow19ORqnTbPe4\nVEmpBXy1F9gQHQe8s4sb3D2w4e1Nygd/eB/CWWSoASUdURRQKmQDa/g8MUYKmeHHlU/zcCYC/u5h\nGyuLBc9Zs8x86/JISScoyxNMt73GSA0/mxGxOKL3Z9SqxozQIw8VihduJR1BELDishBKpztAo93z\nbHyFWfkqqXqxVDBKOqxvwJY1HM3wASPL3z1sn2oOBmX4ti9+9ICvmRYWPJlSWhbJPHNExgGvAqWp\n+jtlMiojPYG6z+h4XGQzIgr5DL9KJ6CkAxhxI+gY8Vi35EM4Zvb6A3R72tgTgakP+L3+AIcnHc/6\nPWCURkRBsNwWw2S15ZGl7nYPh33wR1mNMNMVcJR0CsM/aG2xgEa7N5QF2EvEubsNWvJQrpJOMhJA\nKZeBrsPyLLFM01ZOB/yNlRIGmn6q9MVubF4+6esJzLZtd/rQ4T/pipHGIiiWdfOEJZkA/6zUJqcV\nx+jExUlPumKw5jsPXuvZOmGmin7wxJgwq15NwlYBOAMBf/dQhQ7vhi1g3OVr1YJV62+GqFsb/teG\nHara7aPRdtfgM9hzYa18W2of+ax4apTi5osfJG0LU9JpJjSZY/Tk3ao3US3nXQOrV+O2fqSiUsx5\nXmyrCcy2DTOpyW9d1nHR4lTAjANeewXe0sJoDX/SGnxGSXK3OXfDXs/Wx0lVyqLb13w19DzOlmGa\ntrzKqLicgYDv37BlrC0VcdQ0FsJuqD0Iwuls2o2ylDPtUPuOIWlwwA/buG11+kMNW4abFt9Lg89Y\nKOaQz4qcTdtgEzkenFr8bm+A+pF6SqHDsKSZjsatruuomwuye35GLoNqJR8v4IcY0VQ8rDXGCWsu\nplHD59GY67oeuFqYtb2RhqlVw59whl+Wsmh3Blzyx07ATFuAT77KIwwJ45g5KbuNqQ/4XqZpozhN\n1NgyYV5lGSdlh084z7TwqNJMpzWyEzaT1hm82Y3HK8MXBAE108gtiGY72ESOB3vVqwG291vQAWyO\nNGwZG8vMF9/O8I9bPfT6WmD2t24uFs+rXR4lTKZkN20nJ8tMS4Pv/Ey/EU27M4Cm69yjY8CR4R+r\nKBWyQ3NNJgH7XjzyWrVnLm8YUNIB/G+MPPNbCiEcMy013bzX8Hc5A75Tmhk05dmJc1jqJ8lk1CJk\n+Lquo+1Y/GRoey4lHdtHx3vFoBrHIuhAcg1Cp70CU+B4Zfgby6dtkuuc2d/akrm+QchlHBnNEBdO\nGiqdNIzTGDw+MWEm/zDHzGa7Z4zgzHWgJ00YwzPepm3Q9njkzpbpIJV0+AnS4DOcrpnNdp874Dtd\nE3lKOoVcBoshtfjdnuGFXyqc3ic3TT37N/PMd4Onjp/kUnqSY3hqNWw9An4hn8HyQmE44PtMJHMS\nd31bduHwNG3D+KYkRUMNzgzHRT6XQT4r+n7fsIGnbNbPmfBg0uUcwD6WPI1bFnwln0l57Dr1216j\n3YMA/xt3mKYtBXyTy4dtlArZwIyI3RAu7JxwD0mB4WHuno+tgpNatRjKF9/NVoHBVrMareFXy3nf\nujvP5KskPVucJR026ercintJBzDq+AcnHetk587wY5qohWnaenm6jxPeWazjwmsdZ4at6uK7IZWL\nWTTUvq3ASjPD5+jFdLoDCAICFkZiZV7/kk5JykIUvcvG+TBN2wmdF1Md8DXdkPatBWT3gJ3hv7B1\nAoA/g7LrkH3sHqnIZb01+IxVc5lBXi0+U2a41fDz5oiBXTCapmP/RA00n+LJ8JOsFxccq15t1VvI\n50Qs+5ScrBm3pqcOz+gJ8F7BjJewJZNJG6hNKpPzoixlfXsWYRRuxvZy6HQH1szqdEo63guqj8JW\nu/KTnPL48zQ4FhWKluHPcQ3/qNFFr69Zw3w/igVjCULWKAyb4bOSjtfsVie2UocvKPll+MDwQihH\njY45Pd07mAJ8NslxFy93wk7edseo4W+ulHyb4pY00yzNhC3pRNXihy5JFPk84pMizaYtYFwXfo6Z\nYY8fu85e3jESrVRKOkxey5Hhq71BoLFbkAOnrutcfcIwKp2k5ssEMdUB//KBv0vmKM7XcdfwzQNc\nP1IDNfiMsDbJbrNsndQWjZW0Tlo9q5QRlOHzTABLtqRjnLyX9hro9TWc81DoMDZXjN+C1fH3jlQU\ncpnAUgFzWozqpxPWU7zCoblOkjRlmYAdULzq02FHSCw4XjADvpvp4LixVDocNfxOd4CCx2pXDK/1\nehlqd4CBFlw2DqXDT8DVloepDvh+PvhuOF/HfcKaP+6FnQYAe/KPH+w1YTN8Nx0+MDz5ijUrgzKl\npUoBGVHwr+EnOExkNfyXzJKZV8OWMTr5qn5sKDiCRk/2+gbeq2b50VKNZhrPHAxg8vYKPMsvjpMg\nAzV7xme4kii7ftLJ8Pn9dNTuwLdha2yP3UDct8drv+21NKjrNtU+ioXMWJ0ygSkP+LwafEaUDJ9d\n8K/umgE/TIbPqdQJzvBNLf6Ril3T+z8owxdFAcsLBa4afhIZPhuevrprNmwDMvzVqoSMKGBnv4WW\n2ke70+cOBuvLxvoGYf2KAOPCKUlZrjkYwOSlmc2EJsJFJWh2sTUzmzthsreXz4kTX9TFuQ9BTVtN\n19HpDXw1+Mb2/G8gDc6yFwv4PKPHoMVUkmKqA/6eFfD5AoXzdbw1SLa26cBcBIMn4Fv1c96A32Fl\nBvd9cs3wq/41fLYfR42u5yQl3pWLeGAlHaZMcvPQcZIRRawtFbG93+Ku3zPY77htqoHC0AjpODhq\nADZummoyE+GiEuQfFLbH4AzwPP2vccBr+8wCb5ApHJOvelku8za2RVE4tXCQF402BXxcPmwjIwpY\n8dGjO1mPkOEbjpnORUmCPyufy2CxnOc2ULNUOh5DQEtxc9zhruGz9+lwX0AFSLaG78yKBAAbK8Gj\nrs2VEppqHy9vs4Ze8E0MsH/HrZABP4wtAGPSWvyGGm7/kiaoIdlU+xCFYKdMe3v2OT1Jl0wnhRzf\ngvRBq1058Wvmh7muCiPrSLhhOWVOIBGY7oB/0MZqVfLVujpZX7azzjBBzpnN8J60li8+x/J4QSWd\n4Qy/hVxWxALH/gfZJCcZ8IuOALC6JCGXDb5o2E3hyZf2AYTI8M1eDLNg5qXb19AfaKEWkZhkSac/\n0NDpDlJT6ADBEkbDOC3YKdPeniPDT0GSCdgL0gdl+DxOmYySjwNnqICfCw74vGswJ8HUBvx2x3Cu\n5NHgM6qVvLWKUJi7JTtpc1kRiyW+g860+IeN04uXjBLUtF0o5ZDNGGZohvc/39DY6iV4ZfghTOSC\nyGZEqy4eVL9nsMYtC/iri+F6MWFLOq0IF844Av5Ro+N6kfM2+8ZJUA0/bGmhPFTS4RvBjQPmeuuH\n7ZTJkeFLObQ67usdhyl7FfKZQB3+JO02pjbgX9wzLnYeDT5DFARsrhRRLGS5MlAGuwBXOVQkjDCe\nOkEZvigIWFksYOegjaNGl/vC8Zt89dzFI7x6uYHFUp67gemH4BjmByl0GJvmiIstLs+bAdYWjYbv\nY8/t4aTF37iNElAXS8Yku4ef2cVxiM9y4+XtE3zi84/hV//9g/g/Pvkt3P+dl4e8jixJZooZfsWn\npMNKYmH2rxJhdDwOylIOzXbfV9mlcjhl2tsz7RpGDNmOm118/9ldAEC15D9BE+DL8HkXbEqCqQ34\n33liGwBw8/W1UO/7hZ94A375nptCvYfdqcOcsGtVfmlmq9NHPif6rmFaW5TQNk8unvo94Kz9Dwf8\nh57cwb/7fx9Bt6fhve+4gWtbPBSsgB8uwweAbEZAtRJ8gRivFfETt1+NvcM2PvFfH+PWyNvGX/wB\n6+qNCm6/cQMvb5/g337me6H7BgDw0vYxPvH5x/Cv7/suHn1uD9dsLmCg6fjc3zyPX/+Db+OvzMCf\n9qQr52e7lT/UruGUGeaGGbb/NS7KUtZc08L7XOlwNm0B997Oxd0GfvMz38OFnQbuuGkTV6wFXweF\nXAb9ge7r/movaj/+8yK9saUPvb6Gh57cQbWcx03Xr4R67/XnF0N/XsUK+PwnbBhpZtvDGtnJiiOr\n55UvsmY2y/B1XcdffuslfOGbL0LKZ/DL770ZN4W8YfrBLpTNAIUOY6mStzKclQXvVcTcuOdHrsdJ\ne4CvP/Iq/tOXnsQv33NTYC+nEXLSFWCMXH7x7huxvlzEFx98Cb/5mYfxkXffhBuvDT7vXtw6xhf/\n9kX84Pk6AOA1V1bxD+66Djdes4x2p4+//t6r+Op3X8Hn/+Z5fPmhC5CvWjL3bxpq+Kcz/Cg3JLbE\nYKebjnEaw9mM9iph2k6ZPOtk2H46GwCeeHEfn/yLx9HuDHDPXdfh7juv5aoGsGum2xt4JnxJyqeD\nmMqA/+hze2iqffz4bVePfSICMFzS4SVUSafTD/TncV4sK5wlnVxWRLWSR/1YRa+v4b77n8a3n9hG\nbVHCr/zMLbhyrcK1HV7YhcJb0hEEARsrRVzYaYTO/kRBwK/87JuxU2/g+8/s4s8eeBbv+9HX+l5k\nUX1qBEHAPT9yPdaXi7jv/qfx8f/yA7z/XTLe/qbzp17bUvt4WLmMbz+xjacvHAIAXmsG+jdcs2zt\nX0nK4afvug4/eutV+OuHX8FX/8crePgZoxSQZg0/b84BePrCAf73Tz449NxgYJRDwt6QKlIWg4HG\nPYIbB2WH/81q1f01QQuYu22vpfbxN49exJ985RmIIvCPfvpG3H7jJvd+OSdfeU22m/sM/8HHtwAA\nd958biKfx5qEV4QIkJYWP8DkSzcX1Q7Kip1lnDCZ0uqihJe2T3Dvf34Uz7xyiOvPL+KfvPcWVANu\nMFG48+ZNXLVewQJH7ZKxuVIyAn6E7C+XzeCj77kZv/Un38dfP/wqalUJ73rb1Z6vb8W8cO646RxW\nq0X8+z9/HPfd/zR2Dlp47ztuwGCg4/EX6vjOE9t49Lm6NTx//dVLuPuOa/F6R6AfpSRl8dN3Xocf\nfetVeODhV/DESwd443XhRq1Jc9fN5/D4C/VTj4tZAZsrJbzpNeFGhe948xVodfqJ9IqiYnv9ezdu\no5R0/vwbL+Dl7RNUijn8k/fejNdeuRRqv3j8dJjwIygpTIKpC/gHJx08/kId151bxBWrfLXiuLzt\nDRtYrRZxwxX85aB8LoNqOR+4UEenZ9RFg1YBqkUM+LWqhOcvHeOZVw5x6+vX8aGffINly5o0//Nb\nrgz9HrYYStT6bknK4Z/+zJvwb/+f7+E///fnsLIo4e++fv3U69qdvjVbOk7z63VXLeE3fv6t+L3P\nPYb7v3MBT798gJ39ttW8O1cr4fY3buL2Gze4Z4Ab3yOLu++8DnffeV3kfUuK979LTnR7P3XHtYlu\nLwo8yxKyBnqYpu3L2yc4VyvhV37mTaEEJAwePx1r4aUJ+BBNXcD/zhPb0HXgrpv5h01xEUUBr7nS\nYxzow2rVyK41TfesLwcpdBjOMg7zyOfhirUK8NRl/OTfuwbvfvv1qWZZbtxwhXFcr9lciLyNWlXC\nP/2ZN+G3Pvt9fOpLT6JazuO1V1ZxcbeJx1+o4/EX6nj21SMMNB0CwD1Rz4uN5RJ+4/1vxSe/8Die\nvnCIpUoeP/Kmq3D7jZu4eqOSymxSwp8FU0595COTZkGXp6TDbuZvuGYZH3n3TZG9j3iWOdw7aiOf\n45t7E5epCvi6ruNvH99CNiPibTdupL07gbDs+rDR8VTWBGnwGez9S5VCqAz9J267Gm97/fqQImaa\nuOWGGn77H98eKht24+qNBXzk3Tfh9z/3GH7/849BymdwcGJc3AKAa88t4KbranjL69YSUYtUijn8\n2s++GRd3m7hyrcI9+Y9IhzUOW22VY3lDxrWbC/itf3Q7VpekWH1EiaOkUz9SsVotTiSRmKqA/8LW\nMbbqLbztDeupKhl4ca6j6xnwOTP8Qi6Dq9crOL8ertGazYhTG+wZzhnQcbjpuhr+tx9/PT79V08h\nIwq4/cYN3Hx9DW+8bmUs9c+MKOLqjegjE2JysNLhjk/A74TQ4RuCg/jnbT7AMbOl9tFU+7j+fPgK\nQxSmKuA/+LihvZ9UszYuTqWOV1U0aPETJ7/x87difX0BB/vhteDzwl23nMMtN9RQKeYo6yYsSlIW\niyV7ASQ3Ohzr2SZN0KpXk14acmomXnV7Azz05A6WKnm8kUMDPQ3wLELS5szwAUNm6Tc5izBYLOcp\n2BOn2FgpYe9I9ZzkFGambVIENW1519FOiqmJLo88u4d2p487bjp3Zi7mVWu2rY8nfYTZnwRBhGdj\nuQRN1y3H2VFYwB+Xis2NIFkmSxYnNUt5agL+31ra+8mpc+LCPG/87BV4m7YEQcSDubN61fE7XWM9\n20kq2YIzfFbSmYwP0VQE/L3DNp58cR83XLHI7dMyDeSyGVQred8Mn7dpSxBEPKzGrUcdX+VY7Spp\ngpY5rB9NtobvG4VkWRYBfBLALQA6AD6kKMrz5nMbAP7M8fI3A/hniqL8J1mWvw/gyHz8BUVRPuj3\nOV97+BXoODvNWierVQkvbZ1goGmu8q0wTVuCIKJjraPsmeH3J9qwBYKbtntHKvJZ0ZpHMG6CotA9\nAPKKotwhy/JtAO41H4OiKDsA3gkAsiz/PQD/BsCnZFmWzOffybsTD3z3AnJZEW97/fRr70dZqxbx\n/MVjHJx0XIdlYZq2BEFEZ91cO8Mrw+/0BqFsQZIgz9G0rYWwZY9LUEnnTgBfBgBFUR4CcOvoC2RZ\nFgB8AsCHFUXRAbwJQEmW5a/IsvyAeaPw5eJuE2993dqZzIL9POkBRw2fAj5BjJVCLoPlhQJ2Dk4H\nfN20Tp50Sccvw293DA3+JG2lgwL+IoBjx98Ds8zj5G4AP1QU5Vnz7yaA31EU5V0AfgnAZ13ec4o7\nbzl75Rwg2Ca5pfZRyGVIbkkQE2BzpYT949MrjvX6GnSdb5Ztkvg1besTbtgCwSWdYwDOqYaioiij\nItefA/B7jr+fAfAcACiK8qwsy3UA5wBc9PqQjZUSfuStVyNzRuSYTm642pgz0O5pWFs7PSuz0x+g\nUsq5PudFmNfOOnQsbOhY2Hgdi2vOV/HUywfoCyKudLyGeexUK9LEj2M+K2Kgn97nF3eNCZbXnK9O\nbJ+CAv6DMDL4z8myfDuAx1xec6uiKN92/P0BGE3ej8iyfB7GKGHL70N++yN3Yb/e4N/rKSIHw0P8\n5a0j7O6enHr+pNnFUqXg+pwba2sL3K+ddehY2NCxsPE7FlXTKfWp53ZRztoJpKXN17WJH8d8LoNG\nq3vqc1+4cAAAKGaFyPsU9kYRVGf4AgBVluUHYTRsf1WW5ffJsvyLACDL8hpsNQ7jDwEsyrL8DRgq\nng+4jAqGWI1prJUmzEPHrYav6zpanT5p8AliQtieOsN1fHsB88lfi4VcxnWZTjZ/Z5I1fN9vbzZh\nP6MC9BoAAAnOSURBVDzy8DOO53cBvGXkPX0A709qB6edXFbEkocWX+0OoOuk0CGISWFNvtoflmaq\nvcnbKjCkfMZa5MTJpCddAVMy8eqss1otYv+4g4E2PJBpkwafICbK2lIRguCd4acR8PO5DDq900WO\nvSMVuayIxQlp8AEK+ImwuiRB03UcHA/fxWmWLUFMlmxGxFq1eEqLb3nhT3jiFWBk+P2BdsrUrX6k\norY4OQ0+QAE/EbykmTTLliAmz/pKEcetnpVwAUCnx7+8YdIwaaazjq92+2i0exOzVGBQwE8AL9dM\n5pRZKpBTJkFMik2Xxm2Y1a6SxnbMtDP8SXvoMCjgJ4C9EMpwo8gq6VCGTxATw/bUsQO+VcNPoaRj\nG6jZI469CdsiMyjgJ4DXQihWSYdq+AQxMdyUOqlm+FZJx87w01DoABTwE2FlQYKA0yWdNmX4BDFx\n3LT4nRRlmewznRk+lXTOMLmsiKWFAjVtCWIKqC1KyGaEIaWOmmJJR3JZ9SqNSVcABfzEWK1KODgZ\n1uKTLJMgJo8oClhbKmJnvw1dN6xPrAXM8+nMtDX2Ybikk82IWCxP1q6ZAn5CrFYNLf6+Q4tvZ/ik\n0iGISbK5UkKr08dJ21DKdaaghj/atK1VpYkutwhQwE+Mmos0s2XKMouFyZ9kBDHPsDr+ZbNxy4Jt\nKiqd/HDTttMdpKLBByjgJ8aqizSz1emjkM+4Ln1IEMT4YEqdbbOO3+kNkM+KEFOwYB/N8PeO02nY\nAhTwE8NNmtlS+1S/J4gUGFXqpLHaFWO0aVs3k0IK+GcYN3uFdqdPCh2CSAFr8pUjw0+jnAM4mrZd\no6ST1qQrgAJ+YqwsDmvxNdMLnzJ8gpg8S5U8CrkMdg6MbLrTHaTSsAWc1gpmSYdp8Bcnvw4IBfyE\nyGZELC8WrOFah7zwCSI1BEHAxnIROwet1BYwZ4zKMinDnxFWFyXsn3TQH2jko0MQKbO+UkK3p2Hv\nSMVA01OxRgacJR27hp/NCKhWJqvBByjgJ0qtWoSuA/snHXLKJIiU2TSVOi9vG+vFFlKYdGV8rhFm\nmUqH+eBPWoMPUMBPFEupc9im1a4IImWYUufCZTPgp5ThZ0QR2YyITk9DpzfAcSsdDT5AAT9RnEod\nKukQRLowpc7L2w0A6cyyZUj5DDq9gSXbrk3YJZNB0ShBnAGfTfCgpi1BpMPGslnS2WElnfQCfiEn\notMdOGyRKcM/89SWbHsFyvAJIl0qxRzKUhbHzS6AdNazZRTyWTPDT8clk0EBP0FWFgoQBKMLT4uf\nEES6CIKAdbOOD6Sd4WegUoY/W2QzIlYWCtg7dmb4pNIhiLRgSh0g3Rp+ISeiP9Bw+ZDZKqRTw6eA\nnzC1ahEHxx2ctIxhZJFKOgSRGhtTkuEzH/5Le01kxHQ0+AAF/MRZrUrQAby6aygDqKRDEOnBlDoA\nIOXSuxbzOSPU7uy3U/HBZ1DATxhWm9uqG6ZN5IVPEOmx4SjppJvhG5+t6Xpq9XuAAn7isO77QNMh\nkRc+QaSKs6STbg3fHl1QwJ8hnM0YkmQSRLoUC1lr3di0ZtoCtr0CYCyynhYU8BPGefem+j1BpM+m\nOQEr3Qzf/uy0FDoAzbRNnJXFAkRBgKbrFPAJYgq47cYNQBCsTD8NnAE/rUlXAAX8xMmIIpYXCqgf\nq6TBJ4gp4J1vuRLvfMuVqe6Ds2FMNfwZg/2gVMMnCAKwdfgZUcBSpZDaflDAHwNWwKeSDkEQMGba\nAkbDlhkrpgEF/DFQowyfIAgHrIafZv0eoIA/FtZM10yq4RMEAdglnTTr9wA1bcfCW+U1vHK5YagD\nCIKYe67aqOCn7rgGt70h3ZhAAX8MSPksfvbvvzbt3SAIYkoQBQHvefsNae8GlXQIgiDmBQr4BEEQ\ncwIFfIIgiDmBAj5BEMScQAGfIAhiTqCATxAEMSdQwCcIgpgTKOATBEHMCRTwCYIg5gQK+ARBEHMC\nBXyCIIg5gQI+QRDEnEABnyAIYk6ggE8QBDEnUMAnCIKYEyjgEwRBzAm+C6DIsiwC+CSAWwB0AHxI\nUZTnzec2APyZ4+VvBvDPAHwKwB+4vYcgCIJIj6AM/x4AeUVR7gDwzwHcy55QFGVHUZR3KoryTgAf\nA/AwjGD/bgAFt/cQBEEQ6REU8O8E8GUAUBTlIQC3jr5AlmUBwCcAfFhRFN18z/1+7yEIgiAmT1DA\nXwRw7Ph7YJZ5nNwN4IeKojwb4j0EQRDEhAlaxPwYwILjb1FRFG3kNT8H4PdCvmcUYW1tIeAl8wMd\nCxs6FjZ0LGzoWEQjKPN+EMD/AgCyLN8O4DGX19yqKMq3Q76HIAiCmDBBGf4XAPyYLMsPmn9/QJbl\n9wGoKIryKVmW1wAcBb0nud0lCIIgoiLoup72PhAEQRATgJqpBEEQcwIFfIIgiDmBAj5BEMScENS0\nHSt+1g3zgizLtwH4bUVR3inL8msA3AdAA/BDAB8xJ7PNPLIs5wD8EYBrABQA/CaApzCHx0OW5QyM\nWeuvA6AD+CUY18d9mLNjwZBleR3GbP6/D+MY3Ic5PBayLH8ftlDmBQC/hRDHIu0M39O6YR6QZfnX\nYVzYBfOh3wXwMUVR3g5AAPAP0tq3FPg5ALvmd/9xAP8XjPNhHo/HTwHQFEW5C8C/BPB/Yn6PBUsG\n/iOAJozvPpfXiSzLEgAwSxtFUT6IkMci7YAfaN0w4zwH4D0wfigAeIuiKN8w/30/gB9NZa/S4XMA\n/pX5bxFAD3N6PBRF+f8A/GPzz2sBHAB46zweC5PfgWHIuGX+PZfnBYA3ASjJsvwVWZYfMOc5hToW\naQf8ubZhUBTlzwH0HQ8Jjn83AFQnu0fpoShKU1GUhizLCzCC/7/E8Pk5b8djIMvyfQB+H8BnMafn\nhizLvwBj5PdV8yEBc3osYIxwfkdRlHfBKPN9duT5wGORdnCNYsMwyzi/+wKAw7R2JA1kWb4KwH8H\n8BlFUf4Uc348FEX5BQAygP8bgOR4ap6OxQdgTOT8GgwL9j8GsOZ4fp6OxTMwg7zpXVYHsOF4PvBY\npB3wyYZhmEdkWX6H+e+fAPANvxfPEub6Cl8F8OuKotxnPjyXx0OW5ffLsvwvzD/bAAYAvjePx0JR\nlHcoivI/mTbsjwL4eQBfnsdjAePmdy8AyLJ8HkaA/2qYY5GqSgdkw8BgXfVfA/ApWZbzAJ4E8Pn0\ndmnifAzGcPRfybLMavm/AuATc3g8Pg/gPlmWvw4gB+M4PI35PTec6Jjf6+QPAXxalmUW1D8AI8vn\nPhZkrUAQBDEnpF3SIQiCICYEBXyCIIg5gQI+QRDEnEABnyAIYk6ggE8QBDEnUMAnCIKYEyjgEwRB\nzAkU8AmCIOaE/x8Ys6GoNREgGQAAAABJRU5ErkJggg==\n", + "png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAECCAYAAAD9z2x7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmUpFd53/Fv7b13z9IajWbTfg1CQpuNFmyQQQhLYIND\n/gDHPhbBwdjkkBwSjBUfkjh2nBNHiNiODJENkmMnBDnBDptQjMGEkZBkISEE6I4kJM1IM5rp0cx0\ndVd17W/+qPetqqmpvWt536rf5xwOqq7q6tt3qp966nnvfW7IcRxERGT8hUc9ABERGQ4FfBGRCaGA\nLyIyIRTwRUQmhAK+iMiEUMAXEZkQ0VZ3GmPCwJ3AZUAWeJ+19tma+98J3AY4wKettZ90v+dPgIuB\nEvAr1lo7oPGLiEiH2mX47wDi1trrgI8Ct9fd/3HgRuB64MPGmCXgLcCstfb1wG8Dv9vfIYuISC/a\nBfzrgfsArLUPAVfX3Z8HloAZIEQ5o98AFo0xIWARyPVzwCIi0pt2AX8BSNbcLrolG8/twKPA94Av\nWGuTwH5gCngK+BTwh/0broiI9KpdwE8C87WPt9aWAIwxe4EPAvuAc4Edxph3AR8B9ltrDXA5cI8x\nJt7vgYuISHdaXrSlnK2/HbjXGHMN8ETNfVNAEchaa0vGmGPAFmCW6qeCk0AMiLT6IY7jOKFQqIfh\ni4hMtK4CZ6hV8zS3Du+t0gG4FbgKmLPW3mWM+efAe4AM8AzwK8Ac8BlgO+Vg/wlr7WfbjMNZWVnr\nZtxja3l5Hs1FmeaiSnNRpbmoWl6e71/AHyIFfJdezFWaiyrNRZXmoqrbgK+NVyIiE0IBX0RkQijg\ni4hMCAV8EZEJoYAvIjIhFPBFRCaEAr6IyIRQwBcRmRAK+CIiE0IBX0RkQijgi4hMCAV8EZEJoYAv\nIjIhFPBFRCaEAr6IyIRQwB+Aw8dTfPRTD3LwqHp2i4h/KOAPwFMHT3Ls5Ab20KlRD0VEpKLdmbZD\ns5Et8NLxFC+trLv/n+JEMsN7bryYS8/fNurhdSWZygGwkSmMeCQiIlW+CPjv/Z37WTm50fC+7z93\nYuAB/+jJNJ//5o/4pZsMM1OxTT/f2kYegHRWAV9E/MMXAb9YdLjk3C3sWp5j1/ZZzlmeJRYJ828+\n8wjpIWTJD//gKA//8BhXXLTM6169Y9PPt5Z2A74yfBHxEV8E/Hv+9U1nHEq8PsQsOZkq/6xkOteX\n51tzSzrK8EXET3x70XYqHgEgnckP/Gd5gX6tXwHfe7MawtiHqeQ4OI4z6mGISI98G/CjkTCJWGQo\nWbIX6L1Mf7MqF22zxb48n1/86Rd/wL+9+5FRD0NEeuSLkk4zM1PRodTBk27N3QvUm1EqOaQq5ajx\nyvCfeWmV46sZHMchFAqNejgi0iXfZvgAM4koG0Op4fevpLO+kccreozbRdtkKo/jQKGoso5IEPk6\n4E9PRUlnCwOtGxdLpcoF4n5ctK190xj02IcpkyuQzZdLVPnCeJWqRCaFrwP+TCKK40AmN7gAs56u\nll36UcNfq3m+QY99mGrLXdl8aYQjEZFe+TvgT5UvMQyyrLN6WiArkt1kgK7/lDCMktQw1M5TThm+\nSCD5O+AnygF/kLXw2oy8fHtzZR3v+eamyzt2x6WOX5vh55XhiwSSvwO+m+EPcmmml5FPJ8rr/lc3\nHfDL379j6zQwPpuvTvskpAxfJJD8HfATg8+Svcx11/Y5ANY2Wcf3MvwdW2aA8czwc8rwRQLJ3wG/\nkuEPbj27l+HvXp497fZmn2/HVjfgj8la/NoMX6t0RILJ3wF/GDV8N6Pftexm+H2q4Z+15JZ0lOGL\niE/4OuBPD7GG72X4q5vcbbuWzjE3HWNuxi1HjUkNP1m3mklEgsfXAX8YGX4ylSMWDbPsZuT1q3a6\ntZbOMz8TG8rYh+n0ko4yfJEgCkbAH2CWvJbOsTATZ34mDmyun06xVCK1kWd+OjaUPQTD4jhOXUlH\nGb5IEPk64HslnUEdFeg4DqupPAuzMWLRMDOJ6KYu2q5vFHCA+dn4UN6shiWTK5IrlIhGyg3Tssrw\nRQKpZbdMY0wYuBO4DMgC77PWPltz/zuB2wAH+LS19pPu138TeDsQA/7IWntPL4MbdNDM5IoUiqVK\ndj8/G68cXtIL74Lv/Eyc6TEq6XjZ/bbFaY6eSGuVjkhAtcvw3wHErbXXAR8Fbq+7/+PAjcD1wIeN\nMYvGmDcC17rf80bg/F4HF42EicfCAwv4Xja/4Ab8hZkYaxt5SqXeGp55bxYLM7Gh9vMfNK9+v7w4\nBWiVjkhQtQv41wP3AVhrHwKurrs/DywBM4DXIP0m4HvGmL8CvgD8n80McCYRHVhJx8tcF2bjlf93\nnOrxit3yTrryPjHMTA1u7MPkzdP2SsBXhi8SRO0C/gKQrLlddMs8ntuBR4HvAV+w1q4C24GrgHcB\nvwr8xWYGODMVG1yG767BX3CXUHqZfq91fG+Fz7z7fDOJ6Hhl+O5Kppxq+CKB1O7EqyQwX3M7bK0t\nARhj9gIfBPYBaeDPjTHvAo4DP7TWFoADxpiMMWa7tfZ4qx+0vDzf8OuLcwlePpFm+/a5vp+y5Dzz\nCgC7zl5geXmes93NV+FYtOl4WvFC+55zFllenmdhLsGRHsbey88epIJb4Tp/7xYAQuHw0Mbot7kY\nJc1FleaiN+0C/n7KF1/vNcZcAzxRc98UUASy1tqSMeYY5fLOt4APAR83xpwDzAKvtBvIyspaw6/H\nIiFKJYcXD59iKt7fExkPH3U/vJRKrKysEXXPqjp4+BTnLE11/XxHj6cAKOYKrKys9TT25eX5pnMx\nKkdW1gGIu+9Za6nsUMbox7kYFc1Fleaiqts3vnZR6PPAjcaY/e7tW40x7wbmrLV3GWPuAR4wxmSA\nZ4C7rbUFY8xPGWMeplwy+jVrbc/HPtVuYOp3wK/U8L1VOu7/99pAba3u+QY59mFSDV9kPLSMQm6g\n/kDdlw/U3H8HcEeD7/uNvoyO09srbO3Xk7q8w8srNfzZzdbwc4So9sIf5NiHaTWVIxoJMTcdIxoJ\nqYYvElC+3ngFg22vsJZyA3R9wO9xLX4ynWd2OkY4XK59jEt7hWQqy8JsnFAoRDwa0bJMkYDyf8Af\nYAO1ZDrH7HSMSLg8DV6m32s/nbV0rrJCB4ZzgMugebuRF903w1gsrCMORQLK9wHf27E6iPXsyVSu\nktV7PysaCfVU0imWSqQyhUr9HqoZfpDX4m9kCxSKpcrvlYhGVMMXCSjfB/xBtVcoFL0AXc3IQ6EQ\n8zPxnko663Vr8KG8hwCCneGv1m1Oi8fC6pYpElD+D/heWSTT35OjvLJNbYYP5RU2vWT41U1XZ2b4\nQQ743pvf4pxb0olGyKqGLxJI/g/4icFkybWNzmrNz8bI5Utkc92VLZKV5zuzhh/kks5q3VLTRCxM\noVjqud+QiIyO/wP+gPrKVxqn1WX4i25gW+0yy2+d4Qf3XNtqhp8AIB6LADoERSSI/B/wB7S0MVnT\n2bLW/Ky3+aq7gN/oDaSyDn8sMvzyPMWi5ZdMVit1RALH9wF/ekB18GrjtDNr+ND95qtKhj9dU9IZ\nqxq+m+FHyxm+VuqIBI/vA34sGiYeDfc9S15rUtJZmC0H7G5X6qx7Nfya54tGBjP2YWpUwweVdESC\nyPcBH8qlkf5n+GcGaKjN8LuruycbLMuEwYx9mJKpHNFImOlEObOPVTJ8BXyRoAlEwJ9JRPtfw6/r\no+OpNlDrtqTjtmmYOv35BjH2YVpN5Vh02ypAeR0+QFYlHZHACUbAn4qykS3gOP1bCphM54jHwmd0\nsey1gVoynWduptpHxzOIsQ+L4zispU/fjaxVOiLBFYyAn4hRLDl9LSOspXNnXLCFakmmlxp+/Zp+\nGMzYhyWdLVAoOpU+OgBxd5WOLtqKBE8wAn6fm5A5jkMy1ThARyNhZqeiXTVQa9SmwRPkBmqr62de\n2PYyfC3LFAmeQAT8ytLMPrVX2MgWz8hca83PxCurUzrhHXo+1zDDD27AryzJbJDh5wP4iUVk0gUi\n4Pc7aK41aINQa2E2TmojT7HUWVBba3IBGILdXqG+cRpUM3wdgiISPMEI+H3esdookNVamInhAOsb\nnf28ZJO+PBDs9gqtMnz1xBcJnmAE/AFl+I0u2kJ1bX6nF25bfWIIcnuFhhl+5aKtMnyRoAlGwO9z\nA7XKJqnZxiWdxS7bK6w1adMAY1jDj6m1gkhQBSPg97mBWjLVWYbf6eartY3mGX6Qz7VVDV9kvAQi\n4E/3eWljsk1JZ6HLtfheI7ZGNfx+j32Ykqkc8WiYqXik8jWtwxcJrkAE/H5nyWvtLtrOdtdPp1UN\nP8gZftLdZeu1VQBl+CJBFoyA3+ezYZMpt+/NdJNlmd3W8DfyhEIw2+D5gnqubcndnFa/V8HrpaMM\nXyR4ghHw3U6NG33aeNWs743HK810vEonlWN+OkY4dObz9Xvsw5LOFCiWnDM+BVWXZSrDFwmaQAT8\nWDRCLBru67LMZuUcgOlEhGgkXCnVtH++fMP6PfR/7MOyup4FOCPDj0bChFCGLxJEgQj40L82w9W+\nN80DfigUYmE2VrkY2+750tlC0127EMwWyckm1zlCoRDxWEQZvkgABSfg9+kgkbUmB5XUm5+Jk0zn\n2rY1bnR4eb1+jX2YVhuswffEomFl+CIBFJiAP+1myZvtK98sc623OBsnXyiRybUObO127UI5ww9a\nT/xW85SIhbXTViSAAhPwZxLRcl/5TZYS2q3B93ifANrV8Tv5xDA9FaVQdAJ1aEg1w0+ccV88FiGv\nXjoigROcgN+nnjSdZviVpZlt6vjtOm9CMNsrVOfpzN8rFg2TDdCbl4iUBSfg9yloVlsZtwn4HR51\nmOyohu+uxQ/QhdvVdPM3xngsQi5fDFSJSkQCFPCn+9RArdLKuEnjNE+nm686yfCn3bX4gcrw13Mk\nYpEzzvwFSETDOA4USwr4IkESmIDfrxYF7Rqnebw3hHabr9ZaZMKeILZXWE3nGpZzoLy3ALQWXyRo\nghPwKy0KNrdjtdOLtt79a21r+F2UdAJyCErJcVhL5RtesIVqe4WsVuqIBEpwAn6iP0cFrqXyJGIR\nEjUdIBvpvIafIxwKVS4qN9KvsQ/L+kaeknNmWwVP3M3wtVJHJFiaRynAGBMG7gQuA7LA+6y1z9bc\n/07gNsABPm2t/WTNfWcBjwJvstYe2OxAZ/rUZjiZzrXddAXVxmrtSzpuX54GfXQ8/Rr7sCTXm2+6\ngtoGasrwRYKkXYb/DiBurb0O+Chwe939HwduBK4HPmyMWQQwxsSATwGpfg20H3Vwp0kHyEaikTCz\nU9EOLtrm276BBK2G32qFDqhFskhQtQv41wP3AVhrHwKurrs/DywB00CIcqYP8PvAHwNH+jXQfmTJ\nG9lyB8hW9fZaC7PxSo2+kXyhxEa2dV8eGMMMX4egiARSu4C/ACRrbhfdMo/ndsplmyeBL1hrk8aY\nXwZWrLX3u49pXuvoQj+y5NUWm4kaWZiJs76Rp1BsnMmub3TWlydwGX6bzWnVDF8BXyRI2gX8JDBf\n+3hrbQnAGLMX+CCwDzgX2GGMeRdwK3CjMebrwOXAPcaYHZsdaD+y5Mqmqw5KOlA929YL7PW8+n67\nTwyBy/DTnWb4KumIBEnLi7bAfuDtwL3GmGuAJ2rumwKKQNZaWzLGHAOWrLVv8B7gBv33W2uPthvI\n8vJ8u4cQjYTJF0sdPbaRA0fWANh51nxHz3H2ttnyz03EGj7+0ImN8uOW59o+XywaplB0Ovq5vf5+\n/eK1TTh3zxaW3TmotXVL+WuJ6fjAxzrqufATzUWV5qI37QL+5yln6/vd27caY94NzFlr7zLG3AM8\nYIzJAM8Ad/c6kJWVtbaPmUlEWF3PdfTYRl48sgpAuOR09BxuIsvzL55kLnbmhyHv+SJO++ebTkRZ\nXc+2fdzy8nzPv1+/HDuRBqCQzTccSy5T/gTwyonUQMfqh7nwC81Fleaiqts3vpYB31rrAB+o+/KB\nmvvvAO5o8f03dDWaNqanYps6KrDTxmmedpuv1lLt2yp4ZhLB6Ym/up5jKh4hEWu8V0GrdESCKTAb\nr2DzQbPaOK3Di7ZtNl+tbbTfZeuZDtCpV8k2R0BqlY5IMAUr4Ff6yvcWaCoXWbvM8Jttvkp2k+FP\nRSkUS77fnVoqOaylW+9V0CodkWBqV8P3ldrljYtzrVsjNJJM5wiFqrto26k0UGuW4XfQR8ez2bEf\nP7VBpklGPZOIsnVhquvnbGRtI4/jtC57BXGnbSZXIBQKNS1T9dOJZKbpJ9F4LMJZS9N9+TmO43Ds\n1MbQDtbp5+us5DisrudYmosTarFLvV4ylWNpy3glGqWSQzpb6DgubUawAn7N8sbFucaNvVpJpvPM\nz8RbtkGoVanhN9l8tZbOEQm37qPj6WXsjuPwwxdO8sUHnuepg6daPvbyC7dzy7X7uGDXYkfP3czq\nehZoviQTarplBqCG/8pqhvseOsg3nzhMJBzihit38ZYf39vRbutOlRyH5w4neezp4zz29ApHXkm3\nfPxFuxe55dpzufT8rV0FO4/jOHz3mVf44oPP86PDybaP76fXXrCNW647lwt7fJ0ViiUe/uFRvvTg\nCxx5Jc22hSkuv2g7V1y0nYv3LBGNnF50KDkOL7y8xmNPH+fxp1d4cSXFlvkEN169hzdecU7D9t1B\nUSiWePDJl/nyt1/g6MkNLjlvK2+7dh8X71nq6XXRiUDN1mY3MCVTObYtdP5GMRWPEIuGKxuR6q2l\n88xNt+6j4+lm7CXH4btPH+eLD77Ac0fKf9Cv2reFndtmGj7+4NF1Hn/mOI8/c5xX7dvCLdfu41X7\ntvT0okl20O45EYAa/pFXUnzl2wd58PsvUyw5bFuYolAs8ZVvH+Rv/v5Ffuqyc7jpdXvYvthbtp3J\nFXjq4Ckef3qFx595pVLei0fDXH7hdrY2eZ0dO7nBk8+d4BP3fpe9O+Z427XncqVZ7ug1VCo5PPzU\nUb784Au8uFLuWnLZBdvYvtifrLudg0fX+e6zr/DdZ1/hx/Yucct15/LqDl9n+UKRb33vZb7y7Rc4\nvpohHArxqn1beP7lNb726It87dEXmU5EueyCbVxx0Xam4tHya/rpFU65O7+jkbD7PUk+9/Vn+NKD\nz/Pmq/fwpqt2DyU77pdsvsj/++5h7nv4ICeSWSLhEPt2zPP9507w/edOcOHuRd527T4uPX9b3wN/\nsAL+JjYweW0Q5mc6X8YUCoVYmIk1Pdd2bSPHtoXOAkYnYy+WSnzjOy/y2fuf4iX3D/qqi5e5+dp9\nnLdzoen3OY7DgUOn+OKDL/D9507wwxdOct7OBd523T5ee+H2jj/RQGcrmfy8Sufg0TW++OALPPrU\nMRxg57YZbr5mH6979Q4cx+FbTxzhKw8d5GvfeZFvPP4S11yyg5uv2cfOBvsNoByojryS5qXjKV5a\nSXH4eIoXV9Y5vpqpPGZ+JsbrL9vJFRdu59XnbW1bNjp4dI0vf/sFHvnhMe78qydPG2N9hlseQ4mv\nfvsFPvc3lmMnNwiHQlzrjnvX8tym5qtbBw6d4osPPM+Tz53gqYOPl19n1+7jtRc1fp1lcgW+8dhh\nvvrwQVZTOaKRMD995S7e+hN72b40TaFYwh46xeNuBv/QD47y0A+q23Zmp6Jc95qzueKi7Vxy3lam\n4lGmZxP8z68+xf/9+0P89bee476HD3LDFbt4y4/vYamHT/7Dks4U+PpjL3L/I4dYS+eJR8PcePUe\nbvqJPWxdmOLZl1b50oMv8Pgzx/nEvU+w96w5brnuXK66eJlwuD+BP+STY+qcTtbVfv07L/Lf7j/A\n+3/2El736u42755IZvgXdz7A6169g/f/7CUdf9+/u+cRnjuyRjRy5oQXig6v2reFf/nuKzY9dsdx\n+I///THsoVOEQyFe9+od3HztPnZtbxyImnnuSJIvP/gCjx5YASASDtFNklAqlT9h/NOfv5QrLl5u\n+JiNbIFfv+ObXHbBNv7ZP3xty+d79vAqn/jcd8n29Gmgtj1TZwrF8uP37Zjnlmv3Ncye68sKQMN/\nX4Bi0TljBAszMXYtz3Hu2fNcftF2Ljhnsac/yJdPpPnyt1/gwSfLn0LCoRDhBssovH+TaCTE6y/d\nyVuv2de36wC9ev7lJF96oP3rrFhycJzyp+UbrtzFW67e07Sk6TgOh46t8/jTx8nmi1x2wTYu3L1I\npG5SvHX4mVyBv3u8nCmvrucIAZEm/45+4M3FdCLKm67azZuv3t2wD9ehY+t86cHneeSpYzhOuYz2\noSZ/Z8vL8139woHK8Kc3keFX+t50+dHvp6/czTcee6nxnSF4w+XndPQ87ca+cmoDe+gUZu8W3nvz\nj7Hc4x/0eTsX+PWfv5SXjqf46kMHOfJK9w1LZ6djXLx3qen91Yu27YP4sy8lSWUKnL11htkOrnXU\nisYiFLp8o5ibjvGmq3ZzyXnN6+PRSJjrXrOTay45m8cOrPCNxw+TafLvEo2E2blthl3Lc+zaPss5\ny7Ntm+V16uytM7z35lfxc9efx1cfOchzLerxl160zE++5my2zPsjgz337JrX2cMHOXK88essFArx\nmvO28qardzM71fpvLxQKsXfHPHt3dPYpfCoe5aaf2MtPX7mb/U8e4aHvH23a98oPQuEQr71gGzdc\nsbvldb89Z83xqz/3Gt75k2nuf+RQR6sAOxWogD+T8A4D737zVcoN+LNdBvzrL93J9Zfu7Prn1Ws3\ndnuofFH2jVft7jnY19q1fZb33vKqTT9PI5FwmGgk1NHqEO/3/aWbDD+2b0tXP2fQOyrDoRBXmbO4\nypw1sJ/RiW2LU7znzRe3fIxfd5fu2j7Le28ezOusU7FomDdevos3Xr5rpOPotx1bZ/jFm0xfnzNw\n6/Chtww/5V4s7TbL7Jd2Yz/gBvxLzt82tDFtRiwa6eiIQ+8idScrmURksAIV8Kc3cVTgeqa3DL9f\nKsccZhuXKA4cOsVMIsq+s5tfnPWTeCzc0carlAK+iG8EKuBXljb2kuF7JZ02dcRBqWT4DUo6J5IZ\nVk5luHjPUt+uxg9aPBruqKSz4f5beSUtERmdYAX8qd7X4VdKOtOjyTSnW7xZHXixXM65eE/zC6V+\nE49FOrpom87kCYVgKjH4Ha4i0lqgAn48GiYSDm0qw58bUYbvjb1ROerAoXKb5UAF/Giko3X4qWyB\nmUS0q70AIjIYgQr4oVC5jcHGZi7ajqiG7429YYZ/6BSJWIS9O4a7iWYzvJJOqc0+jnSmoPq9iE8E\nKuCD2yK5l5LORp4Q1esAo9Bo7Ml0jsPHU1y4a6HhLku/8nbb5tus1ElnCqrfi/hEcCKMq1mW3E4q\nk2dmKjrSi6KNxv50AMs5ULP5qsVKnUKxRDZfVIYv4hPBC/iJKPlC933lU5nCyFboeBqN3Vt/H7iA\n38FB5t6bmwK+iD8ELuBPu0E73WQ9ezOpjfzIVuh4Go39wKFTRCMhzj8nGOvvPZ0cguJdoB5lGU1E\nqgIX8Ktthjtvr5DLF8kVSr7I8KE69nSmwMFja5y/c6HSYz4o4l5P/A4y/FHPu4iUBS/g99BeYdQr\ndDz1Y3/mpVUch5aNyvyqkxp+yn1jm1ZJR8QXghfwe2ivUN1lO9rAUz/2oNbvocMavko6Ir4SvIDf\nU4Y/2rYKnvqxH3B7319wzuaOJRyFTmr46RE3rBOR0wUu4E/3cMzh+oZPSjo17RWy+SLPHUmy7+y5\nyu8UJFqlIxI8gQv4vTRQq2b4I16lU1PS+dHhJMWSE8hyDtRk+C366VRbI+uirYgfBC/g99BALTXi\n1sie2pJOkOv30Nm5tt5qJNXwRfwhcH+JPWX4bklnVI3TPDM15aiXT5TPUr1odzADfizafpWOSjoi\n/hLADL8ctLtpoFbN8Ee8SscdezKd49mXVtm9PMvciD919CrRQQ1/1KeMicjpghfwe7hoO+rDTzze\n2O3BU+QKpcCWcwBiHa7SiUbCgdtUJjKuAhfw4zGvJ37nO21HffiJxxv7uvsGFOSA39k6/LyyexEf\nCVzAD4VCTHfZIjm1kWc6ESESHu2v643dE+SAn/DaI7ep4at+L+IfgQv40H2L5FQmP/JyjscLgDu2\nTLM0lxjxaHoXa5PhO47j9sJXwBfxi2AG/ES0q9YK6xujb43s8QJgkLN7qC7LzDZZh5/LlyiWHK3B\nF/GRYAb8qSi5Qol8B2eq5gvlQzhGXb/3eBl+0AN+wm2e1uzfQEsyRfwnmAHf27HaQVkn7ZM+Op6l\nuQQhwASwQ2ataCRMiOY7bb2lsAr4Iv4RyL/G2h2rC7Pxlo9d90lrZM8/eMMFvP7SnWxfnB71UDYl\nFAoRi4XJNsvw1SlTxHcC+dfoHYrtra9vxS+tkT1b5hNsmQ/uxdpa8WhEJR2RAGn512iMCQN3ApcB\nWeB91tpna+5/J3Ab4ACfttZ+0hgTAz4N7AMSwO9Ya7/Qz0F7Wf1qKtf2sX5pjTyO4rFw05KO30pp\nItK+hv8OIG6tvQ74KHB73f0fB24Ergc+bIxZAv4RsGKt/SngrcAf9XfIsDRfDvin1rNtH5va8Mem\nq3EUj0ZaBHyVdET8pl3Avx64D8Ba+xBwdd39eWAJmAZCQAn4HPCxmufvfP1kh7a469c7Cvhupjnq\nxmnjKB4LN+2WWW2NrIAv4hft/hoXgGTN7aIxJmyt9f7KbwceBVLA/7LWVh5rjJkH7gX+VR/HC8CS\nWwM/udZ5wPfLRdtxUs7wSziOQygUOu0+1fBF/Kddhp8E5msf7wV7Y8xe4IOUa/XnAjuMMe9y79sD\n/C3wZ9baz/Z70EuzXobfQQ1/Qx0bByUeC1NyHIol54z7UuqFL+I77f4a9wNvB+41xlwDPFFz3xRQ\nBLLW2pIx5hiwZIzZAdwP/Jq19uudDmR5eb79g2rMTkVZ38i3/b6CG4v27t7C1oWprn7GqHQ7F6My\n777xLizOnPEJqlgz7/MzrZfOthKUuRgGzUWV5qI37QL+54EbjTH73du3GmPeDcxZa+8yxtwDPGCM\nyQDPAPdodT7vAAAMnElEQVQA/wlYBD5mjPFq+T9jrc20+kErK2tdDXxxLsHxUxttv+/EqfJBI5lU\nlpUuOmyOyvLyfNdzMSpOqVzZO/zy6hl9gU4ly//c6bUMmVT70lsjQZqLQdNcVGkuqrp942sZ8K21\nDvCBui8fqLn/DuCOuvs/5P5voJbm4hw+niKXL1b6ujSynimQiEUqzb6kf+LR5ufapjIFphMRwuHQ\nGfeJyGgENgp6GeWpNmvxUxt5LckckHjMO+bwzJU66pQp4j+BDfjebtVTbVbq+Kk18ripZvgNAn42\nz3RC8y7iJ4EN+EsdrMUvFEtsZItaoTMglQy/rqRTKjmadxEfCnDAd3fbtsjwvbXgWoM/GJVDUOpK\nOhs5rcEX8aPgBvz59mvx/XJ4+bjyLpbXZ/gp7bIV8aXABnyvvcLJFiUdvxxePq6q59rWZfiVPjp6\noxXxk8AG/IXZOCFal3S8DF99dAbDK+lkC/UZvr9aUotIWWADfjQSZn423vKirfroDFazVTpe47Rp\nBXwRXwlswIfyhduT61kc58xeLqA+OoNWPdf29Ay/0jhN6/BFfCXgAT9BLl9eetmIDj8ZrEpJp0mG\nr3kX8ZfAB3xovha/eviJAs8gNFulk87qAHMRPwp0wPd22zZbqaOLh4MVb7JKR8syRfwp0AG/3ear\ndW+VjjL8gYhHG++03dDxhiK+FPCA36akk8kTi4ZbdtOU3nnzmlWGLxIIgQ74W9rstk1tFFTOGSAv\nw883qOFHwqHKxiwR8YdAB/xKht+kpJPK5HXBdoCatUdOZwpMJ6JnnHMrIqMV6IA/NxMjEg41LOmU\nSg7pTEFLAwcoEg4TCYfOXKWT0ScrET8KdMAPh0IszTXebZvOFnDQCp1Bi8ciZ2b42YLq9yI+FOiA\nD+Wyzqn1HKW63bZqqzAc8Wj4tAw/XyiSL5S0QkfEh8Yi4BdLDuvp0w8o9zZdqXHaYMVj4dMy/HRl\nhY7mXcRvgh/w5xsvzaxm+Mo0BykejZyW4Vf66KikI+I7wQ/47uark3UrdXT4yXDUZ/hagy/iX2MQ\n8Jtl+OqjMwzxaIR8oVS5hpLWLlsR3wp8wG+2+aqa4SvwDFKs0iK5nOWnM17jNL3RivhN4AO+l+HX\nl3TW1Rp5KBLR0ztmVg6O1xutiO+MTcA/o6SzoYu2wxCvy/BTKumI+FbgA/50IkIiFmlaw1enzMGK\nuRl+1s3wN7QsU8S3Ah/wQ5XdtmfW8NXAa/Aq/XTcU690+ImIfwU+4EO5rLOWylEoVpcHrmcKzE7H\n1MBrwBJ1h6CopCPiX2MR8LfMJ3CAZKqa5ac28rpwOASVc23dg8zTWocv4ltjEfArK3XcOn7JcdQa\neUji9at0MgXisTDRyFi8tETGylj8VdYfdZjJFnEc9dEZhkT9OvxsXkthRXxqPAJ+3eYrHV4+PPWr\ndNKZgur3Ij41HgG/bi2+WiMPT+0qnZLjkM4WmNYbrYgvjUfAnz99t63XGlkZ/uDFa1bpZHPlUtqs\nMnwRXxqPgD/r1vCV4Q+dd5B5Ll+szLtW6Ij401gE/HgswuxUtFrDV2vkofEy/GyhqMNPRHyuZSpm\njAkDdwKXAVngfdbaZ2vufydwG+AAn7bWfrLd9wzK0nyCE8lyhr9eaY2sTHPQvAw/ny+pNbKIz7XL\n8N8BxK211wEfBW6vu//jwI3A9cCHjTFL7vckWnzPQCzNJdjIFsjmisrwh8jL8HOFok67EvG5dgH/\neuA+AGvtQ8DVdffngSVgBghRzvSvB77S4nsGYou3UieVremUqYA/aNUafkm7bEV8rl3AXwCSNbeL\nbsnGczvwKPA94AvW2tUOvmcgluarm68qnTKV4Q9cNcMvVQ8/SWjeRfyoXSqWBOZrboettSUAY8xe\n4IPAPiAN/Lkx5l2tvqeV5eX5dg9paffZiwAUQ2GyhRLhcIi9u5cC2Txts3MxTAveAeahEETKwX/X\n2Qt9+x2CNBeDprmo0lz0pl3A3w+8HbjXGHMN8ETNfVNAEchaa0vGmGOUyzutvqeplZW1bsd+mhjl\nM1UPHl7l1FqGmUSU48fXN/Wco7C8PL/puRgmxz3Ldj2VZeVECoBcJteX3yFoczFImosqzUVVt298\n7QL+54EbjTH73du3GmPeDcxZa+8yxtwDPGCMyQDPAHdTfhM47Xu6GlGPqu0VyiUd1e+HIxQKEY+G\n3ZKOVumI+FnLv0xrrQN8oO7LB2ruvwO4o8G31n/PwNW2V0ht5FlenBr2ECZWPBYhVyixkdU6fBE/\nG4uNVwALszFCIXj5RJpiyVGGP0TxWLiy0zYETCV0ypiIH41NwI+EwyzMxjl8PA2oj84wxaLlDD+d\nLTAzFSUcwAvlIpNgbAI+lMs63jGH2nQ1PIloOcNPZwpMq34v4ltjFfC9zVegTVfDFI9FyLsXbbXp\nSsS/xuqv01upAyrpDFMsGqZYciiWivpkJeJjY5Xhe0cdgjL8YUrEqhdptSRTxL/GKuCfVtJRpjk0\n3qlXoD46In42VgH/tJKOWiMPTSyqgC8SBOMV8GsyfDVOG564SjoigTBWAX/LvFbpjEIiWhPw9UYr\n4ltjFfBnp6JEIyFCKNMcJpV0RIJhrAJ+KBRiy3yivNszrN2ew1J70VbLYUX8a+z+Ot/z5ovJFdq2\n35c+Or2Gr5KOiF+NXcB/7YXbRz2EiROvKelMK8MX8a2xKunIaNRm+CrpiPiXAr5sWm2Gr4vlIv6l\ngC+b5mX40Uj4tGxfRPxFAV82zcvwtSRTxN8U8GXTvKxe5RwRf1PAl03zMnxdsBXxNwV82TQvw9eS\nTBF/U8CXTZubjhGLhllemh71UESkBaVksmnTiSi/90+uYX5Gu2xF/EwBX/pi68LUqIcgIm2opCMi\nMiEU8EVEJoQCvojIhFDAFxGZEAr4IiITQgFfRGRCKOCLiEwIBXwRkQmhgC8iMiEU8EVEJoQCvojI\nhFDAFxGZEC2bpxljwsCdwGVAFniftfZZ974dwGdrHn458BvAXcCfAhcDJeBXrLW2/0MXEZFutMvw\n3wHErbXXAR8FbvfusNYetdbeYK29AbgNeJRysL8JmLXWvh74beB3BzJyERHpSruAfz1wH4C19iHg\n6voHGGNCwB8AH7DWOsAGsOh+fRHI9XXEIiLSk3YBfwFI1twuumWeWm8HnrTWPu3e3g9MAU8BnwL+\nsB8DFRGRzWkX8JPAfO3jrbWlusf8AvBfa25/BNhvrTWU6/r3GGPimx6piIhsSrsTr/ZTzuDvNcZc\nAzzR4DFXW2sfrLk9S/VTwUkgBkTa/JzQ8vJ8m4dMDs1FleaiSnNRpbnoTchxnKZ3unV4b5UOwK3A\nVcCctfYuY8wy8FVr7ZU137MEfAbYTjnYf8Ja+1lERGSkWgZ8EREZH9p4JSIyIRTwRUQmhAK+iMiE\nUMAXEZkQ7ZZlDlSrXj2TwhjzOuA/WGtvMMZcCNxNuQfRk8Cvu7uXx54xJgZ8GtgHJIDfAX7IBM6H\nMSZCuU3JxYAD/Crlv4+7mbC58BhjzqLcvuVNlOfgbiZwLowx3wFW3Zs/An6PLuZi1Bl+0149k8AY\n8xHKf9gJ90sfB26z1v4UEAJ+blRjG4FfAFbc3/2twH+h/HqYxPl4G1By+1H9FvDvmdy58JKBTwEp\nyr/7RP6dGGOmALweZtbaf0yXczHqgN+2V8+Yewb4ecr/UABXWmu/6f73V4A3j2RUo3Ev8DH3v8NA\nngmdD2vtXwPvd2+eS3kD41WTOBeu3wf+GDji3p7I1wXwWmDGGPNVY8zX3M2wXc3FqAN+J716xpa1\n9n8DhZovhWr+e51y87mJYK1NWWvXjTHzlIP/b3H663PS5qNojLkb+M/AXzChrw1jzC9T/uR3v/ul\nEBM6F5Q/4fy+tfYmymW+v6i7v+1cjDq4dtKrZ5LU/u7zwKlRDWQUjDF7gL8F/sxa+z+Y8Pmw1v4y\nYIA/odyQ0DNJc3ErcKMx5uu4vbmA5Zr7J2kuDuAGebdZ5SvAjpr7287FqAP+fuBmgBa9eibJY8aY\nN7j//TPAN1s9eJy4B+rcD3zEWnu3++WJnA9jzC8aY37TvbkBFIG/n8S5sNa+wVr7RvfcjceBXwLu\nm8S5oPzmdzuAMeYcygH+/m7mYqSrdIDPU3733u/evnWUgxkh76r6h4G73O6iPwD+cnRDGrrbKH8c\n/Zgxxqvlfwj4gwmcj78E7jbG/B3lflQfotxufFJfG7UcJvfv5E+BzxhjvKB+K+Usv+O5UC8dEZEJ\nMeqSjoiIDIkCvojIhFDAFxGZEAr4IiITQgFfRGRCKOCLiEwIBXwRkQmhgC8iMiH+PzxQ1LcS6Wos\nAAAAAElFTkSuQmCC\n", "text": [ - "" + "" ] } ], - "prompt_number": 14 + "prompt_number": 6 }, { "cell_type": "heading", @@ -4152,7 +2230,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 31 + "prompt_number": 7 }, { "cell_type": "code", @@ -4166,23 +2244,23 @@ { "metadata": {}, "output_type": "pyout", - "prompt_number": 28, + "prompt_number": 8, "text": [ - "[('ruby', 0.26000000000000001),\n", - " ('java', 0.16),\n", - " ('python', 0.16),\n", - " ('clojure', 0.14000000000000001),\n", - " ('scheme', 0.10000000000000001),\n", - " ('php', 0.059999999999999998),\n", + "[('java', 0.46000000000000002),\n", + " ('ruby', 0.20000000000000001),\n", + " ('scheme', 0.16),\n", + " ('clojure', 0.059999999999999998),\n", + " ('python', 0.059999999999999998),\n", " ('haskell', 0.040000000000000001),\n", - " ('javascript', 0.040000000000000001),\n", - " ('perl', 0.040000000000000001),\n", + " ('php', 0.02),\n", " ('scala', 0.0),\n", - " ('ocaml', 0.0)]" + " ('javascript', 0.0),\n", + " ('ocaml', 0.0),\n", + " ('perl', 0.0)]" ] } ], - "prompt_number": 28 + "prompt_number": 8 }, { "cell_type": "heading", @@ -4216,7 +2294,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 34 + "prompt_number": 9 }, { "cell_type": "code", @@ -4238,23 +2316,23 @@ { "metadata": {}, "output_type": "pyout", - "prompt_number": 33, + "prompt_number": 10, "text": [ - "[('ocaml', 0.73999999999999999),\n", - " ('clojure', 0.10000000000000001),\n", - " ('scheme', 0.059999999999999998),\n", - " ('python', 0.059999999999999998),\n", + "[('ocaml', 0.66000000000000003),\n", + " ('clojure', 0.12),\n", + " ('scheme', 0.12),\n", + " ('python', 0.040000000000000001),\n", + " ('ruby', 0.040000000000000001),\n", " ('haskell', 0.02),\n", - " ('java', 0.02),\n", " ('scala', 0.0),\n", + " ('java', 0.0),\n", " ('javascript', 0.0),\n", " ('perl', 0.0),\n", - " ('php', 0.0),\n", - " ('ruby', 0.0)]" + " ('php', 0.0)]" ] } ], - "prompt_number": 33 + "prompt_number": 10 }, { "cell_type": "code", @@ -4263,7 +2341,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 16 + "prompt_number": 10 }, { "cell_type": "code", @@ -4272,7 +2350,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 16 + "prompt_number": 10 }, { "cell_type": "code", @@ -4281,7 +2359,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 16 + "prompt_number": 10 }, { "cell_type": "code", @@ -4290,7 +2368,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 16 + "prompt_number": 10 }, { "cell_type": "code", @@ -4299,7 +2377,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 16 + "prompt_number": 10 } ], "metadata": {} From 1698c61b714957bb3f83a8c28d187df260401863 Mon Sep 17 00:00:00 2001 From: Zack Cooper Date: Mon, 16 Feb 2015 09:10:38 -0500 Subject: [PATCH 6/6] changed features --- corpus_build.py | 4 +- lang classifier live.ipynb | 596 +++++++++++++++++++------------------ 2 files changed, 316 insertions(+), 284 deletions(-) diff --git a/corpus_build.py b/corpus_build.py index b659079..2b7fce0 100644 --- a/corpus_build.py +++ b/corpus_build.py @@ -67,8 +67,8 @@ ('val_exists', r"\bval\b"), ('else_if', r"(else if)"), ('elif', r"(elif)"), - ('elif', r"(print )"), - ('elif', r"(print\()"), + ('print_space', r"(print )"), + ('pyth_print', r"(print\()"), ('void', r"(void)")] diff --git a/lang classifier live.ipynb b/lang classifier live.ipynb index d9ebd8b..fc999af 100644 --- a/lang classifier live.ipynb +++ b/lang classifier live.ipynb @@ -1,7 +1,7 @@ { "metadata": { "name": "", - "signature": "sha256:5d38ae2fb3758ef4f48e8df732eaeef65eb86124e67ec22c3c221b664a5301d2" + "signature": "sha256:67cb485a11f3574e7154427b6ce8b9cb5f0788005d345301cc83874050c10871" }, "nbformat": 3, "nbformat_minor": 0, @@ -56,6 +56,8 @@ " val_exists\n", " else_if\n", " elif\n", + " print_space\n", + " pyth_print\n", " void\n", " \n", " \n", @@ -79,6 +81,8 @@ " 0\n", " 0\n", " 0\n", + " 0\n", + " 0\n", " 0.000000\n", " \n", " \n", @@ -100,6 +104,8 @@ " 0\n", " 0\n", " 0\n", + " 0\n", + " 0\n", " 0.000000\n", " \n", " \n", @@ -121,6 +127,8 @@ " 0\n", " 0\n", " 0\n", + " 0\n", + " 0\n", " 0.000337\n", " \n", " \n", @@ -142,6 +150,8 @@ " 0\n", " 0\n", " 0\n", + " 0\n", + " 0\n", " 0.000000\n", " \n", " \n", @@ -163,6 +173,8 @@ " 0\n", " 0\n", " 0\n", + " 0\n", + " 0\n", " 0.000000\n", " \n", " \n", @@ -187,12 +199,19 @@ "3 0.000000 0 0.000000 0 0.000618 \n", "4 0.000000 0 0.001059 0 0.000529 \n", "\n", - " var_exists star_count dollar_sign val_exists else_if elif void \n", - "0 0 0.000000 0 0 0 0 0.000000 \n", - "1 0 0.000000 0 0 0 0 0.000000 \n", - "2 0 0.000000 0 0 0 0 0.000337 \n", - "3 0 0.001235 0 0 0 0 0.000000 \n", - "4 0 0.001059 0 0 0 0 0.000000 " + " var_exists star_count dollar_sign val_exists else_if elif \\\n", + "0 0 0.000000 0 0 0 0 \n", + "1 0 0.000000 0 0 0 0 \n", + "2 0 0.000000 0 0 0 0 \n", + "3 0 0.001235 0 0 0 0 \n", + "4 0 0.001059 0 0 0 0 \n", + "\n", + " print_space pyth_print void \n", + "0 0 0 0.000000 \n", + "1 0 0 0.000000 \n", + "2 0 0 0.000337 \n", + "3 0 0 0.000000 \n", + "4 0 0 0.000000 " ] } ], @@ -233,6 +252,8 @@ " val_exists\n", " else_if\n", " elif\n", + " print_space\n", + " pyth_print\n", " void\n", " answers\n", " \n", @@ -257,6 +278,8 @@ " 0.000000\n", " 0\n", " 0\n", + " 0\n", + " 0\n", " 1\n", " \n", " \n", @@ -278,6 +301,8 @@ " 0.000000\n", " 0\n", " 0\n", + " 0\n", + " 0\n", " 4\n", " \n", " \n", @@ -299,6 +324,8 @@ " 0.000288\n", " 0\n", " 0\n", + " 0\n", + " 0\n", " 4\n", " \n", " \n", @@ -320,6 +347,8 @@ " 0.000000\n", " 0\n", " 0\n", + " 0\n", + " 0\n", " 4\n", " \n", " \n", @@ -341,6 +370,8 @@ " 0.000000\n", " 0\n", " 0\n", + " 0\n", + " 0\n", " 9\n", " \n", " \n", @@ -365,12 +396,19 @@ "3 0.000000 0.035294 0 0.011765 0.005882 \n", "4 0.004769 0.001122 0 0.000000 0.000000 \n", "\n", - " star_count dollar_sign val_exists else_if elif void answers \n", - "0 0 0.000000 0.000000 0.000000 0 0 1 \n", - "1 0 0.000000 0.000000 0.000000 0 0 4 \n", - "2 0 0.000173 0.000346 0.000288 0 0 4 \n", - "3 0 0.000000 0.000000 0.000000 0 0 4 \n", - "4 0 0.000000 0.000000 0.000000 0 0 9 " + " star_count dollar_sign val_exists else_if elif print_space \\\n", + "0 0 0.000000 0.000000 0.000000 0 0 \n", + "1 0 0.000000 0.000000 0.000000 0 0 \n", + "2 0 0.000173 0.000346 0.000288 0 0 \n", + "3 0 0.000000 0.000000 0.000000 0 0 \n", + "4 0 0.000000 0.000000 0.000000 0 0 \n", + "\n", + " pyth_print void answers \n", + "0 0 0 1 \n", + "1 0 0 4 \n", + "2 0 0 4 \n", + "3 0 0 4 \n", + "4 0 0 9 " ] } ], @@ -411,13 +449,13 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 3 + "prompt_number": 13 }, { "cell_type": "code", "collapsed": false, "input": [ - "run_test_model(RandomForestClassifier(50), corp_train, corp_answer, test_train, test_answer)" + "run_test_model(RandomForestClassifier(40), corp_train, corp_answer, test_train, test_answer)" ], "language": "python", "metadata": {}, @@ -432,15 +470,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n", + "avg / total 0.92 0.87 0.87 31\n", "\n", "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", @@ -450,21 +488,21 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n" ] }, { "metadata": {}, "output_type": "pyout", - "prompt_number": 4, + "prompt_number": 14, "text": [ - "0.83694316436251914" + "0.86991807475678429" ] } ], - "prompt_number": 4 + "prompt_number": 14 }, { "cell_type": "code", @@ -484,21 +522,21 @@ "text": [ " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 1.00 0.25 0.40 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", " 3 1.00 1.00 1.00 2\n", - " 4 0.80 1.00 0.89 4\n", + " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.60 1.00 0.75 3\n", "\n", - "avg / total 0.92 0.87 0.87 31\n", + "avg / total 0.90 0.81 0.79 31\n", "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", + "[[1 0 2 0 0 0 0 0 0 0 1]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", @@ -506,8 +544,8 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 0 1 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 0 0 0 0 0 3 1]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -515,15 +553,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 1.00 1.00 1.00 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 0.75 1.00 0.86 3\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.87 0.87 31\n", + "avg / total 0.92 0.84 0.84 31\n", "\n", "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", @@ -533,24 +571,24 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 0 0 0 0 0 3 1]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.25 0.40 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 1.00 1.00 1.00 2\n", + " 3 0.67 1.00 0.80 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", - " 8 0.75 0.75 0.75 4\n", - " 9 0.75 1.00 0.86 3\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 0.75 0.86 4\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.89 0.84 0.82 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -558,7 +596,7 @@ "stream": "stdout", "text": [ "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", @@ -566,24 +604,24 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 0 0 0 0 0 3 1]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 0.50 0.25 0.33 4\n", + " 1 1.00 0.25 0.40 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", " 3 1.00 1.00 1.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.84 0.81 0.79 31\n", + "avg / total 0.88 0.81 0.79 31\n", "\n", "[[1 0 2 0 0 0 0 0 0 1 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", @@ -594,7 +632,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [1 0 0 0 0 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -605,12 +643,12 @@ " 3 1.00 1.00 1.00 2\n", " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.87 0.87 31\n" ] }, { @@ -626,7 +664,7 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", @@ -636,14 +674,14 @@ " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", " 3 1.00 1.00 1.00 2\n", - " 4 0.80 1.00 0.89 4\n", + " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", - " 8 1.00 0.75 0.86 4\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", + " 8 1.00 1.00 1.00 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.87 0.87 31\n", + "avg / total 0.94 0.87 0.87 31\n", "\n", "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", @@ -653,8 +691,8 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 0 1 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 0 0 0 0 0 4 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -703,13 +741,7 @@ " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" - ] - }, - { - "output_type": "stream", - "stream": "stdout", - "text": [ + "avg / total 0.91 0.84 0.84 31\n", "\n", "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", @@ -728,15 +760,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.50 1.00 0.67 1\n", - " 7 1.00 0.67 0.80 3\n", + " 6 0.33 1.00 0.50 1\n", + " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.87 0.87 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -752,24 +784,24 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.25 0.40 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.50 1.00 0.67 1\n", " 7 1.00 0.67 0.80 3\n", - " 8 0.75 0.75 0.75 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.89 0.84 0.82 31\n" + "avg / total 0.92 0.87 0.87 31\n" ] }, { @@ -777,7 +809,7 @@ "stream": "stdout", "text": [ "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", @@ -786,7 +818,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 1 2 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -823,7 +855,7 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 1.00 0.25 0.40 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", @@ -832,10 +864,10 @@ " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", + " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.89 0.81 0.79 31\n" ] }, { @@ -843,7 +875,7 @@ "stream": "stdout", "text": [ "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", @@ -889,7 +921,7 @@ " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 1.00 0.25 0.40 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", @@ -898,10 +930,10 @@ " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", + " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.88 0.81 0.79 31\n" ] }, { @@ -909,7 +941,7 @@ "stream": "stdout", "text": [ "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", @@ -929,12 +961,12 @@ " 3 1.00 1.00 1.00 2\n", " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.87 0.87 31\n" ] }, { @@ -950,7 +982,7 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", @@ -959,15 +991,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 1.00 1.00 1.00 2\n", - " 4 0.80 1.00 0.89 4\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.87 0.87 31\n" ] }, { @@ -983,8 +1015,8 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 0 1 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -993,14 +1025,14 @@ " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", " 3 1.00 1.00 1.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 1.00 1.00 4\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.94 0.87 0.87 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -1017,23 +1049,23 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 0 0 0 0 0 4 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.25 0.40 4\n", + " 1 1.00 0.50 0.67 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", - " 8 0.75 0.75 0.75 4\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", + " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.89 0.81 0.79 31\n" + "avg / total 0.92 0.87 0.87 31\n" ] }, { @@ -1041,7 +1073,7 @@ "stream": "stdout", "text": [ "\n", - "[[1 0 2 0 0 0 0 0 0 1 0]\n", + "[[2 0 2 0 0 0 0 0 0 0 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", @@ -1049,8 +1081,8 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1091,15 +1123,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 1.00 1.00 1.00 2\n", - " 4 0.80 1.00 0.89 4\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -1116,7 +1148,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 0 1 0 0 0 3 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1157,15 +1189,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 1.00 1.00 1.00 2\n", - " 4 0.80 1.00 0.89 4\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -1182,7 +1214,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 0 1 0 0 0 3 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1223,15 +1255,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -1248,7 +1280,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1257,14 +1289,14 @@ " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", " 3 1.00 1.00 1.00 2\n", - " 4 0.80 1.00 0.89 4\n", + " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.75 1.00 0.86 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -1281,7 +1313,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 0 1 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 3 1]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1355,13 +1387,13 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 1.00 1.00 1.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.75 1.00 0.86 3\n", "\n", "avg / total 0.92 0.84 0.84 31\n" ] @@ -1380,7 +1412,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 3 1]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1388,15 +1420,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -1413,7 +1445,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1421,15 +1453,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 1.00 1.00 1.00 2\n", - " 4 0.80 1.00 0.89 4\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.87 0.87 31\n" ] }, { @@ -1445,8 +1477,8 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 0 1 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1454,15 +1486,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 1.00 1.00 1.00 2\n", - " 4 0.80 1.00 0.89 4\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -1479,7 +1511,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 0 1 0 0 0 3 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1487,15 +1519,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -1512,7 +1544,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1553,15 +1585,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.92 0.87 0.87 31\n" ] }, { @@ -1577,12 +1609,12 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", - " 1 1.00 0.50 0.67 4\n", + " 1 1.00 0.25 0.40 4\n", " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", @@ -1591,10 +1623,10 @@ " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", - " 8 1.00 0.75 0.86 4\n", + " 8 0.75 0.75 0.75 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.89 0.81 0.79 31\n" ] }, { @@ -1602,7 +1634,7 @@ "stream": "stdout", "text": [ "\n", - "[[2 0 2 0 0 0 0 0 0 0 0]\n", + "[[1 0 2 0 0 0 0 0 0 1 0]\n", " [0 2 0 0 0 0 0 0 0 0 0]\n", " [0 0 3 0 0 0 0 0 0 0 0]\n", " [0 0 0 3 0 0 0 0 0 0 0]\n", @@ -1619,15 +1651,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -1644,7 +1676,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1688,12 +1720,12 @@ " 3 1.00 1.00 1.00 2\n", " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.87 0.87 31\n" ] }, { @@ -1709,7 +1741,7 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", @@ -1718,15 +1750,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -1743,7 +1775,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1752,14 +1784,14 @@ " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", " 3 1.00 1.00 1.00 2\n", - " 4 1.00 1.00 1.00 4\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 0.75 1.00 0.86 3\n", + " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -1776,7 +1808,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 0 0 0 0 0 3 1]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1784,15 +1816,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", - " 4 1.00 1.00 1.00 4\n", + " 3 1.00 1.00 1.00 2\n", + " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.92 0.84 0.84 31\n" + "avg / total 0.91 0.84 0.84 31\n" ] }, { @@ -1809,7 +1841,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1817,15 +1849,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 1.00 1.00 1.00 2\n", - " 4 0.80 1.00 0.89 4\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -1842,7 +1874,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 0 1 0 0 0 3 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -1952,12 +1984,12 @@ " 3 1.00 1.00 1.00 2\n", " 4 0.80 1.00 0.89 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.87 0.87 31\n" ] }, { @@ -1973,7 +2005,7 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", " [0 0 0 0 0 1 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", @@ -1982,15 +2014,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 1.00 1.00 1.00 2\n", - " 4 0.80 1.00 0.89 4\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -2007,7 +2039,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 0 1 0 0 0 3 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -2016,14 +2048,14 @@ " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", " 3 1.00 1.00 1.00 2\n", - " 4 0.80 1.00 0.89 4\n", + " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", - " 6 0.33 1.00 0.50 1\n", - " 7 1.00 0.33 0.50 3\n", + " 6 0.50 1.00 0.67 1\n", + " 7 1.00 0.67 0.80 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.75 1.00 0.86 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.87 0.87 31\n" ] }, { @@ -2039,8 +2071,8 @@ " [0 0 0 0 0 4 0 0 0 0 0]\n", " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 0 1 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 1 2 0 0]\n", + " [0 0 0 0 0 0 0 0 0 3 1]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -2048,13 +2080,13 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 0.67 1.00 0.80 2\n", + " 3 1.00 1.00 1.00 2\n", " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", - " 9 1.00 1.00 1.00 3\n", + " 9 0.75 1.00 0.86 3\n", "\n", "avg / total 0.92 0.84 0.84 31\n" ] @@ -2073,7 +2105,7 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 1 0 0 0 0 3 0]\n", + " [0 0 0 0 0 0 0 0 0 3 1]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n", " precision recall f1-score support\n", "\n", @@ -2081,15 +2113,15 @@ " 10 1.00 1.00 1.00 2\n", " 11 0.60 1.00 0.75 3\n", " 2 1.00 1.00 1.00 3\n", - " 3 1.00 1.00 1.00 2\n", - " 4 0.80 1.00 0.89 4\n", + " 3 0.67 1.00 0.80 2\n", + " 4 1.00 1.00 1.00 4\n", " 5 1.00 1.00 1.00 2\n", " 6 0.33 1.00 0.50 1\n", " 7 1.00 0.33 0.50 3\n", " 8 1.00 0.75 0.86 4\n", " 9 1.00 1.00 1.00 3\n", "\n", - "avg / total 0.91 0.84 0.84 31\n" + "avg / total 0.92 0.84 0.84 31\n" ] }, { @@ -2106,69 +2138,69 @@ " [0 0 0 0 0 0 2 0 0 0 0]\n", " [0 0 0 0 0 0 0 1 0 0 0]\n", " [0 0 0 0 0 0 0 2 1 0 0]\n", - " [0 0 0 0 0 1 0 0 0 3 0]\n", + " [0 0 0 0 1 0 0 0 0 3 0]\n", " [0 0 0 0 0 0 0 0 0 0 3]]\n" ] }, { "metadata": {}, "output_type": "pyout", - "prompt_number": 5, + "prompt_number": 15, "text": [ - "[(20, 0.86991807475678429),\n", - " (21, 0.87043010752688177),\n", - " (22, 0.82219662058371723),\n", - " (23, 0.793010752688172),\n", - " (24, 0.83550947260624675),\n", - " (25, 0.86991807475678429),\n", + "[(20, 0.79124423963133639),\n", + " (21, 0.83694316436251914),\n", + " (22, 0.83694316436251914),\n", + " (23, 0.78727598566308243),\n", + " (24, 0.86991807475678429),\n", + " (25, 0.86827956989247301),\n", " (26, 0.83550947260624675),\n", " (27, 0.83550947260624675),\n", - " (28, 0.87135176651305668),\n", - " (29, 0.82311827956989247),\n", + " (28, 0.83550947260624675),\n", + " (29, 0.86991807475678429),\n", " (30, 0.83550947260624675),\n", - " (31, 0.83694316436251914),\n", + " (31, 0.78870967741935483),\n", " (32, 0.83694316436251914),\n", - " (33, 0.83550947260624675),\n", - " (34, 0.83550947260624675),\n", - " (35, 0.83550947260624675),\n", - " (36, 0.86827956989247301),\n", - " (37, 0.78870967741935483),\n", + " (33, 0.78727598566308243),\n", + " (34, 0.86991807475678429),\n", + " (35, 0.87135176651305668),\n", + " (36, 0.83550947260624675),\n", + " (37, 0.86991807475678429),\n", " (38, 0.83550947260624675),\n", - " (39, 0.83550947260624675),\n", + " (39, 0.83694316436251914),\n", " (40, 0.83550947260624675),\n", - " (41, 0.83550947260624675),\n", + " (41, 0.83694316436251914),\n", " (42, 0.83550947260624675),\n", - " (43, 0.83694316436251914),\n", - " (44, 0.83550947260624675),\n", + " (43, 0.83550947260624675),\n", + " (44, 0.83602150537634401),\n", " (45, 0.83550947260624675),\n", " (46, 0.83602150537634401),\n", - " (47, 0.83694316436251914),\n", - " (48, 0.83694316436251914),\n", - " (49, 0.83550947260624675),\n", - " (50, 0.83550947260624675),\n", - " (51, 0.83694316436251914),\n", + " (47, 0.83602150537634401),\n", + " (48, 0.83550947260624675),\n", + " (49, 0.87135176651305668),\n", + " (50, 0.83694316436251914),\n", + " (51, 0.83550947260624675),\n", " (52, 0.83694316436251914),\n", - " (53, 0.83694316436251914),\n", - " (54, 0.83694316436251914),\n", - " (55, 0.83694316436251914),\n", + " (53, 0.86991807475678429),\n", + " (54, 0.78870967741935483),\n", + " (55, 0.83550947260624675),\n", " (56, 0.83550947260624675),\n", - " (57, 0.83550947260624675),\n", - " (58, 0.83694316436251914),\n", - " (59, 0.83602150537634401),\n", - " (60, 0.83694316436251914),\n", - " (61, 0.83550947260624675),\n", + " (57, 0.86991807475678429),\n", + " (58, 0.83550947260624675),\n", + " (59, 0.83550947260624675),\n", + " (60, 0.83550947260624675),\n", + " (61, 0.83694316436251914),\n", " (62, 0.83550947260624675),\n", " (63, 0.83550947260624675),\n", " (64, 0.83550947260624675),\n", - " (65, 0.83550947260624675),\n", - " (66, 0.83550947260624675),\n", - " (67, 0.83550947260624675),\n", - " (68, 0.83694316436251914),\n", - " (69, 0.83550947260624675)]" + " (65, 0.86991807475678429),\n", + " (66, 0.83694316436251914),\n", + " (67, 0.87043010752688177),\n", + " (68, 0.83602150537634401),\n", + " (69, 0.83694316436251914)]" ] } ], - "prompt_number": 5 + "prompt_number": 15 }, { "cell_type": "code", @@ -2184,13 +2216,13 @@ { "metadata": {}, "output_type": "display_data", - "png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAECCAYAAAD9z2x7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmUpFd53/Fv7b13z9IajWbTfg1CQpuNFmyQQQhLYIND\n/gDHPhbBwdjkkBwSjBUfkjh2nBNHiNiODJENkmMnBDnBDptQjMGEkZBkISEE6I4kJM1IM5rp0cx0\ndVd17W/+qPetqqmpvWt536rf5xwOqq7q6tt3qp966nnvfW7IcRxERGT8hUc9ABERGQ4FfBGRCaGA\nLyIyIRTwRUQmhAK+iMiEUMAXEZkQ0VZ3GmPCwJ3AZUAWeJ+19tma+98J3AY4wKettZ90v+dPgIuB\nEvAr1lo7oPGLiEiH2mX47wDi1trrgI8Ct9fd/3HgRuB64MPGmCXgLcCstfb1wG8Dv9vfIYuISC/a\nBfzrgfsArLUPAVfX3Z8HloAZIEQ5o98AFo0xIWARyPVzwCIi0pt2AX8BSNbcLrolG8/twKPA94Av\nWGuTwH5gCngK+BTwh/0broiI9KpdwE8C87WPt9aWAIwxe4EPAvuAc4Edxph3AR8B9ltrDXA5cI8x\nJt7vgYuISHdaXrSlnK2/HbjXGHMN8ETNfVNAEchaa0vGmGPAFmCW6qeCk0AMiLT6IY7jOKFQqIfh\ni4hMtK4CZ6hV8zS3Du+t0gG4FbgKmLPW3mWM+efAe4AM8AzwK8Ac8BlgO+Vg/wlr7WfbjMNZWVnr\nZtxja3l5Hs1FmeaiSnNRpbmoWl6e71/AHyIFfJdezFWaiyrNRZXmoqrbgK+NVyIiE0IBX0RkQijg\ni4hMCAV8EZEJoYAvIjIhFPBFRCaEAr6IyIRQwBcRmRAK+CIiE0IBX0RkQijgi4hMCAV8EZEJoYAv\nIjIhFPBFRCaEAr6IyIRQwB+Aw8dTfPRTD3LwqHp2i4h/KOAPwFMHT3Ls5Ab20KlRD0VEpKLdmbZD\ns5Et8NLxFC+trLv/n+JEMsN7bryYS8/fNurhdSWZygGwkSmMeCQiIlW+CPjv/Z37WTm50fC+7z93\nYuAB/+jJNJ//5o/4pZsMM1OxTT/f2kYegHRWAV9E/MMXAb9YdLjk3C3sWp5j1/ZZzlmeJRYJ828+\n8wjpIWTJD//gKA//8BhXXLTM6169Y9PPt5Z2A74yfBHxEV8E/Hv+9U1nHEq8PsQsOZkq/6xkOteX\n51tzSzrK8EXET3x70XYqHgEgnckP/Gd5gX6tXwHfe7MawtiHqeQ4OI4z6mGISI98G/CjkTCJWGQo\nWbIX6L1Mf7MqF22zxb48n1/86Rd/wL+9+5FRD0NEeuSLkk4zM1PRodTBk27N3QvUm1EqOaQq5ajx\nyvCfeWmV46sZHMchFAqNejgi0iXfZvgAM4koG0Op4fevpLO+kccreozbRdtkKo/jQKGoso5IEPk6\n4E9PRUlnCwOtGxdLpcoF4n5ctK190xj02IcpkyuQzZdLVPnCeJWqRCaFrwP+TCKK40AmN7gAs56u\nll36UcNfq3m+QY99mGrLXdl8aYQjEZFe+TvgT5UvMQyyrLN6WiArkt1kgK7/lDCMktQw1M5TThm+\nSCD5O+AnygF/kLXw2oy8fHtzZR3v+eamyzt2x6WOX5vh55XhiwSSvwO+m+EPcmmml5FPJ8rr/lc3\nHfDL379j6zQwPpuvTvskpAxfJJD8HfATg8+Svcx11/Y5ANY2Wcf3MvwdW2aA8czwc8rwRQLJ3wG/\nkuEPbj27l+HvXp497fZmn2/HVjfgj8la/NoMX6t0RILJ3wF/GDV8N6Pftexm+H2q4Z+15JZ0lOGL\niE/4OuBPD7GG72X4q5vcbbuWzjE3HWNuxi1HjUkNP1m3mklEgsfXAX8YGX4ylSMWDbPsZuT1q3a6\ntZbOMz8TG8rYh+n0ko4yfJEgCkbAH2CWvJbOsTATZ34mDmyun06xVCK1kWd+OjaUPQTD4jhOXUlH\nGb5IEPk64HslnUEdFeg4DqupPAuzMWLRMDOJ6KYu2q5vFHCA+dn4UN6shiWTK5IrlIhGyg3Tssrw\nRQKpZbdMY0wYuBO4DMgC77PWPltz/zuB2wAH+LS19pPu138TeDsQA/7IWntPL4MbdNDM5IoUiqVK\ndj8/G68cXtIL74Lv/Eyc6TEq6XjZ/bbFaY6eSGuVjkhAtcvw3wHErbXXAR8Fbq+7/+PAjcD1wIeN\nMYvGmDcC17rf80bg/F4HF42EicfCAwv4Xja/4Ab8hZkYaxt5SqXeGp55bxYLM7Gh9vMfNK9+v7w4\nBWiVjkhQtQv41wP3AVhrHwKurrs/DywBM4DXIP0m4HvGmL8CvgD8n80McCYRHVhJx8tcF2bjlf93\nnOrxit3yTrryPjHMTA1u7MPkzdP2SsBXhi8SRO0C/gKQrLlddMs8ntuBR4HvAV+w1q4C24GrgHcB\nvwr8xWYGODMVG1yG767BX3CXUHqZfq91fG+Fz7z7fDOJ6Hhl+O5Kppxq+CKB1O7EqyQwX3M7bK0t\nARhj9gIfBPYBaeDPjTHvAo4DP7TWFoADxpiMMWa7tfZ4qx+0vDzf8OuLcwlePpFm+/a5vp+y5Dzz\nCgC7zl5geXmes93NV+FYtOl4WvFC+55zFllenmdhLsGRHsbey88epIJb4Tp/7xYAQuHw0Mbot7kY\nJc1FleaiN+0C/n7KF1/vNcZcAzxRc98UUASy1tqSMeYY5fLOt4APAR83xpwDzAKvtBvIyspaw6/H\nIiFKJYcXD59iKt7fExkPH3U/vJRKrKysEXXPqjp4+BTnLE11/XxHj6cAKOYKrKys9TT25eX5pnMx\nKkdW1gGIu+9Za6nsUMbox7kYFc1Fleaiqts3vnZR6PPAjcaY/e7tW40x7wbmrLV3GWPuAR4wxmSA\nZ4C7rbUFY8xPGWMeplwy+jVrbc/HPtVuYOp3wK/U8L1VOu7/99pAba3u+QY59mFSDV9kPLSMQm6g\n/kDdlw/U3H8HcEeD7/uNvoyO09srbO3Xk7q8w8srNfzZzdbwc4So9sIf5NiHaTWVIxoJMTcdIxoJ\nqYYvElC+3ngFg22vsJZyA3R9wO9xLX4ynWd2OkY4XK59jEt7hWQqy8JsnFAoRDwa0bJMkYDyf8Af\nYAO1ZDrH7HSMSLg8DV6m32s/nbV0rrJCB4ZzgMugebuRF903w1gsrCMORQLK9wHf27E6iPXsyVSu\nktV7PysaCfVU0imWSqQyhUr9HqoZfpDX4m9kCxSKpcrvlYhGVMMXCSjfB/xBtVcoFL0AXc3IQ6EQ\n8zPxnko663Vr8KG8hwCCneGv1m1Oi8fC6pYpElD+D/heWSTT35OjvLJNbYYP5RU2vWT41U1XZ2b4\nQQ743pvf4pxb0olGyKqGLxJI/g/4icFkybWNzmrNz8bI5Utkc92VLZKV5zuzhh/kks5q3VLTRCxM\noVjqud+QiIyO/wP+gPrKVxqn1WX4i25gW+0yy2+d4Qf3XNtqhp8AIB6LADoERSSI/B/wB7S0MVnT\n2bLW/Ky3+aq7gN/oDaSyDn8sMvzyPMWi5ZdMVit1RALH9wF/ekB18GrjtDNr+ND95qtKhj9dU9IZ\nqxq+m+FHyxm+VuqIBI/vA34sGiYeDfc9S15rUtJZmC0H7G5X6qx7Nfya54tGBjP2YWpUwweVdESC\nyPcBH8qlkf5n+GcGaKjN8LuruycbLMuEwYx9mJKpHNFImOlEObOPVTJ8BXyRoAlEwJ9JRPtfw6/r\no+OpNlDrtqTjtmmYOv35BjH2YVpN5Vh02ypAeR0+QFYlHZHACUbAn4qykS3gOP1bCphM54jHwmd0\nsey1gVoynWduptpHxzOIsQ+L4zispU/fjaxVOiLBFYyAn4hRLDl9LSOspXNnXLCFakmmlxp+/Zp+\nGMzYhyWdLVAoOpU+OgBxd5WOLtqKBE8wAn6fm5A5jkMy1ThARyNhZqeiXTVQa9SmwRPkBmqr62de\n2PYyfC3LFAmeQAT8ytLMPrVX2MgWz8hca83PxCurUzrhHXo+1zDDD27AryzJbJDh5wP4iUVk0gUi\n4Pc7aK41aINQa2E2TmojT7HUWVBba3IBGILdXqG+cRpUM3wdgiISPMEI+H3esdookNVamInhAOsb\nnf28ZJO+PBDs9gqtMnz1xBcJnmAE/AFl+I0u2kJ1bX6nF25bfWIIcnuFhhl+5aKtMnyRoAlGwO9z\nA7XKJqnZxiWdxS7bK6w1adMAY1jDj6m1gkhQBSPg97mBWjLVWYbf6eartY3mGX6Qz7VVDV9kvAQi\n4E/3eWljsk1JZ6HLtfheI7ZGNfx+j32Ykqkc8WiYqXik8jWtwxcJrkAE/H5nyWvtLtrOdtdPp1UN\nP8gZftLdZeu1VQBl+CJBFoyA3+ezYZMpt+/NdJNlmd3W8DfyhEIw2+D5gnqubcndnFa/V8HrpaMM\nXyR4ghHw3U6NG33aeNWs743HK810vEonlWN+OkY4dObz9Xvsw5LOFCiWnDM+BVWXZSrDFwmaQAT8\nWDRCLBru67LMZuUcgOlEhGgkXCnVtH++fMP6PfR/7MOyup4FOCPDj0bChFCGLxJEgQj40L82w9W+\nN80DfigUYmE2VrkY2+750tlC0127EMwWyckm1zlCoRDxWEQZvkgABSfg9+kgkbUmB5XUm5+Jk0zn\n2rY1bnR4eb1+jX2YVhuswffEomFl+CIBFJiAP+1myZvtK98sc623OBsnXyiRybUObO127UI5ww9a\nT/xW85SIhbXTViSAAhPwZxLRcl/5TZYS2q3B93ifANrV8Tv5xDA9FaVQdAJ1aEg1w0+ccV88FiGv\nXjoigROcgN+nnjSdZviVpZlt6vjtOm9CMNsrVOfpzN8rFg2TDdCbl4iUBSfg9yloVlsZtwn4HR51\nmOyohu+uxQ/QhdvVdPM3xngsQi5fDFSJSkQCFPCn+9RArdLKuEnjNE+nm686yfCn3bX4gcrw13Mk\nYpEzzvwFSETDOA4USwr4IkESmIDfrxYF7Rqnebw3hHabr9ZaZMKeILZXWE3nGpZzoLy3ALQWXyRo\nghPwKy0KNrdjtdOLtt79a21r+F2UdAJyCErJcVhL5RtesIVqe4WsVuqIBEpwAn6iP0cFrqXyJGIR\nEjUdIBvpvIafIxwKVS4qN9KvsQ/L+kaeknNmWwVP3M3wtVJHJFiaRynAGBMG7gQuA7LA+6y1z9bc\n/07gNsABPm2t/WTNfWcBjwJvstYe2OxAZ/rUZjiZzrXddAXVxmrtSzpuX54GfXQ8/Rr7sCTXm2+6\ngtoGasrwRYKkXYb/DiBurb0O+Chwe939HwduBK4HPmyMWQQwxsSATwGpfg20H3Vwp0kHyEaikTCz\nU9EOLtrm276BBK2G32qFDqhFskhQtQv41wP3AVhrHwKurrs/DywB00CIcqYP8PvAHwNH+jXQfmTJ\nG9lyB8hW9fZaC7PxSo2+kXyhxEa2dV8eGMMMX4egiARSu4C/ACRrbhfdMo/ndsplmyeBL1hrk8aY\nXwZWrLX3u49pXuvoQj+y5NUWm4kaWZiJs76Rp1BsnMmub3TWlydwGX6bzWnVDF8BXyRI2gX8JDBf\n+3hrbQnAGLMX+CCwDzgX2GGMeRdwK3CjMebrwOXAPcaYHZsdaD+y5Mqmqw5KOlA929YL7PW8+n67\nTwyBy/DTnWb4KumIBEnLi7bAfuDtwL3GmGuAJ2rumwKKQNZaWzLGHAOWrLVv8B7gBv33W2uPthvI\n8vJ8u4cQjYTJF0sdPbaRA0fWANh51nxHz3H2ttnyz03EGj7+0ImN8uOW59o+XywaplB0Ovq5vf5+\n/eK1TTh3zxaW3TmotXVL+WuJ6fjAxzrqufATzUWV5qI37QL+5yln6/vd27caY94NzFlr7zLG3AM8\nYIzJAM8Ad/c6kJWVtbaPmUlEWF3PdfTYRl48sgpAuOR09BxuIsvzL55kLnbmhyHv+SJO++ebTkRZ\nXc+2fdzy8nzPv1+/HDuRBqCQzTccSy5T/gTwyonUQMfqh7nwC81Fleaiqts3vpYB31rrAB+o+/KB\nmvvvAO5o8f03dDWaNqanYps6KrDTxmmedpuv1lLt2yp4ZhLB6Ym/up5jKh4hEWu8V0GrdESCKTAb\nr2DzQbPaOK3Di7ZtNl+tbbTfZeuZDtCpV8k2R0BqlY5IMAUr4Ff6yvcWaCoXWbvM8Jttvkp2k+FP\nRSkUS77fnVoqOaylW+9V0CodkWBqV8P3ldrljYtzrVsjNJJM5wiFqrto26k0UGuW4XfQR8ez2bEf\nP7VBpklGPZOIsnVhquvnbGRtI4/jtC57BXGnbSZXIBQKNS1T9dOJZKbpJ9F4LMJZS9N9+TmO43Ds\n1MbQDtbp5+us5DisrudYmosTarFLvV4ylWNpy3glGqWSQzpb6DgubUawAn7N8sbFucaNvVpJpvPM\nz8RbtkGoVanhN9l8tZbOEQm37qPj6WXsjuPwwxdO8sUHnuepg6daPvbyC7dzy7X7uGDXYkfP3czq\nehZoviQTarplBqCG/8pqhvseOsg3nzhMJBzihit38ZYf39vRbutOlRyH5w4neezp4zz29ApHXkm3\nfPxFuxe55dpzufT8rV0FO4/jOHz3mVf44oPP86PDybaP76fXXrCNW647lwt7fJ0ViiUe/uFRvvTg\nCxx5Jc22hSkuv2g7V1y0nYv3LBGNnF50KDkOL7y8xmNPH+fxp1d4cSXFlvkEN169hzdecU7D9t1B\nUSiWePDJl/nyt1/g6MkNLjlvK2+7dh8X71nq6XXRiUDN1mY3MCVTObYtdP5GMRWPEIuGKxuR6q2l\n88xNt+6j4+lm7CXH4btPH+eLD77Ac0fKf9Cv2reFndtmGj7+4NF1Hn/mOI8/c5xX7dvCLdfu41X7\ntvT0okl20O45EYAa/pFXUnzl2wd58PsvUyw5bFuYolAs8ZVvH+Rv/v5Ffuqyc7jpdXvYvthbtp3J\nFXjq4Ckef3qFx595pVLei0fDXH7hdrY2eZ0dO7nBk8+d4BP3fpe9O+Z427XncqVZ7ug1VCo5PPzU\nUb784Au8uFLuWnLZBdvYvtifrLudg0fX+e6zr/DdZ1/hx/Yucct15/LqDl9n+UKRb33vZb7y7Rc4\nvpohHArxqn1beP7lNb726It87dEXmU5EueyCbVxx0Xam4tHya/rpFU65O7+jkbD7PUk+9/Vn+NKD\nz/Pmq/fwpqt2DyU77pdsvsj/++5h7nv4ICeSWSLhEPt2zPP9507w/edOcOHuRd527T4uPX9b3wN/\nsAL+JjYweW0Q5mc6X8YUCoVYmIk1Pdd2bSPHtoXOAkYnYy+WSnzjOy/y2fuf4iX3D/qqi5e5+dp9\nnLdzoen3OY7DgUOn+OKDL/D9507wwxdOct7OBd523T5ee+H2jj/RQGcrmfy8Sufg0TW++OALPPrU\nMRxg57YZbr5mH6979Q4cx+FbTxzhKw8d5GvfeZFvPP4S11yyg5uv2cfOBvsNoByojryS5qXjKV5a\nSXH4eIoXV9Y5vpqpPGZ+JsbrL9vJFRdu59XnbW1bNjp4dI0vf/sFHvnhMe78qydPG2N9hlseQ4mv\nfvsFPvc3lmMnNwiHQlzrjnvX8tym5qtbBw6d4osPPM+Tz53gqYOPl19n1+7jtRc1fp1lcgW+8dhh\nvvrwQVZTOaKRMD995S7e+hN72b40TaFYwh46xeNuBv/QD47y0A+q23Zmp6Jc95qzueKi7Vxy3lam\n4lGmZxP8z68+xf/9+0P89bee476HD3LDFbt4y4/vYamHT/7Dks4U+PpjL3L/I4dYS+eJR8PcePUe\nbvqJPWxdmOLZl1b50oMv8Pgzx/nEvU+w96w5brnuXK66eJlwuD+BP+STY+qcTtbVfv07L/Lf7j/A\n+3/2El736u42755IZvgXdz7A6169g/f/7CUdf9+/u+cRnjuyRjRy5oQXig6v2reFf/nuKzY9dsdx\n+I///THsoVOEQyFe9+od3HztPnZtbxyImnnuSJIvP/gCjx5YASASDtFNklAqlT9h/NOfv5QrLl5u\n+JiNbIFfv+ObXHbBNv7ZP3xty+d79vAqn/jcd8n29Gmgtj1TZwrF8uP37Zjnlmv3Ncye68sKQMN/\nX4Bi0TljBAszMXYtz3Hu2fNcftF2Ljhnsac/yJdPpPnyt1/gwSfLn0LCoRDhBssovH+TaCTE6y/d\nyVuv2de36wC9ev7lJF96oP3rrFhycJzyp+UbrtzFW67e07Sk6TgOh46t8/jTx8nmi1x2wTYu3L1I\npG5SvHX4mVyBv3u8nCmvrucIAZEm/45+4M3FdCLKm67azZuv3t2wD9ehY+t86cHneeSpYzhOuYz2\noSZ/Z8vL8139woHK8Kc3keFX+t50+dHvp6/czTcee6nxnSF4w+XndPQ87ca+cmoDe+gUZu8W3nvz\nj7Hc4x/0eTsX+PWfv5SXjqf46kMHOfJK9w1LZ6djXLx3qen91Yu27YP4sy8lSWUKnL11htkOrnXU\nisYiFLp8o5ibjvGmq3ZzyXnN6+PRSJjrXrOTay45m8cOrPCNxw+TafLvEo2E2blthl3Lc+zaPss5\ny7Ntm+V16uytM7z35lfxc9efx1cfOchzLerxl160zE++5my2zPsjgz337JrX2cMHOXK88essFArx\nmvO28qardzM71fpvLxQKsXfHPHt3dPYpfCoe5aaf2MtPX7mb/U8e4aHvH23a98oPQuEQr71gGzdc\nsbvldb89Z83xqz/3Gt75k2nuf+RQR6sAOxWogD+T8A4D737zVcoN+LNdBvzrL93J9Zfu7Prn1Ws3\ndnuofFH2jVft7jnY19q1fZb33vKqTT9PI5FwmGgk1NHqEO/3/aWbDD+2b0tXP2fQOyrDoRBXmbO4\nypw1sJ/RiW2LU7znzRe3fIxfd5fu2j7Le28ezOusU7FomDdevos3Xr5rpOPotx1bZ/jFm0xfnzNw\n6/Chtww/5V4s7TbL7Jd2Yz/gBvxLzt82tDFtRiwa6eiIQ+8idScrmURksAIV8Kc3cVTgeqa3DL9f\nKsccZhuXKA4cOsVMIsq+s5tfnPWTeCzc0carlAK+iG8EKuBXljb2kuF7JZ02dcRBqWT4DUo6J5IZ\nVk5luHjPUt+uxg9aPBruqKSz4f5beSUtERmdYAX8qd7X4VdKOtOjyTSnW7xZHXixXM65eE/zC6V+\nE49FOrpom87kCYVgKjH4Ha4i0lqgAn48GiYSDm0qw58bUYbvjb1ROerAoXKb5UAF/Giko3X4qWyB\nmUS0q70AIjIYgQr4oVC5jcHGZi7ajqiG7429YYZ/6BSJWIS9O4a7iWYzvJJOqc0+jnSmoPq9iE8E\nKuCD2yK5l5LORp4Q1esAo9Bo7Ml0jsPHU1y4a6HhLku/8nbb5tus1ElnCqrfi/hEcCKMq1mW3E4q\nk2dmKjrSi6KNxv50AMs5ULP5qsVKnUKxRDZfVIYv4hPBC/iJKPlC933lU5nCyFboeBqN3Vt/H7iA\n38FB5t6bmwK+iD8ELuBPu0E73WQ9ezOpjfzIVuh4Go39wKFTRCMhzj8nGOvvPZ0cguJdoB5lGU1E\nqgIX8Ktthjtvr5DLF8kVSr7I8KE69nSmwMFja5y/c6HSYz4o4l5P/A4y/FHPu4iUBS/g99BeYdQr\ndDz1Y3/mpVUch5aNyvyqkxp+yn1jm1ZJR8QXghfwe2ivUN1lO9rAUz/2oNbvocMavko6Ir4SvIDf\nU4Y/2rYKnvqxH3B7319wzuaOJRyFTmr46RE3rBOR0wUu4E/3cMzh+oZPSjo17RWy+SLPHUmy7+y5\nyu8UJFqlIxI8gQv4vTRQq2b4I16lU1PS+dHhJMWSE8hyDtRk+C366VRbI+uirYgfBC/g99BALTXi\n1sie2pJOkOv30Nm5tt5qJNXwRfwhcH+JPWX4bklnVI3TPDM15aiXT5TPUr1odzADfizafpWOSjoi\n/hLADL8ctLtpoFbN8Ee8SscdezKd49mXVtm9PMvciD919CrRQQ1/1KeMicjpghfwe7hoO+rDTzze\n2O3BU+QKpcCWcwBiHa7SiUbCgdtUJjKuAhfw4zGvJ37nO21HffiJxxv7uvsGFOSA39k6/LyyexEf\nCVzAD4VCTHfZIjm1kWc6ESESHu2v643dE+SAn/DaI7ep4at+L+IfgQv40H2L5FQmP/JyjscLgDu2\nTLM0lxjxaHoXa5PhO47j9sJXwBfxi2AG/ES0q9YK6xujb43s8QJgkLN7qC7LzDZZh5/LlyiWHK3B\nF/GRYAb8qSi5Qol8B2eq5gvlQzhGXb/3eBl+0AN+wm2e1uzfQEsyRfwnmAHf27HaQVkn7ZM+Op6l\nuQQhwASwQ2ataCRMiOY7bb2lsAr4Iv4RyL/G2h2rC7Pxlo9d90lrZM8/eMMFvP7SnWxfnB71UDYl\nFAoRi4XJNsvw1SlTxHcC+dfoHYrtra9vxS+tkT1b5hNsmQ/uxdpa8WhEJR2RAGn512iMCQN3ApcB\nWeB91tpna+5/J3Ab4ACfttZ+0hgTAz4N7AMSwO9Ya7/Qz0F7Wf1qKtf2sX5pjTyO4rFw05KO30pp\nItK+hv8OIG6tvQ74KHB73f0fB24Ergc+bIxZAv4RsGKt/SngrcAf9XfIsDRfDvin1rNtH5va8Mem\nq3EUj0ZaBHyVdET8pl3Avx64D8Ba+xBwdd39eWAJmAZCQAn4HPCxmufvfP1kh7a469c7Cvhupjnq\nxmnjKB4LN+2WWW2NrIAv4hft/hoXgGTN7aIxJmyt9f7KbwceBVLA/7LWVh5rjJkH7gX+VR/HC8CS\nWwM/udZ5wPfLRdtxUs7wSziOQygUOu0+1fBF/Kddhp8E5msf7wV7Y8xe4IOUa/XnAjuMMe9y79sD\n/C3wZ9baz/Z70EuzXobfQQ1/Qx0bByUeC1NyHIol54z7UuqFL+I77f4a9wNvB+41xlwDPFFz3xRQ\nBLLW2pIx5hiwZIzZAdwP/Jq19uudDmR5eb79g2rMTkVZ38i3/b6CG4v27t7C1oWprn7GqHQ7F6My\n777xLizOnPEJqlgz7/MzrZfOthKUuRgGzUWV5qI37QL+54EbjTH73du3GmPeDcxZa+8yxtwDPGCM\nyQDPAPdodT7vAAAMnElEQVQA/wlYBD5mjPFq+T9jrc20+kErK2tdDXxxLsHxUxttv+/EqfJBI5lU\nlpUuOmyOyvLyfNdzMSpOqVzZO/zy6hl9gU4ly//c6bUMmVT70lsjQZqLQdNcVGkuqrp942sZ8K21\nDvCBui8fqLn/DuCOuvs/5P5voJbm4hw+niKXL1b6ujSynimQiEUqzb6kf+LR5ufapjIFphMRwuHQ\nGfeJyGgENgp6GeWpNmvxUxt5LckckHjMO+bwzJU66pQp4j+BDfjebtVTbVbq+Kk18ripZvgNAn42\nz3RC8y7iJ4EN+EsdrMUvFEtsZItaoTMglQy/rqRTKjmadxEfCnDAd3fbtsjwvbXgWoM/GJVDUOpK\nOhs5rcEX8aPgBvz59mvx/XJ4+bjyLpbXZ/gp7bIV8aXABnyvvcLJFiUdvxxePq6q59rWZfiVPjp6\noxXxk8AG/IXZOCFal3S8DF99dAbDK+lkC/UZvr9aUotIWWADfjQSZn423vKirfroDFazVTpe47Rp\nBXwRXwlswIfyhduT61kc58xeLqA+OoNWPdf29Ay/0jhN6/BFfCXgAT9BLl9eetmIDj8ZrEpJp0mG\nr3kX8ZfAB3xovha/eviJAs8gNFulk87qAHMRPwp0wPd22zZbqaOLh4MVb7JKR8syRfwp0AG/3ear\ndW+VjjL8gYhHG++03dDxhiK+FPCA36akk8kTi4ZbdtOU3nnzmlWGLxIIgQ74W9rstk1tFFTOGSAv\nw883qOFHwqHKxiwR8YdAB/xKht+kpJPK5HXBdoCatUdOZwpMJ6JnnHMrIqMV6IA/NxMjEg41LOmU\nSg7pTEFLAwcoEg4TCYfOXKWT0ScrET8KdMAPh0IszTXebZvOFnDQCp1Bi8ciZ2b42YLq9yI+FOiA\nD+Wyzqn1HKW63bZqqzAc8Wj4tAw/XyiSL5S0QkfEh8Yi4BdLDuvp0w8o9zZdqXHaYMVj4dMy/HRl\nhY7mXcRvgh/w5xsvzaxm+Mo0BykejZyW4Vf66KikI+I7wQ/47uark3UrdXT4yXDUZ/hagy/iX2MQ\n8Jtl+OqjMwzxaIR8oVS5hpLWLlsR3wp8wG+2+aqa4SvwDFKs0iK5nOWnM17jNL3RivhN4AO+l+HX\nl3TW1Rp5KBLR0ztmVg6O1xutiO+MTcA/o6SzoYu2wxCvy/BTKumI+FbgA/50IkIiFmlaw1enzMGK\nuRl+1s3wN7QsU8S3Ah/wQ5XdtmfW8NXAa/Aq/XTcU690+ImIfwU+4EO5rLOWylEoVpcHrmcKzE7H\n1MBrwBJ1h6CopCPiX2MR8LfMJ3CAZKqa5ac28rpwOASVc23dg8zTWocv4ltjEfArK3XcOn7JcdQa\neUji9at0MgXisTDRyFi8tETGylj8VdYfdZjJFnEc9dEZhkT9OvxsXkthRXxqPAJ+3eYrHV4+PPWr\ndNKZgur3Ij41HgG/bi2+WiMPT+0qnZLjkM4WmNYbrYgvjUfAnz99t63XGlkZ/uDFa1bpZHPlUtqs\nMnwRXxqPgD/r1vCV4Q+dd5B5Ll+szLtW6Ij401gE/HgswuxUtFrDV2vkofEy/GyhqMNPRHyuZSpm\njAkDdwKXAVngfdbaZ2vufydwG+AAn7bWfrLd9wzK0nyCE8lyhr9eaY2sTHPQvAw/ny+pNbKIz7XL\n8N8BxK211wEfBW6vu//jwI3A9cCHjTFL7vckWnzPQCzNJdjIFsjmisrwh8jL8HOFok67EvG5dgH/\neuA+AGvtQ8DVdffngSVgBghRzvSvB77S4nsGYou3UieVremUqYA/aNUafkm7bEV8rl3AXwCSNbeL\nbsnGczvwKPA94AvW2tUOvmcgluarm68qnTKV4Q9cNcMvVQ8/SWjeRfyoXSqWBOZrboettSUAY8xe\n4IPAPiAN/Lkx5l2tvqeV5eX5dg9paffZiwAUQ2GyhRLhcIi9u5cC2Txts3MxTAveAeahEETKwX/X\n2Qt9+x2CNBeDprmo0lz0pl3A3w+8HbjXGHMN8ETNfVNAEchaa0vGmGOUyzutvqeplZW1bsd+mhjl\nM1UPHl7l1FqGmUSU48fXN/Wco7C8PL/puRgmxz3Ldj2VZeVECoBcJteX3yFoczFImosqzUVVt298\n7QL+54EbjTH73du3GmPeDcxZa+8yxtwDPGCMyQDPAHdTfhM47Xu6GlGPqu0VyiUd1e+HIxQKEY+G\n3ZKOVumI+FnLv0xrrQN8oO7LB2ruvwO4o8G31n/PwNW2V0ht5FlenBr2ECZWPBYhVyixkdU6fBE/\nG4uNVwALszFCIXj5RJpiyVGGP0TxWLiy0zYETCV0ypiIH41NwI+EwyzMxjl8PA2oj84wxaLlDD+d\nLTAzFSUcwAvlIpNgbAI+lMs63jGH2nQ1PIloOcNPZwpMq34v4ltjFfC9zVegTVfDFI9FyLsXbbXp\nSsS/xuqv01upAyrpDFMsGqZYciiWivpkJeJjY5Xhe0cdgjL8YUrEqhdptSRTxL/GKuCfVtJRpjk0\n3qlXoD46In42VgH/tJKOWiMPTSyqgC8SBOMV8GsyfDVOG564SjoigTBWAX/LvFbpjEIiWhPw9UYr\n4ltjFfBnp6JEIyFCKNMcJpV0RIJhrAJ+KBRiy3yivNszrN2ew1J70VbLYUX8a+z+Ot/z5ovJFdq2\n35c+Or2Gr5KOiF+NXcB/7YXbRz2EiROvKelMK8MX8a2xKunIaNRm+CrpiPiXAr5sWm2Gr4vlIv6l\ngC+b5mX40Uj4tGxfRPxFAV82zcvwtSRTxN8U8GXTvKxe5RwRf1PAl03zMnxdsBXxNwV82TQvw9eS\nTBF/U8CXTZubjhGLhllemh71UESkBaVksmnTiSi/90+uYX5Gu2xF/EwBX/pi68LUqIcgIm2opCMi\nMiEU8EVEJoQCvojIhFDAFxGZEAr4IiITQgFfRGRCKOCLiEwIBXwRkQmhgC8iMiEU8EVEJoQCvojI\nhFDAFxGZEC2bpxljwsCdwGVAFniftfZZ974dwGdrHn458BvAXcCfAhcDJeBXrLW2/0MXEZFutMvw\n3wHErbXXAR8FbvfusNYetdbeYK29AbgNeJRysL8JmLXWvh74beB3BzJyERHpSruAfz1wH4C19iHg\n6voHGGNCwB8AH7DWOsAGsOh+fRHI9XXEIiLSk3YBfwFI1twuumWeWm8HnrTWPu3e3g9MAU8BnwL+\nsB8DFRGRzWkX8JPAfO3jrbWlusf8AvBfa25/BNhvrTWU6/r3GGPimx6piIhsSrsTr/ZTzuDvNcZc\nAzzR4DFXW2sfrLk9S/VTwUkgBkTa/JzQ8vJ8m4dMDs1FleaiSnNRpbnoTchxnKZ3unV4b5UOwK3A\nVcCctfYuY8wy8FVr7ZU137MEfAbYTjnYf8Ja+1lERGSkWgZ8EREZH9p4JSIyIRTwRUQmhAK+iMiE\nUMAXEZkQ7ZZlDlSrXj2TwhjzOuA/WGtvMMZcCNxNuQfRk8Cvu7uXx54xJgZ8GtgHJIDfAX7IBM6H\nMSZCuU3JxYAD/Crlv4+7mbC58BhjzqLcvuVNlOfgbiZwLowx3wFW3Zs/An6PLuZi1Bl+0149k8AY\n8xHKf9gJ90sfB26z1v4UEAJ+blRjG4FfAFbc3/2twH+h/HqYxPl4G1By+1H9FvDvmdy58JKBTwEp\nyr/7RP6dGGOmALweZtbaf0yXczHqgN+2V8+Yewb4ecr/UABXWmu/6f73V4A3j2RUo3Ev8DH3v8NA\nngmdD2vtXwPvd2+eS3kD41WTOBeu3wf+GDji3p7I1wXwWmDGGPNVY8zX3M2wXc3FqAN+J716xpa1\n9n8DhZovhWr+e51y87mJYK1NWWvXjTHzlIP/b3H663PS5qNojLkb+M/AXzChrw1jzC9T/uR3v/ul\nEBM6F5Q/4fy+tfYmymW+v6i7v+1cjDq4dtKrZ5LU/u7zwKlRDWQUjDF7gL8F/sxa+z+Y8Pmw1v4y\nYIA/odyQ0DNJc3ErcKMx5uu4vbmA5Zr7J2kuDuAGebdZ5SvAjpr7287FqAP+fuBmgBa9eibJY8aY\nN7j//TPAN1s9eJy4B+rcD3zEWnu3++WJnA9jzC8aY37TvbkBFIG/n8S5sNa+wVr7RvfcjceBXwLu\nm8S5oPzmdzuAMeYcygH+/m7mYqSrdIDPU3733u/evnWUgxkh76r6h4G73O6iPwD+cnRDGrrbKH8c\n/Zgxxqvlfwj4gwmcj78E7jbG/B3lflQfotxufFJfG7UcJvfv5E+BzxhjvKB+K+Usv+O5UC8dEZEJ\nMeqSjoiIDIkCvojIhFDAFxGZEAr4IiITQgFfRGRCKOCLiEwIBXwRkQmhgC8iMiH+PzxQ1LcS6Wos\nAAAAAElFTkSuQmCC\n", + "png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAECCAYAAAD9z2x7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztvXuUJFd95/mJfFZmVlZVP6pbrZa6BQgCYxAIZFsvQDy0\nYLDGMIt3lvXax9rFa+PhHO8uZ22sncOenTPe9RxW4B3PMjzGIHnGM8xoPHgAYxkZMAZJCBDIkgUK\nvaAltR5dLXVXZuUzIiP2j4ibGZmV8crMyMyIvJ9zdNRVmVEZcfPe3/3e3+93f1exLAuJRCKRpJ/M\nom9AIpFIJPNBGnyJRCJZEaTBl0gkkhVBGnyJRCJZEaTBl0gkkhVBGnyJRCJZEXJ+L6qqmgE+DlwG\ndID3aZr2mOv1dwM3ARbwGU3TPuFc86+BlwEm8Ouapmkx3b9EIpFIQhKk8N8FFDRNuxr4EHDzyOsf\nBa4HrgE+qKrqFvBfARVN064F/inw+7O9ZYlEIpFMQpDBvwa4HUDTtHuAK0Ze14EtoAwo2Iq+BWyq\nqqoAm0B3ljcskUgkkskIMvgbQM31c89x2QhuBu4FHgC+qGlaDbgTWAMeAj4J/NHsblcikUgkkxJk\n8GtA1f1+TdNMAFVVTwAfAE4ClwBHVVV9D/A7wJ2apqnAa4BbVVUtzPrGJRKJRBIN36Attlq/AbhN\nVdUrgftdr60BPaCjaZqpquoZ4ABQYbAqOAfkgazfh1iWZSmKMsHtSyQSyUoTyXAqfsXTHD+8yNIB\nuBF4HbCuadqnVVX9X4D/DmgDjwK/DqwDnwUOYxv7P9Q07XMB92Ht7NSj3Hdq2d6uItvCRrbFANkW\nA2RbDNjers7O4M8RafAdZGceINtigGyLAbItBkQ1+HLjlUQikawI0uBLJBLJiiANvkQikawI0uBL\nJBLJiiANvkQikawI0uBLJBLJiiANvkQikawI0uBLJBLJiiANvkQikawI0uBLJBLJiiANvkQikawI\n0uBLJBLJiiANvkQikawI0uBLJBLJiiANvkQikawI0uBL+jx1Zo9PfeFBfu9T3+ZcvbPo21kpvnjn\nj/nof7yPJTmfQpJSgo44lKwAjz9d40t3/YT7Hj079LvXqdsLvKvV4vuPnOXUs3VaHYPyWn7RtyNJ\nKdLgryiWZfHQqXN86e5T/OjUOQBecuEGFx9Z52/ue5p6s7vgO1wtRHvXmro0+Cmk2db5w9vu5x9c\ncwmvfPGhhd3Hyrh0jJ7Jp77wIH9x90/o6L2F3cf9jz3Pv7vj4YUu3Tt6j3/+p9/nI5+7jx+dOscr\nLjnA//bey7npV17Hax1VH8Xgm5bFv7vjYR54/Pm4bjnVWJZFvakDUGvEP9F+8a6fcOcDz8T+OUbP\n5LNf/hGPPrUb+2fNE+2Jc/zJ7Q/RM83Q1/z4mTqPnt7lB4+cDX5zjKyMwt853+LbP3wOgK/e+xTv\nfv2LueZVx8hkIh0JOTVf//5T/N1jz3P9z1zM9lZprp8teOb5Bg8/tcvJo1V+5W0qL75wo//aRrkA\n0DdAYTi72+av732Ks7ttXrVA9ZJUOnoP3bCNR9wG3zQt/vybj3Px9jrXvOpYrJ/1xHN7fPP+Z7As\nuPSizVg/a558476n+fYPn+O6y49z4mg11DXie53HhO7Hyij8rm4PqKMHSjTbBp/9y4f4Pz7zHe5/\n7Oxc1Xatv3Rf3Bcv2uKVLz44ZOwBqo7Bj3J//c4s3UATUXNNrnG70uotHcuCrhFenU5KWvvF7gTP\nNck1cbAyCr9r2G6cK15+hDe/9iL+/JuP860HnuEPb7ufl5/Y4pfedCkvOrYR8Femp9awB3e9EV5B\nzxqhJgu5/fN9tWz7j6Mo/PqSqJek4jbytQjtPgm7e3b2lRgPcSKMW9riQeJ5dvciiCJXjGaRrJzC\nz+cyHKgWufEdP8X/+T/8LJe95BAPPXGe3/+Te3lqZy/We7B9tYuf6btODKOQz+57LZfNUCrmIg3S\nwcBebGdOKu7JP+5JU3xXYjzESb+vL1DcxIEw2hOtgqVLZz4IRVPIDYzcRdvr/M+/9Grec91LMC2L\nnzxTj/Ue2t1efym9SNXT9VH4YKv8KMZbDICO3qPTXVxAPKkMK/yYDb5jcOah8IUbo97spmZ/gekW\nbRGMt3hvq2P0V9iLYGUMft+Nkd//yMcPV4D4B5t7YO8ucKYXgz2f26/wwQ7c1ps6ZshB6u74i/ZR\nJhF3m9XjVviO2tZ1M3YjLERD1zBpp0QI7DkxEJjM4MNixd7KGHyRipkfo2o31+1AZRSf3CQMB+cW\nt8wVy/lxkx/YCt+0LJptI9Tfm6dCTSPuvrAbtw+/YfvwLey0yThZFiM3S9wTchTRtrskY2RlDP4g\nULlf1W5MkJkyCUNKeIEK368tYJCpE3aQDg3slPlr54Ew+GuF7BwU/uDvx52pU5tjMHpeuJ8j7Bg2\nLYu9oesW1xYrY/D9VO1GRSj8eOvH1JZklu+7dHwUPoRfhQwNgpQouXkiJtbjhys0O0asynvI4Mcc\nuK0vicCZJZOItmbboGcO3GfSpTMH9DFBW0Eum6GylotdhdSHlPAyKHwvgz+5wk/LwJ4n9aZOIZ/h\nsLMRL8423B1S+PH51U3Tot5KnxAYirc09VC7bUWbbzmu40WOkZUx+GL5Os6HD7bKj13hO0u5UjFL\nvaVjmovJXBDxDC+Xzoaj8MNMgD3TpNHSKRWzzjXpGNjzpNbsUi0VJtoDEfmzXMZGj1Hhi+Cm6BeL\nFDizRIigjUoBC4ZcNV6INr9oe93+WSr8+BHL1+KY3HOAzUqBRjvm5XR/6b6OZdmDYhH4ZSxBNIW/\n19SxsJ8JpMKPiqijs1HJs1mJN5Y0qro7MSp8d1+H9OTii/598bad2RcmcNs3+EcW3xYrY/B1wztL\nBwZ+/DgNljCgF84pDdQL9ya0cfSVZoiOKVYB4pnk5qtotLs9jJ5JtVwYlLWIqQ/WXSmFEK/Cr/dV\n7WL7+qwRxvr4dniBU1uitlgZg9/R/f3Wm5UiEO+XUWvqrJfyHKjan7WoZe64TWhu+gq/FaIzO+11\ncKPoxEHSMbDnhRAB1XI+9myxUZdlnD78/UIgHf2i3uySzShccKgMhFT4zrMf3ixRzMefieXHyhj8\nvsL3cOlsVBy/dYxfRq3RdQa2/Vm7CxoEwS6d8G0h3rPhKFTp0omGWBFVy4X+KjOu1Naaa3KBeLN0\nav1AZZH1Un6hGw1nyW6jy0alwFYEgSiefaNix2mkwp8DQeUEhMKPa/NVzzTZa+l9wwiLy1nvBgRt\nc9kM5WJuyN/rhVAr1XKBjXKevebigtFJRAz+Daf9IL5d2MIIH960s4HiVfjDRi4trr56U7dFWwQX\nsFsUbVai7WKfNatj8PsFwwJ8+DHNviKaX60UYv+sILqGSTaj+J4FEHaQiqX7RiVPVWQuLCgYnUQG\nCt9uP/t3Mbl0HMOzvbUGxLvxatRVtdcKl8K4zHS6PTp6z56cK+En51qj6xQlzFItF+iZ4Xexz5qV\nMfi6Y+SyGS+FH295BdExNl1L90W5P7q6ObZSpptqpWCr9QAlUnMtVxf9XEnEbRiL+SzFQjY2IbBP\n4cfq0rEnsk1Xv0i6ynevWjajKPxml81KHkVR+hPFomIavvXwVVXNAB8HLgM6wPs0TXvM9fq7gZuw\nS3N8RtO0Tzi//z3gBiAP/EtN026N5/bD0zVMT3UP8Sv8vpKrDHz4ixoAutHzdG0JqqVBPZ31kvcZ\nq8MuicWuXJKI24cP9h6IuCbMgcG3Fb4es0snm1EoFXODftHosrVejO0z48bd1/O5LKViLvC7siyL\nWqPLxU5KplsUHTtUifeGxxCk8N8FFDRNuxr4EHDzyOsfBa4HrgE+qKrqpqqq1wFXOddcB7x4pnc8\nIV3D9KwOCYNAVlwK391ZSsUcuayyUJeOV0qmIKxarzft5epaIevasCUNflhGA6miUmkclSx3Rwx+\n3EHbjUoBRVGoVhYrcGaFiLmJ59moFAJdOq2OgdGz+pPe4ES5xbRFkMG/BrgdQNO0e4ArRl7XgS2g\nDAiH8NuAB1RV/XPgi8AXZna3U9DV/VVtLpthvRRfBN3t+lAUZaEZLV2957kBTTDY9el/j7WG7lqu\nFvq/k4Rjn8KvOD7ezux9vLVGl1IxS8VZscUdtN1wPZP4/CTjFm0Am06Sgl9sYhDjKgxdu6i2CDL4\nG0DN9XPPcfMIbgbuBR4Avqhp2i5wGHgd8B7gN4E/nd3tTo5uBPutNyuFuSh88f9FKWE9hMIf7Lb1\nNt6WZdllAUbUS1pyrudBvdm1ffdO34xz81WtYRthIXziCtp2uj26ujlQwilx9blFG8DGejGwvMK+\na2IOzAcRdKZtDXAfy57RNM0EUFX1BPAB4CTQBP6tqqrvAc4CP9I0zQAeVlW1rarqYU3Tzvp90PZ2\nuNPfJ0XvmZTXcr6fc/hAidNnG2wdKPu6fyb7fPv/Jy/eYvvwOocOlDj1XJ3qRom14vDXEGdbWJZF\n1zCplAu+n3P8qH2+r5XNeL6v2dbRDZPDB8psb1fpOou8bs+a2TPE3S8WTaNtsFUt9p/zAmcHZya/\nv69O0xY902KvpXPR0SoXON9txue7nYZnn28AcORghe3tKif2bINoWLP7PhfRLwzHy3bi+Bbb21Uu\ncHzwmULe834efto+Re/Co1W2t6s0e/Yf6ZqLeYYgg38ndvD1NlVVrwTud722BvSAjqZppqqqZ7Dd\nO98Cfhv4qKqqFwIV4PmgG9nZie94Qcuy6HR7KAGfU3JU1uOnXuDgxtpM7+HMC/YgMNo6Ozt1So7K\nevyJF9h2qiSC3QnibAuRnopl+X6O1bPf98xzdc/3nTnXBGAtl2Fnp47hpJqdeb4xk2eIuy0WjWVZ\n7O51uPjI4Dlz2Abhyad3OboxCHBO2xa7ex1MC8qFLHu1FgD1vU4s7fuT07sAFLIKOzt1TN02+M+d\nTXa/eM6ZyHodewyLHJBTT52jWhi/Yn7yGbstss54Mzp2W8xyjEQhyOB/HrheVdU7nZ9vVFX1vcC6\npmmfVlX1VuAuVVXbwKPALZqmGaqqvkFV1e9gu4x+S9O0he7EEQXRgjJT+nXxG92ZG/xao0s+Zwc3\ngX7Oda3RHTL4cRO0AU0QZhleGwlilYpZJxgtffhhaHV6GD2rHy+B4T44S9y7PcXqNa6g7aj7sppS\nl86mk3Hk9125N10BrJfyKMri2sLX4DuG+v0jv37Y9frHgI+Nue53Z3J3M6Jv5EL48CGenY71ZpeN\nsh3chMX5NfWQbRGmVO/owBbBaOnDD4eoVSTaD+KLg7hzyEV6clxBW/dmMrBP8srnMqkI2paLOXJZ\nu/1CiaLm8CSRWXDCxkpsvAqqDimIK5vAsix2G3r/79uftZhUtW5A1VCByL33MzyjnVn8O+lKbl70\n0/zcCj/CWQRREMkIm5UCuWyGjKLEpvD7mwwrAyGwkQIhUG90+ytzcJ+UF0Lhu68r5xfWFqth8AOq\nQwriUvjuErgCoQ7mXVSqfy5AQFuIU8B8FX5jv0LdKBfo6ibt7mK2jieJwS7b/UZk1qJjdHIu5DPx\nKfzGuOfKs9uIZ3/BPBBnCWwOud+C953UGvYGtMrawJmyUSnQ6vRi3fjmxUoYfN3nPFs3/cE249TM\nUdcHuJbu8zb4AefZuqkGpI7OU6GmEVGczt1+lZh8vKNKs5DLxO/Drwz3d6Nn0u7O38jNAnGCl1vh\nhymvIKprClcuuHPx5z9GVsLgDwKVIRV+zIPN/e+5+/ADzgVwUy3n2Wt519MR97455rnScqRdnIzr\nF8LHO+v2G3Wz5HPZ2BRmra/w9wejk+ruGyfaRHkF36CtawOaYJFB7NUw+HpIv3U5jwLUZny2rZjJ\nN1wDIErN+VnSd28FBG3B7piWBQ2P6pfi3teHnisduyrnwWhwUxDHprzaiMG3XTrxKPx6U6eyNghu\nwuJ3mE7LuMlZ/Oz1TO2uMbQBbXDN4gqorYbBDzjwQ5DNZFgv59mdsTui76t1dZYwPvI4CBvAhmD3\nTK3ZZb2UH6pAGsavKbHp94vSqBHJz9zHK8oqiJTMQi4bq0unOqJq+30poWU3Bgp/2HhvOlVlx5VX\n6E+y+9piMfE7WBGDr4cM2oL9Bc48YOanDhaVlhnC4K87HXPP4x7tw7f3PxNIH34Y3KWR3Wz0UzNn\n14airIJABG1nHUTtmSZ7Pv0iqZk6g3jV/ueyGP9d9Vf2I20xOPdA+vBjQSj8MIFKO4JuzFZdjfH/\ngd15goovzZqwGUvgr/CNnjjBa7yxSurSfZ7UmjrFQnafe23Wm696pkm9qQ/FWgq5DJYFRm+2Bn+v\nZWCxXwlXQwQ4l5lxgWgYqPdxz+V1zSLHyGoY/P6RfsGPG0dqZr9i3j7jmHdOiJpfCmM3ZMYS+G8C\n2muNVzyygFp4xGa8UcJWKg3LXlO3jbDb4DuTzKwDt/2UTC8jl9B+4b1K947FBV4jffjxEDZLB+LZ\n2l5r2GXF1kcN/gIyWgYbryIo/AideVHB6KRhWZZzPmph32uzTtvbHfNdiRhOZ8Z+fJHhts9vnXBX\nX91DtPmVV/AeI4vLZFsJg6+HDNrC4DDzWRqserNLZSS4Ca7gzRxn+ig+/H7HHJOl4zUARDA6qQN7\nXrQ6Bj3T2ufmg9mnMI5m6MD8Ff56KWdnwCVUCOw2uuSy9glebvzcM16T3+AoS+nDj4VBWmYYhR/+\ncOKwiNN/RqkuQuHr4WrpgMu14KNeRgc2+KeqSWzEhDm66oPZ77Ydp/D7NfFnrPC93JfZTIZKaXEl\nBaal7mQeuTdQgb9HwEvhgz0JSB9+TIRNy4TZDzajZ9JoG2N9tQO/5vxm+kHQNkyWjne9n5qHegH7\nuRqt+Qajk4ZXIB9m78MfFzwUE/6sc/F9jVyChcC4DVTg2m075ruqNbooCmPPhK5W8tSb3psa42Il\nDP5gd2mYtMzZunT6ro+xSnj+GzCiZCxlM85egTEunf5ZrB4rl6CTgFYdr01XMPssjnFGeKDwZ+vS\nCZrIGm2jX648KYgNVH5jeFwBtVrDXhVkMsq+1zbKBUzLotmeb82plTD4nQiqdtZB23EFsvqftYAN\nGIOMpXAnenm5ZwaF08YZLFlPJ4jRw8vdFPJZ1mbo4x0tqwCDoO2sFf64wmmCQS5+svqFl5sKBuUV\nxip8j1UBxFckL4iVMPgiUBlmd2lVFK+atbry8dXO04cfJYANdns0WjqmObz0HD18203SU/DmwSDo\n7WEQZlheYZ5B21pTBDf3C4qkpux6BaIF40SRbvRodXpsVvaPe1hcCZKVMPh9VRsiUJnJ2MWrZqW6\nvTZfgH0wRC6bmbMPP3yWDrjcMyNunVqjS8F1gpebRamXJOG38gPHx9uYjY93tKwCxBi0dc59GA1u\nQnILqPm5qWB8eYVxgXI3g1WwNPgzJ0oqIsw2uDQonLb/i1cUhY1Kfq6GUdfts33dha388FJlNY+s\nhaFrpMH3xM+HD7P18doleotDvxsEbWen8O29Bfvr6Aj89nUsM4MYiMd3Naa8gldZBfc1MH/31koY\n/LDVMgUblQLtbo/ODAJafsFNoH8S0LwOhugYJvl8ZqyhHke1tN8fb1mWo+S8BoD04QcRpPBntUoS\ntW02RyaWOBR+u9uja5i+bipIXgG1WoD7bVx5Bb9sJfffmncBtdUw+IZJIRfeyM0yS6Ie9MVXCnQN\ncyaTSxh0wwwdsIXxRa/ECV7LFpBKErWG7rhZxg/BWfl4x5VVgHgUfr0ZrITd70sKgWN4fYzBD3AD\nVRfUFith8HXDDK3uATbXZzf7+kX4Yf6lCLp6L1JbjDvM3G/TFcigbRjqre6+sshu/PK7ozDI0Blx\n6cSg8IOUcFILqNUCVmPj6m8FKfwwp2XFwUoY/I7eCxWwFcxS4YvgZtHj8+ddY6RrmJHaYpwPP0i9\niGB00pTcvLAsi72m7um/h/ET7SR4+Z8HWTozNPg+KZmQ3HTdcSd4uRlnL8ad9+ymvJYjoygyaBsH\nkyr8mRj85v4zLd1szDnAqRu90MFr8FL4/gEpRVHYrOQT56udF02njo6XYYTZ+Xi9skX6xdNiKAO+\n6dEv1go5CvlM4lZ+407wcjNu745fdh6Ioyzz/Tr782IlDH53Qr/1tIMtKGsB5l9AraubkQz+OPeM\n1+k/bsQB6PMKRieJIMUIs/N3exmevsKfoUtnkK/u81wLqiEzDeIgci/GuWfCfsdS4ceAbvRCbzSC\n2fnXWh0Do2f5G0ZRXmEOg6BnmvRMK9JqZ71kn/PrVvhBG1HA7sy6YdLuzicYnST8ym0IZhX4Flv+\nPX34Mz1G0d+HD7YQmGdW2rT0TJNGa3wZa8G4gou7Dfv4T7/0541y3s5smlPCBqyAwTdNC6NnRVO1\nM1L4tTADe44F1KJUyhRkMsq+Kod+hdMEG2N8/xKbfg7+mKJagln5eGsemTPxBG393RhgiymjZ9Hq\nJEMI9E/w8nmmceUVvCrkuqnOKDAfhdQb/H51yAhGbr2UtwfbtAY/IFLvfm0ey9yoG9AE1XJ+2Icv\nDJbPc1X7JwFJP/4o4w61HyWjKP3dttMwrqwCxFNaQTzXuOqQguqCdphOil9pFDeblUJ/NeVXIddN\nHGcXB7ECBj98HR2BGGy7jc5Unx2UtQCDwTEPJRzltCs31ZFyx/0TvEo5z2tkaqY3XoeXjzKLejq7\njS6lYm7fd57NKCiKvRFvVuw2ur7BTUjeHo2gjDTBRmUwRsK47Nyvz3PzVfoNfoTzbN3YBxRMN/MG\nbUQBu8TBeik/V5dOMUI8A9h39m692WW9vP8Er6FrElo3ZR70V0g+efgwGx+vl2tBURQK+exsg7ZN\nPdjIJczVF7TpSuAurxBmZQ/+BwzFReoN/qA6ZDRVu7FeoKP3aHcnr2UStBFFUC3Pp57OoGpodIUP\ng0Faa3iXfRXMuqZ7mqiH8HW7X5900vQqqyAo5DIzC9r2TJO9lh7c130O/V5GwqzSYbi8wrhy1ONY\nxCo49QZfqNooLh0YXx8jKkGbLwQb5QJ7czghahDPiO7DB1uJCP9kkDticI304Y/SP97Qx9cN7ol2\nsjbsl1VYL459vZDLzixoWw8R14HFnPI2DYPEiwD3m2t3fuhx33dvSR/+zBgc6Rdd4cN0X0aYrAX3\n63GfENXP0okctHUMT0sP7Z+cVWmANFJvCr+6//cwrY+3rzQ9DE8hPzuFXwv4LEHSXH1hffjuVO7Q\n434B7q0VMPiTGTnRcacJ3Nb7wc1w0fq4Vc/kQdvBMrwecgCsJ7QU7jyoB5RVEEzr4w0q61vIZWd2\n4tVA4Yfs6wnpF2H98e7nCn1NZf4ZS6k3+PoEuecwvgJeVHabOuvl/NgzLd3My68Z9bQrgTt9LKhw\nmiCbEcHoZAzseWFalh3cDJgwYfpVUtAhHPl8hq7em8kmqLBujHXnRLmknJVQb3bJZccf9OPGXXAx\nqMSEwM7fz8518ku9wR+o2kkV/uRfRj3E5guY3zJ30njGoJ5O+M4M9nMl7fzSuGm2DUzLCqnwp3Mr\nDnLwx/vwi7kMlgU9cwYGP6QbI5NRqM4pK20WiHMfgkqrj1P4Yb/jebZF+g3+hH7rafOFjZ5Js2OE\nUnLzKqAmJj+vyp1eDGp3633jE6Yzb5Tz7LV0jF68wegkETYHH6b38QYqfMe1N4ut/WF93WD3pySk\nZYpaWKHGsKu8Qs1j78PY65xSE7M4yjIM3jtnAFVVM8DHgcuADvA+TdMec73+buAmwAI+o2naJ1yv\nHQHuBd6iadrDMdx7KPQJdtoCbDqZDZMq/Ciz/LwKqE2q8NfXRD2dbrSB7bxnr6Wz5ZEpsmr4Hf4+\nyrQ+Xq+yCgLh2usaJuWJPsH1WSEKpwk2ygVO7zQwemboozYXQf8ErxCr2X55BSfOFeYacPL3LQLr\n9cyKoNZ+F1DQNO1q4EPAzSOvfxS4HrgG+KCqqpsAqqrmgU8CjdnebnQmDdqW13JkM5OXV6iHzMGH\nwYCMO4VRnzBjSdTTqTX1UIXTBEkL0M2DsHndML2Pd1A4zSNLRyj8GQRuo/T3WdX6j5soqzGw2/n8\nXod6y3vvwyjzPuc3yApeA9wOoGnaPcAVI6/rwBZQAhRspQ/wEeBfAc/M7E4npH+ebcRAZUZR2KgU\nJlf4IX2a4PLVxq3wJwzagvDHd/v+xqD0O/uaZNVNmQf1lv8JaKNM4+OtNf1dC2JMzMSl0+iSzwUH\nNyE55RXCFD90s1Ep0GgbWFb4a6pzytATBI38DaDm+rnnuHkEN2O7bf4e+KKmaTVVVX8N2NE07SvO\ne8IdJBsTA4UfTdXCoHb3JFkMYVOzwD4hKp/LxD4ABvGM6G1RLeVptA3O1TsU8hmKIQZ23/cvN1/1\nCTq8fBQx0ZoTBFaDKjYWc7M79arW7LJRDg5uQnLqLIXNPBK42zrKJAHzy8UPMvg1oOp+v6ZpJoCq\nqieADwAngUuAo6qqvge4EbheVdWvA68BblVV9eisbzwsk6Yigp1q1Z2wpnstwnJQUZR+8CZO9Akz\nlmBgvJ99oRl+ACRkYM+TeoSgN9htaFnRDUK/rIJvWd/ZKHzLsqg1wvugk6Pwoxn8zSkM/rwKqPkG\nbYE7gRuA21RVvRK43/XaGtADOpqmmaqqngG2NE17o3iDY/R/Q9O054JuZHu7GvSWicg6KubodjXy\nZxw5WIHHnidXzLO9vR7pWsOylc7J41uhPvfg5hqnnqlhWVZsbZFx2uLY0Q22D0YL0x1x3m/0TA5t\nlkLd40nHuBnW5N9vXG2xKLqOUr/k4gMc2iwFvv/IoQqww/m9Dicv2Aj9OS/U2ljA9sGyZxse2LI/\nv1QpTtXOzbadiXX4gPdnubn42CYAppJZ6n5hOs6Jiy/cDPV5x44MbMTxoxvh2qJmb+zsoczlmYIM\n/uex1fqdzs83qqr6XmBd07RPq6p6K3CXqqpt4FHglklvZGenPumlvuzW2wDs1dvsZKN5lwrO+3/8\n5DnyRFtSP7uzB0Cva4R6tlLB3vXY6hg0nHueNTXn79ZrLZReNFWXd20eKxWyoZ6p17UN/rNn9yb6\nfre3q7E/at8ZAAAgAElEQVT1i0Wx84Kdx9BtddkJUZiv4CzGdvc6kdriiefs965lM57X6R3783fO\n7rGzM3meznMvNO3Pynl/lhvTsD/3mTP1pe4Xz5yxx7CphxvDbmOaMc1Q11i63RbP7kw+RqLga/A1\nTbOA94/8+mHX6x8DPuZz/Zsi3U0MdCeslgnTHXUYtgSBQCztzu91CLfYj84kZwMI3C6IsO6IaYt/\npZF6S6dc9K8Z70a04fl6h2Oba6E/J6isAgynZU5D330ZIiUTkuPqi5J4AcNjPXLQdkmydBKPPmE9\nfJjOv1Zrdinms6GCm+DKxa/H98VPE8+oTtCZ1wpZCnMIRieJeqMbesKEYSEQhX7hNJ/9D4UZbbwS\nm/HCZG5Bcgx+5LTM9ehjRKR/L0vQNvFMo2o3pzH4UQe2897ze/G4c8Ae2NmM4ntwiRfuNMKwqxZF\nUZy0wuUe2PPCtCz2WkaoPQwC0e4ipz4sYTJM+kHbmSn8cM9VLGQp5udbQ2YSxEHkYcfLJAo/oyj2\neRhzGiNBPvyFcq7eodEe7w5YL+VD7d7sGj0yijLRjr5Jswksp0DWyQvC+9cGSi7aZ5mmRdfosVYI\n/iq7hjmRugdYd3XmsEt3sF0KT55pYFnWvpQ93TDRDZPyWrRuWG92nSJcC834jUy/jk5A9VQ3/X5R\nn0zh+xke4eacVuHXI6Yvwv5zkqfFsizqLZ3qDPtF2KqmAuE+ExNa6OvKBZ471xr7WqtjoCiEGt9h\nWFqD/9y5Jjd98tueoVJFgT/4javY3vLPdND1yY3c5oQVM5sdg55pRRsAYjURcel+x/ee5PPffJx/\n/htX+S7fwTb4UUsjC9wKP+zS3b6ugNGr0+72KBXt7tZsG3z9B0/xle8+SaNl8HOvOMo7rjrJ8cMV\nz79jWRYPPP4Cf3H3T3jkqV0u2l7nnVed5GdefiSwGumyEGVvhkC89677n6a6luVNl1/kO0G2uwbf\nuO9p7n7wWQC21n0MvqPwp83Dj+rrBnv1/JNn62OFQBRMy+IHD+/wpbtPcerZOi86tsEvXH2SV196\nmMwUf1ec4HXRtnefHCWfy7JeylOJMKGDPfafOLNHp9ujWMhy9nyLHzx6lvseOcvDT57n4iPrfPjX\nfibqI4xleQ3+Cy0s4NLjm5w4OpwSeerZOo89XePM+Vagwe8a5kT+e8AJrimRXTphAmajbJQnU3JP\nndmjq5vsnG8HGnzd6E3cFhWnrK1lhV+6w+C9Nee0rDu+9xRfvfcpWh2DUjHH0YMl7n7wWe5+8Fle\n+7Jt3nnVSV50bJB+aFoW39d2+NLdP+GJ5+ysiZMXVHniuTqf/MKDfP6bj/OOK09y9SsvmGgVZ1kW\ney3d8aWGv77T7XmuPsE2fqP3E9UnDFBZy/Nfv/HF3H7PE/zZNx7ny99+gre87jhvveLiIUHRaOt8\n9XtPccf3nqTRNigWstxw9SUcqPr48POzKa0wcB+Ff65quUDPtGh2DCpr0dMUjJ7JPT98ji9/+xTP\nPN9EAS65oMqPn6nxR3/2AMe3K31BMIkLM0rNIzfv+4WfiiyqxPd42988ysNP7vKUk+EH8KJjG7z1\niosi/T0/ltbgN53BdPUrL+C6y48PvXbH957ksadrtNrBaW1dozexqu37oCc0+FE6y6TBuaaTWif+\n70dXD1cIahwZZVDWNsrKRbz3P3ztUX546gW6ukm1bBuxN11+EWvFLH/36Fm+dNcpvv/wDt9/eIef\nftFB3nHlSe7/yTn+wx1af0D/7E8d4R1XnuTE0SpnzjW5/Z4n+NYDz3DLXz7Ef/nWj3n7z57gDa++\n0DNQXmt2eXqnwemzDU7v7Dn/b9DsGOSyCscOVTh+uMLx7QrHD69z4XaFrUqBZ19ocvpsg6ed9z+1\ns8fZXf9YSzajcMHBMhf2/16F551rohqRd151Cb90/cu57Y6H+Mp3n+RLd53iK999kje++jjXvOoC\n7vnhc3ztB6fpdHtU1nK869oX8ebXXRR48E5hRhuvak3dPuhnAvdHrdGNZPB1o8eX7/oxt/31w5zd\nbZPNKFz7qmP8/JUnOHaowumdPb787VPc88MzfOoLP+TP//bH/PyVJ7j6lccixfGi7rIVXPaSw5He\nDwNPwte+f5pcNsNlLznEay49zKsvPew7YU/C0hr8hmPMxy1fy8I1ENLIRVFUo4il5//0ka+HvkZU\nYojSWdZLORSiK/xmWxj8YH+oPflNHqevlgvUW3qgIXEjJpj7Hj3LwY0ib3/jCV7/6guHfJyXv3Sb\n11x6mB+dOsdf3H2KB3/8Ag/++AXANpzXXnaMd1x5kgtcm8WOHCjzq29/OTdc8yL+6jtP8Df3nebf\nf/URPve1R8iOcfGMq/uuKHD0QJmXXbzF+b0OTz/f4Mkze/uu3fdM5TwvP7HFVrU4tm6IacHZ8y2e\nOmtPLt99aPj1SfpjpZTnnVddwluvuJhv/t3T3P6dJ7jje09yx/eeBGyj8YvXvIjrLr8wtL83Slrm\ntx98lltuf2hsiQejZ0UKbsKgX3z4j79DFM+Ladqrvnwuw1teexFv+7mLOezawHZ8e51fv+Gn+cXX\n26uib93/NLfervFv/uphogh9MYajxKsm5brXXIhlWbzkwk1e+eKDM/PXj2NpDb5Q+H4GvxXC4OuG\nOVHtGMGbX3sR37jvdOTrCvksr770UOj3ZzMZyms59iJG68Wk1+r4qzTLstB1k+IUBv9tP3uCc/V2\nJJ/56162zSNPnueylxziKh+3i6IovOKSg7zikoM8dnqXr33/NEcOVbj2p49yyCf//EC1yH/7lpfy\nzqtO8tffe4ofnTrnWftovZTn+PZ6X3EfO1QeWv2ZlsXZ3Tand/b6av78XocLDpY5vr1uq/XDldCr\nJMuyeKHW4fTZPU47K4tm2+AVlxwMdf04ivksb73iYq67/Dh3P/gs9z1ylle++BDXvuqCyCvZKPXw\nH3riHF3d5MTRdfJjvsPXvDSasr1CPcIjT+5GPytBgde87AjXvvIC37IRR7ZK/OrbVG64+hLu+O6T\nPHL6PBH3TpLPZXjty7ajXTQBRw6U+UdvfmnsnwNLbPCFwh+33BOTQDPApWNZFl29F7lSpptrLzvG\ntZcdm/j6KJSKORqtaJkLfYXv408G2+dpAfkJNqAJJmmHQ5tr/ON/+KpI17zk+CYvOb4ZaUdltVzg\n3W94Me+OfIcDMorCka0SR7ZKXP7S6Qe6oigc2lzj0ObaREt9P3LZDK+/7EJef9mFE/+NYj580FaM\nx//1H70msptjHCeOVvndX37tRNdG6RcHqkX+mzdfOtHnpJGlzcMXynWcwi+FVPhGz8KCqVTtPKms\n5X0DgeMIq/AnPRdAkl6iKHwhLMTqWpJMlnb0N/0Ufkgf/qA65OSqdp6U13K0Or3Qy1zTsmiHDNpO\netqVJL3ksgqKEs6H32wbFPPZpT6hShLM0n57wkVRKu431qW1cAq/o09eSmARlEM+l6DdMfpuyaBr\nuhMe9ShJL4qiUMhl+2LAj2ZHj7xBTrJ8LK0lbLQN1grZsZH/UiGcD3+a+u+LoFwM91wCt6oPbIsJ\nD3OXpJt8LtMXA34024Y0+ClgaUd/s61T8ehgmYzCWiEbQtVOXilzEfSD0SEVvtvIh26LhLi3JPOh\nmM8EKnzTsjdISf998lleg98xKPtsyCiv5UL48JOlasXzhg3cuo182HhGUtxbkvmQz2X7fcOLTreH\nZY2Pp0mSxVKOftO0aHV6ngof7EydQFWrJyxoG9WlE0Hhd2TQVjKGQj4TGLRt9ONpUuEnnaUc/UKt\n+nWwctFW+H4HjIuOXEyIqq1EdelE8eELhZ+QyU8yH8IEbZs+u94lyWIpLaFQFH5LyFIxh2Xhe8D4\nIBUxGUYu7IYygft9Hb1Hz/QeuP1zARIy+UnmQyGfwbQs31TgQYq0NPhJZylHfxhFEaa8Qj8VMSFu\nDOHDj5qlI1JX/TZfCfdWMSGTn2Q+DE698jH4YhOkdOkknqW0hGEURSmE+0Of4rSrRTDw4YcL2op2\nOrRh15rxa4tpTv6SpJdCv7yCt1gYCDAZtE06Szn6G/3CaT5ZOmEUvlC1aU3LdCpkimqBfuWipznP\nVpJehADo+ARu/QoZSpLFUo7+KC4dP/dH0hR+ZUIffiiFr8ugrWQ/Yo+K7lNPR7p00sNSWsJB0NY/\nLRP8FX5HT5YPP5/Lks9l+pUJgxDPfnCzOPTzOGTQVjKOQoiDzP3OppAki6Uc/WF8hmHcH3rCdtqC\nXbM9yk7btUKW9RDB3kFpheS0hSR+wlTMlGmZ6WE5DX4nRNA2VJZOslw6YJ9sFDpo27Hrm4QputaV\nO20lYyiGOPWq1R+PMmibdJZy9DfCKPwQPvy+3zpBCt82+P4bygQtp75JKUS5aFlLRzKOfIi0zEZb\nR1HwPCtYkhyW0uD3swJ8gkRhFH7SaumA7dLpmVakglahFL6erMqhkvkQKi3T6WeZKIfPSpaSpRz9\njbZBIZfxNU5hfPhJq5YJtsKH4NRMUdCqvJYfKPwwaZnS4EtchAnaytLI6WEpR3+zHXzYQig3RgJV\n7bow+AF+fGHcS8VcpD0JSWoLSfwUQgZty0Xpv08DSzn6m20jMEBUyGXIZhTfzUZJDdoCgamZ7tzo\nsD78Qi6DIpflEheFgKCt0TPp6D2p8FPC0lnCvm86oIMpikKp6F8TXzd65HOZRPke10O6dPpHQK7l\nyGUzFHKZwBTVJE18kvkQpPD7wkIa/FSwdBag3XF80yF29ZUDauILVZskKqUCEMKlM7L7sbQW1Ba9\nRMUyJPMhH6DwxQpa7rJNB0tnDZsh6ugISgGnXnX1XuJU7cCHH6Twh5VXuZgLSFFN3uQniR9RPVX3\nyAprtGUOfppYOgvQCFEpU1Au5ujqpmct765hJk7VhnbpjCh8sdrxyt/vGmZizgWQzA+h8DseaZmi\nQF9JunRSwdIZ/CiV+YKyU/QEqtpKSIXfGlH4pWLOzt/3WJrrRi8xJ39J5ofw4ese/UYefpIuls4C\nNCNs4w7afNU1eolTtevlkC6dkWCa3+Yr07Qwelbi3FuS+Oln6XgFbaUPP1UsnQWIUpnPb/OVMHJJ\nVfiNkHn4/aCtz+arQR2dZE1+kvgJ2ngls3TShe+3qKpqBvg4cBnQAd6nadpjrtffDdwEWMBnNE37\nhKqqeeAzwEmgCPwzTdO+GPaGolTm6yv8MUYuiZUyYRCs9su4AfdAtN/v595K4n4EyXzIZTMoeNfD\nD3MYkSQ5BFmAdwEFTdOuBj4E3Dzy+keB64FrgA+qqroF/PfAjqZpbwDeDvzLKDcU5gBzQdlnw1HS\nzrMVZDMKpWI2cOOVMOziPFu/zVeyNLLEC0VRyOcznideybTMdBFkDa8BbgfQNO0e4IqR13VgCygB\nCmAC/xH4sOvvhyvu7jCJwh9r8PXkHvgRlGIJdjsVC1myGfv5/Hz4sjSyxI9CLusZtI2SNSdZfoK+\nxQ2g5vq5p6pqRtM00TtuBu4FGsCfaZrWf6+qqlXgNuB/j3JDUYK2fSPn57dOoKotr+U5u9v2fU+z\now+pLl8fvi5dOhJvCvmM3Gm7IgRZgBpQdb9fGHtVVU8AH8D21V8CHFVV9T3OaxcDXwP+RNO0z0W5\noUaI0sgCXzdGgqtDipx60/SuiW8XtMoNXQPjFb5oi6Qc5i6ZL4Vc1jto29bJZTOJy3aTjCfIqt4J\n3ADcpqrqlcD9rtfWgB7Q0TTNVFX1DLClqupR4CvAb2ma9vWwN7K9bc8rumGRyyocv3AzsNDXbsdR\nJZlM/3rB2T174tjcWNv32rKz5RxKXqmusV4u7HvdsixaHYMTF2z0n+1YvWu/mN3fFqfPtey/u1lK\nXFsk7X7jJK62KK/l2W10x/79jm5SLeeX7ntYtvtJCkEG//PA9aqq3un8fKOqqu8F1jVN+7SqqrcC\nd6mq2gYeBW4F/h9gE/iwqqrCl//zmqb5+ih2duoAnN/rUC7mOHt2L/DmOy3byD1/vtm/XnDmrP1z\nT+/te22Z2d6uIhYlp06f58hWad97Wh0D04J8Vuk/W7dtt8XZc/vbYudsAwC9YySuLZJ0v3ESZ1so\nikXXY5zUm13WS/ml+h5kvxgQdeLzNfiaplnA+0d+/bDr9Y8BHxt5/bed/yai2dZD1+3wO+awn4qY\nwEClqD3uVfq5NcavWvZJUZVBW4kfhVyWnmlh9Exy2UEfsSyLZtvgyIH9okOSTJbKAogOFjYjQKQk\n+vmtkxi0Fc/vVTFz3O7HMPEMGbSVjEPEuUYzdbq6Sc+05OEnKWKpLIDoYGELNWUzGYr5LK3O/gyD\nJJ/wJJ7fKxd/XOZEsZBFUTzSMvXkZixJ4kdsThzN1GlEqGslSQZLZQ2jbLoSlNdy/Yp+bgbn2S7V\nI4bCb0MZuBX+oJ0yikKpML5cdDfBGUuS+PEqryBTMtPHUlmAKJuuBKVibqzC1xOsasWE57X5Skxw\no+1U9jgEJYmHuUvmR1/hjxp8ucs2dSyVwR8o/PAdTOxKHa0D30mwqh0UhQvvwwd78hu/8Sq57i1J\n/Ih+MerSGZRGlj78tLBUFmBwqEf4DlYq5jAtq7+bVKAnuEJkOaQPfzTWUS7maHd7+zZs6Ql2b0ni\nR4yR0aCt10pSklyWygJM4tLxKpGc5HICfimW4ErLHKPwAVrdkbZIcMaSJH4KHgq/IV06qWOprOEk\nhZq80hGTHKj0q/MP3hOjV22hQZZO8tpCEj9ePvzRU9UkyWepLECUA8wF/Vz8ESMngrb5BLp0ivks\n2YzieQjK6Hm2gqDJL4ltIYmfQIUvDX5qWCqDP4nC90phFEaumEBVqyiKZwAWBgq/VNzvw4f9ufi6\nVPgSH/KeaZny8JO0sVQWYCIffnF8Rstgd2kyVW1lzcfgdwwK+czQNnjwLpEsT7yS+FH02Hgl0zLT\nx1JZgL5LJ0qWTv/gj+HO2tGTXT/G3lDmEbQdKY3svgbGrXZ6ZDPKvglCIoGBENiXpSMNfupYKgvQ\n6BgoCqwVw6vyQQG1/Qo/oyhkM/4llpeV8loe3TD76aVumh1j7DLby72l66ZU9xJPRNC2M6rwOwal\nYpZMQseQZD9LZQXEoR6ZgDr4bvqVJUcUftfokc9nAmvqLytelUBFgblxqqvk4cPvGmYi9yNI5oNX\n8bRmW5fqPmUslcFvRCiNLPCqmNnVzUQHKb3cMx29h2lZY+McXufado1eottCEi+epRU8VpKS5LJU\nVqDVNiKngIkOuc+NkXAj57Xb1s+v6hm0lS4diQ/j0jJN06LV6UmFnzKWxgrohknXMCMbfE+Fn3A3\nhpdLx6usgvuafWmZCW8LSbyMK60gK2Wmk6Ux+JNsugI7pSyjKGNTEZOsagcVM4eD0aEUvsvgW5Yl\nXToSX8Q4cQdtpcFPJ0tjBSbZdAVik1J2SNValn1GZ5Jrx3j58P0GYj6XIZ/LDLWF0bOwLLnpSuJN\nMb8/aDtJirRk+VkaKzDJpivBaM56z3SMXEJz8MHbpeNVOE0wukNXpHUmdQOaJH5y2QwKwz785oQC\nTLLcLI1FnOS0K0GpOGzwRaXMZCv88YegDCbG8e1ULg4fgpLkk78k80FRFPK5zFCWTr98hzT4qWJp\nrIBXQbAwlIs5Ot0ePdPusF0j+Qd+eB2C0g/aemxO2z/5JffkL8n8KOSzwwa/IxV+GlkaiziNS2ew\n4cg2bmlQtV5pmf2StR6+1fJaDqNn9V05g0qZyW0LSfzkc5mxLh3pw08XS2MFpnHpjJYUSPJ5tgLv\ntEz/U4hGc/H1ftXQ5LaFJH5GFX6jLU+7SiNLY/CnUvgjB3+koTpkLpuhkM/sz9IJKGg1OvnJ82wl\nYSjkMkN1m2RaZjpZGiswyQHmAi8jl2SXDtirnX15+H0ffkiDnwL3liR+CvnM0LnQslJmOlkaKxCU\nfeLH6A5TPSVnuJbHHILSbBsUnHz7cZRG6umkIWNJEj+FXJaeafUTHwZpmdKHnyaWz+BPoChG/dZJ\nPs/WjdhfYFpW/3fNjuGbKjfq++/n4UuFL/FhUE/HMfgdnWxGkSvDlLE032ajPXnt7dEqkd0En2fr\nplzMYVnQ6Q5nT/hNiqO1hdIy+UniJT9SMbPZNigVc4ktLy4Zz9JYgWZHnzgFbLQOfFqMnHBvifiG\nZVm0Ov4VRUfPB5B5+JIwiLOfRYZbs23IHPwUsjQWsTFFBxutO9P34Sdd4a+NuKp0k55p+U6MQuGL\n9E1dBm0lIRAKv+P0l8YEpcoly89SWIFez6TT7U3cwUarRKYlFXE0GB0mVU6sClptuw06uqylIwlm\ncOpVD93oYfRMefhJClkKi7jXmqw0sqBvGFMWtK2M7LYNU37CU+EnvC0k8SJWgF3d7Pc3mZKZPpbC\nCjRa0+3q26fwDZGHn2xVWxpx6YgJzSsHH8b48FPi3pLEi1gBdo3eVJsgJcvNUhh8ofAn9eHnshkK\nuYyrtEI6VO3oIShBZRUA1opZFNc1Ii0z6W0hiZeiKy1T7rJNL0thBfaa07l0wFbDrRGFnxYfvhiA\nYfYqZBSFtWKOZj9LJ/llJiTxM0jL7LkOP5EGP20sxTfad+lM0cHKxRx1Z+IQboxiwt0Yo1k6YZVX\nuZil1RluC+nSkfjRD9rqJk1L7rJNK76WQ1XVDPBx4DKgA7xP07THXK+/G7gJsIDPaJr2iaBrxrHX\n6gLT1d4uF3OcOdfCsqy+Syfpqna0RHLY3cilYp7na23AFc9IeFtI4qXg2nglRIJ06aSPICvwLqCg\nadrVwIeAm0de/yhwPXAN8EFVVbeca4o+1+xj2iwdsAOZPdOia5h0jHRsNhoEYIcVftApROVilrZT\nkkG6dCRh6JdWMHpTHUYkWW6CrMA1wO0AmqbdA1wx8roObAFlQMFW+tcAf+lzzT6ED38qhe8qr9BX\n+AnfbLRWzKIog522YRV+eS2PBbQ7dk51PpeRW+Qlvrhr6bSmKGQoWW6CLOIGUHP93HNcNoKbgXuB\nB4Avapq2G+KafexNmZYJwwXUuoZJLpshk3Ajl1EUu2Lmvo1X/gPRnYvfNUzpzpEE4g7aysNP0kvQ\nN1oDqq6fM5qmmQCqqp4APgCcBJrAv1VV9T1+13ghgrYXH9/iQHUt0gMIDh8oA1AsF7CAYiHL9nbV\n/6IlxX3f6+UC7W6P7e0qhmlXzTxxfMs3CHtwy26LtXIR04S1Yi4VbbHqxNkWe86qOJvL0sMejyeO\nb7G5XoztM6dB9ovJCDL4dwI3ALepqnolcL/rtTWgB3Q0TTNVVT2D7d7xu2YsImjbbnTYGTnwIyxW\nz+6wzzxbo9nSyWUVdnbqE/2tRbK9XR2677V8lmfrHXZ26uzW2+RzGXbPN33/hmLZbfH0szWaHZ1i\nPpuKtlhl4m6Lvbod5K/V25zbtf/d3GvTdcbmMiH7xYCoE1+Qwf88cL2qqnc6P9+oqup7gXVN0z6t\nquqtwF2qqraBR4FbsCeBoWuCbmKvpVPIZ8hlJ3c9uAuodY1eas5wLa/l6Oh2bZOg0sgCt3tL102q\npULctylJOG4ffrNtTD0eJcuJr/XQNM0C3j/y64ddr38M+NiYS0ev8WWvqU+d8+sur6AbJpVSOgJO\n7oms2TFYD/Fc7qJrXcOUlTIlgbjTMhvt6cejZDlZCkuw19KnDhCVXAXUOnp6ApXuE6xanWgKf6+t\nY/TS0xaS+Bgo/F7ofiZJHkthCZptfeoOJq5vtA3HyKXHpQOwu9fB6Fm+hdNGr6k1bP+rLI0sCSLv\nMvjNgEN2JMllKQy+ZU2/jVsY/N1GB0h+Dr5ApGCedQJpYQaimBTO79ltIV06kiAURaGQy7Db1LEs\nuekqrSyNJZhWUQyUsK1qU6PwnYHXN/hhFH5xtC2W5muWLDH5XKYvEqTCTydLYwlm5cPfbaTLyInd\nx2fPt4DgsgrgXu1Il44kPIV8lk7XLksid9mmk6WxitO6dIoFuwzBbsrcGGIijKTwXX5/SE9bSOLF\nLZKkSyedLI0lmLaDZRSFUmFQIjktqnbgw28N/exHPpcll1X6bZEW95YkXtxjZpq6VpLlZXkM/gw6\nWHkth+X8Oy0uHTERvlDvDP0c5rq0tYUkXoqulWAY16EkeSyNJZjFRg93ymJaDvwQE6FlDf8cRBrb\nQhIv+SGXjvThp5GlMfgzUfhuI5cSVTu6tA6t8F3XyVr4kjC4hYF06aSTpbEEs+hgblWbFiNn++Nd\nymsShZ+StpDEy1DQVhr8VLI0lmAWaWDuTpomN4Z7Mgyr8IcmP5mlIwmBe8xIg59OlsYSzKKDpVXV\nutsmbDu5J4a0VA6VxEtB+vBTz1JYxVw2MxMDPezSSY+RE8Y7l1VCP5dU+JKoCIWvYB+vKUkfS2EJ\n1sv5mZy5OhS0TZGRE+6uKHsVhtxbKZr8JPEh4l7ltVzijweVjGcprOJGZTYHdAwbuaV4tJkgnqsU\nIc5RSunkJ4kPofDDVGSVJJOlsATv+wevnMnfKac091wY/EgKP6XuLUl8CJEkDz9JL0th8C9Xj8zk\n76QxLRMGxjtKYDuNexIk8VJwuXQk6SRVliCtaZkTKfyUtoUkPkQ/kYXT0kuqDH5a0zLFEjuK8kpr\nW0jiIy8VfupJlSVIqxuj79KZ2IefnraQxEdf4UuDn1pSZQnSmod/waEyivP/sIhqhxlFGSrNIJF4\ncbBaBGB7q7TgO5HERaqm8nwuQz6XweiZ5LLpySO+aHudj/zW1WytF0NfUyrYX61MyZSE5cTRKn/w\nm1dxeGNt0bciiYlUGXywVX6n25vJRq5l4mDEQZjJKKwVsqlybUni54hU96kmdQZ/o1ygmdUXfRtL\nwUalIHdMSiSSPqkz+O/7hZ9C75mLvo2l4Dd/8acXfQsSiWSJSJ3BP3G0uuhbWBouuWBj0bcgkUiW\nCOnglUgkkhVBGnyJRCJZEaTBl0gkkhVBGnyJRCJZEaTBl0gkkhVBGnyJRCJZEaTBl0gkkhVBGnyJ\nRFuyMMYAAAQ2SURBVCJZEaTBl0gkkhXBd6etqqoZ4OPAZUAHeJ+maY85rx0FPud6+2uA3wU+Dfwx\n8DLABH5d0zRt9rcukUgkkigEKfx3AQVN064GPgTcLF7QNO05TdPepGnam4CbgHuxjf3bgIqmadcC\n/xT4/VjuXCKRSCSRCDL41wC3A2iadg9wxegbVFVVgH8BvF/TNAtoAZvO7zeB7kzvWCKRSCQTEWTw\nN4Ca6+ee4+ZxcwPw95qmPeL8fCewBjwEfBL4o1ncqEQikUimI8jg1wB3+cmMpmmjtYd/GfiU6+ff\nAe7UNE3F9uvfqqpqYeo7lUgkEslUBJVHvhNbwd+mquqVwP1j3nOFpml3u36uMFgVnAPyQNABs8r2\ntixrLJBtMUC2xQDZFgNkW0yGYlmW54uOH15k6QDcCLwOWNc07dOqqm4Df6Vp2mtd12wBnwUOYxv7\nP9Q07XNIJBKJZKH4GnyJRCKRpAe58UoikUhWBGnwJRKJZEWQBl8ikUhWBGnwJRKJZEUISsuMFb9a\nPauCqqo/B/yBpmlvUlX1UuAW7BpEfw/8Y2f3cupRVTUPfAY4CRSBfwb8iBVsD1VVs9hlSl4GWMBv\nYo+PW1ixthCoqnoEu3zLW7Db4BZWsC1UVf0+sOv8+DjwfxOhLRat8D1r9awCqqr+DvbALjq/+ihw\nk6ZpbwAU4BcXdW8L4JeBHefZ3w78f9j9YRXb4xcA06lH9U+A/4vVbQshBj4JNLCffSXHiaqqawCi\nhpmmaf8jEdti0QY/sFZPynkU+IfYXxTAazVN+1vn338JvHUhd7UYbgM+7Pw7A+isaHtomvZfgN9w\nfrwEewPj61axLRw+Avwr4Bnn55XsF8CrgbKqqn+lqupXnc2wkdpi0QY/TK2e1KJp2n8GDNevFNe/\n97CLz60EmqY1NE3bU1W1im38/wnD/XPV2qOnquotwP8L/Ckr2jdUVf017JXfV5xfKaxoW2CvcD6i\nadrbsN18fzryemBbLNq4hqnVs0q4n70KnF/UjSwCVVUvBr4G/Immaf+eFW8PTdN+DVCBf41dkFCw\nSm1xI3C9qqpfx6nNBWy7Xl+ltngYx8g7xSqfB466Xg9si0Ub/DuBdwD41OpZJX6gquobnX//PPC3\nfm9OE86BOl8BfkfTtFucX69ke6iq+iuqqv6e82ML6AHfW8W20DTtjZqmXeecu3Ef8KvA7avYFtiT\n380AqqpeiG3gvxKlLRaapQN8Hnv2vtP5+cZF3swCEVH1DwKfdqqL/hD4T4u7pblzE/Zy9MOqqgpf\n/m8D/2IF2+M/AbeoqvoN7HpUv41dbnxV+4Ybi9UdJ38MfFZVVWHUb8RW+aHbQtbSkUgkkhVh0S4d\niUQikcwJafAlEolkRZAGXyKRSFYEafAlEolkRZAGXyKRSFYEafAlEolkRZAGXyKRSFYEafAlEolk\nRfj/AVqKnSI9NotsAAAAAElFTkSuQmCC\n", "text": [ - "" + "" ] } ], - "prompt_number": 6 + "prompt_number": 16 }, { "cell_type": "heading", @@ -2217,7 +2249,7 @@ " request_df = request.compl_df_build()\n", "\n", " # Run model\n", - " model_created = RandomForestClassifier(50)\n", + " model_created = RandomForestClassifier(40)\n", " model_created.fit(corp_train, corp_answer)\n", " predicted = model_created.predict(request_df.values)\n", " predict_prob = model_created.predict_proba(request_df.values)\n", @@ -2230,7 +2262,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 7 + "prompt_number": 17 }, { "cell_type": "code", @@ -2244,23 +2276,23 @@ { "metadata": {}, "output_type": "pyout", - "prompt_number": 8, + "prompt_number": 22, "text": [ - "[('java', 0.46000000000000002),\n", - " ('ruby', 0.20000000000000001),\n", - " ('scheme', 0.16),\n", - " ('clojure', 0.059999999999999998),\n", - " ('python', 0.059999999999999998),\n", - " ('haskell', 0.040000000000000001),\n", - " ('php', 0.02),\n", + "[('java', 0.52500000000000002),\n", + " ('ruby', 0.17499999999999999),\n", + " ('clojure', 0.10000000000000001),\n", + " ('scheme', 0.074999999999999997),\n", + " ('haskell', 0.074999999999999997),\n", + " ('python', 0.050000000000000003),\n", " ('scala', 0.0),\n", " ('javascript', 0.0),\n", " ('ocaml', 0.0),\n", - " ('perl', 0.0)]" + " ('perl', 0.0),\n", + " ('php', 0.0)]" ] } ], - "prompt_number": 8 + "prompt_number": 22 }, { "cell_type": "heading", @@ -2294,7 +2326,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 9 + "prompt_number": 19 }, { "cell_type": "code", @@ -2316,23 +2348,23 @@ { "metadata": {}, "output_type": "pyout", - "prompt_number": 10, + "prompt_number": 20, "text": [ - "[('ocaml', 0.66000000000000003),\n", - " ('clojure', 0.12),\n", - " ('scheme', 0.12),\n", - " ('python', 0.040000000000000001),\n", + "[('ocaml', 0.57999999999999996),\n", + " ('clojure', 0.14000000000000001),\n", + " ('scheme', 0.080000000000000002),\n", + " ('haskell', 0.080000000000000002),\n", + " ('python', 0.059999999999999998),\n", " ('ruby', 0.040000000000000001),\n", - " ('haskell', 0.02),\n", + " ('perl', 0.02),\n", " ('scala', 0.0),\n", " ('java', 0.0),\n", " ('javascript', 0.0),\n", - " ('perl', 0.0),\n", " ('php', 0.0)]" ] } ], - "prompt_number": 10 + "prompt_number": 20 }, { "cell_type": "code", @@ -2341,7 +2373,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 10 + "prompt_number": 20 }, { "cell_type": "code", @@ -2350,7 +2382,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 10 + "prompt_number": 20 }, { "cell_type": "code", @@ -2359,7 +2391,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 10 + "prompt_number": 20 }, { "cell_type": "code", @@ -2368,7 +2400,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 10 + "prompt_number": 20 }, { "cell_type": "code", @@ -2377,7 +2409,7 @@ "language": "python", "metadata": {}, "outputs": [], - "prompt_number": 10 + "prompt_number": 20 } ], "metadata": {}