From ca18a5ada7e450a6d77a600350d797d17b49cbf1 Mon Sep 17 00:00:00 2001 From: JOLIMAITRE Matthieu Date: Thu, 9 May 2024 05:11:09 +0200 Subject: [PATCH] ia tp5 --- ia/tp5/.gitignore | 1 + ia/tp5/questions.md | 88 +++++++++++++++++++++++++++++ ia/tp5/requirements.txt | 2 + ia/tp5/setup.sh | 7 +++ ia/tp5/src/ex1_regression1.py | 56 +++++++++++++++++++ ia/tp5/src/ex1_regression2.py | 41 ++++++++++++++ ia/tp5/src/ex2_classification1.py | 91 ++++++++++++++++++++++++++++++ ia/tp5/src/ex2_classification2.py | 93 +++++++++++++++++++++++++++++++ 8 files changed, 379 insertions(+) create mode 100644 ia/tp5/.gitignore create mode 100644 ia/tp5/questions.md create mode 100644 ia/tp5/requirements.txt create mode 100755 ia/tp5/setup.sh create mode 100755 ia/tp5/src/ex1_regression1.py create mode 100755 ia/tp5/src/ex1_regression2.py create mode 100755 ia/tp5/src/ex2_classification1.py create mode 100755 ia/tp5/src/ex2_classification2.py diff --git a/ia/tp5/.gitignore b/ia/tp5/.gitignore new file mode 100644 index 0000000..a979ee7 --- /dev/null +++ b/ia/tp5/.gitignore @@ -0,0 +1 @@ +/venv \ No newline at end of file diff --git a/ia/tp5/questions.md b/ia/tp5/questions.md new file mode 100644 index 0000000..e9ed86d --- /dev/null +++ b/ia/tp5/questions.md @@ -0,0 +1,88 @@ +# Questions TP5 d'IA + +> Matthieu Jolimaitre + +## Partie 1 + +### 1. D’après le modèle ajusté, quelle est la température attendue si la puissance est de 120W ? + +``` +f(120) = 64.61248085407703 +``` + +ce qui implique que la température attendue si la puissance est de 120W est de +64.61°. + +### 2. Que vaut la RMSE et quelle est son unité ? + +``` +RMSE = 5.662048552655999 +``` + +L'unité de la RMSE est un écart de températures en degrés. + +### 3. Quelle est la valeur de température qui admet la plus grande erreur d’approximation ? + +``` +Maximal error = 14.337368012120734 +for (x, y) = (153.89061334817407, 91.90827168300679) +``` + +La valeur de température qui admet la plus grande erreur d'approximation est de +91.91° pour une puissance de 153.89W. + +### 4. Le modèle est-il meilleur ou moins bon que sur le jeu de données précédent ? Pour quelle raison ce modèle est, ou n’est pas, adapté à ces données ? + +``` +f(120) = 44.42433903389104 +RMSE = 13.428963137234206 +Maximal error = 47.681726938444285 +for (x, y) = (179.85011085382266, 113.05473219321803) +``` + +Le modèle est moins bon que sur le jeu de données précédent. En effet, la RMSE +est plus élevée. Le modèle n'est pas adapté à ces données car il ne suit pas la +tendance des données qui semble être parabolique ou exponentielle plutôt que +linéaire. + +## Partie 2 + +#### 5. En combien d’epochs le perceptron converge-t-il ? + +``` +M=1, score=0.966 +M=2, score=0.974 +M=3, score=1.0 +``` + +Le perceptron converge en 3 epochs. + +#### 6. En combien d’epochs le perceptron converge-t-il ? + +``` +... +M=1, score=0.908 +M=2, score=0.922 +M=3, score=0.924 +M=4, score=0.916 +M=5, score=0.912 +M=6, score=0.93 +... +``` + +Le perceptron semble converger en 5 epochs avant de regresser. + +#### 7. Que valent ces métriques avec ce nouveaux jeu de données avec 𝑀 = 25 epochs et un pas d’apprentissage 𝜌 = 0.01 ? Que vaut la loss définie par l’Equation 3 qui est minimisée par le Perceptron ? + +``` +precision = 0.9766666666666667 +recall = 0.8987730061349694 +F1 = 0.4680511182108626 +``` + +``` +loss = 40 +``` + +Le perceptron n'a pas pu converger d'avantage et a atteint une loss de 40. Ce +qui signifie que 40 individus ont été mal classifiés. diff --git a/ia/tp5/requirements.txt b/ia/tp5/requirements.txt new file mode 100644 index 0000000..aa094d9 --- /dev/null +++ b/ia/tp5/requirements.txt @@ -0,0 +1,2 @@ +numpy +matplotlib diff --git a/ia/tp5/setup.sh b/ia/tp5/setup.sh new file mode 100755 index 0000000..9a85bc2 --- /dev/null +++ b/ia/tp5/setup.sh @@ -0,0 +1,7 @@ +#!/bin/sh +cd "$(dirname "$(realpath "$0")")" + +python3 -m venv venv +source venv/bin/activate + +pip install -r requirements.txt diff --git a/ia/tp5/src/ex1_regression1.py b/ia/tp5/src/ex1_regression1.py new file mode 100755 index 0000000..e69e7e1 --- /dev/null +++ b/ia/tp5/src/ex1_regression1.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 + +import numpy as np +import matplotlib.pyplot as plt + +x = np.array([107.01819675010165,142.97461762841453,128.3592388893137,178.48859894769942,105.07512391405761,133.9662346986653,106.88633746415002,165.9304901016704,175.27615886513382,99.8473974473511,152.9242549507464,63.265825397243646,77.1539782923285,170.32756297804593,59.2346875657253,61.32680896120029,52.62839166724234,158.24057992123193,151.16037762348054,163.1015792720865,177.22038449025933,153.89061334817407,109.99231709288114,151.46879291723923,65.37567536296132,133.1897327725781,68.63592736317604,172.8069592164459,70.97023732504951,103.90605219876807,84.3922295736015,150.65037962644817,109.29954318815132,134.9040823105018,52.44267405672617,130.29261461986403,129.57244395391479,130.2014195937184,172.68725020690113,138.63663888345286,96.73602707459219,106.81415399391439,140.69205547054443,57.82931131180508,136.6796730079368,137.18292305036073,77.34973293959932,66.76041869513094,91.00568562014391,97.28240022254093,]) +y = np.array([58.604012802664876,61.568938988109096,63.9339339649278,82.30116210624541,60.16795400802,64.93076361223594,58.199364484080114,78.94874665221919,96.59629220870175,55.08553593594234,80.59085994712159,42.765533811305836,51.2101273466327,89.89235901983224,40.98731129137799,41.45808065513656,38.63780451473805,73.0967722436298,71.29285705472836,79.97576728127771,92.61811453529036,91.90827168300679,61.526181495163584,63.194648094263655,43.7942773055833,64.14802974784072,44.56028490073529,89.47188535978337,44.40026349403853,59.98046502154318,48.28512591278614,84.23051603949706,62.02917662565915,62.1335255880633,38.81183960757261,62.94408352854921,79.54097547379997,77.90871790408256,71.16968980369258,74.30802693800463,55.39297558330232,62.1570061057422,75.8196079986226,41.200254600443635,77.03603317805609,67.24949240498717,50.174757752678055,43.80774399266216,56.51718964090277,56.610351455109445,]) +N = len(x) + +# +# step 01 +# +# - build data matrix X +# - compute optimal w +# + +X = np.ones((N, 2)) +X[:, 1] = x + +w = np.linalg.inv(X.T @ X) @ X.T @ y + +# +# step 02 +# +# - plot the model as a line y = w0 + w1 * x s +# +plt.figure() +plt.scatter(x, y) + +x_min = np.min(x) +x_max = np.max(x) +x_plot = np.linspace(x_min, x_max, 100) +y_plot = w[0] + w[1] * x_plot +plt.plot(x_plot, y_plot, color='orange') + +# +# step 03 +# +# - predict new value f(x) +# - compute RMSE +# - compute maximal error +# + +prediction = w[0] + w[1] * 120 +print(f'f(120) = {prediction}') + +y_pred = X @ w +rmse = np.sqrt(np.sum((y - y_pred) ** 2) / N) +print(f'RMSE = {rmse}') + +idx = np.argmax(np.abs(y - y_pred)) +max_error = np.abs(y[idx] - y_pred[idx]) +print(f'Maximal error = {max_error}') +print(f'for (x, y) = ({x[idx]}, {y[idx]})') + +plt.show() diff --git a/ia/tp5/src/ex1_regression2.py b/ia/tp5/src/ex1_regression2.py new file mode 100755 index 0000000..b13ea01 --- /dev/null +++ b/ia/tp5/src/ex1_regression2.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 + +import numpy as np +import matplotlib.pyplot as plt + +x = np.array([140.02910671010955,144.28305637655325,115.17216965047129,174.2908725140191,133.71872589985287,105.10115631256336,128.83111783663017,52.49511578021336,89.2047261676914,135.82255987404903,87.71008893735774,130.3420057698494,105.7399311229496,67.61162834891853,88.776702374284,124.09543839116444,126.81345896226252,124.66228235044524,134.91610658142736,134.77342510021955,106.08439660641662,166.5510574606382,97.78304310622656,106.66244028453148,165.95003615203737,154.80521857599115,141.5055158602476,63.02949535059915,169.53273978680755,142.85136894138446,179.85011085382266,69.42827960553919,162.85638745786787,71.12408150792874,130.02274335689975,66.0965977704274,160.24106981189047,154.9514646342514,123.98309601989713,102.93382863937995,58.991709409167946,140.66574050879328,108.96054874814895,143.86722793114524,162.6297023707218,176.81779565037516,161.25443451103945,51.522830944050256,96.7971483821873,144.89877311512754,]) +y = np.array([42.81605146528971,36.28330545155218,31.68572329595792,76.93511586659231,39.67091689151953,36.75433594698061,43.668607504626735,35.36147584081617,37.15391917695597,54.600834054029775,39.53711133759362,40.70788477452493,31.238140682578088,32.803846883928486,33.26497225482958,32.23737408256554,26.644077012420333,35.07966603555466,27.18589689633954,34.824784598255725,30.995750167590458,87.49142265447308,35.718581012669205,32.94444212403231,89.28741953891095,38.377360047677826,47.934251797831806,33.86693859748784,72.15159937429533,52.39883472306155,113.05473219321803,32.358600372044044,71.84653139192467,36.770050235085684,34.448886069864834,35.7827916825874,59.57702285934876,61.263901673061625,35.90307044444734,36.06547033199114,34.36729122240489,48.69663447942126,35.42784626944333,34.556197150504495,51.345881947334895,82.19073242621832,50.649674506375675,34.96619043312894,38.77250612852849,48.018188311243144,]) +N = len(x) + +# +# step 04 +# + + +X = np.ones((N, 2)) +X[:, 1] = x + +w = np.linalg.inv(X.T @ X) @ X.T @ y + +plt.figure() +plt.scatter(x, y) + +x_min = np.min(x) +x_max = np.max(x) +x_plot = np.linspace(x_min, x_max, 100) +y_plot = w[0] + w[1] * x_plot +plt.plot(x_plot, y_plot, color='orange') + +prediction = w[0] + w[1] * 120 +print(f'f(120) = {prediction}') + +y_pred = X @ w +rmse = np.sqrt(np.sum((y - y_pred) ** 2) / N) +print(f'RMSE = {rmse}') + +idx = np.argmax(np.abs(y - y_pred)) +max_error = np.abs(y[idx] - y_pred[idx]) +print(f'Maximal error = {max_error}') +print(f'for (x, y) = ({x[idx]}, {y[idx]})') + +plt.show() diff --git a/ia/tp5/src/ex2_classification1.py b/ia/tp5/src/ex2_classification1.py new file mode 100755 index 0000000..00b368f --- /dev/null +++ b/ia/tp5/src/ex2_classification1.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python3 + +import numpy as np +import matplotlib.pyplot as plt + +x = np.array([1.0976270078546495,1.430378732744839,1.2055267521432877,1.0897663659937937,0.8473095986778094,1.2917882261333122,0.875174422525385,1.7835460015641595,1.9273255210020586,0.7668830376515554,1.5834500761653292,1.057789839505809,1.1360891221878646,1.851193276585322,0.14207211639577388,0.17425859940308142,0.04043679488065144,1.665239691095876,1.556313501899701,1.7400242964936383,1.957236684465528,1.5983171284334472,0.9229587245058637,1.561058352572911,0.23654885173786644,1.2798420426550476,0.2867065748180928,1.8893378340991678,1.0436966435001434,0.8293238799810472,0.5291112242092539,1.5484673788684333,0.9123006644330971,1.136867897737297,0.037579600872710284,1.2352709941517541,1.2241914454448428,1.2338679937495138,1.8874961570292483,1.3636405982069668,0.719015801147572,0.8740639075986829,1.3952623918545297,0.12045094325853967,1.3335334308913354,1.3412757392363188,0.4207651221476818,0.2578525953097066,0.6308567018483677,0.7274215418852452,1.1403935408357593,0.8772030269246407,1.9767476761184524,0.20408962149605614,0.4177535121896694,0.3226190357699925,1.3062166509307969,0.5065832050795642,0.9326215457126126,0.4888511840032055,0.31793916729103944,0.22075028232861027,1.3126591789305468,0.2763659026972276,0.393164723360107,0.7374503413219282,1.6419864596958702,0.19420255158612254,1.6758898149976078,0.19219681578792613,1.9529189300267915,0.9373024032954032,1.9535221763806743,1.209691039490092,1.4785271587966033,0.07837558450864135,0.5656139251528192,0.2403931224263378,0.5922803950442899,0.2374554379084881,0.635966358787952,0.8285259890293399,0.12829499269756872,1.3849442387400397,1.1332029084131503,0.5307789818788908,1.0464961069333993,0.18788102151688335,1.1518929911123585,1.8585923951524281,0.6371379049026473,1.3348207599273634,0.26359572480878435,1.432654408237131,0.5788121858944022,0.36638272401423366,1.1730258696201663,0.040215092374987105,1.6578800584347262,0.009390952385094131,1.3556330735924602,0.5400159463843297,1.4703880442451898,1.9243770902348765,0.49750628703991606,1.1523146688356738,1.184083862543678,1.1445038115817467,0.4461632652812366,1.90549802303397,0.8942507572352547,1.6928173449422557,1.3989585506350086,0.5948739017102673,1.6275956394049544,0.7930114816939693,1.7622063942223232,1.1625457452717174,1.7634707237097056,1.3850631801555318,1.450508559639281,1.0026487638534045,1.9121672694464478,1.2879803984592748,0.8477100971163594,1.2127864282558487,0.03838639661866705,0.6031496333490987,1.32034707498537,0.5801552144208881,1.236030857997683,0.8575374018915323,0.27094812844490046,0.5965646519120615,1.1399298214025297,1.1817455224963465,1.1486504976991576,1.3064016397142673,1.3042065400033778,0.8628368708679479,1.793093191702126,0.7351237400957931,0.8717298505312536,1.7838467100313442,1.6123879780921715,1.4077771670807326,0.20045377462460223,1.838965227489347,1.4284825990982228,1.997694013135733,0.2988966093159875,1.7362521147364285,0.32498586935274965,1.2311191285676883,0.24763996569888302,1.6960164586444688,1.6146379174500214,1.1382014772291866,0.8143665944519993,0.1383339909102761,1.3948575462891273,0.9070853653561377,1.4441111989406958,1.7327646518572584,1.9510430100057716,1.711606684785222,0.023428168370003943,0.7199561289567278,1.459981124848116,0.34325935452288103,1.0420732124082586,0.10867597667850726,0.39999304979280015,0.03704358892122794,1.5873954067148413,0.44784937612076026,0.6907033613938054,1.8561625869311817,1.4088288038470655,0.0636778590626157,0.3293883129958255,1.242956802999527,1.1544571772083352,0.47578564274901725,1.8684279958495875,1.227931911931792,1.0712656060499166,1.179819952709142,1.4602440590335393,0.6238899909592037,0.7964421244321838,0.4196874979502443,0.3723860117606723,1.8887447799678672,1.4791015900985751,0.9809176172351342,0.45482925594664647,0.5087129635407859,0.11605832064775123,0.8688332511162415,0.6235917639882051,1.392686977630919,0.7555036785849618,0.3592073551192696,0.04935745678266246,0.13449926292649717,1.3587855469971346,0.9073936891120906,1.0731584222174444,1.7933425860806842,1.9806778947934087,0.43379396879694787,1.3261564062002016,0.5266447534743013,0.04130199893145736,1.516757307672283,0.6400343016449357,0.7669277883437959,1.1766342271072114,1.6620969104723808,1.2579636871822975,1.7453013108947906,0.5470840696312715,1.5960936678251274,0.3712718886119044,1.9055833139438891,1.3749765527756306,0.4310153542271169,1.8947411809778485,1.4617116135403156,0.5078832851900517,0.42662395473496395,1.0364014278613265,0.05132543610906315,0.4149401508822188,0.8493709375030125,0.7483399606684511,0.9271508487296214,0.5552574125894638,1.1735686929163376,1.7277112118464628,0.23506371192406617,1.0347582143082283,0.2641362126903066,1.4337193623851874,0.7921194056145875,1.130842623717018,0.36655967242815723,0.28969551868675447,0.9761125612979091,0.7112254756999112,1.880863890505626,1.5306505076139305,1.4973272397010946,1.807439479491867,0.1668448708840371,1.1043849398448131,1.1689521379115377,1.923872757094458,0.5842950535850977,0.48165755983089364,0.20058788453099563,0.03285925918294841,1.859058633584381,1.33983309318182,1.5703058240462755,0.5634602115078982,1.1728203323726534,0.12791053224196225,0.9712551918692458,1.9549902794888936,1.7530104906331816,0.6763179036736913,1.923140309082997,0.463403252942409,1.8986376448313629,1.8827554094129972,1.5984051747047834,1.2608958737335823,1.748575933249894,0.5860405690155934,1.6978871106258364,1.2357533838350476,0.02647371551779898,0.6944670358644391,0.29628172189633006,1.9636587796365064,0.9567406140799761,0.9947827309973254,1.2789450327974472,0.737169212259235,0.27380054337119786,1.644235466388491,0.3796958238055159,1.022637965092912,0.4486340579494785,0.1956889689880681,1.7243830348433666,1.9458389780462606,1.9216693161260003,1.813110998442358,1.5480946653972776,0.6662903040572838,0.16220277997599353,0.8144823428276147,0.4644682843418855,0.26497526951596595,0.10685436357365052,1.4511887284211575,0.022854917250062057,1.5411614970055525,0.2938932908007501,0.1590441651735115,0.17920606847721077,1.344095614707829,0.49073441970568954,0.8410789333601969,1.1147375826478338,1.7211023476575875,1.4540885254226565,0.5406558104774293,0.2629655985822552,0.11074864084239588,0.603197268961885,0.5242362984793565,0.9122811336009593,1.366562671095361,1.3912508912777144,0.5670376931643333,0.759853911800241,0.3623019234738061,1.5770910246130374,0.1136961528664806,1.3939944834499747,1.5573907918822067,1.5548151236975063,0.5188451286907099,0.7476262758651229,1.175199270392778,0.545643804848934,0.7417055984357774,0.3941085603712793,0.9197117675120148,0.08922460250822817,1.599591769141236,0.15391289397326546,1.037670297663052,0.6136201990903922,1.155085897662751,1.9188666816668503,1.2911404889120077,0.07072487151098183,0.8608048790161225,1.0200337046365004,1.072354989406904,1.3627850212076758,0.5551921954635322,0.2577211309326404,0.7853513530941887,1.9128114455918976,0.3742617835016895,1.807967909856474,1.0876119001546527,0.9138228432915316,1.7640828204597792,0.9172079235371717,1.4483352732230865,0.798050643406204,1.8080887858019155,1.3800500403824547,1.3992441085010334,0.6554408031142378,1.5135572854737784,1.2721221108942826,0.4800405467594191,0.32107764497051283,1.5927829490346634,1.918333206070445,0.9162776545200857,1.1819683306473698,1.7154452883871092,0.9144469067077141,1.9037489536654724,1.1515023240897448,1.64153424140263,1.8176874368254767,1.6310476375371377,0.31882892689791187,1.2577968781234008,0.7968685172393541,0.1254259040466914,0.8480645037796839,0.5173681337788154,1.6980766168570216,0.06660925309339238,1.917965443726947,0.7107376969438592,0.7134137808050858,0.03265700536741578,0.3704646504723679,0.8025190016072175,1.8585828346054278,0.19922986044254265,1.890603066958159,1.7389770610932644,0.9083247938151036,0.6534017635365201,0.4654882585581137,1.2289294129537487,0.06614918295101124,0.031212128893656432,0.8575914449964757,0.1361481479494404,0.5038819764921858,0.4423218306921677,0.5063823874457039,0.2621104624305155,0.02407244579530854,0.23096859427749616,1.2369605190254958,1.9485124256361006,1.9806900031217878,0.8181081907461232,0.32590885209321074,1.2775235147330586,0.9806106930974743,1.978819554568863,0.1306084143035604,1.5664688766276262,0.5767969946629878,0.482837240153148,1.325009143065352,0.49212636998192894,1.3317182351183754,1.0346170344045775,0.8481779768716986,1.109375617322838,0.5741030398392595,1.4131494125459578,0.829713738667128,0.7210911209717845,1.6573138291114755,1.8499338239063843,0.09201462177459385,0.465253985659531,0.6970387389851265,1.6299329587404947,1.9709828552865951,1.9379434093407035,1.8098966910998537,0.5931125301280598,1.9840224868289482,0.4988400821129024,0.2118123097644644,1.9019052221107882,0.4668405109361926,1.379536530155501,0.11671271796117733,1.4614181982549523,1.7634404246676794,0.544873790931925,0.7581137921548571,0.7485923666418322,1.4975765150802662,0.4756144850780777,0.343706198095286,0.8985832973754762,0.608936814754639,1.6783782445173048,0.4754836520312775,1.004778914978523,1.8851671993958607,1.2679953954893215,1.7345788109249296,1.8804193787095347,1.5015297237727039,1.3991501204495027,1.9359311332084541,1.9888015792953588,0.9036433653395193,0.14173955636841673,0.5855880628810377,0.3047094113754609,0.8349727495920236,0.26257865694651206,1.208235608041764,0.7656161183157082,1.79077176857642,1.9355893435970037,1.0937698033388443,0.5496471397351932,1.1844608375236736,1.7935223164488197,0.8134666916714965,1.1041565533839417,0.5433055352122917,0.910888298900054,0.8034270707591973,0.49682693016594204,1.0117327676506167,]) +y = np.array([37.93427433818303,43.573137749267275,57.247339802883786,77.55355206360888,40.015671921214775,93.17428899586872,87.60866921523122,14.382126637797569,32.82782718311405,50.152196139328176,19.416509986822668,41.362839013014735,76.60877730559143,71.24630330285433,66.01459857094044,73.9475562450111,28.443131826373158,40.75283033782589,70.86182340497166,89.13112867281944,58.931024844528565,35.4429685850983,12.721173220538432,73.93031460767921,10.709569315759634,43.5411162838896,57.74834931065036,92.99003155904738,18.05450905296101,46.53480897714554,12.1881879739142,40.83498859074313,66.00079529558154,35.116115340573856,28.877495469007158,20.41329099943843,61.94262196183076,72.57430053142174,70.47614265362401,95.39749186484566,10.243289250415243,68.24769885046325,64.03530133878756,62.986564897325934,96.64932878562182,11.518450603303572,72.6834187631305,83.23107847316771,55.88264769594257,40.05683826371282,81.17561469046645,18.751863306918217,49.783207395693275,56.79571371137544,72.45607698410927,18.179715882916852,30.498355138407486,46.927140642111304,66.09652057181177,89.82647030956757,65.69435514172389,22.011532384144097,98.25221195085541,88.46071612799436,55.24486850307916,93.01131836169696,58.72427143814222,93.09754611002468,84.69076317430088,97.14577692648676,92.78045297003425,13.243043568571252,25.729480374484595,45.02212093910672,95.69284275658786,37.00260275283367,24.442087949884094,89.76741994779039,50.175497393488264,91.70880349188934,24.420741968812894,69.50057603572895,49.623737754654265,16.883809212725687,72.68168301872505,32.26588799852384,13.565397032156596,15.394986842461593,15.49706833601086,91.69596617365355,76.58955260461909,90.82561214923615,70.53240801668693,57.60459361277948,37.40017279126404,99.8166026195806,42.59701530454504,52.35840542925186,44.04206574311156,98.15742364019127,25.71925468555052,39.518920081727174,71.23137994135014,15.688685650476758,64.65244366103869,52.98818525887745,35.5599979085891,31.45719528316522,56.30614689688811,43.11348224833372,51.086790213638984,40.37296435878159,97.34443242363798,22.009548857104363,18.712355786053678,40.905255591182446,63.19242107834421,69.32588246650255,45.753107245123786,99.9350194529954,41.67036965737383,74.92660011639572,67.38244250777136,83.17484769227146,97.8603097108438,90.08142908009862,78.81057769219377,72.84236300364614,40.194835270830964,23.29170203860366,15.63724027538288,31.771153378133633,48.90533330631688,56.97966462669842,79.57751986493845,96.28668307509336,20.55884323463299,19.63037261744124,63.072525071219566,77.08582665525637,86.33353423122864,94.22488721951098,98.50836180345779,45.98215230020733,44.23016651748158,23.302780902754513,71.64409948152036,69.10857625967535,87.58563362660865,18.753219530887655,54.79992170428076,62.297373670485676,31.740133603592657,25.212286551624516,87.36227527776593,15.26814300120029,52.35588135262656,20.425060117079674,51.135288519823064,98.19660937080783,48.13357181099255,87.14124257541106,20.55840077648745,34.41268690856777,46.341346660060104,45.98309260083977,70.42451308031377,41.024631463795686,74.23901815690147,67.52682093028533,45.924503072929575,48.858411488888734,65.30749298292886,16.303797113018017,84.01660645201213,68.80790450022732,75.37082179760517,58.32307009741514,19.942939989257027,46.45320519672549,46.48362245637004,38.89386910388952,12.695529241427442,76.35288183368296,19.880601225625064,64.56773197405766,73.28957468204942,67.13076906403253,96.32280267801775,19.296833957662475,88.04504431946792,12.627121136402193,58.142516943437556,46.38192561453329,57.17654743543824,42.85898893540088,27.151022344606126,11.72106077038208,56.63348324120569,85.84991764163581,43.589436017031474,30.05774362134822,17.247880312465966,17.677983080683305,29.9256801663495,19.001265482939967,33.85357285280338,15.953451590525924,15.904438048893308,87.06485616605038,24.59082346379499,60.371416524110316,79.61099900041275,51.076860880515994,23.803199007341114,27.96365279081029,48.96857856530627,57.54106802606822,41.44962628436814,80.33316402111952,77.59194839707585,93.44906266358062,12.605729412426449,90.61221620891831,45.33119096159359,89.05352458419947,72.17062985408765,98.86138813665714,78.33542065450013,42.809016339710794,55.09568555512769,43.875023967491614,42.84206524191143,33.48140494429538,54.637326583612264,71.3565950562425,34.96062441747192,57.1941829969498,20.564226475350146,24.386075816877216,14.212571924096988,97.36582984935696,10.347431635923499,26.072197125189067,65.15800778052932,17.323263896797478,89.3706852787149,74.76581420580594,96.97509742941041,55.68719925166884,37.03633148426385,59.45505155157443,93.77368455681759,56.86852935176745,34.048632867608674,88.96589102567077,43.47268736612152,10.124501499909922,32.29165202430843,38.64101582593561,87.2899721408712,51.26528503598005,50.01285590301707,40.249203975989865,89.26103107423717,95.05240992463523,99.27012962391665,43.90671402648881,96.95327010644542,81.26916126678113,70.812023287984,32.04005314780895,29.481153484978883,24.944304206912108,93.04809492028288,36.466899614484944,50.77848209039906,54.45620505885012,80.03544359052287,85.98114653977217,22.516543103375152,48.42139241899663,85.85693990519114,83.62299751802546,19.217238260717476,24.074501398116674,37.3778822439467,16.78231621750063,48.21967025565336,19.68559346346237,61.13958343028605,32.19012458300405,63.678975881466044,20.57730786132739,97.829548157668,93.93050834716064,45.26172447081992,31.79607347134769,32.53583915682155,53.505418168215286,13.599352171064528,67.57345954676143,46.74726175057703,43.96659153299986,82.84284743402786,73.8131914164961,95.89004338534228,41.674261644612166,90.77884881844649,79.297046762508,42.16821864352417,65.9498892807932,35.9712961886526,88.69599253673582,20.118458549108013,29.11909251646369,26.472996287192903,46.27234021638598,77.07096640289163,57.42167041469623,53.89086911843868,10.04913684072961,48.286155281954926,15.719839735254258,28.742792690933545,93.91545450644449,29.385838390891436,87.25038747708362,82.26040344052008,24.323161324801855,64.5140761543251,20.4095684714512,75.50993425325603,67.3716049634986,83.07447055219174,53.14460944502692,92.33767790500445,14.441405210959573,36.35997085243132,74.35473377186503,47.62982905732008,25.565621884404074,19.648967088569144,83.56052003154593,52.58286806190798,89.40553047271966,75.99602208850534,46.87535850676692,43.61599127401153,56.407451198612655,90.01539578707558,76.35507217427511,10.46376678421209,72.4742066232213,92.75566662152386,73.94101835540424,25.930520341074633,53.51663146847128,22.628441613107746,42.309575050566885,94.34053377464659,93.09747768028375,35.45531669584746,40.566793974957925,64.01915813181645,96.6877565734342,23.302120065885138,33.12249793180022,88.62011445616943,54.270300853751,90.90649830043286,26.696610777085866,57.940172872422465,39.364266938443514,38.488830390322846,50.21892675515792,48.97697041911416,42.1612191710159,92.34736932840568,75.85697668896034,75.47922921983768,36.09221046327599,61.993848188515635,80.12614899716506,81.60313316888917,41.0077414678881,79.3785480911783,76.23045071269597,22.735583705971024,87.93509217098295,49.71893231623697,53.77694039997989,50.35322610081976,61.106140132975675,65.90523226303492,54.83616091866491,88.0109688933186,66.49612805757559,46.128515437496795,47.50225812178399,82.97547536160563,41.337274847186805,29.03093162041722,15.344486920521032,88.84241631285167,92.6691806071315,20.810816394712838,40.10263673465034,25.783486256371948,20.430862194329176,90.98800687000272,15.118953323081993,98.24370971221062,18.680577462764575,87.71235842742271,60.985549629024646,43.112573903608606,40.81081389626421,78.16277289139379,38.31159655038585,69.15870249554277,56.55934751644721,53.646908064226345,91.10459535842455,59.91805527582336,84.41754427438255,75.30161806913405,13.470152145309852,79.57990472548772,29.51832250819366,91.28346821664144,13.863177154794881,39.97648310268869,18.97596525112786,52.80302053763594,83.80201922827766,36.83686236967577,23.584140757993744,39.724033212720926,83.24921277285723,22.63455620194122,30.462620416975163,16.196676804403655,73.51390395906469,45.570991918270316,37.975597942898446,74.67637513070368,40.23797881062297,75.49941458929763,83.36794557828821,29.58965591119646,97.6436827161385,24.61221531214001,36.17568159910683,26.181576175018748,41.0955090720701,53.205479903868,56.995828210190716,86.82454380666448,90.050311793428,29.809347470281573,66.06046289671772,20.0346451568844,51.307287416386146,39.010018424195636,38.48506709082457,53.43258175408909,75.68448719763408,16.226439291315053,89.1256003918664,76.1332397167505,25.884945000015414,94.52448181786333,55.568100162104976,99.98277203052687,27.75335268706639,58.14173785448812,36.122323830393555,37.37562016503219,63.195884275059115,92.954716018375,82.47374700212575,75.15472586511933,60.3256403891532,93.0068653300703,54.312526602179574,88.64489605012463,85.05834794889685,29.24518121214279,79.41029166693193,11.095404124893289,39.054658379327876,30.66107002227548,55.61766626385821,76.3167845512707,18.79087307029517,56.34299817430916,94.4570819524947,30.578189588465097,70.94270297002817,]) +u = np.array([1,1,1,1,-1,1,1,1,1,1,1,1,1,1,-1,1,-1,1,1,1,1,1,-1,1,-1,1,-1,1,-1,1,-1,1,1,1,-1,-1,1,1,1,1,-1,1,1,-1,1,-1,1,1,1,-1,1,-1,1,-1,1,-1,1,-1,1,1,-1,-1,1,1,-1,1,1,1,1,1,1,-1,1,1,1,-1,-1,1,-1,1,-1,1,-1,-1,1,-1,-1,-1,-1,1,1,1,1,1,-1,1,1,-1,1,1,1,-1,1,1,1,1,1,-1,-1,1,1,1,1,-1,1,-1,1,1,1,1,1,1,1,1,1,1,1,1,1,-1,-1,-1,-1,1,1,1,-1,-1,1,1,1,1,1,1,1,1,1,1,1,1,-1,1,-1,-1,1,1,1,-1,1,1,1,1,1,1,1,1,-1,-1,1,1,1,-1,1,-1,1,1,1,1,-1,-1,-1,1,-1,1,1,1,1,1,1,-1,1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,-1,-1,-1,-1,1,-1,-1,1,1,-1,1,-1,-1,1,-1,1,1,1,1,1,-1,1,-1,1,1,1,1,1,-1,-1,1,-1,-1,1,-1,1,-1,-1,1,1,-1,-1,1,-1,1,1,1,1,-1,1,1,1,1,1,1,-1,1,-1,1,-1,-1,1,1,1,1,1,1,1,1,1,-1,1,1,1,1,1,1,1,-1,1,1,1,-1,-1,1,-1,1,-1,1,-1,1,-1,1,1,-1,1,1,1,1,1,-1,-1,1,1,1,-1,1,1,1,-1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,1,-1,-1,1,1,1,1,1,1,-1,1,1,-1,-1,-1,1,1,1,1,-1,1,1,1,-1,-1,1,1,1,-1,-1,-1,1,1,1,1,-1,1,1,1,-1,1,1,1,-1,1,1,-1,-1,1,1,1,1,1,-1,1,1,1,1,1,1,-1,1,-1,1,-1,1,-1,1,1,1,-1,-1,1,1,-1,1,1,1,-1,-1,-1,-1,1,-1,1,-1,1,1,-1,-1,1,1,1,1,1,1,1,1,1,-1,1,-1,1,-1,-1,-1,1,1,1,-1,1,1,-1,1,1,-1,-1,-1,1,1,1,1,-1,1,-1,-1,1,-1,1,-1,1,1,-1,1,-1,1,-1,-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,-1,1,1,1,1,-1,1,1,-1,1,]) +N = len(x) + +# +# step 05 +# + +def u_(w, x): return np.sign(np.dot(w, x)) + +def perceptron(x, y, u, rho=0.01, M=5): + w = np.zeros(3) + for _epoch in range(M): + for (xi, yi, ui) in zip(x, y, u): + pos = np.array([1, xi, yi]) + if u_(w, pos) != ui: + w = w + rho * ui * pos + return w + +# +# step 06 +# +# center/normalize x and y +# + +xn = (x - np.mean(x)) / np.std(x) +yn = (y - np.mean(y)) / np.std(y) + +w = perceptron(xn, yn, u) +print(f'w = {w}') + + +# +# step 07 +# +# plot linear decision boundary +# +plt.figure() +plt.scatter(xn[u==+1], yn[u==+1], label='+') +plt.scatter(xn[u==-1], yn[u==-1], label='-') + +x1 = np.linspace(-2, 2, 100) +x2 = (-w[0] - w[1] * x1) / w[2] +plt.plot(x1, x2, color='green') +plt.legend() + + +# +# step 08 +# +# make predictions +# + +(xp, yp) = (1, 30) +xp = (xp - np.mean(x)) / np.std(x) +yp = (yp - np.mean(y)) / np.std(y) +pred = np.dot(w, [1, xp, yp]) +print(f'pred(1, 30) = {pred}') + +(xp, yp) = (1, 60) +xp = (xp - np.mean(x)) / np.std(x) +yp = (yp - np.mean(y)) / np.std(y) +pred = np.dot(w, [1, xp, yp]) +print(f'pred(1, 60) = {pred}') + + +# +# step 09 +# + + +def score_weights(weights): + predictions = np.dot(np.array([np.ones(N), xn, yn]).T, weights) + successes = np.sign(predictions) == u + return np.sum(successes) / N + + +for rho in [0.0001, 0.001, 0.01, 0.1]: + for M in [1, 2, 3, 4, 5]: + weights = perceptron(xn, yn, u, rho, M) + print(f'rho={rho}, M={M}, score={score_weights(weights)}') + print("---") + + +plt.show() diff --git a/ia/tp5/src/ex2_classification2.py b/ia/tp5/src/ex2_classification2.py new file mode 100755 index 0000000..ad64173 --- /dev/null +++ b/ia/tp5/src/ex2_classification2.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python3 + +import numpy as np +import matplotlib.pyplot as plt + +x = np.array([1.0976270078546495,1.430378732744839,1.2055267521432877,1.0897663659937937,0.8473095986778094,1.2917882261333122,0.875174422525385,1.7835460015641595,1.9273255210020586,0.7668830376515554,1.5834500761653292,1.057789839505809,1.1360891221878646,1.851193276585322,0.14207211639577388,0.17425859940308142,0.04043679488065144,1.665239691095876,1.556313501899701,1.7400242964936383,1.957236684465528,1.5983171284334472,0.9229587245058637,1.561058352572911,0.23654885173786644,1.2798420426550476,0.2867065748180928,1.8893378340991678,1.0436966435001434,0.8293238799810472,0.5291112242092539,1.5484673788684333,0.9123006644330971,1.136867897737297,0.037579600872710284,1.2352709941517541,1.2241914454448428,1.2338679937495138,1.8874961570292483,1.3636405982069668,0.719015801147572,0.8740639075986829,1.3952623918545297,0.12045094325853967,1.3335334308913354,1.3412757392363188,0.4207651221476818,0.2578525953097066,0.6308567018483677,0.7274215418852452,1.1403935408357593,0.8772030269246407,1.9767476761184524,0.20408962149605614,0.4177535121896694,0.3226190357699925,1.3062166509307969,0.5065832050795642,0.9326215457126126,0.4888511840032055,0.31793916729103944,0.22075028232861027,1.3126591789305468,0.2763659026972276,0.393164723360107,0.7374503413219282,1.6419864596958702,0.19420255158612254,1.6758898149976078,0.19219681578792613,1.9529189300267915,0.9373024032954032,1.9535221763806743,1.209691039490092,1.4785271587966033,0.07837558450864135,0.5656139251528192,0.2403931224263378,0.5922803950442899,0.2374554379084881,0.635966358787952,0.8285259890293399,0.12829499269756872,1.3849442387400397,1.1332029084131503,0.5307789818788908,1.0464961069333993,0.18788102151688335,1.1518929911123585,1.8585923951524281,0.6371379049026473,1.3348207599273634,0.26359572480878435,1.432654408237131,0.5788121858944022,0.36638272401423366,1.1730258696201663,0.040215092374987105,1.6578800584347262,0.009390952385094131,1.3556330735924602,0.5400159463843297,1.4703880442451898,1.9243770902348765,0.49750628703991606,1.1523146688356738,1.184083862543678,1.1445038115817467,0.4461632652812366,1.90549802303397,0.8942507572352547,1.6928173449422557,1.3989585506350086,0.5948739017102673,1.6275956394049544,0.7930114816939693,1.7622063942223232,1.1625457452717174,1.7634707237097056,1.3850631801555318,1.450508559639281,1.0026487638534045,1.9121672694464478,1.2879803984592748,0.8477100971163594,1.2127864282558487,0.03838639661866705,0.6031496333490987,1.32034707498537,0.5801552144208881,1.236030857997683,0.8575374018915323,0.27094812844490046,0.5965646519120615,1.1399298214025297,1.1817455224963465,1.1486504976991576,1.3064016397142673,1.3042065400033778,0.8628368708679479,1.793093191702126,0.7351237400957931,0.8717298505312536,1.7838467100313442,1.6123879780921715,1.4077771670807326,0.20045377462460223,1.838965227489347,1.4284825990982228,1.997694013135733,0.2988966093159875,1.7362521147364285,0.32498586935274965,1.2311191285676883,0.24763996569888302,1.6960164586444688,1.6146379174500214,1.1382014772291866,0.8143665944519993,0.1383339909102761,1.3948575462891273,0.9070853653561377,1.4441111989406958,1.7327646518572584,1.9510430100057716,1.711606684785222,0.023428168370003943,0.7199561289567278,1.459981124848116,0.34325935452288103,1.0420732124082586,0.10867597667850726,0.39999304979280015,0.03704358892122794,1.5873954067148413,0.44784937612076026,0.6907033613938054,1.8561625869311817,1.4088288038470655,0.0636778590626157,0.3293883129958255,1.242956802999527,1.1544571772083352,0.47578564274901725,1.8684279958495875,1.227931911931792,1.0712656060499166,1.179819952709142,1.4602440590335393,0.6238899909592037,0.7964421244321838,0.4196874979502443,0.3723860117606723,1.8887447799678672,1.4791015900985751,0.9809176172351342,0.45482925594664647,0.5087129635407859,0.11605832064775123,0.8688332511162415,0.6235917639882051,1.392686977630919,0.7555036785849618,0.3592073551192696,0.04935745678266246,0.13449926292649717,1.3587855469971346,0.9073936891120906,1.0731584222174444,1.7933425860806842,1.9806778947934087,0.43379396879694787,1.3261564062002016,0.5266447534743013,0.04130199893145736,1.516757307672283,0.6400343016449357,0.7669277883437959,1.1766342271072114,1.6620969104723808,1.2579636871822975,1.7453013108947906,0.5470840696312715,1.5960936678251274,0.3712718886119044,1.9055833139438891,1.3749765527756306,0.4310153542271169,1.8947411809778485,1.4617116135403156,0.5078832851900517,0.42662395473496395,1.0364014278613265,0.05132543610906315,0.4149401508822188,0.8493709375030125,0.7483399606684511,0.9271508487296214,0.5552574125894638,1.1735686929163376,1.7277112118464628,0.23506371192406617,1.0347582143082283,0.2641362126903066,1.4337193623851874,0.7921194056145875,1.130842623717018,0.36655967242815723,0.28969551868675447,0.9761125612979091,0.7112254756999112,1.880863890505626,1.5306505076139305,1.4973272397010946,1.807439479491867,0.1668448708840371,1.1043849398448131,1.1689521379115377,1.923872757094458,0.5842950535850977,0.48165755983089364,0.20058788453099563,0.03285925918294841,1.859058633584381,1.33983309318182,1.5703058240462755,0.5634602115078982,1.1728203323726534,0.12791053224196225,0.9712551918692458,1.9549902794888936,1.7530104906331816,0.6763179036736913,1.923140309082997,0.463403252942409,1.8986376448313629,1.8827554094129972,1.5984051747047834,1.2608958737335823,1.748575933249894,0.5860405690155934,1.6978871106258364,1.2357533838350476,0.02647371551779898,0.6944670358644391,0.29628172189633006,1.9636587796365064,0.9567406140799761,0.9947827309973254,1.2789450327974472,0.737169212259235,0.27380054337119786,1.644235466388491,0.3796958238055159,1.022637965092912,0.4486340579494785,0.1956889689880681,1.7243830348433666,1.9458389780462606,1.9216693161260003,1.813110998442358,1.5480946653972776,0.6662903040572838,0.16220277997599353,0.8144823428276147,0.4644682843418855,0.26497526951596595,0.10685436357365052,1.4511887284211575,0.022854917250062057,1.5411614970055525,0.2938932908007501,0.1590441651735115,0.17920606847721077,1.344095614707829,0.49073441970568954,0.8410789333601969,1.1147375826478338,1.7211023476575875,1.4540885254226565,0.5406558104774293,0.2629655985822552,0.11074864084239588,0.603197268961885,0.5242362984793565,0.9122811336009593,1.366562671095361,1.3912508912777144,0.5670376931643333,0.759853911800241,0.3623019234738061,1.5770910246130374,0.1136961528664806,1.3939944834499747,1.5573907918822067,1.5548151236975063,0.5188451286907099,0.7476262758651229,1.175199270392778,0.545643804848934,0.7417055984357774,0.3941085603712793,0.9197117675120148,0.08922460250822817,1.599591769141236,0.15391289397326546,1.037670297663052,0.6136201990903922,1.155085897662751,1.9188666816668503,1.2911404889120077,0.07072487151098183,0.8608048790161225,1.0200337046365004,1.072354989406904,1.3627850212076758,0.5551921954635322,0.2577211309326404,0.7853513530941887,1.9128114455918976,0.3742617835016895,1.807967909856474,1.0876119001546527,0.9138228432915316,1.7640828204597792,0.9172079235371717,1.4483352732230865,0.798050643406204,1.8080887858019155,1.3800500403824547,1.3992441085010334,0.6554408031142378,1.5135572854737784,1.2721221108942826,0.4800405467594191,0.32107764497051283,1.5927829490346634,1.918333206070445,0.9162776545200857,1.1819683306473698,1.7154452883871092,0.9144469067077141,1.9037489536654724,1.1515023240897448,1.64153424140263,1.8176874368254767,1.6310476375371377,0.31882892689791187,1.2577968781234008,0.7968685172393541,0.1254259040466914,0.8480645037796839,0.5173681337788154,1.6980766168570216,0.06660925309339238,1.917965443726947,0.7107376969438592,0.7134137808050858,0.03265700536741578,0.3704646504723679,0.8025190016072175,1.8585828346054278,0.19922986044254265,1.890603066958159,1.7389770610932644,0.9083247938151036,0.6534017635365201,0.4654882585581137,1.2289294129537487,0.06614918295101124,0.031212128893656432,0.8575914449964757,0.1361481479494404,0.5038819764921858,0.4423218306921677,0.5063823874457039,0.2621104624305155,0.02407244579530854,0.23096859427749616,1.2369605190254958,1.9485124256361006,1.9806900031217878,0.8181081907461232,0.32590885209321074,1.2775235147330586,0.9806106930974743,1.978819554568863,0.1306084143035604,1.5664688766276262,0.5767969946629878,0.482837240153148,1.325009143065352,0.49212636998192894,1.3317182351183754,1.0346170344045775,0.8481779768716986,1.109375617322838,0.5741030398392595,1.4131494125459578,0.829713738667128,0.7210911209717845,1.6573138291114755,1.8499338239063843,0.09201462177459385,0.465253985659531,0.6970387389851265,1.6299329587404947,1.9709828552865951,1.9379434093407035,1.8098966910998537,0.5931125301280598,1.9840224868289482,0.4988400821129024,0.2118123097644644,1.9019052221107882,0.4668405109361926,1.379536530155501,0.11671271796117733,1.4614181982549523,1.7634404246676794,0.544873790931925,0.7581137921548571,0.7485923666418322,1.4975765150802662,0.4756144850780777,0.343706198095286,0.8985832973754762,0.608936814754639,1.6783782445173048,0.4754836520312775,1.004778914978523,1.8851671993958607,1.2679953954893215,1.7345788109249296,1.8804193787095347,1.5015297237727039,1.3991501204495027,1.9359311332084541,1.9888015792953588,0.9036433653395193,0.14173955636841673,0.5855880628810377,0.3047094113754609,0.8349727495920236,0.26257865694651206,1.208235608041764,0.7656161183157082,1.79077176857642,1.9355893435970037,1.0937698033388443,0.5496471397351932,1.1844608375236736,1.7935223164488197,0.8134666916714965,1.1041565533839417,0.5433055352122917,0.910888298900054,0.8034270707591973,0.49682693016594204,1.0117327676506167,]) +y = np.array([37.93427433818303,43.573137749267275,57.247339802883786,77.55355206360888,40.015671921214775,93.17428899586872,87.60866921523122,14.382126637797569,32.82782718311405,50.152196139328176,19.416509986822668,41.362839013014735,76.60877730559143,71.24630330285433,66.01459857094044,73.9475562450111,28.443131826373158,40.75283033782589,70.86182340497166,89.13112867281944,58.931024844528565,35.4429685850983,12.721173220538432,73.93031460767921,10.709569315759634,43.5411162838896,57.74834931065036,92.99003155904738,18.05450905296101,46.53480897714554,12.1881879739142,40.83498859074313,66.00079529558154,35.116115340573856,28.877495469007158,20.41329099943843,61.94262196183076,72.57430053142174,70.47614265362401,95.39749186484566,10.243289250415243,68.24769885046325,64.03530133878756,62.986564897325934,96.64932878562182,11.518450603303572,72.6834187631305,83.23107847316771,55.88264769594257,40.05683826371282,81.17561469046645,18.751863306918217,49.783207395693275,56.79571371137544,72.45607698410927,18.179715882916852,30.498355138407486,46.927140642111304,66.09652057181177,89.82647030956757,65.69435514172389,22.011532384144097,98.25221195085541,88.46071612799436,55.24486850307916,93.01131836169696,58.72427143814222,93.09754611002468,84.69076317430088,97.14577692648676,92.78045297003425,13.243043568571252,25.729480374484595,45.02212093910672,95.69284275658786,37.00260275283367,24.442087949884094,89.76741994779039,50.175497393488264,91.70880349188934,24.420741968812894,69.50057603572895,49.623737754654265,16.883809212725687,72.68168301872505,32.26588799852384,13.565397032156596,15.394986842461593,15.49706833601086,91.69596617365355,76.58955260461909,90.82561214923615,70.53240801668693,57.60459361277948,37.40017279126404,99.8166026195806,42.59701530454504,52.35840542925186,44.04206574311156,98.15742364019127,25.71925468555052,39.518920081727174,71.23137994135014,15.688685650476758,64.65244366103869,52.98818525887745,35.5599979085891,31.45719528316522,56.30614689688811,43.11348224833372,51.086790213638984,40.37296435878159,97.34443242363798,22.009548857104363,18.712355786053678,40.905255591182446,63.19242107834421,69.32588246650255,45.753107245123786,99.9350194529954,41.67036965737383,74.92660011639572,67.38244250777136,83.17484769227146,97.8603097108438,90.08142908009862,78.81057769219377,72.84236300364614,40.194835270830964,23.29170203860366,15.63724027538288,31.771153378133633,48.90533330631688,56.97966462669842,79.57751986493845,96.28668307509336,20.55884323463299,19.63037261744124,63.072525071219566,77.08582665525637,86.33353423122864,94.22488721951098,98.50836180345779,45.98215230020733,44.23016651748158,23.302780902754513,71.64409948152036,69.10857625967535,87.58563362660865,18.753219530887655,54.79992170428076,62.297373670485676,31.740133603592657,25.212286551624516,87.36227527776593,15.26814300120029,52.35588135262656,20.425060117079674,51.135288519823064,98.19660937080783,48.13357181099255,87.14124257541106,20.55840077648745,34.41268690856777,46.341346660060104,45.98309260083977,70.42451308031377,41.024631463795686,74.23901815690147,67.52682093028533,45.924503072929575,48.858411488888734,65.30749298292886,16.303797113018017,84.01660645201213,68.80790450022732,75.37082179760517,58.32307009741514,19.942939989257027,46.45320519672549,46.48362245637004,38.89386910388952,12.695529241427442,76.35288183368296,19.880601225625064,64.56773197405766,73.28957468204942,67.13076906403253,96.32280267801775,19.296833957662475,88.04504431946792,12.627121136402193,58.142516943437556,46.38192561453329,57.17654743543824,42.85898893540088,27.151022344606126,11.72106077038208,56.63348324120569,85.84991764163581,43.589436017031474,30.05774362134822,17.247880312465966,17.677983080683305,29.9256801663495,19.001265482939967,33.85357285280338,15.953451590525924,15.904438048893308,87.06485616605038,24.59082346379499,60.371416524110316,79.61099900041275,51.076860880515994,23.803199007341114,27.96365279081029,48.96857856530627,57.54106802606822,41.44962628436814,80.33316402111952,77.59194839707585,93.44906266358062,12.605729412426449,90.61221620891831,45.33119096159359,89.05352458419947,72.17062985408765,98.86138813665714,78.33542065450013,42.809016339710794,55.09568555512769,43.875023967491614,42.84206524191143,33.48140494429538,54.637326583612264,71.3565950562425,34.96062441747192,57.1941829969498,20.564226475350146,24.386075816877216,14.212571924096988,97.36582984935696,10.347431635923499,26.072197125189067,65.15800778052932,17.323263896797478,89.3706852787149,74.76581420580594,96.97509742941041,55.68719925166884,37.03633148426385,59.45505155157443,93.77368455681759,56.86852935176745,34.048632867608674,88.96589102567077,43.47268736612152,10.124501499909922,32.29165202430843,38.64101582593561,87.2899721408712,51.26528503598005,50.01285590301707,40.249203975989865,89.26103107423717,95.05240992463523,99.27012962391665,43.90671402648881,96.95327010644542,81.26916126678113,70.812023287984,32.04005314780895,29.481153484978883,24.944304206912108,93.04809492028288,36.466899614484944,50.77848209039906,54.45620505885012,80.03544359052287,85.98114653977217,22.516543103375152,48.42139241899663,85.85693990519114,83.62299751802546,19.217238260717476,24.074501398116674,37.3778822439467,16.78231621750063,48.21967025565336,19.68559346346237,61.13958343028605,32.19012458300405,63.678975881466044,20.57730786132739,97.829548157668,93.93050834716064,45.26172447081992,31.79607347134769,32.53583915682155,53.505418168215286,13.599352171064528,67.57345954676143,46.74726175057703,43.96659153299986,82.84284743402786,73.8131914164961,95.89004338534228,41.674261644612166,90.77884881844649,79.297046762508,42.16821864352417,65.9498892807932,35.9712961886526,88.69599253673582,20.118458549108013,29.11909251646369,26.472996287192903,46.27234021638598,77.07096640289163,57.42167041469623,53.89086911843868,10.04913684072961,48.286155281954926,15.719839735254258,28.742792690933545,93.91545450644449,29.385838390891436,87.25038747708362,82.26040344052008,24.323161324801855,64.5140761543251,20.4095684714512,75.50993425325603,67.3716049634986,83.07447055219174,53.14460944502692,92.33767790500445,14.441405210959573,36.35997085243132,74.35473377186503,47.62982905732008,25.565621884404074,19.648967088569144,83.56052003154593,52.58286806190798,89.40553047271966,75.99602208850534,46.87535850676692,43.61599127401153,56.407451198612655,90.01539578707558,76.35507217427511,10.46376678421209,72.4742066232213,92.75566662152386,73.94101835540424,25.930520341074633,53.51663146847128,22.628441613107746,42.309575050566885,94.34053377464659,93.09747768028375,35.45531669584746,40.566793974957925,64.01915813181645,96.6877565734342,23.302120065885138,33.12249793180022,88.62011445616943,54.270300853751,90.90649830043286,26.696610777085866,57.940172872422465,39.364266938443514,38.488830390322846,50.21892675515792,48.97697041911416,42.1612191710159,92.34736932840568,75.85697668896034,75.47922921983768,36.09221046327599,61.993848188515635,80.12614899716506,81.60313316888917,41.0077414678881,79.3785480911783,76.23045071269597,22.735583705971024,87.93509217098295,49.71893231623697,53.77694039997989,50.35322610081976,61.106140132975675,65.90523226303492,54.83616091866491,88.0109688933186,66.49612805757559,46.128515437496795,47.50225812178399,82.97547536160563,41.337274847186805,29.03093162041722,15.344486920521032,88.84241631285167,92.6691806071315,20.810816394712838,40.10263673465034,25.783486256371948,20.430862194329176,90.98800687000272,15.118953323081993,98.24370971221062,18.680577462764575,87.71235842742271,60.985549629024646,43.112573903608606,40.81081389626421,78.16277289139379,38.31159655038585,69.15870249554277,56.55934751644721,53.646908064226345,91.10459535842455,59.91805527582336,84.41754427438255,75.30161806913405,13.470152145309852,79.57990472548772,29.51832250819366,91.28346821664144,13.863177154794881,39.97648310268869,18.97596525112786,52.80302053763594,83.80201922827766,36.83686236967577,23.584140757993744,39.724033212720926,83.24921277285723,22.63455620194122,30.462620416975163,16.196676804403655,73.51390395906469,45.570991918270316,37.975597942898446,74.67637513070368,40.23797881062297,75.49941458929763,83.36794557828821,29.58965591119646,97.6436827161385,24.61221531214001,36.17568159910683,26.181576175018748,41.0955090720701,53.205479903868,56.995828210190716,86.82454380666448,90.050311793428,29.809347470281573,66.06046289671772,20.0346451568844,51.307287416386146,39.010018424195636,38.48506709082457,53.43258175408909,75.68448719763408,16.226439291315053,89.1256003918664,76.1332397167505,25.884945000015414,94.52448181786333,55.568100162104976,99.98277203052687,27.75335268706639,58.14173785448812,36.122323830393555,37.37562016503219,63.195884275059115,92.954716018375,82.47374700212575,75.15472586511933,60.3256403891532,93.0068653300703,54.312526602179574,88.64489605012463,85.05834794889685,29.24518121214279,79.41029166693193,11.095404124893289,39.054658379327876,30.66107002227548,55.61766626385821,76.3167845512707,18.79087307029517,56.34299817430916,94.4570819524947,30.578189588465097,70.94270297002817,]) +u = np.array([1,1,1,1,-1,1,1,1,1,1,1,1,1,1,-1,1,-1,1,1,1,1,1,-1,1,-1,1,-1,1,-1,1,-1,1,1,1,-1,-1,1,1,1,1,-1,1,1,-1,1,-1,1,1,-1,-1,1,-1,1,-1,1,-1,1,-1,1,1,-1,-1,1,1,-1,1,1,1,1,1,1,-1,1,1,1,-1,-1,1,-1,1,-1,1,-1,-1,1,-1,-1,-1,-1,1,1,1,1,1,-1,1,1,-1,1,1,1,-1,1,-1,1,1,1,1,-1,1,1,1,1,-1,-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,-1,-1,1,-1,1,1,1,-1,-1,1,1,1,1,1,1,1,1,-1,1,1,1,-1,1,-1,-1,1,-1,1,-1,1,1,1,1,-1,1,1,1,-1,1,1,1,1,-1,1,-1,1,1,1,1,1,-1,-1,-1,-1,1,1,1,1,1,1,-1,1,-1,-1,1,1,-1,-1,-1,-1,1,-1,-1,-1,-1,-1,-1,1,-1,-1,1,1,1,1,-1,-1,1,-1,1,1,1,1,1,-1,1,-1,1,1,1,1,1,-1,-1,1,-1,-1,1,-1,1,-1,-1,-1,1,-1,-1,1,-1,1,1,1,1,-1,1,1,1,1,1,1,-1,1,-1,1,-1,-1,1,1,1,1,1,1,1,1,1,-1,1,1,1,1,1,1,1,-1,1,1,1,-1,-1,1,-1,-1,-1,1,-1,1,-1,1,1,-1,1,1,1,1,1,-1,-1,1,1,1,-1,1,1,1,1,-1,1,1,-1,-1,1,1,1,1,-1,-1,-1,-1,1,1,1,1,-1,-1,1,-1,1,1,1,1,-1,1,1,-1,-1,-1,1,1,1,1,1,-1,1,1,-1,-1,1,1,1,-1,-1,-1,1,1,1,1,1,1,1,-1,-1,1,1,1,-1,1,1,-1,-1,1,1,1,1,1,-1,1,1,1,1,1,1,-1,1,-1,1,-1,1,-1,1,1,1,-1,-1,1,1,-1,1,1,1,-1,-1,-1,-1,1,-1,1,-1,1,1,-1,-1,1,1,1,1,1,1,1,1,1,-1,1,-1,1,-1,-1,-1,1,1,-1,-1,1,1,-1,1,-1,-1,-1,-1,1,1,1,1,-1,1,-1,-1,1,-1,1,-1,1,1,-1,1,-1,1,-1,-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,-1,-1,1,1,1,-1,1,1,-1,1,]) +N = len(x) + +# +# step 10 +# + +def u_(w, x): return np.sign(np.dot(w, x)) + +def perceptron(x, y, u, rho=0.01, M=5): + w = np.zeros(3) + for _epoch in range(M): + for (xi, yi, ui) in zip(x, y, u): + pos = np.array([1, xi, yi]) + if u_(w, pos) != ui: + w = w + rho * ui * pos + return w + + +xn = (x - np.mean(x)) / np.std(x) +yn = (y - np.mean(y)) / np.std(y) + +w = perceptron(xn, yn, u, rho=0.01, M=25) +print(f'w = {w}') + + +plt.figure() +plt.scatter(xn[u==+1], yn[u==+1], label='+') +plt.scatter(xn[u==-1], yn[u==-1], label='-') + +x1 = np.linspace(-2, 2, 100) +x2 = (-w[0] - w[1] * x1) / w[2] +plt.plot(x1, x2, color='green') +plt.legend() + + +def score_weights(weights): + predictions = np.dot(np.array([np.ones(N), xn, yn]).T, weights) + successes = np.sign(predictions) == u + return np.sum(successes) / N + + +for M in range(10): + weights = perceptron(xn, yn, u, rho=0.01, M=M) + print(f'M={M}, score={score_weights(weights)}') + + +# +# step 11 +# +def eval_metrics(x, y, u, w): + TN = 0 + TP = 0 + FN = 0 + FP = 0 + for xi, yi, ui in zip(x, y, u): + pos = np.array([1, xi, yi]) + if np.dot(w, pos) * ui > 0: + if ui == 1: + TP += 1 + else: + TN += 1 + else: + if ui == 1: + FN += 1 + else: + FP += 1 + precision = TP / (TP + FP) + recall = TP / (TP + FN) + F1 = (precision * recall) / (precision + recall) + print('precision =', precision) + print('recall =', recall) + print('F1 =', F1) + +eval_metrics(xn, yn, u, w) + +def calc_loss(w): + loss = 0 + for xi, yi, ui in zip(xn, yn, u): + pos = np.array([1, xi, yi]) + if u_(w, pos) != ui: loss += 1 + return loss + +print(f'loss = {calc_loss(w)}') + +# plt.show()