Initial Commit

This commit is contained in:
2026-01-18 17:29:00 +01:00
commit 7da1135791
15 changed files with 4205 additions and 0 deletions

298
2025/2025.ipynb Normal file
View File

@@ -0,0 +1,298 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 21,
"metadata": {
"id": "2QuiuhUXUndl"
},
"outputs": [],
"source": [
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"import scipy.io\n",
"import time\n",
"from scipy.sparse import csc_matrix, hstack"
]
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {
"id": "1xIVmvlUUvFZ"
},
"outputs": [],
"source": [
"def load_data(name, big_data=False):\n",
" S = scipy.io.loadmat(name)\n",
" if big_data:\n",
" a = S[\"a\"].tocsr() #ajouter \".tocsr()\" pour le dataset \"Ybig_train_set.mat\"\n",
" else:\n",
" a = S[\"a\"]\n",
" y = S[\"y\"]\n",
" id_classe1 = np.where(y==1)[0]\n",
" id_classe2 = np.where(y==0)[0]\n",
" return a[id_classe1,:], a[id_classe2,:]"
]
},
{
"cell_type": "code",
"execution_count": 23,
"metadata": {
"id": "l4L0fk4YU1fE"
},
"outputs": [],
"source": [
"def optimisation(classe1,classe2,methode=\"gradient\",pas=\"wolfe\",maxiter=10000,maxtime=10,big_data=False):\n",
" def fct(x):\n",
"\n",
" # Valeur pour clip l'exponentielle (éviter certains problèmes)\n",
" D = 50\n",
"\n",
" # Pour les yi==0\n",
" exposant = np.clip(a1@x, -D , D)\n",
" exp = np.exp(-exposant)\n",
" f1 = sum(np.log(1+exp))\n",
"\n",
" g1 = -a1.T@(exp/(1+exp))\n",
"\n",
" # Pour les yi==1\n",
" exposant = np.clip(a2@x, -D, D)\n",
" exp = np.exp(-exposant)\n",
"\n",
" f2 = sum(exp) + sum(np.log(1+exp))\n",
"\n",
" g2 = a2.T@(1/(1+exp))\n",
"\n",
" f = f1 + f2\n",
" g = g1 + g2\n",
" return f, g\n",
"\n",
" def w1(x,d,alpha,beta1):\n",
" f0, g0 = fct(x)\n",
" f1, g1 = fct(x+alpha*d)\n",
" return f1 <= f0 + alpha*beta1*np.dot(d,g0)\n",
"\n",
" def w2(x,d,alpha,beta2):\n",
" f0, g0 = fct(x)\n",
" f1, g1 = fct(x+alpha*d)\n",
" return np.dot(d,g1) >= beta2*np.dot(d,g0)\n",
"\n",
" def wolfebissection(x,d,alpha=1,beta1=0.0001,beta2=0.9):\n",
" aleft = 0\n",
" aright = np.inf\n",
" iter = 0\n",
" while True:\n",
" if w1(x,d,alpha,beta1) and w2(x,d,alpha,beta2):\n",
" break\n",
" if not w1(x,d,alpha,beta1):\n",
" aright = alpha\n",
" alpha = (aleft+aright)/2\n",
" elif not w2(x,d,alpha,beta2):\n",
" aleft = alpha\n",
" if aright<np.inf:\n",
" alpha = (aleft+aright)/2\n",
" else:\n",
" alpha = 2*alpha #le \"2\" est arbitraire, du moment que ce soit >1\n",
" iter += 1\n",
" return alpha\n",
"\n",
" def backtrackingsimple(x,d,alpha=1):\n",
" f0, g0 = fct(x) # valeur et gradient à l'itéré\n",
" f1, g1 = fct(x+alpha*d) # valeur et gradient à l'itéré souhaité\n",
"\n",
" while f1>f0 and alpha>=1e-16:\n",
" alpha /= 2\n",
" f1, g1 = fct(alpha*d)\n",
" \n",
" return alpha\n",
"\n",
" def methgradient(x0):\n",
" start_time = time.time()\n",
" temps = np.zeros(maxiter)\n",
" fobj = np.zeros(maxiter)\n",
" x = x0.copy()\n",
"\n",
" for k in range(maxiter):\n",
" f, g = fct(x)\n",
" d = -g\n",
"\n",
" if pas==\"wolfe\":\n",
" alpha = wolfebissection(x, d)\n",
" elif pas==\"back\":\n",
" alpha = backtrackingsimple(x, d)\n",
"\n",
" x = x + alpha*d\n",
"\n",
" f, g = fct(x)\n",
" fobj[k] = f\n",
" temps[k] = time.time() - start_time\n",
"\n",
" if temps[k] >= maxtime:\n",
" break\n",
" return x, fobj[k+1], temps[k+1]\n",
"\n",
" def methgradientacc(x0):\n",
" # Pas fait en séance\n",
" pass\n",
"\n",
"\n",
"\n",
"\n",
" #Données des deux groupes a1 et a2\n",
" n1, m = classe1.shape\n",
" n2, m = classe2.shape\n",
" if big_data:\n",
" a1 = np.ones((n1, 1+m)) # !! csc_matrix()\n",
" a2 = np.ones((n1, 1+m))\n",
" else:\n",
" a1 = np.ones((n1, 1+m)) # !! csc_matrix()\n",
" a2 = np.ones((n1, 1+m))\n",
"\n",
" a1[:,1:] = classe1\n",
" a2[:,1:] = classe2\n",
" #Itéré initial\n",
" x0 = np.ones(1+m)\n",
"\n",
" if methode==\"gradient\": x,f,t = methgradient(x0)\n",
" # Pas fait en séance\n",
" # if methode==\"gradientacc\": x,f,t = methgradientacc(x0)\n",
"\n",
" return x, f, t"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "DOjbhWDDgxW0"
},
"outputs": [],
"source": [
"# ############# Entrainement du modèle #############\n",
"classe1_train, classe2_train = load_data(\"train_set.mat\")\n",
"x_gd_wf, f_gd_wf, t_gd_wf = optimisation(classe1_train,classe2_train,maxtime=60)\n",
"x_gd_bt, f_gd_bt, t_gd_bt = optimisation(classe1_train,classe2_train,pas=\"back\",maxtime=60)\n",
"#x_ga_wf, f_ga_wf, t_ga_wf = optimisation(classe1_train,classe2_train,methode=\"gradientacc\", pas=\"wolfe\",maxtime=60)\n",
"#x_ga_bt, f_ga_bt, t_ga_bt = optimisation(classe1_train,classe2_train,methode=\"gradientacc\", pas=\"backtracking\",maxtime=60)\n",
"plt.loglog(t_gd_wf,f_gd_wf,label=\"Grad. Wolfe\")\n",
"plt.loglog(t_gd_bt,f_gd_bt,label=\"Grad. Backting\")\n",
"#plt.loglog(t_ga_wf,f_ga_wf,label=\"GradAcc. Wolfe\")\n",
"#plt.loglog(t_ga_bt,f_ga_bt,label=\"GradAcc. Backting\")\n",
"plt.xlabel(\"Temps\")\n",
"plt.ylabel(\"Erreur\")\n",
"plt.legend(loc=1)\n",
"plt.show()\n",
"\n",
"plt.loglog(t_gd_wf,f_gd_wf,label=\"Grad. Wolfe\")\n",
"plt.loglog(t_gd_bt,f_gd_bt,label=\"Grad. Backting\")\n",
"#plt.loglog(t_ga_wf,f_ga_wf,label=\"GradAcc. Wolfe\")\n",
"#plt.loglog(t_ga_bt,f_ga_bt,label=\"GradAcc. Backting\")\n",
"plt.xlabel(\"Temps\")\n",
"plt.ylabel(\"Erreur\")\n",
"plt.legend(loc=1)\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "LLaiMIxoYG6P"
},
"outputs": [],
"source": [
"classe1_train, classe2_train = load_data(\"big_train_set.mat\", big_data=True)\n",
"x_gd_bt, f_gd_bt, t_gd_bt = optimisation(classe1_train,classe2_train,pas=\"backtracking\",maxtime=180,big_data=True)\n",
"#x_ga_bt, f_ga_bt, t_ga_bt = optimisation(classe1_train,classe2_train,methode=\"gradientacc\", pas=\"backtracking\",maxtime=180)\n",
"\n",
"plt.loglog(t_gd_bt,f_gd_bt,label=\"Grad. Backting\")\n",
"#plt.loglog(t_ga_bt,f_ga_bt,label=\"GradAcc. Backting\")\n",
"plt.xlabel(\"Temps\")\n",
"plt.ylabel(\"Erreur\")\n",
"plt.legend(loc=1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "AmSlY7I-kH7X"
},
"outputs": [],
"source": [
"def entrainement(classe1,classe2):\n",
" ..."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "qE7YMoJ7lbJN"
},
"outputs": [],
"source": [
"############# Entrainement du modèle #############\n",
"classe1_train, classe2_train = load_data(\"Ybig_train_set.mat\")\n",
"poids_reseau = entrainement(classe1_train,classe2_train)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "JEyEwFCcVQge"
},
"outputs": [],
"source": [
"def validation(poids_reseau,classe1_test,classe2_test):\n",
" def pred_sigmoide(poids_reseau,data):\n",
" return np.round(1/(1+np.exp(-poids_reseau[0]-data.dot(poids_reseau[1:]))))\n",
" correct = 0\n",
" nb1, nb2 = classe1_test.shape[0], classe2_test.shape[0]\n",
" for i in range(nb1): correct += pred_sigmoide(poids_reseau,classe1_test[i,:])\n",
" for i in range(nb2): correct += 1-pred_sigmoide(poids_reseau,classe2_test[i,:])\n",
" print(f\"{correct}/{nb1+nb2} donnees classees correctement dans jeu test\")\n",
" return correct"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "HgnXLoUmmY8h"
},
"outputs": [],
"source": [
"############# Validation du modèle #############\n",
"classe1_test, classe2_test = load_data(\"Ybig_test_set.mat\")\n",
"result = validation(poids_reseau,classe1_test,classe2_test)"
]
}
],
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"display_name": "optinonlin",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.2"
}
},
"nbformat": 4,
"nbformat_minor": 0
}

BIN
2025/big_test_set.mat Normal file

Binary file not shown.

BIN
2025/big_train_set.mat Normal file

Binary file not shown.

BIN
2025/test_set.mat Normal file

Binary file not shown.

BIN
2025/train_set.mat Normal file

Binary file not shown.