diff --git a/M2/TP-introduction-NN.ipynb b/M2/TP-introduction-NN.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..6ee9dd039518fa3376cf3678cb16de03b9dff284 --- /dev/null +++ b/M2/TP-introduction-NN.ipynb @@ -0,0 +1,128 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Neural Network\n", + "\n", + "Complete the descent with constant rate function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "julia" + } + }, + "outputs": [], + "source": [ + "function my_descent(f,x0::Vector{<:Real};η=0.1,AbsTol= abs(eps()), RelTol = abs(eps()), ε=0.01, nbit_max = 0)# to complete\n", + "\n", + " return xₖ, flag, fₖ, ∇fₖ, k c\n", + "end" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "And apply this code for solving the following problem\n", + "\n", + "$$(P)\\left\\{\\begin{array}{l}\n", + "Min\\;f(\\beta) = \\frac{1}{n}\\|X\\beta - y\\|^2\\\\\n", + "\\beta\\in\\R^2\n", + "\\end{array}\\right.\n", + "$$\n", + "with\n", + "$$X = \\begin{pmatrix}\n", + "1 & x_1\\\\\n", + "\\vdots & \\vdots\\\\\n", + "1 & x_5\n", + "\\end{pmatrix}\n", + "\\;\\;\\textrm{and}\\;\\;\n", + "y = \\begin{pmatrix}\n", + "y_1\\\\\n", + "\\vdots\\\\\n", + "y_5\n", + "\\end{pmatrix}\n", + "$$\n", + "\n", + "\n", + "You 'll print the first 7 iterations for the initual guess \n", + "$x0= (n/2)(9,1)$." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "plaintext" + } + }, + "outputs": [], + "source": [ + "using Plots\n", + "# n = 5\n", + "x1 = 1.; x2 = sqrt(5*9/2-x1^2);\n", + "x = [-x2,-x1,0.,x1,x2]\n", + "n = length(x)\n", + "a₁ = 1; a₀ = 2\n", + "y = a₁*x .+ a₀ # model\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "julia" + } + }, + "outputs": [], + "source": [ + "function descent_backtrac(f,x0::Vector{<:Real};AbsTol= abs(eps()), RelTol = abs(eps()), ε=0.01, nbit_max = 0)\n", + " # to complete\n", + "\n", + " return xₖ, flag, fₖ, ∇fₖ, k c\n", + "end" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "julia" + } + }, + "outputs": [], + "source": [ + "using Plots\n", + "x = range(-10*(n/2)-xsol[1],stop=10*(n/2)-xsol[1],length=100)\n", + "y = range(-9*(n/2)-xsol[2],stop=9*(n/2)-xsol[2],length=100)\n", + "f_contour(x,y) = f([x,y])\n", + "z = @. f_contour(x', y)\n", + "nb_levels = 7\n", + "x0 = (n/2)*[9,1]" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/M2/TP-steepest-descent-quad.ipynb b/M2/TP-steepest-descent-quad.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..fb8a13ce5c7b6984490a2de8ec190abc1987ac5f --- /dev/null +++ b/M2/TP-steepest-descent-quad.ipynb @@ -0,0 +1,158 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Steepest descent for a quadratic function\n", + "$f(x) = x^Ax + b^Tx + c$, where $A$ is symetric and positive-definite\n", + "\n", + "Complete the steepest_descent_quad function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "julia" + } + }, + "outputs": [], + "source": [ + "function steepest_descent_quad(A::Matrix{<:Real},b::Vector{<:Real},c::Real,x0::Vector{<:Real};AbsTol= abs(eps()), RelTol = abs(eps()), ε=0.01, nbit_max = 0)\n", + "# to complete\n", + "\n", + " return xₖ, flag, fₖ, ∇fₖ, k c\n", + "end" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "plaintext" + } + }, + "outputs": [], + "source": [ + "using Plots\n", + "# n = 5\n", + "x1 = 1.; x2 = sqrt(5*9/2-x1^2);\n", + "x = [-x2,-x1,0.,x1,x2]\n", + "n = length(x)\n", + "X = [ones(n) x]\n", + "a₁ = 1; a₀ = 2\n", + "y = a₁*x .+ a₀ # model\n", + "A = (1/n)*X'*X # A=[1 0 ; 0 9]\n", + "println(\"A = \", A)\n", + "b = -(2/n)*X'*y\n", + "c = (1/n)*y'*y\n", + "println(\"b = \", b)\n", + "println(\"n= \",n)\n", + "\n", + "f(x) = x'*A*x + b'*x + c \n", + "xsol = -(A' + A)\\b\n", + "xx = range(-10*(n/2)-xsol[1],stop=10*(n/2)-xsol[1],length=100)\n", + "yy = range(-9*(n/2)-xsol[2],stop=9*(n/2)-xsol[2],length=100)\n", + "f_contour(x,y) = f([x,y])\n", + "z = @. f_contour(xx', yy)\n", + "nb_levels = 7\n", + "x0 = (n/2)*[9,1]\n", + "xₖ = x0\n", + "Xsol = zeros(nb_levels+1,2)\n", + "Xsol[1,:] = x0\n", + "p1 = plot()\n", + "for nbit in 1:nb_levels\n", + " xsol, flag, fsol, ∇f_xsol , nb_iter = steepest_descent_quad(A,b,c,x0,nbit_max=nbit)\n", + " plot!(p1,[xₖ[1],xsol[1]],[xₖ[2],xsol[2]],arrow=true)\n", + " xₖ = xsol\n", + " Xsol[nbit+1,:] = xsol\n", + "end\n", + "\n", + "levels = [f(Xsol[k,:]) for k in nb_levels+1:-1:1]\n", + "contour!(p1,xx,yy,z,levels=levels,cbar=false,color=:turbo)\n", + "\n", + "\n", + "xsol, flag, fsol, ∇f_xsol , nb_iter = algo_Newton(f,x0)\n", + "println()\n", + "println(\"Result with Newton's algorithm : \")\n", + "println(\"xsol = \", xsol)\n", + "println(\"flag = \", flag)\n", + "println(\"fsol = \", fsol)\n", + "println(\"∇f_xsol = \", ∇f_xsol)\n", + "println(\"nb_iter = \", nb_iter)\n", + "scatter!(p1,[xsol[1]],[xsol[2]])\n", + "plot!(p1,[x0[1],xsol[1]],[x0[2],xsol[2]],arrow=true)\n", + "plot(p1,legend=false)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "julia" + } + }, + "outputs": [], + "source": [ + "function descent_backtrac(f,x0::Vector{<:Real};AbsTol= abs(eps()), RelTol = abs(eps()), ε=0.01, nbit_max = 0)\n", + " # to complete\n", + "\n", + " return xₖ, flag, fₖ, ∇fₖ, k c\n", + "end" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "julia" + } + }, + "outputs": [], + "source": [ + "using Plots\n", + "x = range(-10*(n/2)-xsol[1],stop=10*(n/2)-xsol[1],length=100)\n", + "y = range(-9*(n/2)-xsol[2],stop=9*(n/2)-xsol[2],length=100)\n", + "f_contour(x,y) = f([x,y])\n", + "z = @. f_contour(x', y)\n", + "nb_levels = 7\n", + "x0 = (n/2)*[9,1]\n", + "xₖ = x0\n", + "p1 = plot()\n", + "for nbit in 1:nb_levels\n", + " xsol, flag, fsol, ∇f_xsol , nb_iter = descent_backtrac(f,x0,nbit_max=nbit)\n", + " #xsol, flag, fsol, ∇f_xsol , nb_iter = my_descent(f,x0,nbit_max=nbit)\n", + " println(\"xsol = \", xsol)\n", + " println(\"flag = \", flag)\n", + " println(\"fsol = \", fsol)\n", + " println(\"∇f_xsol = \", ∇f_xsol)\n", + " println(\"nb_iter = \", nb_iter)\n", + " plot!(p1,[xₖ[1],xsol[1]],[xₖ[2],xsol[2]],arrow=true)\n", + " xₖ = xsol\n", + "end\n", + "levels = [f(Xsol[k,:]) for k in nb_levels+1:-1:1]\n", + "contour!(p1,xx,yy,z,levels=levels,cbar=false,color=:turbo)\n", + "plot(p1,legend=false)" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git "a/M2/nn \342\200\223 Cours.pdf" "b/M2/nn \342\200\223 Cours.pdf" new file mode 100644 index 0000000000000000000000000000000000000000..6135fce7045798343f11f4e4b0bc2a249a9b4648 Binary files /dev/null and "b/M2/nn \342\200\223 Cours.pdf" differ diff --git "a/M2/optimization \342\200\223 Cours.pdf" "b/M2/optimization \342\200\223 Cours.pdf" index fbff3c0d0d15d823a9795001ea8675eaf5656122..594fecbd2fe146cbde6ad9016fc347604604e1ad 100644 Binary files "a/M2/optimization \342\200\223 Cours.pdf" and "b/M2/optimization \342\200\223 Cours.pdf" differ