View file src/ia/pytorch/minimum.py - Download

# -*- coding: utf-8 -*-
"""minimum.ipynb

Automatically generated by Colaboratory.

Original file is located at
    https://colab.research.google.com/drive/13gnmx6JkFtcjK5iAz8Uc8eIrjnSBesjg

#Minimum of a function

Imports
"""

import math
import numpy as np
from matplotlib import pyplot as plt
import torch
from torch import nn

"""Device"""

# Get cpu or gpu device for training.
device = "cuda" if torch.cuda.is_available() else "cpu"
print(f"Using {device} device")

"""The function :

f(x) = x^4 + 0.5 * x^3 - 3 * x^2 - 5 * x + 1


"""

def f(x):
    return x**4 + 0.5 * x**3 - 3 * x**2 - 5 * x + 1

x = np.arange(-3.0, 3.0, 0.01)
plt.plot(x, [f(x1) for x1 in x])
plt.show(block=False)
plt.pause(1)

"""Find the minimum by custom gradient descent"""

x = np.random.rand(1)[0]
eps = 1e-10
g = 1

while abs(g) > 0.001:
    y = f(x)
    g = (f(x+eps) - f(x)) / eps
    x = x - 0.01 * g
    print(f"x={x:6.4f}")

print(f"Minimum at x={x} : f(x)={y}")

"""Find the minimum using autograd and custom optimization"""

# initialization
x = torch.tensor(np.random.rand(1)).requires_grad_(True)

while (x.grad is None or torch.abs(x.grad)>0.001):
    if (x.grad is not None):
        # zero grads
        x.grad.data.zero_()
    # compute function
    y = f(x)
    # compute grads
    y.backward()
    # move in direction of / opposite to grads
    x.data = x.data - 0.01 * x.grad.data
    print(f"x = {x.item():6.4f}")

print(f"Minimum at x={x.item()} : f(x)={y.item()}")

"""Find the minimum using autograd and PyTorch optimizer"""

# initialization
x = torch.tensor(np.random.rand(1)).requires_grad_(True).to(device)
print(f"x = {x}")

p = torch.nn.Parameter(x)
print(f"p = {p}")

x1 = torch.nn.utils.parameters_to_vector(p)
print(f"x1 = {x1}")

optimizer = torch.optim.SGD([p], lr=1e-2)

step = 0

while step < 50:
    step = step + 1
    x = torch.nn.utils.parameters_to_vector(p)[0]
    y = f(x)
    # compute grads
    optimizer.zero_grad()
    y.backward()
    optimizer.step()
    print(f"x = {x.item():6.4f}")

print(f"Minimum at x={x.item()} : f(x)={y.item()}")

"""Finding the minimum using autograd and optimizer applied to a neuron

"""

# Define model
class NeuralNetwork(nn.Module):
    def __init__(self):
        super().__init__()
        self.flatten = nn.Flatten()
        self.linear_relu_stack = nn.Sequential(
            nn.Linear(1, 1),
        )
    def forward(self, x):
        x = self.flatten(x)
        logits = self.linear_relu_stack(x)
        return logits

model = NeuralNetwork().to(device)
print(model)

optimizer = torch.optim.SGD(model.parameters(), lr=1e-2)

def train():
    model.train()
    x = model(torch.tensor([[0.0]]).to(device))
    y = f(x)
    optimizer.zero_grad()
    y.backward()
    optimizer.step()
    print(f"x = {x.item():6.4f}")

epochs = 50
for e in range(epochs):
    train()

x = model(torch.tensor([[0.0]]).to(device))
y = f(x)
print(f"Minimum at x={x.item()} : f(x)={y.item()}")