Files
Telereview/code/Reconnaissance Image/Fingers5.py
2023-02-19 01:10:08 +00:00

76 lines
2.4 KiB
Python

import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
import matplotlib.pyplot as plt
trainSet = datasets.ImageFolder(r'C:\Users\kesha\Desktop\TelecomParis\PACT\DownloadedDataset\train',
transform = transforms.ToTensor())
valSet = datasets.ImageFolder(r'C:\Users\kesha\Desktop\TelecomParis\PACT\DownloadedDataset\val',
transform = transforms.ToTensor())
trainloader = torch.utils.data.DataLoader(trainSet,
batch_size = 50,
shuffle = True)
valloader = torch.utils.data.DataLoader(valSet,
batch_size = 50,
shuffle = True)
class Net(nn.Module):
def __init__(self):
super().__init__()
#nn.Conv2d(channels_in, out_channels/number of filters, kernel size)
self.conv1 = nn.Conv2d(3, 16, 3)
self.pool = nn.MaxPool2d(2, 2)
self.conv2 = nn.Conv2d(16, 32, 3)
self.conv3 = nn.Conv2d(32, 64, 3)
self.fc1 = nn.Linear(64*14*14, 16)
self.fc2 = nn.Linear(16, 6)
def forward(self, x):
x = self.pool(F.relu(self.conv1(x)))
#size = 16*126*126 then 16*63*63
x = self.pool(F.relu(self.conv2(x)))
#size = 32*61*61 then 32*30*30
x = self.pool(F.relu(self.conv3(x)))
#size = 64*28*28 then 64*14*14
x = torch.flatten(x, 1)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return x
net = Net()
print(net)
criterion = nn.CrossEntropyLoss()
optimizer = optim.RMSprop(net.parameters(), lr=0.001)
device = torch.device('cuda')
for epoch in range(1, 7):
print('Starting epoch ' + str(epoch))
current_loss = 0
Epoch = []
Loss = []
for i, data in enumerate(trainloader, 0):
inputs, labels = data
#très important
optimizer.zero_grad()
output = net(inputs)
loss = criterion(output, labels)
loss.backward()
optimizer.step()
current_loss += loss.item()
print('epoch: ', epoch, " loss: ", current_loss)
Loss.append(current_loss)
Epoch.append(epoch)
plt.plot(Epoch, Loss)
plt.title('Valeur de la fonction cost en fonction de l\'epoch')
plt.show()
#to save a model: torch.save(net.state_dict(), file_location)