[MOD] defined the neural net as well as the loss function and optimization algorithm

This commit is contained in:
Eduardo Cueto Mendoza 2020-04-23 17:36:20 -06:00
parent f1883d1a11
commit 21b5ce8dd4
1 changed files with 182 additions and 13 deletions

View File

@ -2,25 +2,19 @@
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"execution_count": 24,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/eddie/.pyenv/versions/3.7.6/envs/pytorch/lib/python3.7/site-packages/pandas/compat/__init__.py:117: UserWarning: Could not import the lzma module. Your installed Python is incomplete. Attempting to use lzma compression will result in a RuntimeError.\n",
" warnings.warn(msg)\n"
]
}
],
"outputs": [],
"source": [
"import numpy as np\n",
"import torch\n",
"import torch.nn as nn\n",
"import pandas as pd\n",
"from sklearn.preprocessing import StandardScaler\n",
"from torch.utils.data import Dataset"
"from torch.utils.data import Dataset\n",
"from torch.utils.data import DataLoader\n",
"from torch.nn import BCELoss\n",
"from torch.optim import SGD"
]
},
{
@ -313,12 +307,187 @@
"y.shape"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [],
"source": [
"class Dataset(Dataset):\n",
" def __init__(self,x,y):\n",
" self.x = x\n",
" self.y = y\n",
" \n",
" def __getitem__(self, index):\n",
" return self.x[index], self.y[index]\n",
" \n",
" def __len__(self):\n",
" return len(self.x)\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [],
"source": [
"dataset = Dataset(x,y)"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"768"
]
},
"execution_count": 17,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"len(dataset)"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [],
"source": [
"train_loader = DataLoader(dataset=dataset,\n",
" batch_size=32,\n",
" shuffle=True)"
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<torch.utils.data.dataloader.DataLoader at 0x12ced9810>"
]
},
"execution_count": 19,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"train_loader"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"There are 24 batches in in the dataset\n",
"For one iteration (baatch) there are:\n",
"Data: torch.Size([32, 7])\n",
"lables: torch.Size([32, 1])\n"
]
}
],
"source": [
"# visualization of the train loader\n",
"print(\"There are {} batches in in the dataset\".format(len(train_loader)))\n",
"for (x,y) in train_loader:\n",
" print(\"For one iteration (baatch) there are:\")\n",
" print(\"Data: {}\".format(x.shape))\n",
" print(\"lables: {}\".format(y.shape))\n",
" break\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"24.0"
]
},
"execution_count": 21,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"768/32"
]
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {},
"outputs": [],
"source": [
"class Model(nn.Module):\n",
" def __init__(self, input_features,output_features): #,hidden_layer_1,hidden_layer_2,): caan be done this way or hard coded\n",
" super(Model, self).__init__()\n",
" self.fc1 = nn.Linear(input_features, 5)\n",
" self.fc2 = nn.Linear(5,4)\n",
" self.fc3 = nn.Linear(4,3)\n",
" self.fc4 = nn.Linear(3,output_features)\n",
" self.sigmoid = nn.Sigmoid()\n",
" self.tanh = nn.Tanh()\n",
" \n",
" def forward(self,x):\n",
" out = self.fc1(x)\n",
" out = self.tanh(out)\n",
" out = self.fc2(out)\n",
" out = self.tanh(out)\n",
" out = self.fc3(out)\n",
" out = self.tanh(out)\n",
" out = self.fc4(out)\n",
" out = self.sigmoid(out)\n",
" \n",
" return out\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 26,
"metadata": {},
"outputs": [],
"source": [
"# The definition of the class Model object nets\n",
"net = Model(7,1)\n",
"\n",
"# Binary cross entropy was chosen as a the loss function\n",
"criterion = BCELoss(reduction='mean')\n",
"\n",
"# Define the optimizer\n",
"optimizer = SGD(net.parameters(), lr=0.1, momentum=0.9)\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
"source": [
" "
]
}
],
"metadata": {