Skip to content
Snippets Groups Projects
Commit be09c6ba authored by TheRiPtide's avatar TheRiPtide
Browse files

chore: rebase 2

parent 272419d9
No related branches found
No related tags found
1 merge request!23feat: deep-leaning poly(A) classifier
...@@ -32,10 +32,14 @@ ...@@ -32,10 +32,14 @@
"# importing the libraries\n", "# importing the libraries\n",
"import pandas as pd\n", "import pandas as pd\n",
"import numpy as np\n", "import numpy as np\n",
<<<<<<< HEAD
<<<<<<< HEAD <<<<<<< HEAD
"import matplotlib.pyplot as plt\n", "import matplotlib.pyplot as plt\n",
======= =======
>>>>>>> d2ef840 (chore: started cnn notebook) >>>>>>> d2ef840 (chore: started cnn notebook)
=======
"import matplotlib.pyplot as plt\n",
>>>>>>> 93ea318 (chore: added training function for cnn)
"\n", "\n",
"# for creating validation set\n", "# for creating validation set\n",
"from sklearn.model_selection import train_test_split\n", "from sklearn.model_selection import train_test_split\n",
...@@ -100,6 +104,9 @@ ...@@ -100,6 +104,9 @@
" x = x.view(x.size(0), -1)\n", " x = x.view(x.size(0), -1)\n",
" x = self.linear_layers(x)\n", " x = self.linear_layers(x)\n",
<<<<<<< HEAD <<<<<<< HEAD
<<<<<<< HEAD
=======
>>>>>>> 93ea318 (chore: added training function for cnn)
" return x\n", " return x\n",
"\n", "\n",
"# defining training function\n", "# defining training function\n",
...@@ -134,9 +141,12 @@ ...@@ -134,9 +141,12 @@
" tr_loss = loss_train.item()\n", " tr_loss = loss_train.item()\n",
"\n", "\n",
" return loss_train, loss_val" " return loss_train, loss_val"
<<<<<<< HEAD
======= =======
" return x" " return x"
>>>>>>> d2ef840 (chore: started cnn notebook) >>>>>>> d2ef840 (chore: started cnn notebook)
=======
>>>>>>> 93ea318 (chore: added training function for cnn)
], ],
"metadata": { "metadata": {
"collapsed": false, "collapsed": false,
...@@ -325,16 +335,25 @@ ...@@ -325,16 +335,25 @@
"source": [ "source": [
"# defining the model\n", "# defining the model\n",
"model = Net()\n", "model = Net()\n",
"\n",
"# defining the optimizer\n", "# defining the optimizer\n",
"optimizer = Adam(model.parameters(), lr=0.07)\n", "optimizer = Adam(model.parameters(), lr=0.07)\n",
"\n",
"# defining the loss function\n", "# defining the loss function\n",
"criterion = CrossEntropyLoss()\n", "criterion = CrossEntropyLoss()\n",
<<<<<<< HEAD
>>>>>>> d2ef840 (chore: started cnn notebook) >>>>>>> d2ef840 (chore: started cnn notebook)
=======
"\n",
>>>>>>> 93ea318 (chore: added training function for cnn)
"# checking if GPU is available\n", "# checking if GPU is available\n",
"if torch.cuda.is_available():\n", "if torch.cuda.is_available():\n",
" model = model.cuda()\n", " model = model.cuda()\n",
" criterion = criterion.cuda()\n", " criterion = criterion.cuda()\n",
<<<<<<< HEAD <<<<<<< HEAD
<<<<<<< HEAD
=======
>>>>>>> 93ea318 (chore: added training function for cnn)
"\n", "\n",
"# defining the number of epochs\n", "# defining the number of epochs\n",
"n_epochs = 25\n", "n_epochs = 25\n",
...@@ -346,6 +365,7 @@ ...@@ -346,6 +365,7 @@
"val_losses = []\n", "val_losses = []\n",
"\n", "\n",
"# training the model\n", "# training the model\n",
<<<<<<< HEAD
"for epoch in tqdm(range(n_epochs)):\n", "for epoch in tqdm(range(n_epochs)):\n",
" train_loss, val_loss = train()\n", " train_loss, val_loss = train()\n",
" train_losses.append(train_loss)\n", " train_losses.append(train_loss)\n",
...@@ -465,6 +485,18 @@ ...@@ -465,6 +485,18 @@
======= =======
"\n" "\n"
>>>>>>> d2ef840 (chore: started cnn notebook) >>>>>>> d2ef840 (chore: started cnn notebook)
=======
"for epoch in range(n_epochs):\n",
" train_loss, val_loss = train()\n",
" train_losses.append(train_loss)\n",
" val_losses.append(val_loss)\n",
"\n",
"# plotting the training and validation loss\n",
"plt.plot(train_losses, label='Training loss')\n",
"plt.plot(val_losses, label='Validation loss')\n",
"plt.legend()\n",
"plt.show()"
>>>>>>> 93ea318 (chore: added training function for cnn)
], ],
"metadata": { "metadata": {
"collapsed": false, "collapsed": false,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment