Anjana-S commited on
Commit
ff8f3d4
·
1 Parent(s): e254969

Upload 3 files

Browse files
Files changed (3) hide show
  1. Perceptron.py +46 -0
  2. __init__.py +46 -0
  3. imdb_perceptron.py +22 -0
Perceptron.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ from tqdm import tqdm
3
+
4
+
5
+ class Perceptron:
6
+
7
+ def __init__(self,learning_rate=0.01, epochs=100,activation_function='step'):
8
+ self.bias = 0
9
+ self.learning_rate = learning_rate
10
+ self.max_epochs = epochs
11
+ self.activation_function = activation_function
12
+
13
+
14
+ def activate(self, x):
15
+ if self.activation_function == 'step':
16
+ return 1 if x >= 0 else 0
17
+ elif self.activation_function == 'sigmoid':
18
+ return 1 if (1 / (1 + np.exp(-x)))>=0.5 else 0
19
+ elif self.activation_function == 'relu':
20
+ return 1 if max(0,x)>=0.5 else 0
21
+
22
+ def fit(self, X, y):
23
+ n_features = X.shape[1]
24
+ self.weights = np.random.randint(n_features, size=(n_features))
25
+ for epoch in tqdm(range(self.max_epochs)):
26
+ for i in range(len(X)):
27
+ inputs = X[i]
28
+ target = y[i]
29
+ weighted_sum = np.dot(inputs, self.weights) + self.bias
30
+ prediction = self.activate(weighted_sum)
31
+ print("Training Completed")
32
+
33
+ def predict(self, X):
34
+ predictions = []
35
+ for i in range(len(X)):
36
+ inputs = X[i]
37
+ weighted_sum = np.dot(inputs, self.weights) + self.bias
38
+ prediction = self.activate(weighted_sum)
39
+ predictions.append(prediction)
40
+ return predictions
41
+
42
+
43
+
44
+
45
+
46
+
__init__.py CHANGED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ from tqdm import tqdm
3
+
4
+
5
+ class Perceptron:
6
+
7
+ def __init__(self,learning_rate=0.01, epochs=100,activation_function='step'):
8
+ self.bias = 0
9
+ self.learning_rate = learning_rate
10
+ self.max_epochs = epochs
11
+ self.activation_function = activation_function
12
+
13
+
14
+ def activate(self, x):
15
+ if self.activation_function == 'step':
16
+ return 1 if x >= 0 else 0
17
+ elif self.activation_function == 'sigmoid':
18
+ return 1 if (1 / (1 + np.exp(-x)))>=0.5 else 0
19
+ elif self.activation_function == 'relu':
20
+ return 1 if max(0,x)>=0.5 else 0
21
+
22
+ def fit(self, X, y):
23
+ n_features = X.shape[1]
24
+ self.weights = np.random.randint(n_features, size=(n_features))
25
+ for epoch in tqdm(range(self.max_epochs)):
26
+ for i in range(len(X)):
27
+ inputs = X[i]
28
+ target = y[i]
29
+ weighted_sum = np.dot(inputs, self.weights) + self.bias
30
+ prediction = self.activate(weighted_sum)
31
+ print("Training Completed")
32
+
33
+ def predict(self, X):
34
+ predictions = []
35
+ for i in range(len(X)):
36
+ inputs = X[i]
37
+ weighted_sum = np.dot(inputs, self.weights) + self.bias
38
+ prediction = self.activate(weighted_sum)
39
+ predictions.append(prediction)
40
+ return predictions
41
+
42
+
43
+
44
+
45
+
46
+
imdb_perceptron.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from tensorflow.keras.datasets import imdb
2
+ from Perceptron import Perceptron
3
+ from tensorflow.keras.preprocessing.sequence import pad_sequences
4
+ from sklearn.metrics import accuracy_score
5
+ import pickle
6
+
7
+ top_words = 5000
8
+ (X_train, y_train), (X_test,y_test) = imdb.load_data(num_words=top_words)
9
+
10
+ max_review_length = 500
11
+ X_train = pad_sequences(X_train, maxlen=max_review_length)
12
+ X_test = pad_sequences(X_test, maxlen=max_review_length)
13
+
14
+ percep = Perceptron(epochs=100)
15
+
16
+ percep.fit(X_train, y_train)
17
+ pred = percep.predict(X_test)
18
+
19
+ print(f"Accuracy : {accuracy_score(pred, y_test)}")
20
+
21
+ with open(r'C:\Users\shahi\Desktop\My Projects\DeepPredictorHub\PP.pkl','wb') as file:
22
+ pickle.dump(percep, file)