-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathNeuron.py
More file actions
55 lines (47 loc) · 2.45 KB
/
Neuron.py
File metadata and controls
55 lines (47 loc) · 2.45 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
from tinyTensor.Operation import Operation
from tinyTensor.Node import Node
import tinyTensor.Graph
import random as rand
class Neuron(Node):
def __init__(self, input_neurons: list = [], activation_fnct: str = "relu", dropout: float = 0.0):
super().__init__()
self.name = "Neuron output"
# input validation
if( not all(isinstance(x,Node) for x in input_neurons)):
raise Exception("'input_neurons' list (parameter) should only contain objects of type 'Node'.")
elif(dropout > 1 or dropout < 0):
raise Exception("'dropout' (parameter) should have a value between 0(inclusive) and 1(inclusive).")
self.activation = activation_fnct
self.dropout_percentage = dropout
self.inputNeurons = input_neurons
########################################################################################################################
# Neuron structure
########################################################################################################################
self.weighted_inputs = []
self.input_weights = []
# bias input, and normal weighted neuron inputs
self.input_weights.append(Node.variable(rand.randrange(0, 1000)/1000))
self.weighted_inputs.append(self.input_weights[0] * -1)
for index,node in enumerate(input_neurons):
index = index + 1 # bias of neuron is at index 0, so need to shift by 1
self.input_weights.append(Node.variable(rand.randrange(0, 1000)/1000)) # random initialisation of input weights
self.weighted_inputs.append(self.input_weights[index] * node)
# computing weighted sum of inputs
self.weighted_sum_of_inputs = Operation.sum(self.weighted_inputs)
# activation function
self.activation_function = Operation(self.weighted_sum_of_inputs,"relu")
# dropout
if(self.dropout_percentage > 0):
self.dropoutNode = Node.dropout(dropout)
self.dropoutNode.addInputs(self.activation_function)
self.inputNodes = [self.dropoutNode]
else:
self.inputNodes = [self.activation_function]
tinyTensor.Graph._default_graph.appendNode(self)
def compute(self,step):
if (self.step == step):
return self
else:
self.step = step
self.value = self.inputNodes[0].value
return