Dies ist eine alte Version des Dokuments!
<code> class Netzwerk(nn.Module):
def __init__(self): super(Netzwerk, self).__init__() self.conv1 = nn.Conv1d(12, 24, kernel_size=15, stride=1, padding=1) self.pool1 = nn.MaxPool1d(kernel_size=3, stride=2, padding=1) self.conv2 = nn.Conv1d(24, 32, kernel_size=10, padding=1) self.pool2 = nn.MaxPool1d(2, stride=2, padding=1) self.conv3 = nn.Conv1d(32, 24, kernel_size=5, padding=1) self.pool3 = nn.MaxPool1d(3, stride=2, padding=1) self.conv4 = nn.Conv1d(24, 12, kernel_size=3, padding=1) self.pool4 = nn.MaxPool1d(3, stride=2, padding=1)
self.lin1 = nn.Linear(12*61, 40) #self.lin2 = nn.Linear(800, 40) self.lin3 = nn.Linear(40, 10) self.lin4 = nn.Linear(10, 2)
self.history_loss = [] self.history_eval = [] self.classific_accuracy_training = [] self.current_epoch = 0
def forward(self, x): x = self.pool1(F.relu(self.conv1(x))) x = self.pool2(F.relu(self.conv2(x))) x = self.pool3(F.relu(self.conv3(x))) x = self.pool4(F.relu(self.conv4(x))) x = x.view(-1, 12*61) # umwandeln der Shape, sodass Uebergang von Conv. Layer zu linearen layers moeglich '''x = F.relu(self.lin1(x)) #x = F.relu(self.lin2(x)) x = F.relu(self.lin3(x)) x = self.lin4(x)''' return x
def num_flat_features(self, x): """eigentlich irrelevant???""" size = x.size()[1:] num = 1 for i in size: num *= i return num