从前有座山,叫巴里赫德,他一个当一座山,十分的爽。
哎,想不到吧。
这个山里面有树林,有庙,有这个山里灵活的狗。山上有棵树,这棵树不叫高树,因为这个梗太老了。这棵树的形状有些奇特,大概就长这个样子。
import torch
from torch import nn
import torch.nn.functional as F
import os
import tensorboardX
from torch.utils.data import Dataset
from torch.utils.data import DataLoader
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
class VGGBaseSimpleS2(nn.Module):
def __init__(self):
super(VGGBaseSimpleS2, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(1, 12, kernel_size=3, stride=1, padding=1),
#nn.BatchNorm2d(16),
nn.ReLU()
# 6*6
self.max_pooling1 = nn.MaxPool2d(kernel_size=2, stride=1)
# 5*5
self.conv2_1 = nn.Sequential(
nn.Conv2d(12, 24, kernel_size=3, stride=1, padding=1),
nn.ReLU()
self.max_pooling2_1 = nn.MaxPool2d(kernel_size=2, stride=1)
# 4*4
self.conv2_2 = nn.Sequential(
nn.Conv2d(24, 24, kernel_size=3, stride=1, padding=1),
nn.ReLU()
self.max_pooling2 = nn.MaxPool2d(kernel_size=2, stride=2)
# 2*2
# 2*2
self.fc = nn.Linear(24*2*2, 2)
def forward(self, x):
batchsize = x.size(0)
out = self.conv1(x)
out = self.max_pooling1(out)
out = self.conv2_1(out)
out = self.conv2_2(out)
out = self.max_pooling2(out)
out = out.view(batchsize, -1)
out = self.fc(out)
out = F.log_softmax(out, dim=1)
return out
class TrainingDataSet(Dataset):
def __init__(self):
super(TrainingDataSet, self).__init__()
self.data_dict_X = X_train
self.data_dict_y = y_train
def __getitem__(self, index):
t = self.data_dict_X[index, 0:36]
t = torch.tensor(t).view(6, 6)
return t, self.data_dict_y[index]
def __len__(self):
return len(self.data_dict_y)
class TestDataSet(Dataset):
def __init__(self):
super(TestDataSet, self).__init__()
self.data_dict_X = X_validate
self.data_dict_y = y_validate
def __getitem__(self, index):
t = self.data_dict_X[index, 0:36]
t = torch.tensor(t).view(6, 6)
return t, self.data_dict_y[index]
def __len__(self):
return len(self.data_dict_y)
def cnn_classification():
batch_size = 256
trainDataLoader = DataLoader(TrainingDataSet(), batch_size=batch_size, shuffle=False)
testDataLoader = DataLoader(TestDataSet(), batch_size=batch_size, shuffle=False)
epoch_num = 200
#lr = 0.001
lr = 0.001
net = VGGBaseSimpleS2().to(device)
print(net)
# loss
loss_func = nn.CrossEntropyLoss()
# optimizer
optimizer = torch.optim.Adam(net.parameters(), lr=lr)
# optimizer = torch.optim.SGD(net.parameters(), lr=lr, momentum=0.9, weight_decay=5e-4)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=5, gamma=0.9)
if not os.path.exists(“logCNN“):
os.mkdir(“logCNN“)
writer = tensorboardX.SummaryWriter(“logCNN“)
for epoch in range(epoch_num):
train_sum_loss = 0
train_sum_correct = 0
train_sum_fp = 0
train_sum_fn = 0
train_sum_tp = 0
train_sum_tn = 0
for i, data in enumerate(trainDataLoader):
net.train()
inputs, labels = data
inputs = inputs.unsqueeze(1).to(torch.float32)
labels = labels.type(torch.LongTensor)