零点看书

字:
关灯 护眼
零点看书 > 印度神话 > 告密者的下场

告密者的下场

告密者的下场 (第1/2页)
  
  从前有座山,叫巴里赫德,他一个当一座山,十分的爽。
  
  哎,想不到吧。
  
  这个山里面有树林,有庙,有这个山里灵活的狗。山上有棵树,这棵树不叫高树,因为这个梗太老了。这棵树的形状有些奇特,大概就长这个样子。
  
  importtorch
  
  fromtorchimportnn
  
  importtorch.nn.functionalasF
  
  importos
  
  importtensorboardX
  
  fromtorch.utils.dataimportDataset
  
  fromtorch.utils.dataimportDataLoader
  
  device=torch.device('cuda'iftorch.cuda.is_available()else'cpu')
  
  classVGGBaseSimpleS2(nn.Module):
  
  def__init__(self):
  
  super(VGGBaseSimpleS2,self).__init__()
  
  self.conv1=nn.Sequential(
  
  nn.Conv2d(1,12,kernel_size=3,stride=1,padding=1),
  
  #nn.BatchNorm2d(16),
  
  nn.ReLU()
  
  )
  
  #6*6
  
  self.max_pooling1=nn.MaxPool2d(kernel_size=2,stride=1)
  
  #5*5
  
  self.conv2_1=nn.Sequential(
  
  nn.Conv2d(12,24,kernel_size=3,stride=1,padding=1),
  
  nn.ReLU()
  
  )
  
  self.max_pooling2_1=nn.MaxPool2d(kernel_size=2,stride=1)
  
  #4*4
  
  self.conv2_2=nn.Sequential(
  
  nn.Conv2d(24,24,kernel_size=3,stride=1,padding=1),
  
  nn.ReLU()
  
  )
  
  self.max_pooling2=nn.MaxPool2d(kernel_size=2,stride=2)
  
  #2*2
  
  #2*2
  
  self.fc=nn.Linear(24*2*2,2)
  
  defforward(self,x):
  
  batchsize=x.size(0)
  
  out=self.conv1(x)
  
  out=self.max_pooling1(out)
  
  out=self.conv2_1(out)
  
  out=self.conv2_2(out)
  
  out=self.max_pooling2(out)
  
  out=out.view(batchsize,-1)
  
  out=self.fc(out)
  
  out=F.log_softmax(out,dim=1)
  
  returnout
  
  classTrainingDataSet(Dataset):
  
  def__init__(self):
  
  super(TrainingDataSet,self).__init__()
  
  self.data_dict_X=X_train
  
  self.data_dict_y=y_train
  
  def__getitem__(self,index):
  
  t=self.data_dict_X[index,0:36]
  
  t=torch.tensor(t).view(6,6)
  
  returnt,self.data_dict_y[index]
  
  def__len__(self):
  
  returnlen(self.data_dict_y)
  
  classTestDataSet(Dataset):
  
  def__init__(self):
  
  super(TestDataSet,self).__init__()
  
  self.data_dict_X=X_validate
  
  self.data_dict_y=y_validate
  
  def__getitem__(self,index):
  
  t=self.data_dict_X[index,0:36]
  
  t=torch.tensor(t).view(6,6)
  
  returnt,self.data_dict_y[index]
  
  def__len__(self):
  
  returnlen(self.data_dict_y)
  
  defcnn_classification():
  
  batch_size=256
  
  trainDataLoader=DataLoader(TrainingDataSet(),batch_size=batch_size,shuffle=False)
  
  testDataLoader=DataLoader(TestDataSet(),batch_size=batch_size,shuffle=False)
  
  epoch_num=200
  
  #lr=0.001
  
  lr=0.001
  
  net=VGGBaseSimpleS2().to(device)
  
  print(net)
  
  #loss
  
  loss_func=nn.CrossEntropyLoss()
  
  #optimizer
  
  optimizer=torch.optim.Adam(net.parameters(),lr=lr)
  
  #optimizer=torch.optim.SGD(net.parameters(),lr=lr,momentum=0.9,weight_decay=5e-4)
  
  scheduler=torch.optim.lr_scheduler.StepLR(optimizer,step_size=5,gamma=0.9)
  
  ifnotos.path.exists(“logCNN“):
  
  os.mkdir(“logCNN“)
  
  writer=tensorboardX.SummaryWriter(“logCNN“)
  
  forepochinrange(epoch_num):
  
  train_sum_loss=0
  
  train_sum_correct=0
  
  train_sum_fp=0
  
  train_sum_fn=0
  
  train_sum_tp=0
  
  train_sum_tn=0
  
  fori,datainenumerate(trainDataLoader):
  
  net.train()
  
  inputs,labels=data
  
  inputs=inputs.unsqueeze(1).to(torch.float32)
  
  labels=labels.type(torch.LongTensor)
  
  inputs,labels=inputs.to(device),labels.to(device)
  
  outputs=net(inputs)
  
  loss=loss_func(outputs,labels)
  
  optimizer.zero_grad()
  
  loss.backward()
  
  optimizer.step()
  
  _,pred=torch.max(outputs.data,dim=1)
  
  acc=pred.eq(labels.data).cpu().sum()
  
  one=torch.ones_like(labels)
  
  zero=torch.zeros_like(labels)
  
  tn=((labels==zero)*(pred==zero)).sum()
  
  tp=((labels==one)*(pred==one)).sum()
  
  fp=((labels==zero)*(pred==one)).sum()
  
  fn=((labels==one)*(pred==zero)).sum()
  
  train_sum_fn+=fn.item()
  
  train_sum_fp+=fp.item()
  
  train_sum_tn+=tn.item()
  
  train_sum_tp+=tp.item()
  
  train_sum_loss+=loss.item()
  
  train_sum_correct+=acc.item()
  
  
  
  (本章未完,请点击下一页继续阅读)
『加入书签,方便阅读』
热门推荐
在木叶打造虫群科技树 情圣结局后我穿越了 修神外传仙界篇 韩娱之崛起 穿越者纵横动漫世界 不死武皇 妖龙古帝 残魄御天 宠妃难为:皇上,娘娘今晚不侍寝 杀手弃妃毒逆天