澎湖县网站建设_网站建设公司_云服务器_seo优化
2025/12/25 6:01:50 网站建设 项目流程
# 4. 定义 MobileNetV2 模型 def create_mobilenet_v2(pretrained=True, num_classes=10): model = models.mobilenet_v2(pretrained=pretrained) # MobileNetV2 的分类器结构: # (classifier): Sequential( # (0): Dropout(p=0.2, inplace=False) # (1): Linear(in_features=1280, out_features=1000, bias=True) # ) # 修改最后一层全连接层 # 获取分类器中最后一个线性层的输入特征数 in_features = model.classifier[1].in_features model.classifier[1] = nn.Linear(in_features, num_classes) return model.to(device) # 5. 冻结/解冻模型层的函数 def freeze_model(model, freeze=True): """冻结或解冻模型的特征提取层参数""" # MobileNetV2 的特征提取部分是 'features' for param in model.features.parameters(): param.requires_grad = not freeze # 打印冻结状态 frozen_params = sum(p.numel() for p in model.parameters() if not p.requires_grad) total_params = sum(p.numel() for p in model.parameters()) if freeze: print(f"已冻结模型特征层参数 ({frozen_params}/{total_params} 参数)") else: print(f"已解冻模型所有参数 ({total_params}/{total_params} 参数可训练)") return model
# 6. 训练函数 def train_model(model, train_loader, test_loader, criterion, optimizer, scheduler, device, epochs, freeze_epochs=1): train_loss_history = [] test_acc_history = [] # 初始冻结 if freeze_epochs > 0: model = freeze_model(model, freeze=True) for epoch in range(epochs): # 解冻控制 if epoch == freeze_epochs: print(f"Epoch {epoch}: 解冻所有参数,开始微调...") model = freeze_model(model, freeze=False) # 解冻后通常使用更小的学习率 for param_group in optimizer.param_groups: param_group['lr'] *= 0.1 model.train() running_loss = 0.0 correct = 0 total = 0 for batch_idx, (data, target) in enumerate(train_loader): data, target = data.to(device), target.to(device) optimizer.zero_grad() output = model(data) loss = criterion(output, target) loss.backward() optimizer.step() running_loss += loss.item() _, predicted = output.max(1) total += target.size(0) correct += predicted.eq(target).sum().item() if (batch_idx + 1) % 200 == 0: print(f"Epoch {epoch+1} | Batch {batch_idx+1}/{len(train_loader)} | Loss: {loss.item():.4f}") epoch_loss = running_loss / len(train_loader) train_acc = 100. * correct / total # 测试 model.eval() correct_test = 0 total_test = 0 with torch.no_grad(): for data, target in test_loader: data, target = data.to(device), target.to(device) output = model(data) _, predicted = output.max(1) total_test += target.size(0) correct_test += predicted.eq(target).sum().item() test_acc = 100. * correct_test / total_test train_loss_history.append(epoch_loss) test_acc_history.append(test_acc) if scheduler: scheduler.step() print(f"Epoch {epoch+1} End | Train Loss: {epoch_loss:.4f} | Train Acc: {train_acc:.2f}% | Test Acc: {test_acc:.2f}%") return train_loss_history, test_acc_history # 主运行逻辑 def run_training(): # 减少 epoch 数以节省时间演示 epochs = 5 freeze_epochs = 2 learning_rate = 0.001 model = create_mobilenet_v2(pretrained=True, num_classes=10) criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=learning_rate) scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=3, gamma=0.1) print("开始训练 MobileNetV2...") train_loss, test_acc = train_model( model, train_loader, test_loader, criterion, optimizer, scheduler, device, epochs, freeze_epochs ) # 简单绘图 plt.figure(figsize=(10, 4)) plt.subplot(1, 2, 1) plt.plot(train_loss, label='Train Loss') plt.title('Training Loss') plt.legend() plt.subplot(1, 2, 2) plt.plot(test_acc, label='Test Acc') plt.title('Test Accuracy') plt.legend() plt.show() if __name__ == "__main__": run_training()

@浙大疏锦行

需要专业的网站建设服务?

联系我们获取免费的网站建设咨询和方案报价,让我们帮助您实现业务目标

立即咨询