-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathF1_Error_confusionMatrix.py
53 lines (42 loc) · 1.51 KB
/
F1_Error_confusionMatrix.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
from sklearn.metrics import f1_score, confusion_matrix
from data import MyDataset
import torch
from torch.utils.data import DataLoader
from torchvision import transforms
import hbp_model
# Define transformation for image preprocessing
transform = transforms.Compose([
transforms.Resize((224, 224)),
transforms.ToTensor(),
transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)),
])
# Load the fine-tuned model
model = hbp_model.Net()
model.load_state_dict(torch.load("firststep.pth"))
model.eval()
# Load the test dataset
test_dataset = MyDataset("test_images_shuffle.txt", transform)
# Create a DataLoader for the test dataset
test_loader = DataLoader(test_dataset, batch_size=1, shuffle=False)
# Initialize lists to store true labels and predicted labels
true_labels = []
predicted_labels = []
# Make predictions on the test set
model.eval() # Set the model to evaluation mode
with torch.no_grad():
for images, labels in test_loader:
outputs = model(images)
_, predicted = torch.max(outputs, 1)
true_labels.extend(labels.numpy())
predicted_labels.extend(predicted.numpy())
# Compute F1 score
f1 = f1_score(true_labels, predicted_labels, average='macro')
# Compute confusion matrix
conf_matrix = confusion_matrix(true_labels, predicted_labels)
# Calculate error count
errors = sum(true != pred for true, pred in zip(true_labels, predicted_labels))
# Print results
print("F1 Score:", f1)
print("Confusion Matrix:")
print(conf_matrix)
print("Total Errors:", errors)