How can I calculate the Accurracy for a Graph Convolutional Networks?

111 views Asked by At

how can I calculate the Accuracy on a GCN based on an event log? With my code I get the following result:

Epoch: 0, Loss: 1.299316294690154, Accuracy: [tensor([5.]), tensor([9.]), tensor([7.]) .... tensor([6.])Validation loss : 0.9456889930794145

I would like to have the Accuracy output as a value.

lr_run = 0
for lr_run in range(3):
  if lr_run==0:
    lr_value = 1e-03
  elif lr_run==1:
    lr_value = 1e-04
  elif lr_run==2:
    lr_value = 1e-05
  run = 0
  for run in range(num_runs):
    print("Run: {}, Learning Rate: {}".format(run+1,lr_value))
    model = EventPredictor(num_nodes, num_features)
    train_dl,valid_dl,test_dl = generate_input_and_labels(path)
    adj = generate_process_graph(path)
    criterion = nn.CrossEntropyLoss()
    optimizer = torch.optim.Adam(model.parameters(),lr=lr_value)
    print("************* Event Predictor ***************")
    print("Train size: {}, Validation size:{}, Test size: {}".format(len(train_dl.dataset),len(valid_dl.dataset),len(test_dl.dataset)))
    print(model)
    model = model.to(device)
    adj = adj.to(device)
    epochs_plt = []
    acc_plt = []
    loss_plt = []
    valid_loss_plt = []

    for epoch in range(num_epochs):
        
        model.train()
        num_train = 0
        training_loss = 0
        predictions, actuals = list(),list()

        for i, (inputs,targets) in enumerate(train_dl):

          inputs,targets = inputs.to(device), targets.to(device)
          optimizer.zero_grad() 
        
          yhat = model(inputs[0],adj)
          
          loss = criterion(yhat.reshape((1,-1)),targets[0].to(torch.long))
          loss.backward()
          optimizer.step()

          training_loss+= loss.item()

          yhat = yhat.to('cpu')
          yhat = torch.argmax(yhat)
          actual = targets.to('cpu')
          actual = actual[0]
          predictions.append(yhat)
          actuals.append(actual)
          num_train+=1

        with torch.no_grad():
          model.eval()
          num_valid = 0
          validation_loss = 0
          for i,(inputs,targets) in enumerate(valid_dl):
            inputs,targets = inputs.to(device),targets.to(device)
            yhat_valid = model(inputs[0],adj)
            loss_valid = criterion(yhat_valid.reshape((1,-1)),targets[0].to(torch.long))
            validation_loss+= loss_valid.item()
            num_valid+= 1

        acc = accuracy_score = (actuals, predictions)
        avg_training_loss = training_loss/num_train
        avg_validation_loss = validation_loss/num_valid        
          
        print("Epoch: {}, Loss: {}, Accuracy: {}, Validation loss : {}".format(epoch, avg_training_loss, acc, avg_validation_loss))
        epochs_plt.append(epoch+1)
        acc_plt.append(acc)
        loss_plt.append(avg_training_loss)
        valid_loss_plt.append(avg_validation_loss)
1

There are 1 answers

2
Olavo Sampaio On

It seems that you are not actually calling the accuracy function. The line

acc = accuracy_score = (actuals, predictions)

should be

acc = accuracy_score(actuals, predictions)