import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from torch_geometric.data import Data from torch_geometric.nn import GATConv from torch_geometric.datasets import Planetoid import torch_geometric.transforms as T import warnings warnings.filterwarnings("ignore") # Seed for reproducible numbers torch.manual_seed(2020) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # Dataset used Cora name_data = 'Cora' dataset = Planetoid(root= '/tmp/' + name_data, name = name_data) dataset.transform = T.NormalizeFeatures() print(f"Number of Classes in {name_data}:", dataset.num_classes) print(f"Number of Node Features in {name_data}:", dataset.num_node_features) # Model Definition class GAT(torch.nn.Module): def __init__(self): super(GAT, self).__init__() self.hid = 8 self.in_head = 8 self.out_head = 1 self.conv1 = GATConv(dataset.num_features, self.hid, heads=self.in_head, dropout=0.6) self.conv2 = GATConv(self.hid*self.in_head, dataset.num_classes, concat=False, heads=self.out_head, dropout=0.6) def forward(self, data): x, edge_index = data.x, data.edge_index x = F.dropout(x, p=0.6, training=self.training) x = self.conv1(x, edge_index) x = F.elu(x) x = F.dropout(x, p=0.6, training=self.training) x = self.conv2(x, edge_index) return F.log_softmax(x, dim=1) # Train model = GAT().to(device) data = dataset[0].to(device) # Adam Optimizer optimizer = torch.optim.Adam(model.parameters(), lr=0.005, weight_decay=5e-4) # Training Loop model.train() for epoch in range(1000): model.train() optimizer.zero_grad() out = model(data) loss = F.nll_loss(out[data.train_mask], data.y[data.train_mask]) if epoch%200 == 0: print(loss) loss.backward() optimizer.step() # Evaluation model.eval() _, pred = model(data).max(dim=1) correct = float (pred[data.test_mask].eq(data.y[data.test_mask]).sum().item()) acc = correct / data.test_mask.sum().item() print('Accuracy: {:.4f}'.format(acc))