PyG官方文档文章来源地址https://www.toymoban.com/news/detail-667285.html
# Install torch geometric
!pip install -q torch-scatter -f https://pytorch-geometric.com/whl/torch-1.10.2+cu102.html
!pip install -q torch-sparse -f https://pytorch-geometric.com/whl/torch-1.10.2+cu102.html
!pip install -q torch-geometric
import torch
import networkx as nx
import matplotlib.pyplot as plt
1.内置数据集(以KarateClub为例)
from torch_geometric.datasets import KarateClub
dataset = KarateClub()
print(f'Dataset: {dataset}:')
print('======================')
# 图的数量
print(f'Number of graphs: {len(dataset)}')
# 每个节点的特征尺寸
print(f'Number of features: {dataset.num_features}')
# 节点的类别数量
print(f'Number of classes: {dataset.num_classes}')
# 获取具体的图
data = dataset[0]
print(data)
print('==============================================================')
# 获取图的属性
print(f'Number of nodes: {data.num_nodes}')
print(f'Number of edges: {data.num_edges}')
print(f'Average node degree: {(2*data.num_edges) / data.num_nodes:.2f}')
print(f'Number of training nodes: {data.train_mask.sum()}')
print(f'Training node label rate: {int(data.train_mask.sum()) / data.num_nodes:.2f}')
print(f'Contains isolated nodes: {data.has_isolated_nodes()}')
print(f'Contains self-loops: {data.has_self_loops()}')
print(f'Is undirected: {data.is_undirected()}')
# 取出的图的数据对象为Data类型,包含以下属性
# 1. edge_index 每条边的两个端点的索引组成的元组
# 2. x 节点特征[节点数量,特征维数]
# 3. y 节点标签(类别),每个节点只分配一个类别
# 4. train_mask
Data(edge_index=[2, 156], x=[34, 34], y=[34], train_mask=[34])
print(data)
# 获取所有的边
print(data.edge_idx.T)
2.可视化
def visualize(h, color, epoch=None, loss=None, accuracy=None):
plt.figure(figsize=(7,7))
plt.xticks([])
plt.yticks([])
if torch.is_tensor(h):
h = h.detach().cpu().numpy()
plt.scatter(h[:, 0], h[:, 1], s=140, c=color, cmap="Set2")
if epoch is not None and loss is not None and accuracy['train'] is not None and accuracy['val'] is not None:
plt.xlabel((f'Epoch: {epoch}, Loss: {loss.item():.4f} \n'
f'Training Accuracy: {accuracy["train"]*100:.2f}% \n'
f' Validation Accuracy: {accuracy["val"]*100:.2f}%'),
fontsize=16)
else:
# networkx的draw_networkx
nx.draw_networkx(h, pos=nx.spring_layout(h, seed=42), with_labels=False, node_color=color, cmap="Set2")
plt.show()
from torch_geometric.utils import to_networkx
# 将Data类型转换成networkx
G = to_networkx(data, to_undirected=True)
# 将图可视化,节点颜色为节点的类型
visualize(G, color=data.y)
3.搭建GNN(以GCN为例)
import torch
from torch.nn import Linear
from torch_geometric.nn import GCNConv
class GCN(torch.nn.Module):
def __init__(self):
super().__init__()
self.conv1 = GCNConv(dataset.num_features, 4)
self.conv2 = GCNConv(4, 4)
self.conv3 = GCNConv(4, 2)
self.classifier = Linear(2, dataset.num_classes)
def forward(self, x, edge_index):
h = self.conv1(x, edge_index)
h = h.tanh()
h = self.conv2(h, edge_index)
h = h.tanh()
h = self.conv3(h, edge_index)
h = h.tanh()
out = self.classifier(h)
return out, h
model = GCN()
print(model)
# 节点分类
model = GCN()
out, h = model(data.x, data.edge_index)
print(f'Embedding shape: {list(h.shape)}')
visualize(h, color=data.y)
4.在KarateClub数据集上训练
import time
model = GCN()
# 交叉熵损失,Adam优化器
criterion = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters())
def train(data):
optimizer.zero_grad()
out, h = model(data.x, data.edge_index)
# 只对train_mask的节点计算loss
loss = criterion(out[data.train_mask], data.y[data.train_mask])
loss.backward()
optimizer.step()
accuracy = {}
# torch.argmax 取置信度最大的一类
predicted_classes = torch.argmax(out[data.train_mask], axis=1) # [0.6, 0.2, 0.7, 0.1] -> 2
target_classes = data.y[data.train_mask]
accuracy['train'] = torch.mean(torch.where(predicted_classes == target_classes, 1, 0).float())
predicted_classes = torch.argmax(out, axis=1)
target_classes = data.y
accuracy['val'] = torch.mean(torch.where(predicted_classes == target_classes, 1, 0).float())
return loss, h, accuracy
for epoch in range(500):
loss, h, accuracy = train(data)
if epoch % 10 == 0:
visualize(h, color=data.y, epoch=epoch, loss=loss, accuracy=accuracy)
time.sleep(0.3)
文章来源:https://www.toymoban.com/news/detail-667285.html
到了这里,关于PyTorch Geometric基本教程的文章就介绍完了。如果您还想了解更多内容,请在右上角搜索TOY模板网以前的文章或继续浏览下面的相关文章,希望大家以后多多支持TOY模板网!