from future import division from future import print_function

import numpy as np import scipy.sparse as sp import torch import torch.nn.functional as F

def sparse_mx_to_torch_sparse_tensor(sparse_mx): '''Convert a scipy sparse matrix to a torch sparse tensor.''' sparse_mx = sparse_mx.tocoo().astype(np.float32) indices = torch.from_numpy( np.vstack((sparse_mx.row, sparse_mx.col)).astype(np.int64)) values = torch.from_numpy(sparse_mx.data) shape = torch.Size(sparse_mx.shape) return torch.sparse.FloatTensor(indices, values, shape)

def normalize_adj(mx, r_power): '''Row-normalize sparse matrix''' mx = sp.coo_matrix(mx) + sp.eye(mx.shape[0]) rowsum = np.array(mx.sum(1)) r_inv_sqrt_left = np.power(rowsum, r_power-1).flatten() r_inv_sqrt_left[np.isinf(r_inv_sqrt_left)] = 0. r_mat_inv_sqrt_left = sp.diags(r_inv_sqrt_left)

r_inv_sqrt_right = np.power(rowsum, -r_power).flatten()
r_inv_sqrt_right[np.isinf(r_inv_sqrt_right)] = 0.
r_mat_inv_sqrt_right = sp.diags(r_inv_sqrt_right)
adj_normalized = mx.dot(r_mat_inv_sqrt_left).transpose().dot(r_mat_inv_sqrt_right).tocoo()
return sparse_mx_to_torch_sparse_tensor(adj_normalized)

def run(args): # Set random seed torch.manual_seed(args.seed)

# Load data
adj = torch.tensor([
    [1., 1., 0., 0., 0., 0., 0., 1., 0., 0.],
    [1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
    [0., 0., 1., 0., 0., 0., 0., 0., 0., 0.],
    [0., 0., 0., 1., 0., 0., 0., 0., 0., 0.],
    [0., 0., 0., 1., 1., 0., 0., 0., 0., 0.],
    [0., 0., 0., 0., 0., 1., 0., 0., 0., 0.],
    [0., 0., 0., 0., 0., 0., 1., 0., 0., 0.],
    [0., 0., 0., 0., 0., 0., 0., 1., 0., 0.],
    [0., 0., 0., 0., 0., 0., 0., 0., 1., 0.],
    [0., 0., 0., 0., 0., 1., 0., 0., 0., 1.],
])
features=torch.tensor([
    [1., 1.2, 0., 0., 0., 0., 0., 1., 0.2, 0.],
    [1., 1., 0., 0., 0., 0., 0., 0., 0., 0.],
    [0., 0., 1., 0., 0.5, 0., 0., 0., 0., 0.],
    [0., 0., 0., 1., 0., 0., 0., 0., 0., 0.],
    [0., 0., 0., 1., 1., 0., 0., 0., 0., 0.],
    [0., 0., 2., 0., 0., 1., 0., 0., 0., 0.],
    [0., 0., 1., 0., 0., 0., 1., 0., 0., 0.],
    [0., 0., 0., 0.4, 0., 0., 0., 1., 0., 0.],
    [0., 0., 0., 0., 0., 0., 0., 0., 1., 0.],
    [0., 0., 0., 0., 0., 1., 0., 0., 0., 1.],
])
n_nodes, feat_dim = features.shape

hops = args.hops

for hop in range(hops, hops+1):
    # Set up input features
    input_features = 0.
    for r in [0, 0.1, 0.2, 0.3, 0.4, 0.5]:
        # Normalize adjacency matrix
        adj_norm = normalize_adj(adj, r)

        # Compute smoothed features
        features_list = []
        features_list.append(features)
        for i in range(hop):
            features_list.append(torch.spmm(adj_norm, features_list[-1]))

        # Compute weights for feature aggregation
        weight_list = []
        norm_fea = torch.norm(features, 2, 1).add(1e-10)
        for fea in features_list:
            norm_cur = torch.norm(fea, 2, 1).add(1e-10)
            temp = torch.div((features*fea).sum(1), norm_fea)
            temp = torch.div(temp, norm_cur)
            weight_list.append(temp.unsqueeze(-1))
        weight = F.softmax(torch.cat(weight_list, dim=1), dim=1)

        # Smooth the node features
        input_feas = []
        for i in range(n_nodes):
            fea = 0.
            for j in range(hop+1):
                fea += (weight[i][j]*features_list[j][i]).unsqueeze(0)
            input_feas.append(fea)
        input_feas = torch.cat(input_feas, dim=0)

        # Aggregate smoothed features with different r
        if r == 0:
            input_features = input_feas
        else:
            input_features = torch.cat([input_features.unsqueeze(0), input_feas.unsqueeze(0)], dim=0).max(0)[0]

    # Compute similarity matrix
    sim = torch.sigmoid(torch.matmul(input_features, input_features.T))

if name == 'main': parser = argparse.ArgumentParser() parser.add_argument('--seed', type=int, default=42, help='Random seed.') parser.add_argument('--dataset', type=str, default='cora', help='type of dataset.') parser.add_argument('--hops', type=int, default=20, help='number of hops') args = parser.parse_args() run(args)

Graph Neural Network with Smooth Node Features for Similarity Calculation

原文地址: https://www.cveoy.top/t/topic/nsIE 著作权归作者所有。请勿转载和采集!

免费AI点我,无需注册和登录