''' Three different ways of computing gradient of infinite matrix product trace Z = Tr(T*T...*T) ''' import torch torch.manual_seed(42) N = 10 Niter = 20 A = torch.rand(N, N, dtype=torch.float64) T = torch.nn.Parameter(A@A.t()) # a symmetric positive definite transfer matrix lnZ = 0.0 M = T.clone() # M is the converged envioment tensor for i in range(Niter): s = M.norm() lnZ = lnZ + torch.log(s)/2**i M = M/s M = M@M lnZ = lnZ + torch.log(torch.trace(M))/(2**Niter) #(1) impurity grad impurity_grad = (M@M).t()/torch.trace(M@T@M) #(2) autograd on lnZ lnZ.backward() lnZ_grad = T.grad.clone() #(3) direct compute dominant eigenvalue w, v = torch.symeig(T, eigenvectors=True) T.grad.zero_() loss = torch.log(w[-1]) loss.backward() exact_grad = ((T.grad + T.grad.t())/2) # need to symmetrize since it is an upper triangular matrix print ((impurity_grad-exact_grad).abs().max().item()) print ((lnZ_grad-exact_grad).abs().max().item())