Advanced Computing Platform for Theoretical Physics

commit大文件会使得服务器变得不稳定,请大家尽量只commit代码,不要commit大的文件。

Commit d9aea05f authored by Lei Wang's avatar Lei Wang
Browse files

compute physical obs in vmps

parent 94759d68
......@@ -51,7 +51,7 @@ def vmps(T, d, D, no_iter, Nepochs=5):
loss = optimizer.step(closure)
print (' epoch, free energy', epoch, loss.item())
return -loss, None
return -loss, A
if __name__=='__main__':
import time
......@@ -68,10 +68,27 @@ if __name__=='__main__':
device = torch.device("cpu" if args.cuda<0 else "cuda:"+str(args.cuda))
dtype = torch.float32 if args.float32 else torch.float64
K = torch.tensor([args.beta], dtype=torch.float64, device=device)
c = torch.sqrt(torch.cosh(K))
s = torch.sqrt(torch.sinh(K))
M = torch.stack([torch.cat([c, s]), torch.cat([c, -s])])
T = torch.einsum('ai,aj,ak,al->ijkl', (M, M, M, M))
def build_tensor(K):
c = torch.sqrt(torch.cosh(K))
s = torch.sqrt(torch.sinh(K))
M = torch.stack([torch.cat([c, s]), torch.cat([c, -s])])
T = torch.einsum('ai,aj,ak,al->ijkl', (M, M, M, M))
return T
vmps(T, 2, args.Dcut, args.Niter, args.Nepochs)
#optimization
K = torch.tensor([args.beta], dtype=torch.float64, device=device)
T = build_tensor(K)
loss, A = vmps(T, 2, args.Dcut, args.Niter, args.Nepochs)
#for computing physical quantity
K = torch.tensor([args.beta], dtype=torch.float64, device=device, requires_grad=True)
T = build_tensor(K)
Asymm = (A + A.permute(2, 1, 0))*0.5
D, d = Asymm.shape[0], Asymm.shape[1]
B = torch.einsum('ldr,adcb,icj->lairbj', (Asymm, T, Asymm)).contiguous().view(D**2*d, D**2*d)
w, _ = torch.symeig(B, eigenvectors=True)
lnZ = torch.log(w.abs().max())
dlnZ = torch.autograd.grad(lnZ, K, create_graph=True)[0] # En = -d lnZ / d beta
print (-dlnZ.item())
#second order derivative evaluated in this way seems not correct, no effect of envioment
#dlnZ2 = torch.autograd.grad(dlnZ, K)[0] # Cv = beta^2 * d^2 lnZ / d beta^2
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment