Skip to content

Commit

Permalink
All the files
Browse files Browse the repository at this point in the history
  • Loading branch information
toonhasenack committed Jan 27, 2023
1 parent 1aa6494 commit 2cad351
Show file tree
Hide file tree
Showing 37 changed files with 2,810 additions and 0 deletions.
158 changes: 158 additions & 0 deletions .ipynb_checkpoints/Exact_Diagonalization-checkpoint.ipynb

Large diffs are not rendered by default.

295 changes: 295 additions & 0 deletions .ipynb_checkpoints/Parameter_estimation-checkpoint.ipynb

Large diffs are not rendered by default.

191 changes: 191 additions & 0 deletions .ipynb_checkpoints/Phase_Transition-checkpoint.ipynb

Large diffs are not rendered by default.

190 changes: 190 additions & 0 deletions .ipynb_checkpoints/Simulation-checkpoint.ipynb

Large diffs are not rendered by default.

Binary file added BEP_FINAL.pdf
Binary file not shown.
Binary file added BEP_LATEX.zip
Binary file not shown.
158 changes: 158 additions & 0 deletions Exact_Diagonalization.ipynb

Large diffs are not rendered by default.

68 changes: 68 additions & 0 deletions Functions/.ipynb_checkpoints/Exact_Solver-checkpoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
from numpy import *
import matplotlib.pyplot as plt

def init_full(n,L,t,U,V,mu):
I = identity(n)
a = zeros((n,n))
c = zeros((n,n))
for i in range(n):
for j in range(n):
if j == i + 1:
a[i,j] = sqrt(i+1)
if j == i - 1:
c[i,j] = sqrt(i)
A = []
C = []
N = []
for i in range(L):
s = ["I"]*L
s[i] = "a"

string = s[0]
for j in range(1,L):
string += "," + s[j] + ")"

exec("A.append(" + (L-1)*"kron(" + string + ")")
C.append(A[-1].T)
N.append(matmul(C[-1],A[-1]))

H = zeros((n**L,n**L))
for i in range(L):
l = 1 + i
for j in range(L):
if (abs(j-i) == 1) or (abs(j-i) == L-1):
H -= t*matmul(C[i], A[j])
if j == i:
H += 1/2*U(l)*multiply(N[i], N[i] - 1) + V(l)*N[i] - mu*N[i]

return H

def diagonalize(n,L,t,U,V,mu):
H = init_full(n,L,t,U,V,mu)

E, psi = linalg.eig(H)

k = where(E == min(E))[0][0]
psi_k = psi[:,k].reshape([n]*L)

uncond_prob = zeros((L,n))
for i in range(L):
string = list(L*":,")
string[2*i] = 'j'
string = ''.join(string)
for j in range(n):
exec(f"uncond_prob[i,j] = sum(psi_k[{string}]**2)")

return H, E, psi, k, uncond_prob

def plot_result(prob, n, L):
fig = plt.figure(figsize = (9,6))
ax = plt.axes()
plt.imshow(prob.T,extent=[0,L,n,0])#, interpolation = 'bicubic')
ax.invert_yaxis()
plt.colorbar()
plt.xticks(linspace(1, L, L));
plt.yticks(linspace(0, n, n+1));
ax.set_xlabel(r"Site $\ell$", size = 20)
ax.set_ylabel(r"Occupancy $|g_\ell\rangle$", size = 20)
return fig,ax
138 changes: 138 additions & 0 deletions Functions/.ipynb_checkpoints/Hamiltonian_Solver-checkpoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
from numpy import *
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
from numba import jit, njit
import time
import cProfile
import io
import pstats

@jit(nopython = True)
def expm(a):
n = a.shape[0]
q = 6
a2 = a.copy ( )
a_norm = np.linalg.norm ( a2, np.inf )
ee = ( int ) ( np.log2 ( a_norm ) ) + 1
s = max ( 0, ee + 1 )
a2 = a2 / ( 2.0 ** s )
x = a2.copy ( )
c = 0.5
e = np.eye ( n, dtype = np.complex64 ) + c * a2
d = np.eye ( n, dtype = np.complex64 ) - c * a2
p = True

for k in range ( 2, q + 1 ):
c = c * float ( q - k + 1 ) / float ( k * ( 2 * q - k + 1 ) )
x = np.dot ( a2, x )
e = e + c * x

if ( p ):
d = d + c * x
else:
d = d - c * x

p = not p
# E -> inverse(D) * E
e = np.linalg.solve ( d, e )
# E -> E^(2*S)
for k in range ( 0, s ):
e = np.dot ( e, e )
return e

@jit(nopython = True)
def init_i(n):
I = identity(n)
a_hat = zeros((n,n))
c_hat = zeros((n,n))
for i in range(n):
for j in range(n):
if j == i + 1:
a_hat[i,j] = sqrt(i+1)
if j == i - 1:
c_hat[i,j] = sqrt(i)

n_hat = c_hat @ a_hat
return a_hat, c_hat, n_hat

@jit(nopython = True)
def Hamiltonian(t,z,U,V,mu,param,a_hat,c_hat,n_hat):
return 1/2*U*multiply(n_hat, n_hat - 1) + V*n_hat - mu*n_hat - t*z*param*(param + a_hat + c_hat)

@jit(nopython = True)
def Perturbation(V_t,n_hat):
return V_t*n_hat

@jit(nopython = True)
def solve_i(H_i):
E_i, phi_i = linalg.eig(H_i)
k = where(E_i == min(E_i))[0][0]
phi_k = phi_i[:,k].reshape(H_i.shape[0],)
E_k = E_i[k]

return phi_k, E_k

@jit(nopython = True)
def solve(n,dim,t,z,U,V,V_p,mu,it=10):
L,M = dim
a_hat, c_hat, n_hat = init_i(n)
param = zeros((L,M)) + 1
psi = np.zeros((L,M,n))
g = np.zeros((L,M))
E = zeros(L*M)
Es = zeros(it)
for i in range(it):
for j in range(L):
for k in range(M):
x = np.array([[j+1, k+1],])
H_i = Hamiltonian(t,z,U(x),V(x, V_p),mu,param[j,k],a_hat,c_hat,n_hat).astype("float64")
phi, E[j] = solve_i(H_i)
param[j,k] = phi @ a_hat @ phi
Es[i] = np.sum(E)

for i in range(L):
for j in range(M):
x = np.array([[i+1, j+1],])
H_i = Hamiltonian(t,z,U(x),V(x, V_p),mu,param[i,j],a_hat,c_hat,n_hat)
phi, _ = solve_i(H_i)
g[i,j] = phi @ n_hat @ phi
psi[i,j] = phi

return psi, g, param, Es

@jit(nopython = True)
def dynamics(n,dim,t,z,U,V,V_p,V_t,mu,psi,param,t_interval=[0,1],t_points=1000):
L,M = dim
a_hat, c_hat, n_hat = init_i(n)
#first we generate the Hamiltonian of each lattice site
H = np.zeros((L,M,n,n))
for i in range(L):
for j in range(M):
x = np.array([[i+1, j+1],])
H[i,j] = Hamiltonian(t,z,U(x),V(x, V_p),mu,param[i,j],a_hat,c_hat,n_hat)

t_s = linspace(t_interval[0], t_interval[1], t_points)
dt = (t_interval[1] - t_interval[0])/t_points
psi_s = np.zeros((t_points,L,M,n), dtype = 'complex_')
psi_I_s = np.zeros((t_points,L,M,n), dtype = 'complex_')
g_s = np.zeros((t_points,L,M))
for i in range(t_points):
for l in range(L):
for m in range(M):
U_hat = expm(-1j*t_s[i]*H[l,m])
if i == 0:
psi_I_s[i,l,m] = psi[l,m].astype('complex_')
else:
x = np.array([[l + 1, m + 1],])
H_t = Perturbation(V_t(x, t_s[i]), n_hat)
H_t_I = np.conj(U_hat) @ H_t.astype('complex_') @ U_hat
psi_I_s[i,l,m] = psi_I_s[i-1,l,m] - 1j*dt*H_t_I @ psi[l,m].astype('complex_')

for i in range(t_points):
for l in range(L):
for m in range(M):
U_hat = expm(-1j*t_s[i]*H[l,m])
psi_s[i,l,m] = U_hat @ psi_I_s[i,l,m]
g_s[i,l,m] = np.real(np.conj(psi_s[i,l,m]) @ n_hat.astype('complex_') @ psi_s[i,l,m])
return psi_s, g_s
45 changes: 45 additions & 0 deletions Functions/.ipynb_checkpoints/Parameter_Estimator-checkpoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from numpy import *
import numpy as np
import matplotlib.pyplot as plt
from numba import jit, njit
from Functions.Hamiltonian_Solver import *
from tqdm import tqdm


@jit(nopython = True)
def d(g1, g2):
return np.sum(np.abs(subtract(g1,g2)))

@jit(nopython = True)
def GradientDescent(n,dim,t,z,U_func,V_func,mu,it,g_exp,times = 100):
L,M = dim
it = 10
dpmax = 1e-2
dpmin = 1e-10
alpha = 1e-3
p = zeros(2) + np.array([L/10, L/10])
dp = zeros(2)+1e-2

_, g, _, _ = solve(n,dim,t,z,U_func,V_func,p,mu,it)
g -= 1/2
prev = d(g, g_exp)
errors = np.zeros(times-2)
for i in range(1,times-1):
for j in range(2):
p[j] += dp[j]
_, g, _, _ = solve(n,dim,t,z,U_func,V_func,p,mu,it)
g -= 1/2
now = d(g, g_exp)

if (now == prev) | (dp[j] < dpmin) :
dp[j] = dpmin
elif abs(dp[j]) >= dpmax:
grad = (now-prev)/dp[j]
grad = grad/np.abs(grad)
dp[j] = -grad*dpmax
else:
grad = (now-prev)/dp[j]
dp[j] = -alpha*grad
prev = now
errors[i-1] = prev
return p, errors
68 changes: 68 additions & 0 deletions Functions/.ipynb_checkpoints/Phase_Transition-checkpoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from sociophysicsDataHandler import SociophysicsDataHandler
import os

def filter_on_length(df, min_length = 10):
traj_length = df.groupby(by = ["tracked_object"])["tracked_object"].count()
df_length = df.join(traj_length, on='tracked_object', rsuffix='_trajectory_length')
return df_length[df_length["tracked_object_trajectory_length"] > min_length].reset_index(drop = True)

def vel(df):
vel = df.groupby(by = ["tracked_object"])
df_vel = df.join(vel[["x_pos", "y_pos"]].diff()/10, on='tracked_object', rsuffix='_dot')
df_vel["speed"] = (df_vel["x_pos_dot"]**2 + df_vel["y_pos_dot"]**2)**(1/2)
return df_vel

def transition(df, v_tresh = 0.1, dx = 10):
filt_data = filter_on_length(df.copy())
filt_data["x_pos"] = filt_data["x_pos"] /1000 //dx
filt_data["y_pos"] = filt_data["y_pos"] /1000 //dx

loc_low = np.where(filt_data["speed"] < v_tresh)[0]
loc_high = np.where(filt_data["speed"] >= v_tresh)[0]
static = filt_data.loc[loc_low].groupby(by = ["date_time_utc", "x_pos", "y_pos"])["tracked_object"].nunique()
static = static.rename("<Nstat>")
dynamic = filt_data.loc[loc_high].groupby(by = ["date_time_utc", "x_pos", "y_pos"])["tracked_object"].nunique()
dynamic = dynamic.rename("<Ndyn>")
correlation = pd.concat([static, dynamic], axis = 1)
correlation = correlation.dropna()
return correlation

def save(n,N_avg,N_std,bins):
N_avg_save = N_avg.copy()/n.copy()
N_avg_save[np.isnan(N_avg)] = 0
N_std_save = N_std.copy()/n.copy()
N_std_save[np.isnan(N_std)] = 0

np.savetxt("Result/N_avg.csv", N_avg_save, delimiter = ",")
np.savetxt("Result/N_std.csv", N_std_save, delimiter = ",")
np.savetxt("Result/n.csv", n, delimiter = ",")
def run():
dh = SociophysicsDataHandler()
os.mkdir("Result")
string = 'ehv/platform2.1/'
files = dh.list_files(string)["name"]
N_tresh = 10
min_file = 306
bins = 50

N_avg = np.zeros([bins])
N_std = np.zeros([bins])
n = np.zeros([bins])
for f in files[min_file:]:
subfiles = dh.list_files(string + f)["name"]
for g in subfiles:
dh.fetch_prorail_data_from_path(string + f + '/' + g, verbose = False)
if dh.df["tracked_object"].nunique() > N_tresh:
df = vel(dh.df)
correlation = transition(df)
Ns = correlation.groupby(by="<Nstat>")["<Ndyn>"]
N_avg[Ns.mean().index.astype("int")] += Ns.mean()
N_std[Ns.std().index.astype("int")] += Ns.std()/len(Ns)
n[Ns.mean().index.astype("int")] += 1
save(n, N_avg, N_std, bins)

if __name__ == "__main__":
run()
2 changes: 2 additions & 0 deletions Functions/.ipynb_checkpoints/auth-checkpoint.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
guest
YDPGL-XREXC-QGMZS-UAKES
Loading

0 comments on commit 2cad351

Please sign in to comment.