diff --git a/evaluation/1Layer Suscept.py b/evaluation/1Layer Suscept.py index 14204b8..1f3d90d 100644 --- a/evaluation/1Layer Suscept.py +++ b/evaluation/1Layer Suscept.py @@ -51,6 +51,7 @@ def __calc_chi(mode='density'): lastkurt=None for dim in dims: try: + 1/0 with open(path+f"magnets/magnetisation_{mode}_dim={dim:02}.txt", 'r', encoding='utf-8') as f: magnet = json.load(f) with open(path+f"suscepts/susceptibility_{mode}_dim={dim:02}.txt", 'r', encoding='utf-8') as f: @@ -87,7 +88,7 @@ def __calc_chi(mode='density'): else: raise NotImplementedError - mag = round(abs(mag),6) + mag = round(abs(dmag),6) fluct = round(mag2-mag**2,6) tail = round(mag4/mag2**2,6) diff --git a/evaluation/2Layer Sonic.py b/evaluation/2Layer Sonic.py new file mode 100644 index 0000000..42cfeec --- /dev/null +++ b/evaluation/2Layer Sonic.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Aug 30 14:25:12 2022 + +@author: timof +""" + +import sys +import os +import json + +# from plot import qtplot +from scipy.io import wavfile + + +import math as m +import numpy as np + + +vect = np.vectorize + +@vect +def log2(x): + try: + return m.log2(x) + except ValueError: + if x==0: + return float(0) + else: + raise + +def new_folder(path): + if not os.path.exists(path): + os.makedirs(path) + return path + +phase = np.vectorize(lambda x,y: (m.atan2(y,x)+m.pi)%(2*m.pi)-m.pi) +diff = np.vectorize(lambda x,y: (y-x+m.pi)%(2*m.pi)-m.pi) +H2 = lambda x: -x*m.log2(x)-(1-x)*m.log2(1-x) + + +path = '/cloud/Public/_data/neuropercolation/2lay/steps=500000_rising/' +suffix = '' + +chi = chr(967) +vareps = chr(949) + +vals = [[],[]] + +runsteps = 1000100 + +eps_space = np.linspace(0.005, 0.5, 100) +eps_space = eps_space[1::2] + +dims = [121]#+[16,49] + +mode='density' +ma=[] +s=[] +k=[] +mk=[] +lastkurt=None +for dim in dims: + dimpath = new_folder(path + f'dim={dim:03d}/') + for epsilon in [0.000]: + with open(dimpath+f"eps={round(epsilon,3):.3f}_activation.txt", 'r', encoding='utf-8') as f: + activation = np.array(json.load(f)[::-1]) + # activation = list(zip(*activation)) + + + wavfile.write(dimpath+f"eps={round(epsilon,3):.3f}_activation_48k.wav_rev",48000,activation) diff --git a/evaluation/2Layer fft.py b/evaluation/2Layer fft.py index d53c26a..a00b2d0 100644 --- a/evaluation/2Layer fft.py +++ b/evaluation/2Layer fft.py @@ -345,13 +345,13 @@ qtplot(f'Q facor for odd automaton sizes', # FWHM=json.load(f) qtplot(f'Dominant frequency', - [[0]+list(eps_space[:-1])]*3, - [[0]+FMAX[-1],[0]+LHM[-1],[0]+UHM[-1]], + [[0]+list(eps_space[:20])]*3, + [[0]+FMAX[:20],[0]+LHM[:20],[0]+UHM[:20]], # [f'dim={dim}x{dim}' for dim in dims], - ['dominant frequency', 'lesser half maximum', 'upper half maximum'], - #colors=['g','r','r'], + ['FMAX', 'FMAX-FWHM/2', 'FMAX+FWHM/2'], + colors=['r','r','g'], y_tag = 'frequency f', - y_log = True, + y_log = False, export=True, path=sigpath, filename=f'FMAX+FWHMto50_plot_{parts}parts_accum={accum}.png', diff --git a/evaluation/2Layer phiplot.py b/evaluation/2Layer phiplot.py index a338e06..aeaf17f 100644 --- a/evaluation/2Layer phiplot.py +++ b/evaluation/2Layer phiplot.py @@ -89,6 +89,7 @@ qtplot(f"Mean effect integration over noise level", [[0]+phi for phi in PHI[::-1]], [f'dim={dim:02d}x{dim:02d}' for dim in dims[::-1]], y_tag = f'effect integration {varphi}', + x_tag = f'noise level {vareps}', export=True, path=dimpath+"", filename=f'eps={round(epsilon,3):.3f}_evolution.png', diff --git a/evaluation/4Layer Sonic.py b/evaluation/4Layer Sonic.py new file mode 100644 index 0000000..71416bf --- /dev/null +++ b/evaluation/4Layer Sonic.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Aug 30 14:25:12 2022 + +@author: timof +""" + +import sys +import os +import json + +from plot import qtplot + + +import math as m +import numpy as np + + +vect = np.vectorize + +@vect +def log2(x): + try: + return m.log2(x) + except ValueError: + if x==0: + return float(0) + else: + raise + +def new_folder(path): + if not os.path.exists(path): + os.makedirs(path) + return path + +path = '/cloud/Public/_data/neuropercolation/4lay/cons=27-knight_steps=100100_causal/dim=09/batch=0/' +suffix = '' + +chi = chr(967) +vareps = chr(949) + +vals = [[],[]] + +runsteps = 1000100 + +eps_space = np.linspace(0.005, 0.5, 100) +eps_space = eps_space[1::2] + +dims = list(range(3,10))#+[16,49] + +mode='density' +ma=[] +s=[] +k=[] +mk=[] +lastkurt=None +for dim in dims[-1:]: + dimpath = new_folder(path + f'dim={dim:02}/') + for epsilon in eps_space[:]: + with open(path+f"eps={round(epsilon,3):.3f}_phase_diff.txt", 'r', encoding='utf-8') as f: + phase_diff = np.array(json.load(f)[:500]) + with open(path+f"eps={round(epsilon,3):.3f}_ei.txt", 'r', encoding='utf-8') as f: + phase_diff = np.array(json.load(f)[:500]) + + qtplot(f"Phase relation time series for eps={round(epsilon,3):.3f}", + [list(range(500))]*2, + [phase_diff], + x_tag = 'time step', + y_tag = f'phase diffe´rence', + y_range = (-m.pi,m.pi), + export=True, + path=dimpath+"evolution/", + filename=f'eps={round(epsilon,3):.3f}_evolution.png', + close=False) + +mode = 'density' +#%% diff --git a/evaluation/4laysign.py b/evaluation/4laysign.py index a09801c..d09acb3 100644 --- a/evaluation/4laysign.py +++ b/evaluation/4laysign.py @@ -71,20 +71,20 @@ for _eps in _epses[11:21]: # sign_p_list.append(sign_p) - # with open(savepath+f"phasediff_means.txt", 'w', encoding='utf-8') as f: - # json.dump(meanlist, f, indent=1) - # with open(savepath+f"phasediff_stds.txt", 'w', encoding='utf-8') as f: - # json.dump(stdlist, f, indent=1) - # with open(savepath+f"phasediff_integrals.txt", 'w', encoding='utf-8') as f: - # json.dump(integrallist, f, indent=1) - # with open(savepath+f"phasediff_cohends.txt", 'w', encoding='utf-8') as f: - # json.dump(cohendlist, f, indent=1) - # with open(savepath+f"phasediff_norm_ps.txt", 'w', encoding='utf-8') as f: - # json.dump(norm_p_list, f, indent=1) - # with open(savepath+f"phasediff_ttest_ps.txt", 'w', encoding='utf-8') as f: - # json.dump(ttest_p_list, f, indent=1) - # with open(savepath+f"phasediff_sign_ps.txt", 'w', encoding='utf-8') as f: - # json.dump(sign_p_list, f, indent=1) + with open(savepath+f"phasediff_means.txt", 'w', encoding='utf-8') as f: + json.dump(meanlist, f, indent=1) + with open(savepath+f"phasediff_stds.txt", 'w', encoding='utf-8') as f: + json.dump(stdlist, f, indent=1) + with open(savepath+f"phasediff_integrals.txt", 'w', encoding='utf-8') as f: + json.dump(integrallist, f, indent=1) + with open(savepath+f"phasediff_cohends.txt", 'w', encoding='utf-8') as f: + json.dump(cohendlist, f, indent=1) + with open(savepath+f"phasediff_norm_ps.txt", 'w', encoding='utf-8') as f: + json.dump(norm_p_list, f, indent=1) + with open(savepath+f"phasediff_ttest_ps.txt", 'w', encoding='utf-8') as f: + json.dump(ttest_p_list, f, indent=1) + with open(savepath+f"phasediff_sign_ps.txt", 'w', encoding='utf-8') as f: + json.dump(sign_p_list, f, indent=1) # stdlowlist = [meanlist[eps] - stdlist[eps] for eps in range(len(meanlist))] # stdhighlist = [meanlist[eps] + stdlist[eps] for eps in range(len(meanlist))] @@ -105,19 +105,19 @@ for _eps in _epses[11:21]: filename=f'Resultant reduction disconnect eps={_eps} dim={_dim} extremes={_ext}.png', close=True) -# qtplot(f'Mean causal phase reduction for dt={_dt} dim={_dim} with 4 layers', -# [_epsilons]*3, -# [stdlowlist, stdhighlist, meanlist], -# ['Low standard deviation', -# 'High standard deviation', -# 'Mean'], -# colors=['r','r','g'], -# x_tag = f'noise level {chr(949)}', -# y_tag = 'abs phase reduction', -# export=True, -# path=savepath, -# filename=f'Mean phase reduction dim={_dim} extremes={_ext}.png', -# close=True) +qtplot(f'Mean causal phase reduction for dt={_dt} dim={_dim} with 4 layers', + [_epsilons]*3, + [stdlowlist, stdhighlist, meanlist], + ['Low standard deviation', + 'High standard deviation', + 'Mean'], + colors=['r','r','g'], + x_tag = f'noise level {chr(949)}', + y_tag = 'abs phase reduction', + export=True, + path=savepath, + filename=f'Mean phase reduction dim={_dim} extremes={_ext}.png', + close=True) # qtplot(f'Phase reduction probability for dt={_dt} dim={_dim} with 4 layers', # [_epsilons], # [integrallist], diff --git a/evaluation/phi.py b/evaluation/phi.py index bcce633..45a6b7f 100644 --- a/evaluation/phi.py +++ b/evaluation/phi.py @@ -13,11 +13,12 @@ from numpy.linalg import norm from datetime import datetime from random import sample as choose from random import random -from numba import jit, njit, prange +import itertools +#from numba import jit, njit, prange -from plot import qtplot +# from plot import qtplot -from neuropercolation import Simulate4Layers +# from neuropercolation import Simulate4Layers eps_space = list(np.linspace(0.01,0.2,20)) def new_folder(path): @@ -29,7 +30,7 @@ phase = np.vectorize(lambda x,y: (m.atan2(y,x)+m.pi)%(2*m.pi)-m.pi) diff = np.vectorize(lambda x,y: (y-x+m.pi)%(2*m.pi)-m.pi) H2 = lambda x: -x*m.log2(x)-(1-x)*m.log2(1-x) -@njit + def neighbor(digit0, digit1, lenght): layer = int(lenght) dim = int(np.sqrt(layer)) @@ -43,9 +44,11 @@ def neighbor(digit0, digit1, lenght): else: return False -@njit + def kcomb(zp,zm): - if zp>2: + if zp+zm>5: + val=None + elif zp>2: val=1 elif zm>2: val=0 @@ -62,7 +65,24 @@ def kcomb(zp,zm): else: raise NotImplementedError(zp,zm) return val +#%% +neighbourcomb = [[(i,j) for j in range(6)] for i in range(6)] +probcombs = [[kcomb(*n) for n in row] for row in neighbourcomb] +posscombs = {val for row in probcombs for val in row} +probcombs1 = [kcomb(n,4-n) for n in range(5)] + +eta = lambda eps: 1-eps +etastar = lambda eps,kp: kp*(1-eps)+(1-kp)*eps +epsstar = lambda eps,kp: kp*eps+(1-kp)*(1-eps) +KL = lambda eps,kp: eta(eps)*m.log2(eta(eps)/etastar(eps,kp)) + eps*m.log2(eps/epsstar(eps,kp)) if kp is not None else 0 +phi_mat = lambda eps: [[KL(eps,kcomb(i,j)) for j in range(6)] for i in range(6)] +phi_vals = lambda eps,df=4: sorted({sum([KL(eps,kp) for kp in kps]) for kps in itertools.product(*[[1,0.5]]*df)}) +phi_sing = lambda eps: KL(eps,kcomb(1,0)) +phi_lims = lambda eps: [phi_sing(eps)+val for val in phi_vals(eps)] + + +#%% path = new_folder('/cloud/Public/_data/neuropercolation/1lay/mips/') @@ -125,22 +145,29 @@ def MIP(dim,statestr,eps): return mip,lophi -def calc_mips(dim,eps): - mip = [] +def calc_mips(dim,eps,save=True): + mips = [[] for i in range(5)] statestr='0'*dim**2 + lims = phi_lims(eps) # statestr=statestr.translate(str.maketrans('','','.-=')) for parti in range(1,2**(dim**2-1)): partstr = bin(parti)[2:].zfill(dim**2) curphi = phi(dim,statestr,partstr,eps) - mip.append(round(curphi,6)) + for cha,mip in enumerate(mips): + if curphi