Model of memory linking through memory allocation (Kastellakis et al. 2016)

 Download zip file 
Help downloading and running models
Accession:206249
Here, we present a simplified, biophysically inspired network model that incorporates multiple plasticity processes and explains linking of information at three different levels: (a) learning of a single associative memory (b) rescuing of a weak memory when paired with a strong one and (c) linking of multiple memories across time. By dissecting synaptic from intrinsic plasticity and neuron-wide from dendritically restricted protein capture, the model reveals a simple, unifying principle: Linked memories share synaptic clusters within the dendrites of overlapping populations of neurons
Reference:
1 . Kastellakis G, Silva AJ, Poirazi P (2016) Linking Memories across Time via Neuronal and Dendritic Overlaps in Model Neurons with Active Dendrites. Cell Rep 17:1491-1504 [PubMed]
Citations  Citation Browser
Model Information (Click on a link to find other models with that property)
Model Type: Realistic Network;
Brain Region(s)/Organism:
Cell Type(s): Abstract integrate-and-fire leaky neuron with dendritic subunits;
Channel(s):
Gap Junctions:
Receptor(s):
Gene(s):
Transmitter(s):
Simulation Environment: C or C++ program; C or C++ program (web link to model);
Model Concept(s): Active Dendrites;
Implementer(s): Kastellakis, George [gkastel at gmail.com];
/
stdmodel
distributionPlot
exportfig
figs
mtrand
README
allgraphs.m
allrun.m
an_brtest.m
an_stats.m
anmulti.py
ansims.py
barwitherr.m *
btagstats.m *
CImg.h *
constructs.cpp
constructs.h
defaults.m
dir2.m *
getspikedata.m *
getsynstate.m *
getsynstate2.m *
graphs.m *
hist_percents.m *
hist_with_errs.m *
interact.m *
intexp_constructs.cpp
job_sims.sh
kurtos.m *
lamodel.cpp
LICENSE *
make_graphs.m *
Makefile *
matlab.mat *
mtest.py
mtrand.cpp *
mtrand.h *
multi.py
multistats.m *
nextplot.m *
pairstrong.m *
repeated.m *
rotateXLabels.m *
run_1.sh
run_2strong.sh
run_2weak.sh
run_3.sh
run_all.sh
run_brov.sh
run_brtest.sh
run_btag.sh
run_dir.sh
run_ep.sh
run_gp.sh
run_gp2.sh
run_mult.sh
run_Nsparse.sh
run_pairstrong.sh
run_rep.sh
run_sims.sh
run_sparse.sh
run_sparseS2.sh
runloc.sh
runmany.sh
S2sparse.m *
savefig.m *
scratch.m *
sensitivity.m *
stats.m *
stats.py *
stderr.m *
strong2.m *
strongstrong.m *
submit_lamodel.sh *
three.m *
trevrolls.m *
vis.py *
weastrong.m *
wxglmodel *
wxglmodel.cpp *
wxglmodel.h *
wxmodel.cpp *
wxmodel.h *
                            
import matplotlib
import pylab
import numpy
import NeuroTools.signals as signals

def visalloc():
	for it in [60]:
		for nm in [10,19]:
			for nbranches in range(10, 60, 16):

				coss = numpy.zeros((nm, nm))
				pcos = numpy.zeros((nm, nm))

				pat = numpy.genfromtxt("./data/alloc/p_m%di%db%d.dat"%(nm, it, nbranches))

				ar = numpy.genfromtxt("./data/alloc/r_m%di%db%d.dat"%(nm, it, nbranches))
				ar = ar[:, 0:64];


				scx = []
				scy = []
				for i in range(nm):
					for j in range(i+1):
						s1 = numpy.sqrt(numpy.dot(ar[i], ar[i]))
						s2 = numpy.sqrt(numpy.dot(ar[j], ar[j]))
						coss[i][j] = numpy.dot(ar[i], ar[j]) / (s1*s2)

						s1 = numpy.sqrt(numpy.dot(pat[i], pat[i]))
						s2 = numpy.sqrt(numpy.dot(pat[j], pat[j]))
						pcos[i][j] = numpy.dot(pat[i], pat[j]) / (s1*s2)

						scx.append( pcos[i][j])
						scy.append( coss[i][j])

						

				pylab.figure()
				pylab.title(" Mems %d intvl %d branches %d" %(nm, it, nbranches));
				pylab.imshow(coss , interpolation='nearest', aspect='auto',cmap='hot')
				pylab.imshow(pcos , interpolation='nearest', aspect='auto',cmap='hot')
				pylab.colorbar();
				pylab.savefig("cos_%di%db%d.png"%(nm, it, nbranches));

				#pylab.scatter(scx, scy)
				#pylab.title("m %d i %d B%d"%(nm, it, nbranches));
				#pylab.xlabel("Input Pattern similarity ")
				#pylab.ylabel("Firing pattern similarity ")
				#pylab.savefig("bn_%di%db%d.png"%(nm, it, nbranches));
	pylab.show()

			
			




def spikestats():
	
	ff = open('./data/0/spikes.dat', 'r') 
	fdata = ff.readlines()

	lst = []
	nid=0
	for l in fdata:
		ar = numpy.fromstring(l, sep=' ')
		for a in ar:
			lst.append( (nid, a))
		nid += 1

	slist = signals.SpikeList(lst, range(0,nid))
	slist.raster_plot()

	#pylab.figure()
	#pylab.imshow(spraster , interpolation='nearest', aspect='auto',cmap='hot')


	#pp = numpy.correlate(spraster[100, :], spraster[102,:], 'full');
	#print pp;
	#pylab.plot(pp)

	pylab.show()





def printltp(): 
	lar = numpy.genfromtxt('/tmp/eltp.dat') 
	lar = lar.transpose()
	pylab.figure();
	print len(lar);

	labels = ['Induction', 'ELTP', 'Rb', 'Pb', 'Rn', 'Pn','iltp', 'W', 'T']

	for i in range(len(lar)):
		if ( i != 6):
			pylab.plot(lar[i, :], label=labels[i])


	pylab.legend()

	mmax = len(lar[0, :]) 
	tstep = 50
	th = 3600/tstep


	locs, labels = pylab.xticks( numpy.arange(0, mmax, th), numpy.arange(0, mmax/th, 1))

	#pylab.plot(lar[1, :] + lar[3, :])

	pylab.show()




spikestats()