from mpi4py import MPI from neuron import h import matplotlib matplotlib.use('Agg') import numpy from pylab import * import time import scipy.io import pickle import sys import mutation_stuff import approxhaynetstuff import mytools def simseedburst_func(Nmc=1, tstop=10200,mutID=0,rdSeed=1,Econ=0.0004,Icon=0.001,nseg=5,rateCoeff=1.0,gNoiseCoeff=1.0,gSynCoeff=1.0,Ncells2save=1,sparsedt=1.0,Nsyns2save=1,connM=[],gToBlock=[],blockEfficiency=0.0): myrandsavemat = int(100000*gSynCoeff+1000*Nmc+rdSeed) rank = MPI.COMM_WORLD.Get_rank() dt = 0.025 coeffCoeffs = [[0.25,0],[0.125,0],[0.5,0],[0.5,1.0/3],[0.5,2.0/3],[0.5,1.0],[-0.25,0],[-0.125,0],[-0.5,0]] MT = mutation_stuff.getMT(False) defVals = mutation_stuff.getdefvals() keyList = defVals.keys() for idefval in range(0,len(keyList)): if type(defVals[keyList[idefval]]) is not list: defVals[keyList[idefval]] = [defVals[keyList[idefval]], defVals[keyList[idefval]]] #make the dictionary values [somatic, apical] updatedVars = ['somatic','apical','basal'] # the possible classes of segments that defVals may apply to whichDefVal = [0,1,0] # use the defVal[0] for somatic and basal segments and defVal[1] for apical segments unpicklefile = open('scalings_cs.sav', 'r') unpickledlist = pickle.load(unpicklefile) unpicklefile.close() theseCoeffsAllAll = unpickledlist[0] filename = 'pars_withmids_combfs_final' unpicklefile = open(filename+".sav", 'r') unpickledlist = pickle.load(unpicklefile) unpicklefile.close() par_names = unpickledlist[0] par_values = unpickledlist[1] paramdict = {} for i in range(0,len(par_names)): paramdict[par_names[i]] = par_values[i] h(""" {load_file("stdlib.hoc")} {load_file("stdrun.hoc")} initialization_tstart = startsw() strdef fileName objref fileObj fileObj = new File() rdSeed = """+str(rdSeed)+""" Nmc = """+str(Nmc)+""" connectivity = 1 noisyst = 0 tstop = """+str(tstop)+""" rcpWeightFactor = 1.5 // the factor by which reciprocal weights are stronger than unidirectional weights pT2Tr = 0.06 //probability of reciprocating an existing connection to another L5bPC pT2T = 0.13 //probability of a L5bPC being connected to another L5bPC Econ = """+str(Econ)+""" //excitatory synaptic conductance Icon = """+str(Icon)+""" //inhibitory synaptic conductance NcontE = 5 // number of excitatory synaptic contacts per connection NsynE = 10000 // number of excitatory synapses NsynI = 2500 // number of inhibitory synapses gNoiseCoeff = """+str(gNoiseCoeff)+""" // scaling of background synaptic conductances rateE = """+str(0.72*rateCoeff)+""" // average rate of presynaptic excitatory cells rateI = """+str(7.0*rateCoeff)+""" // average rate of presynaptic inhibitory cells mainBifurcation = 650 {Ncells2save = """+str(Ncells2save)+"""} sparsedt = """+str(sparsedt)+""" // recordings of [Ca], I_SK and vApical are done with low temporal resolution to save memory {gSynCoeff = """+str(gSynCoeff)+"""} objref tempvec tempvec = new Vector() {tempvec.append(Nmc)} {Ncells2save = """+str(Ncells2save)+"""} """) print "Params OK!" h(""" {load_file("models/TTC.hoc")} objref MC_TTC objref sl //synaptic locations list objref rds1,rds2,rds3 {rds1 = new Random(1000*rdSeed)} {rds1.uniform(0,1)} //random for microcircuit connectivity and noisyst objref conMat conMat = new Matrix(Nmc,Nmc) for(i=0;i 0 and imutvar%2==0: mutText = mutText+"\n" mutvars = allmutvars[iallmutval][imutvar] mutvals = allmutvals[iallmutval][imutvar] if type(mutvars) is str: mutvars = [mutvars] mutText = mutText + str(mutvars) + ": " for kmutvar in range(0,len(mutvars)): mutvar = mutvars[kmutvar] if mutvar.find('offm') > -1 or mutvar.find('offh') > -1 or mutvar.find('ehcn') > -1: newVal = [x+mutvals*thisCoeff for x in defVals[mutvar]] if mutvals >= 0 and kmutvar==0: mutText = mutText + "+" + str(mutvals) +" mV" elif kmutvar==0: mutText = mutText + str(mutvals) +" mV" else: newVal = [x*(mutvals**thisCoeff) for x in defVals[mutvar]] if kmutvar==0: mutText = mutText + "*" + str(mutvals) if kmutvar < len(mutvars)-1: mutText = mutText + ", " if mutvar.find('_Ih') > -1: updateThese = [1,1,1] elif mutvar.find('_Ca_HVA') > -1 or mutvar.find('_Ca_LVAst') > -1 or mutvar.find('_SKv3.1') > -1 or mutvar.find('_Ca_HVA') > -1 or mutvar.find('_SK_E2') > -1 or mutvar.find('_NaTa_t') > -1 or mutvar.find('_CaDynamics_E2') > -1: updateThese = [1,1,0] elif mutvar.find('_K_Pst') > -1 or mutvar.find('_K_Tst') > -1 or mutvar.find('_Nap_Et2') > -1: updateThese = [1,0,0] elif mutvar.find('_Im') > -1: updateThese = [0,1,0] else: print "Error: str=" + str(mutvar) updatedThese = [0,0,0] for iupdated in range(0,3): if updateThese[iupdated]: print """forsec epnm.pc.gid2cell(i)."""+str(updatedVars[iupdated])+""" { """+mutvar+""" = """+str(newVal[whichDefVal[iupdated]])+""" }""" for i in range(0,Nmc): h(""" i = """+str(i)+""" if (epnm.gid_exists(i)) { forsec epnm.pc.gid2cell(i)."""+str(updatedVars[iupdated])+""" { """+mutvar+""" = """+str(newVal[whichDefVal[iupdated]])+""" } }""") print mutText h(""" thisCa = 0.0001 for(i=0;i 0: cellsSynRecorded = [next(i for i,x in enumerate(cumpNsyns) if x > randVec[j]) for j in range(0,Nsyns2save)] synIndsRecorded = [int(2*3*nseg+rand()*Nsyns[i]) for i in cellsSynRecorded] else: cellsSynRecorded = [] synIndsRecorded = [] for i in range(0,int(h.Ncells2save)): h(""" i = """+str(i)+""" if (epnm.gid_exists(i)) { {vSomaList.append(new Vector())} {caSomaList.append(new Vector())} {skSomaList.append(new Vector())} {cahvaSomaList.append(new Vector())} {calvaSomaList.append(new Vector())} {natSomaList.append(new Vector())} {napSomaList.append(new Vector())} {ihSomaList.append(new Vector())} {kv31SomaList.append(new Vector())} {ktSomaList.append(new Vector())} {kpSomaList.append(new Vector())} {tvecList.append(new Vector())} access epnm.pc.gid2cell(i).soma {vSomaList.o[vSomaList.count()-1].record(&v(0.5),dt)} {caSomaList.o[caSomaList.count()-1].record(&cai(0.5),sparsedt)} {skSomaList.o[skSomaList.count()-1].record(&ik_SK_E2(0.5),sparsedt)} {cahvaSomaList.o[skSomaList.count()-1].record(&ica_Ca_HVA(0.5),sparsedt)} {calvaSomaList.o[skSomaList.count()-1].record(&ica_Ca_LVAst(0.5),sparsedt)} {natSomaList.o[skSomaList.count()-1].record(&ina_NaTa_t(0.5),sparsedt)} {napSomaList.o[skSomaList.count()-1].record(&ina_Nap_Et2(0.5),sparsedt)} {ihSomaList.o[skSomaList.count()-1].record(&ihcn_Ih(0.5),sparsedt)} {kv31SomaList.o[skSomaList.count()-1].record(&ik_SKv3_1(0.5),sparsedt)} {ktSomaList.o[skSomaList.count()-1].record(&ik_K_Tst(0.5),sparsedt)} {kpSomaList.o[skSomaList.count()-1].record(&ik_K_Pst(0.5),sparsedt)} } """) indSynIndsRecorded = [ix for ix,x in enumerate(cellsSynRecorded) if x==i] for isyn in range(0,len(indSynIndsRecorded)): h(""" if (epnm.gid_exists(i)) { {IList.append(new Vector())} {IList.o[IList.count()-1].record(&epnm.pc.gid2cell(i).synlist.o["""+str(synIndsRecorded[indSynIndsRecorded[isyn]])+"""].i, sparsedt)} } """) if i < Ncells2save: h(""" if (epnm.gid_exists(i)) { {vSomaList.append(new Vector())} {vSomaList.o[vSomaList.count()-1].record(&v(0.5),dt)} } """) for i in range(0,Nmc): h(""" i = """+str(i)+""" if (epnm.gid_exists(i)) { access epnm.pc.gid2cell(i).soma {apcList.append(new APCount(0.5))} {apcvecList.append(new Vector())} apcList.o[apcList.count()-1].thresh= -40 {apcList.o[apcList.count()-1].record(apcvecList.o[apcList.count()-1])} {netcon = new NetCon(&v(0.5), nil)} netcon.threshold = -20 {netcon.record(spikes, spikedCells) } }""") print "epnm.gid " + str(i)+ " OK!" h(""" {epnm.set_maxstep(100)} stdinit() if (epnm.gid_exists(0)) { print \"\\n\" sim_tstart = startsw() initializationtime = (sim_tstart-initialization_tstart)/3600 print \"Initialization completed. Initialization took \", initializationtime, \" hours\\n\" print \"Starting simulation\\n\" print \"\\n\" } """) h(""" {epnm.psolve(tstop)} if (epnm.gid_exists(0)) { simruntime = (startsw() - sim_tstart)/3600 print \"Simulation took \", simruntime, \" hours\\n\" } """) print "Simulation OK!" vSoma = numpy.array(h.vSomaList) spikes = numpy.array(h.spikes) spikedCells = numpy.array(h.spikedCells) picklelist = [spikes,spikedCells,vSoma] file = open('spikes_parallel_'+myMechToAdd+str(nseg)+'_'+str(tstop)+'_'+str(mutID)+'_'+str(Econ)+'_'+str(Icon)+'_'+str(rateCoeff)+'_'+str(gNoiseCoeff)+'_'+str(gSynCoeff)+'_'+str(rdSeed)+'_'+str(rank)+'_of_'+str(Nmc)+'.sav', 'w') pickle.dump(picklelist,file) file.close() h(""" {epnm.pc.runworker()} {epnm.pc.done()} """) return [spikes,spikedCells,vSoma]