Parallel odor processing by mitral and middle tufted cells in the OB (Cavarretta et al 2016, 2018)

 Download zip file   Auto-launch 
Help downloading and running models
Accession:240116
"[...] experimental findings suggest that MC and mTC may encode parallel and complementary odor representations. We have analyzed the functional roles of these pathways by using a morphologically and physiologically realistic three-dimensional model to explore the MC and mTC microcircuits in the glomerular layer and deeper plexiform layers. [...]"
References:
1 . Cavarretta F, Burton SD, Igarashi KM, Shepherd GM, Hines ML, Migliore M (2018) Parallel odor processing by mitral and middle tufted cells in the olfactory bulb. Sci Rep 8:7625 [PubMed]
2 . Cavarretta F, Marasco A, Hines ML, Shepherd GM, Migliore M (2016) Glomerular and Mitral-Granule Cell Microcircuits Coordinate Temporal and Spatial Information Processing in the Olfactory Bulb. Front Comput Neurosci 10:67 [PubMed]
Model Information (Click on a link to find other models with that property)
Model Type: Realistic Network;
Brain Region(s)/Organism: Olfactory bulb;
Cell Type(s): Olfactory bulb main tufted middle GLU cell; Olfactory bulb main interneuron granule MC GABA cell; Olfactory bulb main interneuron granule TC GABA cell; Olfactory bulb (accessory) mitral cell; Olfactory bulb main tufted cell external; Olfactory bulb short axon cell;
Channel(s): I A; I Na,t; I_Ks; I K;
Gap Junctions: Gap junctions;
Receptor(s): AMPA; GabaA; NMDA;
Gene(s):
Transmitter(s): Glutamate; Gaba;
Simulation Environment: NEURON;
Model Concept(s): Action Potentials; Action Potential Initiation; Active Dendrites; Long-term Synaptic Plasticity; Synaptic Integration; Synchronization; Pattern Recognition; Spatio-temporal Activity Patterns; Temporal Pattern Generation; Sensory coding; Sensory processing; Olfaction;
Implementer(s): Cavarretta, Francesco [francescocavarretta at hotmail.it]; Hines, Michael [Michael.Hines at Yale.edu];
Search NeuronDB for information about:  Olfactory bulb main interneuron granule MC GABA cell; Olfactory bulb main tufted middle GLU cell; Olfactory bulb main interneuron granule TC GABA cell; GabaA; AMPA; NMDA; I Na,t; I A; I K; I_Ks; Gaba; Glutamate;
/
modeldb-bulb3d
sim
ampanmda.mod
distrt.mod *
fi.mod
fi_stdp.mod *
gap.mod
Gfluct.mod
kamt.mod
kdrmt.mod
ks.mod
naxn.mod
orn.mod
ThreshDetect.mod *
all.py
all2all.py *
assembly.py
balance.py *
bindict.py
binsave.py
binspikes.py
blanes.hoc
blanes.py
blanes_exc_conn.txt
blanes6.dic
bulb3dtest.py
cancel.py
catfiles.sh
cellreader.py
cellwriter.py
cfg27.py
common.py
complexity.py *
convertdic.py
destroy_model.py
determine_connections.py
distribute.py *
dsac.py
Eta.txt *
fillgloms.py
fixnseg.hoc *
g_conn_stats.py
gapjunc.py
gen_weights.py
geodist.py
geodist.txt
getmitral.py
gidfunc.py
GJ.py
gj_nrn.hoc
Glom.py *
granule.hoc
granules.py
graphmeat.py
grow.py
growdef.py *
growout.py
job
Kod.txt *
lateral_connections.py
loadbalutil.py *
lpt.py *
mcgrow.py
MCrealSoma.py *
mgrs.py
misc.py
mitral.hoc
mkassembly.py
mkmitral.py
modeldata.py
mtgrow.py
MTrealSoma.py
MTrealSoma2.py
mtufted.hoc
multisplit_distrib.py
net_mitral_centric.py
Nod.txt *
odors.py
odorstim.py
odstim2.txt *
pad.txt *
params.py
parrun.py
pathdist.py
realgloms.txt *
runsim.py
spike2file.hoc *
spk2weight.py
split.py
subsetsim.py
test_complexity.py
txt2bin.py
util.py *
vrecord.py
weightsave.py
                            
#subset of mitral and granule gids to construct in context of full network
import custom_params
custom_params.filename = 'cfg27'

#mitral and granule that generate the most spikes from a Nglom=10 full run

subset = [185,189,187,188,186]
connection_file = 'dummy.dic'
spike_file = 'g37train.spk2'
weights_file = '' 

import params

params.tstop=1000
import odorstim
odorstim.Fmax['Mint'][37]=0.7
for i in range(0, 37)+range(38,127):
  odorstim.Fmax['Mint'][i] = 0
import net_mitral_centric as nmc # nmc.dc sets up full model gids
from common import *
model = getmodel()
params.orn_std_e=0

''' can construct mitrals and connections dynamically. Granule
connections need to come from a file generated by the full model
'''
def subset_distrib(gids, model):
  model.gids = {gid for gid in gids if gid < ncell}
  model.mitral_gids = {gid for gid in model.gids if gid < nmitral}
  model.granule_gids = model.gids - model.mitral_gids

subset_distrib(subset, model)

print 'mitrals ', model.mitral_gids
print 'granules ', model.granule_gids

def mksubset(model):
  nmc.dc.mk_mitrals(model)
  nmc.dc.mk_mconnection_info(model)
  model.granule_gids = model.gids - model.mitral_gids
  nmc.register_mitrals(model)
  nmc.build_granules(model)
  nmc.register_granules(model)
  get_and_make_subset_connections()
  nmc.build_synapses(model)
  import GJ
  GJ.init_gap_junctions()
  if rank == 0: print "network subset setuptime ", h.startsw() - nmc.t_begin

def get_and_make_subset_connections():
  #get the needed info
  global connection_file
  import bindict
  bindict.load(connection_file)
  subset = chk_consist(bindict.gid_dict, bindict.mgid_dict, bindict.ggid_dict)
  #make them
  import mgrs
  for mdgid in subset:
    c = bindict.gid_dict[mdgid]
    slot = mgrs.gid2mg(mdgid)[3]
    rs = mgrs.mk_mgrs(c[0], c[1], c[2], c[3], 0, c[4], slot)
    model.mgrss.update({rs.md_gid : rs})
  print "%d mgrs created"%len(model.mgrss)

def chk_consist(md_dict, m_dict, g_dict):
  #Incomplete consistency check
  subset = set()
  for mgid in model.mitral_gids:
    for gid in m_dict[mgid]:
      if gid < nmitral:
        assert(gid == mgid)
      elif gid < ncell:
        assert(g_dict.has_key(gid))
      else:
        assert(md_dict.has_key(gid))
        subset.add(gid)
  for ggid in model.granule_gids:
    if g_dict.has_key(ggid):
      for gid in g_dict[ggid]:
        assert(md_dict.has_key(gid+1))
        subset.add(gid+1)
    else:
      print 'granule %d was not used in full model'%ggid
  return subset

def patstim(filename):
  #only read the spikes used, ie mgid, ggid, md_gid, and gd_gid
  wanted = set()
  wanted.update(model.gids)
  for md_gid in model.mgrss:
    rs = model.mgrss[md_gid]
    if rs.md:
      wanted.add(rs.gd_gid)
    if rs.gd:
      wanted.add(rs.md_gid)
  print wanted
  # read the spiketimes
  from binspikes import SpikesReader
  sr = SpikesReader(filename)
  # get the output spikes for each of the cells we simulate in order to be able
  # to verify that the subset simulation is same as full network sim.
  spk_standard = {}
  for gid in model.gids:
    if sr.header.has_key(gid):
      spk_standard.update({gid : sr.retrieve(gid)})
    else:
      spk_standard.update({gid : []})
  # now get all the spikes we need for input (will include the cell output
  # spikes but that can also be helpful for debugging)
  tvec = h.Vector()
  gidvec = h.Vector()
  for gid in wanted:
    if not sr.header.has_key(gid):
      print 'no spikes from %d'%gid
      continue
    spikes = sr.retrieve(gid)
    for t in spikes:
      tvec.append(t)
      gidvec.append(gid)
  # put in spiketime order
  srt = tvec.sortindex()
  tvec.index(tvec, srt)
  gidvec.index(gidvec, srt)
  #make the PatternStim
  ps = h.PatternStim()
  ps.play(tvec, gidvec)
  return (tvec, gidvec, ps, spk_standard)

mksubset(model)


# inits the weights
if weights_file:
  import weightsave
  weightsave.weight_load(weights_file)

import dummysyns
dummysyns.mk_dummy_syns([])

import odorstim
odseq = odorstim.OdorSequence(params.odor_sequence)

ps = patstim(spike_file)

def spkrecord(model):
  #record only the output spikes from cells for camparison with ps[3] spk_standard
  spkvec = h.Vector()
  gidvec = h.Vector()
  for gid in model.gids:
    pc.spike_record(gid, spkvec, gidvec)
  return (spkvec, gidvec)

simspikes = spkrecord(model)

def spkcompare():
  #compare simspikes with the spk_standard in ps[3]
  for gid in model.gids:
    tstd = h.Vector(ps[3][gid])
    # get the gid spikes
    ix = simspikes[1].c().indvwhere("==", gid)
    tsim = h.Vector().index(simspikes[0],ix)
    # is tstd same as tsim up to tstop?
    # first, same count of spikes
    nstd = int(tstd.indwhere(">", h.tstop))
    if nstd < 0: nstd = len(tstd)
    nsim = len(tsim)
    if nstd != nsim:
      print "\nFor gid %d, during interval 0 to tstop=%g, different numbers of spikes %d %d"%(gid, h.tstop, nstd, nsim)
      print "tstd"
      tstd.printf()
      print "tsim"
      tsim.printf()
      return nstd, nsim
    else:
      if tstd.c().sub(tsim).abs().indwhere(">", 1.e-6) == -1.0:
        print "\n%d spike times for gid %d are the same"%(nstd, gid)
      else:
        print "\n%d spike times for gid %d are different, tsim-tstd:"%(nstd, gid)
        tsim.c().sub(tstd).printf()
        for i in range(nstd):
          print "%d %g %g"%(i, tstd.x[i], tsim.x[i])

h.load_file("nrngui.hoc")

#h('proc setdt(){}')
h.dt = 1./64. + 1./128.

rseq = []
def saverand():
  l = h.List('Random')
  for r in l:
    rseq.append(r.seq())
def restorerand():
  l = h.List('Random')
  for i,r in enumerate(l):
    r.seq(rseq[i])
rfih = h.FInitializeHandler(3, restorerand)
saverand()

if __name__ == '__nothing__':
  #h.tstop = 1000
  grphs = {}
  for gid in model.gids:
    g = h.Graph()
    h.addplot(g, 0)
    g.size(0, h.tstop, -80, 60)
    g.addvar('gid%d.soma.v(.5)'%gid, pc.gid2cell(gid).soma(.5)._ref_v)
    grphs.update({gid : g})
  h.run()
  spkcompare()
h.load_file('subsetsim.ses')

Loading data, please wait...