From f790d11d4cfadb20c3535a44aed946355be9eeb4 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Wed, 31 Aug 2016 10:49:43 +0100
Subject: [PATCH 01/54] Improved connectivity import for multicpompartmental
cells
---
netpyne/simFuncs.py | 50 +++++++++++++++++++++++++++++++--------------
1 file changed, 35 insertions(+), 15 deletions(-)
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index 584194e54..129ce12df 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -1627,6 +1627,7 @@ class NetPyNEBuilder(DefaultNetworkHandler):
pop_ids_vs_components = {}
pop_ids_vs_use_segment_groups_for_neuron = {}
pop_ids_vs_ordered_segs = {}
+ pop_ids_vs_cumulative_lengths = {}
projection_infos = OrderedDict()
connections = OrderedDict()
@@ -1709,7 +1710,7 @@ def handlePopulation(self, population_id, component, size, component_obj):
use_segment_groups_for_neuron = False
for seg_grp in cell.morphology.segment_groups:
- if hasattr(seg_grp,'neuro_lex_id') and seg_grp.neuro_lex_id == "sao864921383"+"xxxxxxxxxx":
+ if hasattr(seg_grp,'neuro_lex_id') and seg_grp.neuro_lex_id == "sao864921383":
use_segment_groups_for_neuron = True
cellRule['secs'][seg_grp.id] = {'geom': {'pt3d':[]}, 'mechs': {}, 'ions':{}}
for prop in seg_grp.properties:
@@ -1738,8 +1739,10 @@ def handlePopulation(self, population_id, component, size, component_obj):
else:
- ordered_segs = cell.get_ordered_segments_in_groups(cellRule['secs'].keys())
+ ordered_segs, cumulative_lengths = cell.get_ordered_segments_in_groups(cellRule['secs'].keys(),include_cumulative_lengths=True)
self.pop_ids_vs_ordered_segs[population_id] = ordered_segs
+ self.pop_ids_vs_cumulative_lengths[population_id] = cumulative_lengths
+
for section in cellRule['secs'].keys():
#print("ggg %s: %s"%(section,ordered_segs[section]))
for seg in ordered_segs[section]:
@@ -1778,6 +1781,8 @@ def handlePopulation(self, population_id, component, size, component_obj):
seg_grps_vs_nrn_sections[seg_grp.id].append(section_name)
else:
seg_grps_vs_nrn_sections[seg_grp.id].append(inc.segment_groups)
+ if not cellRule['secLists'].has_key(seg_grp.id): cellRule['secLists'][seg_grp.id] = []
+ cellRule['secLists'][seg_grp.id].append(inc.segment_groups)
if not seg_grp.neuro_lex_id or seg_grp.neuro_lex_id !="sao864921383":
cellRule['secLists'][seg_grp.id] = seg_grps_vs_nrn_sections[seg_grp.id]
@@ -1832,8 +1837,8 @@ def handlePopulation(self, population_id, component, size, component_obj):
self.cellParams[component] = cellRule
- for cp in self.cellParams.keys():
- pp.pprint(self.cellParams[cp])
+ #for cp in self.cellParams.keys():
+ # pp.pprint(self.cellParams[cp])
self.pop_ids_vs_seg_ids_vs_segs[population_id] = seg_ids_vs_segs
@@ -1875,11 +1880,25 @@ def _convert_to_nrn_section_location(self, population_id, seg_id, fract_along):
return self.pop_ids_vs_seg_ids_vs_segs[population_id][seg_id].name, fract_along
else:
-
+ fract_sec = -1
for sec in self.pop_ids_vs_ordered_segs[population_id].keys():
- if seg_id in [s.id for s in self.pop_ids_vs_ordered_segs[population_id][sec]]:
- nrn_sec = sec
- return nrn_sec, 0.777777
+ ind = 0
+ for seg in self.pop_ids_vs_ordered_segs[population_id][sec]:
+ if seg.id == seg_id:
+ nrn_sec = sec
+ if len(self.pop_ids_vs_ordered_segs[population_id][sec])==1:
+ fract_sec = fract_along
+ else:
+ lens = self.pop_ids_vs_cumulative_lengths[population_id][sec]
+ to_start = 0.0 if ind==0 else lens[ind-1]
+ to_end = lens[ind]
+ tot = lens[-1]
+ print to_start, to_end, tot, ind, seg, seg_id
+ fract_sec = (to_start + fract_along *(to_end-to_start))/(tot)
+
+ ind+=1
+ print("============= Converted %s:%s on pop %s to %s on %s"%(seg_id, fract_along, population_id, nrn_sec, fract_sec))
+ return nrn_sec, fract_sec
#
# Overridden from DefaultNetworkHandler
@@ -1916,15 +1935,16 @@ def handleConnection(self, projName, id, prePop, postPop, synapseType, \
delay = 0, \
weight = 1):
- self.log.info("A connection "+str(id)+" of: "+projName+": cell "+str(preCellId)+" in "+prePop \
- +" -> cell "+str(postCellId)+" in "+postPop+", syn: "+ str(synapseType) \
- +", weight: "+str(weight)+", delay: "+str(delay))
- pre_seg_name = self.pop_ids_vs_seg_ids_vs_segs[prePop][preSegId].name if self.pop_ids_vs_seg_ids_vs_segs.has_key(prePop) else 'soma'
- post_seg_name = self.pop_ids_vs_seg_ids_vs_segs[postPop][postSegId].name if self.pop_ids_vs_seg_ids_vs_segs.has_key(postPop) else 'soma'
+ pre_seg_name, pre_fract = self._convert_to_nrn_section_location(prePop,preSegId,preFract)
+ post_seg_name, post_fract = self._convert_to_nrn_section_location(postPop,postSegId,postFract)
- self.connections[projName].append( (self.gids[prePop][preCellId], pre_seg_name,preFract, \
- self.gids[postPop][postCellId], post_seg_name, postFract, \
+ self.log.info("A connection "+str(id)+" of: "+projName+": "+prePop+"["+str(preCellId)+"]."+pre_seg_name+"("+str(pre_fract)+")" \
+ +" -> "+postPop+"["+str(postCellId)+"]."+post_seg_name+"("+str(post_fract)+")"+", syn: "+ str(synapseType) \
+ +", weight: "+str(weight)+", delay: "+str(delay))
+
+ self.connections[projName].append( (self.gids[prePop][preCellId], pre_seg_name,pre_fract, \
+ self.gids[postPop][postCellId], post_seg_name, post_fract, \
delay, weight) )
From 3c25209d8d1ee2bf129e03273406f8dd7009d002 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Thu, 1 Sep 2016 18:19:57 +0100
Subject: [PATCH 02/54] Improvements to import of inputs for nml2
---
netpyne/simFuncs.py | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index 129ce12df..27cc212c0 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -1632,10 +1632,10 @@ class NetPyNEBuilder(DefaultNetworkHandler):
projection_infos = OrderedDict()
connections = OrderedDict()
- popStimSources = {}
- stimSources = {}
- popStimLists = {}
- stimLists = {}
+ popStimSources = OrderedDict()
+ stimSources = OrderedDict()
+ popStimLists = OrderedDict()
+ stimLists = OrderedDict()
gids = OrderedDict()
next_gid = 0
@@ -1981,7 +1981,7 @@ def handleSingleInput(self, inputListId, id, cellId, segId = 0, fract = 0.5):
#seg_name = self.pop_ids_vs_seg_ids_vs_segs[pop_id][segId].name if self.pop_ids_vs_seg_ids_vs_segs.has_key(pop_id) else 'soma'
- stimId = "%s_%s_%s_%s_%s"%(inputListId,pop_id,cellId,nrn_sec,(str(fract)).replace('.','_'))
+ stimId = "%s_%s_%s_%s_%s_%s"%(inputListId, id,pop_id,cellId,nrn_sec,(str(fract)).replace('.','_'))
self.stimSources[stimId] = {'label': stimId, 'type': self.popStimSources[inputListId]['type']}
self.stimLists[stimId] = {'source': stimId,
From 5f03a10f6c660bbde06ca0d61c4e98332b0c97b1 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Wed, 7 Sep 2016 10:50:51 +0100
Subject: [PATCH 03/54] Changes to make sure tests run with merged master
---
netpyne/simFuncs.py | 93 +++++++++++++++++++++++----------------------
1 file changed, 48 insertions(+), 45 deletions(-)
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index 27cc212c0..4d1e08cf0 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -1298,43 +1298,46 @@ def exportNeuroML2 (reference, connections=True, stimulations=True):
for cell_name in net.params.cellParams.keys():
cell_param_set = net.params.cellParams[cell_name]
print("--------------- Adding a cell %s: \n%s"%(cell_name,cell_param_set))
- print("Adding a cell %s: \n%s"%(cell_name,pp.pprint(cell_param_set.todict())))
+ # print("===== Adding the cell %s: \n%s"%(cell_name,pp.pprint(cell_param_set)))
# Single section; one known mechanism...
- soma = cell_param_set.secs.soma
- if len(cell_param_set.secs) == 1 \
+ soma = cell_param_set['secs']['soma']
+ if len(cell_param_set['secs']) == 1 \
and soma is not None\
- and len(soma.mechs) == 0 \
- and len(soma.pointps) == 1:
+ and not soma.has_key('mechs') \
+ and len(soma['pointps']) == 1:
- pproc = soma.pointps.values()[0]
- cell_id = 'CELL_%s_%s'%(cell_param_set.conds.cellModel,cell_param_set.conds.cellType)
- if len(cell_param_set.conds.cellModel)==0:
- cell_id = 'CELL_%s_%s'%(pproc.mod,cell_param_set.conds.cellType)
+ pproc = soma['pointps'].values()[0]
+
+ if not cell_param_set['conds'].has_key('cellModel'):
+ cell_id = 'CELL_%s_%s'%(pproc['mod'],cell_param_set['conds']['cellType'])
+ else:
+ cell_id = 'CELL_%s_%s'%(cell_param_set['conds']['cellModel'],cell_param_set['conds']['cellType'])
+
print("Assuming abstract cell with behaviour set by single point process: %s!"%pproc)
- if pproc.mod == 'Izhi2007b':
+ if pproc['mod'] == 'Izhi2007b':
izh = neuroml.Izhikevich2007Cell(id=cell_id)
- izh.a = '%s per_ms'%pproc.a
- izh.b = '%s nS'%pproc.b
- izh.c = '%s mV'%pproc.c
- izh.d = '%s pA'%pproc.d
+ izh.a = '%s per_ms'%pproc['a']
+ izh.b = '%s nS'%pproc['b']
+ izh.c = '%s mV'%pproc['c']
+ izh.d = '%s pA'%pproc['d']
- izh.v0 = '%s mV'%pproc.vr # Note: using vr for v0
- izh.vr = '%s mV'%pproc.vr
- izh.vt = '%s mV'%pproc.vt
- izh.vpeak = '%s mV'%pproc.vpeak
- izh.C = '%s pF'%(pproc.C*100)
- izh.k = '%s nS_per_mV'%pproc.k
+ izh.v0 = '%s mV'%pproc['vr'] # Note: using vr for v0
+ izh.vr = '%s mV'%pproc['vr']
+ izh.vt = '%s mV'%pproc['vt']
+ izh.vpeak = '%s mV'%pproc['vpeak']
+ izh.C = '%s pF'%(pproc['C']*100)
+ izh.k = '%s nS_per_mV'%pproc['k']
nml_doc.izhikevich2007_cells.append(izh)
else:
- print("Unknown point process: %s; can't convert to NeuroML 2 equivalent!"%pproc.mod)
+ print("Unknown point process: %s; can't convert to NeuroML 2 equivalent!"%pproc['mod'])
exit(1)
else:
print("Assuming normal cell with behaviour set by ion channel mechanisms!")
- cell_id = 'CELL_%s_%s'%(cell_param_set.conds.cellModel,cell_param_set.conds.cellType)
+ cell_id = 'CELL_%s_%s'%(cell_param_set['conds']['cellModel'],cell_param_set['conds']['cellType'])
cell = neuroml.Cell(id=cell_id)
cell.notes = "Cell exported from NetPyNE:\n%s"%cell_param_set
@@ -1355,47 +1358,47 @@ def exportNeuroML2 (reference, connections=True, stimulations=True):
parentDistal = neuroml.Point3DWithDiam(x=0,y=0,z=0,diameter=0)
- for np_sec_name in cell_param_set.secs.keys():
+ for np_sec_name in cell_param_set['secs'].keys():
parent_seg = None
- np_sec = cell_param_set.secs[np_sec_name]
+ np_sec = cell_param_set['secs'][np_sec_name]
nml_seg = neuroml.Segment(id=count,name=np_sec_name)
nml_segs[np_sec_name] = nml_seg
- if len(np_sec.topol)>0:
- parent_seg = nml_segs[np_sec.topol.parentSec]
+ if np_sec.has_key('topol') and len(np_sec['topol'])>0:
+ parent_seg = nml_segs[np_sec['topol']['parentSec']]
nml_seg.parent = neuroml.SegmentParent(segments=parent_seg.id)
- if not (np_sec.topol.parentX == 1.0 and np_sec.topol.childX == 0):
+ if not (np_sec['topol']['parentX'] == 1.0 and np_sec['topol']['childX'] == 0):
print("Currently only support cell topol with parentX == 1.0 and childX == 0")
exit(1)
- if not (len(np_sec.geom.pt3d)==2 or len(np_sec.geom.pt3d)==0):
- print("Currently only support cell geoms with 2 pt3ds (or 0 and diam/L specified): %s"%np_sec.geom)
+ if not ( (not np_sec['geom'].has_key('pt3d')) or len(np_sec['geom']['pt3d'])==0 or len(np_sec['geom']['pt3d'])==2 ):
+ print("Currently only support cell geoms with 2 pt3ds (or 0 and diam/L specified): %s"%np_sec['geom'])
exit(1)
- if len(np_sec.geom.pt3d)==0:
+ if (not np_sec['geom'].has_key('pt3d') or len(np_sec['geom']['pt3d'])==0):
if parent_seg == None:
nml_seg.proximal = neuroml.Point3DWithDiam(x=parentDistal.x,
y=parentDistal.y,
z=parentDistal.z,
- diameter=np_sec.geom.diam)
+ diameter=np_sec['geom']['diam'])
nml_seg.distal = neuroml.Point3DWithDiam(x=parentDistal.x,
- y=(parentDistal.y+np_sec.geom.L),
+ y=(parentDistal.y+np_sec['geom']['L']),
z=parentDistal.z,
- diameter=np_sec.geom.diam)
+ diameter=np_sec['geom']['diam'])
else:
- prox = np_sec.geom.pt3d[0]
+ prox = np_sec['geom']['pt3d'][0]
nml_seg.proximal = neuroml.Point3DWithDiam(x=prox[0],
y=prox[1],
z=prox[2],
diameter=prox[3])
- dist = np_sec.geom.pt3d[1]
+ dist = np_sec['geom']['pt3d'][1]
nml_seg.distal = neuroml.Point3DWithDiam(x=dist[0],
y=dist[1],
z=dist[2],
@@ -1410,13 +1413,13 @@ def exportNeuroML2 (reference, connections=True, stimulations=True):
count+=1
- ip.resistivities.append(neuroml.Resistivity(value="%s ohm_cm"%np_sec.geom.Ra,
+ ip.resistivities.append(neuroml.Resistivity(value="%s ohm_cm"%np_sec['geom']['Ra'],
segment_groups=nml_seg_group.id))
'''
See https://github.com/Neurosim-lab/netpyne/issues/130
'''
- cm = np_sec.geom.cm
+ cm = np_sec['geom']['cm'] if np_sec['geom'].has_key('cm') else 1
if isinstance(cm,dict) and len(cm)==0:
cm = 1
mp.specific_capacitances.append(neuroml.SpecificCapacitance(value="%s uF_per_cm2"%cm,
@@ -1427,8 +1430,8 @@ def exportNeuroML2 (reference, connections=True, stimulations=True):
mp.spike_threshes.append(neuroml.SpikeThresh(value="%s mV"%'0'))
- for mech_name in np_sec.mechs.keys():
- mech = np_sec.mechs[mech_name]
+ for mech_name in np_sec['mechs'].keys():
+ mech = np_sec['mechs'][mech_name]
if mech_name == 'hh':
for chan in chans_doc.ion_channel_hhs:
@@ -1440,21 +1443,21 @@ def exportNeuroML2 (reference, connections=True, stimulations=True):
leak_cd = neuroml.ChannelDensity(id='leak_%s'%nml_seg_group.id,
ion_channel='leak_hh',
- cond_density='%s S_per_cm2'%mech.gl,
- erev='%s mV'%mech.el,
+ cond_density='%s S_per_cm2'%mech['gl'],
+ erev='%s mV'%mech['el'],
ion='non_specific')
mp.channel_densities.append(leak_cd)
k_cd = neuroml.ChannelDensity(id='k_%s'%nml_seg_group.id,
ion_channel='k_hh',
- cond_density='%s S_per_cm2'%mech.gkbar,
+ cond_density='%s S_per_cm2'%mech['gkbar'],
erev='%s mV'%'-77',
ion='k')
mp.channel_densities.append(k_cd)
na_cd = neuroml.ChannelDensity(id='na_%s'%nml_seg_group.id,
ion_channel='na_hh',
- cond_density='%s S_per_cm2'%mech.gnabar,
+ cond_density='%s S_per_cm2'%mech['gnabar'],
erev='%s mV'%'50',
ion='na')
mp.channel_densities.append(na_cd)
@@ -1469,8 +1472,8 @@ def exportNeuroML2 (reference, connections=True, stimulations=True):
leak_cd = neuroml.ChannelDensity(id='leak_%s'%nml_seg_group.id,
ion_channel='leak_hh',
- cond_density='%s mS_per_cm2'%mech.g,
- erev='%s mV'%mech.e,
+ cond_density='%s mS_per_cm2'%mech['g'],
+ erev='%s mV'%mech['e'],
ion='non_specific')
mp.channel_densities.append(leak_cd)
else:
From 486c9f60714fc452ddbb219ac4c7b98ae2574ae5 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Wed, 7 Sep 2016 20:09:07 +0100
Subject: [PATCH 04/54] Work towards supporting gap juncs
---
examples/NeuroMLImport/poissonFiringSyn.mod | 6 +++---
netpyne/simFuncs.py | 14 +++++++-------
2 files changed, 10 insertions(+), 10 deletions(-)
diff --git a/examples/NeuroMLImport/poissonFiringSyn.mod b/examples/NeuroMLImport/poissonFiringSyn.mod
index 256643943..ceb3514c7 100644
--- a/examples/NeuroMLImport/poissonFiringSyn.mod
+++ b/examples/NeuroMLImport/poissonFiringSyn.mod
@@ -123,11 +123,11 @@ NET_RECEIVE(flag) {
: This child is a synapse; defining weight
weight = 1
- : paramMappings: {syn0={g=syn0_g, tauDecay=syn0_tauDecay, waveformFactor=syn0_waveformFactor, A=syn0_A, B=syn0_B, erev=syn0_erev, gbase=syn0_gbase, peakTime=syn0_peakTime, tauRise=syn0_tauRise, i=syn0_i}, poissonFiringSyn={isi=isi, tsince=tsince, averageRate=averageRate, averageIsi=averageIsi, i=i}}
+ : paramMappings: {poissonFiringSyn={averageRate=averageRate, i=i, tsince=tsince, isi=isi, averageIsi=averageIsi}, syn0={A=syn0_A, tauRise=syn0_tauRise, gbase=syn0_gbase, erev=syn0_erev, B=syn0_B, peakTime=syn0_peakTime, g=syn0_g, i=syn0_i, tauDecay=syn0_tauDecay, waveformFactor=syn0_waveformFactor}}
? state_discontinuity(syn0_A, syn0_A + (weight * syn0_waveformFactor ))
syn0_A = syn0_A + (weight * syn0_waveformFactor )
- : paramMappings: {syn0={g=syn0_g, tauDecay=syn0_tauDecay, waveformFactor=syn0_waveformFactor, A=syn0_A, B=syn0_B, erev=syn0_erev, gbase=syn0_gbase, peakTime=syn0_peakTime, tauRise=syn0_tauRise, i=syn0_i}, poissonFiringSyn={isi=isi, tsince=tsince, averageRate=averageRate, averageIsi=averageIsi, i=i}}
+ : paramMappings: {poissonFiringSyn={averageRate=averageRate, i=i, tsince=tsince, isi=isi, averageIsi=averageIsi}, syn0={A=syn0_A, tauRise=syn0_tauRise, gbase=syn0_gbase, erev=syn0_erev, B=syn0_B, peakTime=syn0_peakTime, g=syn0_g, i=syn0_i, tauDecay=syn0_tauDecay, waveformFactor=syn0_waveformFactor}}
? state_discontinuity(syn0_B, syn0_B + (weight * syn0_waveformFactor ))
syn0_B = syn0_B + (weight * syn0_waveformFactor )
@@ -156,8 +156,8 @@ PROCEDURE rates() {
rate_tsince = 1 ? Note units of all quantities used here need to be consistent!
- rate_syn0_B = - syn0_B / syn0_tauDecay ? Note units of all quantities used here need to be consistent!
rate_syn0_A = - syn0_A / syn0_tauRise ? Note units of all quantities used here need to be consistent!
+ rate_syn0_B = - syn0_B / syn0_tauDecay ? Note units of all quantities used here need to be consistent!
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index 4d1e08cf0..2f5efe574 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -1655,7 +1655,7 @@ def finalise(self):
self.netParams.addCellParams(cellParam, self.cellParams[cellParam])
for proj_id in self.projection_infos.keys():
- projName, prePop, postPop, synapse = self.projection_infos[proj_id]
+ projName, prePop, postPop, synapse, ptype = self.projection_infos[proj_id]
self.netParams.addSynMechParams(synapse, {'mod': synapse})
@@ -1918,11 +1918,11 @@ def handleLocation(self, id, population_id, component, x, y, z):
#
# Overridden from DefaultNetworkHandler
#
- def handleProjection(self, projName, prePop, postPop, synapse, hasWeights=False, hasDelays=False):
+ def handleProjection(self, projName, prePop, postPop, synapse, hasWeights=False, hasDelays=False, type="projection"):
- self.log.info("A projection: "+projName+" from "+prePop+" -> "+postPop+" with syn: "+synapse)
- self.projection_infos[projName] = (projName, prePop, postPop, synapse)
+ self.log.info("A projection: %s (%s) from %s -> %s with syn: %s" % (projName, type, prePop, postPop, synapse))
+ self.projection_infos[projName] = (projName, prePop, postPop, synapse, type)
self.connections[projName] = []
#
@@ -2024,7 +2024,7 @@ def importNeuroML2(fileName, simConfig):
nmlHandler = NetPyNEBuilder(netParams)
- currParser = NeuroMLXMLParser(nmlHandler) # The HDF5 handler knows of the structure of NeuroML and calls appropriate functions in NetworkHandler
+ currParser = NeuroMLXMLParser(nmlHandler) # The XML handler knows of the structure of NeuroML and calls appropriate functions in NetworkHandler
currParser.parse(fileName)
@@ -2050,8 +2050,8 @@ def importNeuroML2(fileName, simConfig):
assert(gid in nmlHandler.gids[popLabel])
for proj_id in nmlHandler.projection_infos.keys():
- projName, prePop, postPop, synapse = nmlHandler.projection_infos[proj_id]
- print("Creating connections for %s: %s->%s via %s"%(projName, prePop, postPop, synapse))
+ projName, prePop, postPop, synapse, ptype = nmlHandler.projection_infos[proj_id]
+ print("Creating connections for %s (%s): %s->%s via %s"%(projName, ptype, prePop, postPop, synapse))
preComp = nmlHandler.pop_ids_vs_components[prePop]
From f061ac25b153ca1fe6db0ca7070c99eaea5b2264 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Fri, 9 Sep 2016 19:33:44 +0100
Subject: [PATCH 05/54] Catching some not yet supported nml2 elements
---
netpyne/simFuncs.py | 39 +++++++++++++++++++++++++++++++++++++++
1 file changed, 39 insertions(+)
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index 2f5efe574..bd7820d73 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -1810,6 +1810,45 @@ def handlePopulation(self, population_id, component, size, component_obj):
cellRule['secs'][section_name]['ions'][cm.ion] = {}
cellRule['secs'][section_name]['ions'][cm.ion]['e'] = erev
+ for cm in cell.biophysical_properties.membrane_properties.channel_density_nernsts:
+ raise Exception(" not yet supported!")
+
+ group = 'all' if not cm.segment_groups else cm.segment_groups
+ for section_name in seg_grps_vs_nrn_sections[group]:
+ gmax = pynml.convert_to_units(cm.cond_density,'S_per_cm2')
+ if cm.ion_channel=='pas':
+ mech = {'g':gmax}
+ else:
+ mech = {'gmax':gmax}
+
+ cellRule['secs'][section_name]['mechs'][cm.ion_channel] = mech
+
+ #TODO: erev!!
+
+ if cm.ion and cm.ion == 'non_specific':
+ pass
+ ##mech['e'] = erev
+ else:
+ if not cellRule['secs'][section_name]['ions'].has_key(cm.ion):
+ cellRule['secs'][section_name]['ions'][cm.ion] = {}
+ ##cellRule['secs'][section_name]['ions'][cm.ion]['e'] = erev
+
+ for cm in cell.biophysical_properties.membrane_properties.channel_density_ghks:
+ raise Exception(" not yet supported!")
+
+ for cm in cell.biophysical_properties.membrane_properties.channel_density_ghk2s:
+ raise Exception(" not yet supported!")
+
+ for cm in cell.biophysical_properties.membrane_properties.channel_density_non_uniforms:
+ raise Exception(" not yet supported!")
+
+ for cm in cell.biophysical_properties.membrane_properties.channel_density_non_uniform_nernsts:
+ raise Exception(" not yet supported!")
+
+ for cm in cell.biophysical_properties.membrane_properties.channel_density_non_uniform_ghks:
+ raise Exception(" not yet supported!")
+
+
for vi in cell.biophysical_properties.membrane_properties.init_memb_potentials:
group = 'all' if not vi.segment_groups else vi.segment_groups
From 5d0d3dc9d6d434bc3c278e71f887b2a89e66a9af Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Mon, 12 Sep 2016 18:09:23 +0100
Subject: [PATCH 06/54] Slightly less verbose
---
netpyne/simFuncs.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index bd7820d73..1804cd732 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -1935,11 +1935,11 @@ def _convert_to_nrn_section_location(self, population_id, seg_id, fract_along):
to_start = 0.0 if ind==0 else lens[ind-1]
to_end = lens[ind]
tot = lens[-1]
- print to_start, to_end, tot, ind, seg, seg_id
+ #print to_start, to_end, tot, ind, seg, seg_id
fract_sec = (to_start + fract_along *(to_end-to_start))/(tot)
ind+=1
- print("============= Converted %s:%s on pop %s to %s on %s"%(seg_id, fract_along, population_id, nrn_sec, fract_sec))
+ #print("============= Converted %s:%s on pop %s to %s on %s"%(seg_id, fract_along, population_id, nrn_sec, fract_sec))
return nrn_sec, fract_sec
#
@@ -2070,7 +2070,7 @@ def importNeuroML2(fileName, simConfig):
nmlHandler.finalise()
print('Finished import: %s'%nmlHandler.gids)
- print('Connections: %s'%nmlHandler.connections)
+ #print('Connections: %s'%nmlHandler.connections)
sim.initialize(netParams, simConfig) # create network object and set cfg and net params
From c551710033c17707693d606e1dcc345ef400d222 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Thu, 15 Sep 2016 19:13:01 +0100
Subject: [PATCH 07/54] More verbose warning about lack of NeuroML python libs
---
netpyne/simFuncs.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index 1804cd732..735ceaa8d 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -32,7 +32,7 @@
__all__.extend(['importNeuroML2']) # import
neuromlExists = True
except:
- print('(Note: NeuroML import failed; import/export functions will not be available)')
+ print('\n*******\n Note: NeuroML import failed; import/export functions for NeuroML will not be available. \n Install the pyNeuroML & libNeuroML Python packages: https://www.neuroml.org/getneuroml\n*******\n')
neuromlExists = False
import sim, specs
From 18689a065b5b441bcea4ae5b97c58c1d05d5b47f Mon Sep 17 00:00:00 2001
From: salvadord
Date: Sat, 29 Oct 2016 15:39:23 -0400
Subject: [PATCH 08/54] Fixed bug positioning cells with 3d geom
---
CHANGES.md | 5 +++++
netpyne/cell.py | 5 +----
2 files changed, 6 insertions(+), 4 deletions(-)
diff --git a/CHANGES.md b/CHANGES.md
index 03396c746..3876b4d3f 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,3 +1,8 @@
+# Version 0.6.4
+
+- Fixed bug positioning cells with 3d geom
+
+
# Version 0.6.3
- Added cvode_active simConfig option to set variable time step (issue #116)
diff --git a/netpyne/cell.py b/netpyne/cell.py
index f4636c868..19f6ec779 100644
--- a/netpyne/cell.py
+++ b/netpyne/cell.py
@@ -183,10 +183,7 @@ def createNEURONObj (self, prop):
if 'pt3d' in sectParams['geom']:
h.pt3dclear(sec=sec['hSec'])
x = self.tags['x']
- if 'ynorm' in self.tags and hasattr(sim.net.params, 'sizeY'):
- y = self.tags['ynorm'] * sim.net.params.sizeY/1e3 # y as a func of ynorm and cortical thickness
- else:
- y = self.tags['y']
+ y = self.tags['y']
z = self.tags['z']
for pt3d in sectParams['geom']['pt3d']:
h.pt3dadd(x+pt3d[0], y+pt3d[1], z+pt3d[2], pt3d[3], sec=sec['hSec'])
From 7592683c042acb589a961a22a40732ca28e64d16 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Sun, 30 Oct 2016 19:28:19 -0400
Subject: [PATCH 09/54] Added option to skip batch sims if output file already
exists
---
CHANGES.md | 2 ++
netpyne/batch.py | 65 +++++++++++++++++++++++++++---------------------
2 files changed, 39 insertions(+), 28 deletions(-)
diff --git a/CHANGES.md b/CHANGES.md
index 3876b4d3f..cbd965fc5 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,5 +1,7 @@
# Version 0.6.4
+- Added option to skip batch sims if output file already exists
+
- Fixed bug positioning cells with 3d geom
diff --git a/netpyne/batch.py b/netpyne/batch.py
index b653e6334..1f8e54c7f 100644
--- a/netpyne/batch.py
+++ b/netpyne/batch.py
@@ -91,34 +91,43 @@ def run(self):
# run sim
if self.runCfg.get('type',None) == 'hpc_torque':
- # read params or set defaults
- jobName = self.saveFolder+'/'+simLabel # Customize your options here
- numproc = self.runCfg.get('numproc', 1)
- script = self.runCfg.get('script', 'init.py')
- walltime = self.runCfg.get('walltime', '00:30:00')
- queueName = self.runCfg.get('queueName', 'default')
- nodesppn = 'nodes=1:ppn=%d'%(numproc)
- sleepInterval = self.runCfg.get('sleepInterval', 1)
- command = 'mpiexec -np %d nrniv -python -mpi %s simConfig=%s' % (numproc, script, cfgSavePath)
-
- output, input = popen2('qsub') # Open a pipe to the qsub command.
-
- jobString = """#!/bin/bash
- #PBS -N %s
- #PBS -l walltime=%s
- #PBS -q %s
- #PBS -l %s
- #PBS -o %s.run
- #PBS -e %s.err
- cd $PBS_O_WORKDIR
- echo $PBS_O_WORKDIR
- %s""" % (jobName, walltime, queueName, nodesppn, jobName, jobName, command)
-
- # Send job_string to qsub
- input.write(jobString)
- print jobString
- input.close()
- sleep(sleepInterval)
+ jobName = self.saveFolder+'/'+simLabel
+
+ # skip if output file already exists
+ import glob
+ if self.runCfg.get('skip', False) and glob(jobName+'.*'):
+ print 'Skipping job %s since output file already exists...'
+ else:
+ # read params or set defaults
+ sleepInterval = self.runCfg.get('sleepInterval', 1)
+ sleep(sleepInterval)
+
+ numproc = self.runCfg.get('numproc', 1)
+ script = self.runCfg.get('script', 'init.py')
+ walltime = self.runCfg.get('walltime', '00:30:00')
+ queueName = self.runCfg.get('queueName', 'default')
+ nodesppn = 'nodes=1:ppn=%d'%(numproc)
+
+ command = 'mpiexec -np %d nrniv -python -mpi %s simConfig=%s' % (numproc, script, cfgSavePath)
+
+ output, input = popen2('qsub') # Open a pipe to the qsub command.
+
+ jobString = """#!/bin/bash
+ #PBS -N %s
+ #PBS -l walltime=%s
+ #PBS -q %s
+ #PBS -l %s
+ #PBS -o %s.run
+ #PBS -e %s.err
+ cd $PBS_O_WORKDIR
+ echo $PBS_O_WORKDIR
+ %s""" % (jobName, walltime, queueName, nodesppn, jobName, jobName, command)
+
+ # Send job_string to qsub
+ input.write(jobString)
+ print jobString+'\n'
+ input.close()
+
From c6b5d8f5248c49f064031db4374369dced778d02 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Mon, 31 Oct 2016 12:01:01 -0400
Subject: [PATCH 10/54] added pyplot.close('all') to sim.clearAll (issue #168)
---
CHANGES.md | 1 +
netpyne/simFuncs.py | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/CHANGES.md b/CHANGES.md
index cbd965fc5..2162b6368 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -4,6 +4,7 @@
- Fixed bug positioning cells with 3d geom
+- Fixed bug in sim.clearAll by closing all figures instead of current (issue #168)
# Version 0.6.3
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index 8619c128c..649472c5e 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -340,7 +340,7 @@ def clearAll():
import matplotlib
matplotlib.pyplot.clf()
- matplotlib.pyplot.close()
+ matplotlib.pyplot.close('all')
del sim.net
From bc2f65366a9af59980112ca59c63cbfeab37bcaf Mon Sep 17 00:00:00 2001
From: salvadord
Date: Mon, 31 Oct 2016 13:14:19 -0400
Subject: [PATCH 11/54] fixed bug in batch.py - glob.glob
---
netpyne/batch.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/netpyne/batch.py b/netpyne/batch.py
index 1f8e54c7f..98bab46ae 100644
--- a/netpyne/batch.py
+++ b/netpyne/batch.py
@@ -95,7 +95,7 @@ def run(self):
# skip if output file already exists
import glob
- if self.runCfg.get('skip', False) and glob(jobName+'.*'):
+ if self.runCfg.get('skip', False) and glob.glob(jobName+'.*'):
print 'Skipping job %s since output file already exists...'
else:
# read params or set defaults
From 2ed6d2eae5cc4ab72514ed3cea0f35ea26fe1cbe Mon Sep 17 00:00:00 2001
From: salvadord
Date: Mon, 31 Oct 2016 17:20:40 -0400
Subject: [PATCH 12/54] set cvode.active(0) if not selected
---
netpyne/simFuncs.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index 649472c5e..ab550f8e7 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -692,6 +692,8 @@ def preRun():
if sim.cfg.cvode_active:
h.cvode.active(1)
+ else:
+ h.cvode.active(0)
if sim.cfg.cache_efficient:
h.cvode.cache_efficient(0)
From 8e46d14e98bb018a36c543752583919db4c69cd0 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Mon, 31 Oct 2016 17:24:48 -0400
Subject: [PATCH 13/54] Fixed bug: cache_efficient was not being turned on
---
CHANGES.md | 2 ++
netpyne/simFuncs.py | 2 ++
2 files changed, 4 insertions(+)
diff --git a/CHANGES.md b/CHANGES.md
index 2162b6368..10dd794fc 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -6,6 +6,8 @@
- Fixed bug in sim.clearAll by closing all figures instead of current (issue #168)
+- Fixed bug: cache_efficient was not being turned on
+
# Version 0.6.3
- Added cvode_active simConfig option to set variable time step (issue #116)
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index ab550f8e7..7cb70a4d2 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -696,6 +696,8 @@ def preRun():
h.cvode.active(0)
if sim.cfg.cache_efficient:
+ h.cvode.cache_efficient(1)
+ else:
h.cvode.cache_efficient(0)
h.dt = sim.cfg.dt # set time step
From 5b52e63c4d8773fb22a6511528c22de5df8a03d5 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Tue, 1 Nov 2016 22:55:19 -0400
Subject: [PATCH 14/54] Added option to overlay pop labels and show avg rates
to plotRaster() (issue #111)
---
CHANGES.md | 2 ++
doc/source/reference.rst | 4 ++-
netpyne/analysis.py | 55 ++++++++++++++++++++++++++++++++--------
3 files changed, 50 insertions(+), 11 deletions(-)
diff --git a/CHANGES.md b/CHANGES.md
index 10dd794fc..dbe67abaf 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -2,6 +2,8 @@
- Added option to skip batch sims if output file already exists
+- Added option to overlay pop labels and show avg rates to plotRaster() (issue #111)
+
- Fixed bug positioning cells with 3d geom
- Fixed bug in sim.clearAll by closing all figures instead of current (issue #168)
diff --git a/doc/source/reference.rst b/doc/source/reference.rst
index e45249a82..c9377ed82 100644
--- a/doc/source/reference.rst
+++ b/doc/source/reference.rst
@@ -757,7 +757,7 @@ Misc/utilities:
Analysis-related functions
^^^^^^^^^^^^^^^^^^^^^^^^^^
-* **analysis.plotRaster** (include = ['allCells'], timeRange = None, maxSpikes = 1e8, orderBy = 'gid', orderInverse = False, spikeHist = None, spikeHistBin = 5, syncLines = False, figSize = (10,8), saveData = None, saveFig = None, showFig = True)
+* **analysis.plotRaster** (include = ['allCells'], timeRange = None, maxSpikes = 1e8, orderBy = 'gid', orderInverse = False, labels = 'legend', popRates = False, spikeHist = None, spikeHistBin = 5, syncLines = False, figSize = (10,8), saveData = None, saveFig = None, showFig = True)
Plot raster (spikes over time) of network cells. Optional arguments:
@@ -766,6 +766,8 @@ Analysis-related functions
- *maxSpikes*: maximum number of spikes that will be plotted (int)
- *orderBy*: Unique numeric cell property to order y-axis by, e.g. 'gid', 'ynorm', 'y' ('gid'|'y'|'ynorm'|...)
- *orderInverse*: Invert the y-axis order (True|False)
+ - *labels*: Show population labels in a legend or overlayed on one side of raster ('legend'|'overlay'))
+ - *popRates*: Include population rates ('legend'|'overlay')
- *spikeHist*: overlay line over raster showing spike histogram (spikes/bin) (None|'overlay'|'subplot')
- *spikeHistBin*: Size of bin in ms to use for histogram (int)
- *syncLines*: calculate synchorny measure and plot vertical lines for each spike to evidence synchrony (True|False)
diff --git a/netpyne/analysis.py b/netpyne/analysis.py
index 1b1c0857b..cfc0a5726 100644
--- a/netpyne/analysis.py
+++ b/netpyne/analysis.py
@@ -10,10 +10,10 @@
if __gui__:
from matplotlib.pylab import transpose, nanmax, nanmin, errstate, bar, histogram, floor, ceil, yticks, arange, gca, scatter, figure, hold, subplot, axes, shape, imshow, \
- colorbar, plot, xlabel, ylabel, title, xlim, ylim, clim, show, zeros, legend, savefig, psd, ion, subplots_adjust, subplots, tight_layout, get_fignums
+ colorbar, plot, xlabel, ylabel, title, xlim, ylim, clim, show, zeros, legend, savefig, psd, ion, subplots_adjust, subplots, tight_layout, get_fignums, text
from matplotlib import gridspec
-from scipy import size, array, linspace, ceil
+from scipy import size, array, linspace, ceil, cumsum
from numbers import Number
import math
@@ -147,8 +147,8 @@ def getCellsInclude(include):
######################################################################################################################################################
## Raster plot
######################################################################################################################################################
-def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, orderBy = 'gid', orderInverse = False, spikeHist = None,
- spikeHistBin = 5, syncLines = False, figSize = (10,8), saveData = None, saveFig = None, showFig = True):
+def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, orderBy = 'gid', orderInverse = False, labels = 'legend', popRates = False,
+ spikeHist = None, spikeHistBin = 5, syncLines = False, figSize = (10,8), saveData = None, saveFig = None, showFig = True):
'''
Raster plot of network cells
- include (['all',|'allCells',|'allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): Cells to include (default: 'allCells')
@@ -156,6 +156,8 @@ def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, order
- maxSpikes (int): maximum number of spikes that will be plotted (default: 1e8)
- orderBy ('gid'|'y'|'ynorm'|...): Unique numeric cell property to order y-axis by, e.g. 'gid', 'ynorm', 'y' (default: 'gid')
- orderInverse (True|False): Invert the y-axis order (default: False)
+ - labels = ('legend', 'overlay'): Show population labels in a legend or overlayed on one side of raster (default: 'legend')
+ - popRates = (True|False): Include population rates (default: False)
- spikeHist (None|'overlay'|'subplot'): overlay line over raster showing spike histogram (spikes/bin) (default: False)
- spikeHistBin (int): Size of bin in ms to use for histogram (default: 5)
- syncLines (True|False): calculate synchorny measure and plot vertical lines for each spike to evidence synchrony (default: False)
@@ -169,6 +171,7 @@ def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, order
- Returns figure handle
'''
+
print('Plotting raster...')
colorList = [[0.42,0.67,0.84], [0.90,0.76,0.00], [0.42,0.83,0.59], [0.90,0.32,0.00],
@@ -252,6 +255,7 @@ def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, order
spkgidColors = spkgidColors[:maxSpikes]
timeRange[1] = max(spkts)
+
# Calculate spike histogram
if spikeHist:
histo = histogram(spkts, bins = arange(timeRange[0], timeRange[1], spikeHistBin))
@@ -269,11 +273,20 @@ def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, order
ax1.set_xlim(timeRange)
# Plot stats
+ gidPops = [cell['tags']['popLabel'] for cell in cells]
+ popNumCells = [float(gidPops.count(pop)) for pop in popLabels]
totalSpikes = len(spkts)
totalConnections = sum([len(cell['conns']) for cell in cells])
numCells = len(cells)
- firingRate = float(totalSpikes)/numCells/(timeRange[1]-timeRange[0])*1e3 if totalSpikes>0 else 0# Calculate firing rate
+ firingRate = float(totalSpikes)/numCells/(timeRange[1]-timeRange[0])*1e3 if totalSpikes>0 else 0 # Calculate firing rate
connsPerCell = totalConnections/float(numCells) if numCells>0 else 0 # Calculate the number of connections per cell
+
+ if popRates:
+ avgRates = {}
+ for pop, popNum in zip(popLabels, popNumCells):
+ if numCells > 0:
+ tsecs = (timeRange[1]-timeRange[0])/1e3
+ avgRates[pop] = len([spkid for spkid in spkinds if sim.net.allCells[int(spkid)]['tags']['popLabel']==pop])/popNum/tsecs
# Plot synchrony lines
if syncLines:
@@ -303,11 +316,33 @@ def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, order
ax1.set_ylim(-1, len(cells)+numNetStims+1)
# Add legend
- for popLabel in popLabels:
- plot(0,0,color=popColors[popLabel],label=popLabel)
- legend(fontsize=fontsiz, bbox_to_anchor=(1.04, 1), loc=2, borderaxespad=0.)
- maxLabelLen = max([len(l) for l in popLabels])
- subplots_adjust(right=(0.9-0.012*maxLabelLen))
+ if popRates:
+ popLabelRates = [popLabel + ' (%.3g Hz)'%(avgRates[popLabel]) for popLabel in popLabels]
+
+ if labels == 'legend':
+ for ipop,popLabel in enumerate(popLabels):
+ label = popLabelRates[ipop] if popRates else popLabel
+ plot(0,0,color=popColors[popLabel],label=label)
+ legend(fontsize=fontsiz, bbox_to_anchor=(1.04, 1), loc=2, borderaxespad=0.)
+ maxLabelLen = max([len(l) for l in popLabels])
+ rightOffset = 0.85 if popRates else 0.9
+ subplots_adjust(right=(rightOffset-0.012*maxLabelLen))
+
+ elif labels == 'overlay':
+ ax = gca()
+ color = 'k'
+ tx = 1.01
+ margin = 1.0/numCells/2
+ tys = [(popLen/numCells)*(1-2*margin) for popLen in popNumCells]
+ tysOffset = list(cumsum(tys))[:-1]
+ tysOffset.insert(0, 0)
+ labels = popLabelRates if popRates else popLabels
+ for ipop,(ty, tyOffset, popLabel) in enumerate(zip(tys, tysOffset, popLabels)):
+ label = popLabelRates[ipop] if popRates else popLabel
+ text(tx, tyOffset + ty/2.0 - 0.01, label, transform=ax.transAxes, fontsize=fontsiz, color=popColors[popLabel])
+ maxLabelLen = max([len(l) for l in labels])
+ subplots_adjust(right=(1.0-0.011*maxLabelLen))
+
# save figure data
if saveData:
From 17c713fecbbfca9646702cbd70be7297fe3fe6d9 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Wed, 2 Nov 2016 11:27:28 -0400
Subject: [PATCH 15/54] fixed bug in batch checking what jobs to skip
---
CHANGES.md | 2 ++
netpyne/batch.py | 4 ++--
netpyne/simFuncs.py | 6 +++---
3 files changed, 7 insertions(+), 5 deletions(-)
diff --git a/CHANGES.md b/CHANGES.md
index dbe67abaf..928a20523 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -10,6 +10,8 @@
- Fixed bug: cache_efficient was not being turned on
+- Fixed bug setting simConfig loaded from file
+
# Version 0.6.3
- Added cvode_active simConfig option to set variable time step (issue #116)
diff --git a/netpyne/batch.py b/netpyne/batch.py
index 98bab46ae..35bd115a3 100644
--- a/netpyne/batch.py
+++ b/netpyne/batch.py
@@ -95,8 +95,8 @@ def run(self):
# skip if output file already exists
import glob
- if self.runCfg.get('skip', False) and glob.glob(jobName+'.*'):
- print 'Skipping job %s since output file already exists...'
+ if self.runCfg.get('skip', False) and glob.glob(jobName+'.json'):
+ print 'Skipping job %s since output file already exists...' % (jobName)
else:
# read params or set defaults
sleepInterval = self.runCfg.get('sleepInterval', 1)
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index 7cb70a4d2..841e81009 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -102,8 +102,8 @@ def setSimCfg (cfg):
else:
sim.cfg = specs.SimConfig() # create new object
- if cfg.simLabel and cfg.saveFolder:
- cfg.filename = cfg.saveFolder+'/'+cfg.simLabel
+ if sim.cfg.simLabel and sim.cfg.saveFolder:
+ sim.cfg.filename = sim.cfg.saveFolder+'/'+sim.cfg.simLabel
###############################################################################
@@ -122,7 +122,7 @@ def createParallelContext ():
###############################################################################
# Load netParams from cell
###############################################################################
-def loadNetParams (filename, data=None, setLoaded=False):
+def loadNetParams (filename, data=None, setLoaded=True):
if not data: data = _loadFile(filename)
print('Loading netParams...')
if 'net' in data and 'params' in data['net']:
From 57703498e71de63d7fd537744702d2a08b8cb6cd Mon Sep 17 00:00:00 2001
From: salvadord
Date: Wed, 2 Nov 2016 18:30:16 -0400
Subject: [PATCH 16/54] added plotShape to plot 3D morphology of cell and
synapse locations
---
CHANGES.md | 6 +++
examples/HybridTut/HybridTut.py | 2 +-
netpyne/analysis.py | 89 ++++++++++++++++++++++++++-------
netpyne/simFuncs.py | 2 +-
4 files changed, 79 insertions(+), 20 deletions(-)
diff --git a/CHANGES.md b/CHANGES.md
index 928a20523..382060655 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,5 +1,7 @@
# Version 0.6.4
+- Added plotShape to plot 3D morphology of cell and synapse locations
+
- Added option to skip batch sims if output file already exists
- Added option to overlay pop labels and show avg rates to plotRaster() (issue #111)
@@ -12,6 +14,10 @@
- Fixed bug setting simConfig loaded from file
+- Fixed bug in plotRaster inverseOrder option
+
+- Fixed str vs basestring in analysis.py and simFuncs.py
+
# Version 0.6.3
- Added cvode_active simConfig option to set variable time step (issue #116)
diff --git a/examples/HybridTut/HybridTut.py b/examples/HybridTut/HybridTut.py
index 9ad1bf953..3ae6437fa 100644
--- a/examples/HybridTut/HybridTut.py
+++ b/examples/HybridTut/HybridTut.py
@@ -122,6 +122,6 @@
# Analysis and plotting
-simConfig.analysis['plotRaster'] = True # Whether or not to plot a raster
+simConfig.analysis['plotRaster'] = {'orderInverse': False} #True # Whether or not to plot a raster
simConfig.analysis['plotTraces'] = {'include': [1,51]} # plot recorded traces for this list of cells
diff --git a/netpyne/analysis.py b/netpyne/analysis.py
index cfc0a5726..992e06cbc 100644
--- a/netpyne/analysis.py
+++ b/netpyne/analysis.py
@@ -63,7 +63,7 @@ def _showFigure():
## Save figure data
######################################################################################################################################################
def _saveFigData(figData, fileName, type=''):
- if not isinstance(fileName, str):
+ if not isinstance(fileName, basestring):
fileName = sim.cfg.filename+'_'+type+'.pkl'
fileName = fileName.split('.')
@@ -124,7 +124,7 @@ def getCellsInclude(include):
elif isinstance(condition, int): # cell gid
cellGids.append(condition)
- elif isinstance(condition, str): # entire pop
+ elif isinstance(condition, basestring): # entire pop
if condition in allNetStimPops:
netStimPops.append(condition)
else:
@@ -206,7 +206,7 @@ def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, order
else:
yorder = [cell['tags'][orderBy] for cell in cells]
- if orderInverse: yorder.reverse()
+ #if orderInverse: yorder.reverse()
sortedGids = {gid:i for i,(y,gid) in enumerate(sorted(zip(yorder,cellGids)))}
spkinds = [sortedGids[gid] for gid in spkgids]
@@ -315,6 +315,8 @@ def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, order
ax1.set_xlim(timeRange)
ax1.set_ylim(-1, len(cells)+numNetStims+1)
+ if orderInverse: gca().invert_yaxis()
+
# Add legend
if popRates:
popLabelRates = [popLabel + ' (%.3g Hz)'%(avgRates[popLabel]) for popLabel in popLabels]
@@ -339,7 +341,11 @@ def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, order
labels = popLabelRates if popRates else popLabels
for ipop,(ty, tyOffset, popLabel) in enumerate(zip(tys, tysOffset, popLabels)):
label = popLabelRates[ipop] if popRates else popLabel
- text(tx, tyOffset + ty/2.0 - 0.01, label, transform=ax.transAxes, fontsize=fontsiz, color=popColors[popLabel])
+ if orderInverse:
+ finalty = 1.0 - (tyOffset + ty/2.0 - 0.01)
+ else:
+ finalty = tyOffset + ty/2.0 - 0.01
+ text(tx, finalty, label, transform=ax.transAxes, fontsize=fontsiz, color=popColors[popLabel])
maxLabelLen = max([len(l) for l in labels])
subplots_adjust(right=(1.0-0.011*maxLabelLen))
@@ -354,7 +360,7 @@ def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, order
# save figure
if saveFig:
- if isinstance(saveFig, str):
+ if isinstance(saveFig, basestring):
filename = saveFig
else:
filename = sim.cfg.filename+'_'+'raster.png'
@@ -434,18 +440,20 @@ def plotSpikeHist (include = ['allCells', 'eachPop'], timeRange = None, binSize
else:
spkinds,spkts = [],[]
+
# Add NetStim spikes
spkts, spkinds = list(spkts), list(spkinds)
numNetStims = 0
for netStimPop in netStimPops:
- cellStims = [cellStim for cell,cellStim in sim.allSimData['stims'].iteritems() if netStimPop in cellStim]
- if len(cellStims) > 0:
- lastInd = max(spkinds) if len(spkinds)>0 else 0
- spktsNew = [spkt for cellStim in cellStims for spkt in cellStim[netStimPop] ]
- spkindsNew = [lastInd+1+i for i,cellStim in enumerate(cellStims) for spkt in cellStim[netStimPop]]
- spkts.extend(spktsNew)
- spkinds.extend(spkindsNew)
- numNetStims += len(cellStims)
+ if 'stims' in sim.allSimData:
+ cellStims = [cellStim for cell,cellStim in sim.allSimData['stims'].iteritems() if netStimPop in cellStim]
+ if len(cellStims) > 0:
+ lastInd = max(spkinds) if len(spkinds)>0 else 0
+ spktsNew = [spkt for cellStim in cellStims for spkt in cellStim[netStimPop] ]
+ spkindsNew = [lastInd+1+i for i,cellStim in enumerate(cellStims) for spkt in cellStim[netStimPop]]
+ spkts.extend(spktsNew)
+ spkinds.extend(spkindsNew)
+ numNetStims += len(cellStims)
histo = histogram(spkts, bins = arange(timeRange[0], timeRange[1], binSize))
histoT = histo[1][:-1]+binSize/2
@@ -496,7 +504,7 @@ def plotSpikeHist (include = ['allCells', 'eachPop'], timeRange = None, binSize
# save figure
if saveFig:
- if isinstance(saveFig, str):
+ if isinstance(saveFig, basestring):
filename = saveFig
else:
filename = sim.cfg.filename+'_'+'spikeHist.png'
@@ -645,7 +653,7 @@ def plotFigPerTrace(subGids):
# save figure
if saveFig:
- if isinstance(saveFig, str):
+ if isinstance(saveFig, basestring):
filename = saveFig
else:
filename = sim.cfg.filename+'_'+'traces.png'
@@ -669,6 +677,51 @@ def invertDictMapping(d):
inv_map[v].append(k)
return inv_map
+
+######################################################################################################################################################
+## Plot cell shape
+######################################################################################################################################################
+def plotShape (showSyns = True, figSize = (10,8), saveData = None, saveFig = None, showFig = True):
+ '''
+ Plot 3D cell shape using NEURON Interview PlotShape
+ - showSyns (True|False): Show synaptic connections in 3D
+ - figSize ((width, height)): Size of figure (default: (10,8))
+ - saveData (None|True|'fileName'): File name where to save the final data used to generate the figure;
+ if set to True uses filename from simConfig (default: None)
+ - saveFig (None|True|'fileName'): File name where to save the figure;
+ if set to True uses filename from simConfig (default: None)
+ - showFig (True|False): Whether to show the figure or not (default: True)
+
+ - Returns figure handles
+ '''
+
+ from neuron import h, gui
+
+ fig = h.Shape()
+ if showSyns:
+ color = 2 # red
+ style = 'o'
+ siz = 10
+ for cell in sim.net.cells:
+ for sec in cell.secs.values():
+ for synMech in sec['synMechs']:
+ fig.point_mark(synMech['hSyn'], color)
+
+ # save figure
+ if saveFig:
+ if isinstance(saveFig, basestring):
+ filename = saveFig
+ else:
+ filename = sim.cfg.filename+'_'+'shape.ps'
+ fig.printfile(filename)
+
+
+ return fig
+
+
+
+
+
######################################################################################################################################################
## Plot LFP (time-resolved or power spectra)
######################################################################################################################################################
@@ -978,7 +1031,7 @@ def plotConn (include = ['all'], feature = 'strength', orderBy = 'gid', figSize
# save figure
if saveFig:
- if isinstance(saveFig, str):
+ if isinstance(saveFig, basestring):
filename = saveFig
else:
filename = sim.cfg.filename+'_'+'conn.png'
@@ -1027,7 +1080,7 @@ def plot2Dnet (include = ['allCells'], figSize = (12,12), showConns = True, save
if showConns:
for postCell in cells:
for con in postCell['conns']: # plot connections between cells
- if not isinstance(con['preGid'], str) and con['preGid'] in cellGids:
+ if not isinstance(con['preGid'], basestring) and con['preGid'] in cellGids:
posXpre,posYpre = next(((cell['tags']['x'],cell['tags']['y']) for cell in cells if cell['gid']==con['preGid']), None)
posXpost,posYpost = postCell['tags']['x'], postCell['tags']['y']
color='red'
@@ -1056,7 +1109,7 @@ def plot2Dnet (include = ['allCells'], figSize = (12,12), showConns = True, save
# save figure
if saveFig:
- if isinstance(saveFig, str):
+ if isinstance(saveFig, basestring):
filename = saveFig
else:
filename = sim.cfg.filename+'_'+'2Dnet.png'
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index 841e81009..70891e49c 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -661,7 +661,7 @@ def getCellsList(include):
elif isinstance(condition, int): # cell gid
cellGids.append(condition)
- elif isinstance(condition, str): # entire pop
+ elif isinstance(condition, basestring): # entire pop
cellGids.extend(list(sim.net.pops[condition].cellGids))
#[c.gid for c in sim.net.cells if c.tags['popLabel']==condition])
From 0d902ad2656229674b3a4bb9979137d961ef910e Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Fri, 4 Nov 2016 16:46:28 +0000
Subject: [PATCH 17/54] Disabling gui in export examples
---
examples/HHTut/HHTut_export.py | 3 +++
examples/HybridTut/HybridTut_export.py | 3 +++
examples/M1/M1_export.py | 3 +++
examples/sandbox/sandbox_export.py | 3 +++
4 files changed, 12 insertions(+)
diff --git a/examples/HHTut/HHTut_export.py b/examples/HHTut/HHTut_export.py
index 7710b4080..1124450c1 100644
--- a/examples/HHTut/HHTut_export.py
+++ b/examples/HHTut/HHTut_export.py
@@ -1,6 +1,9 @@
import HHTut # import parameters file
from netpyne import sim # import netpyne sim module
+import netpyne
+netpyne.gui = False
+
np = HHTut.netParams
print("********************\n*\n* Note: setting noise to 1, since noise can only be 0 or 1 in NeuroML export currently!\n*\n********************")
np.popParams['background']['noise'] = 1
diff --git a/examples/HybridTut/HybridTut_export.py b/examples/HybridTut/HybridTut_export.py
index e637f3b49..a31f3bd29 100644
--- a/examples/HybridTut/HybridTut_export.py
+++ b/examples/HybridTut/HybridTut_export.py
@@ -1,6 +1,9 @@
import HybridTut # import parameters file
from netpyne import sim # import netpyne sim module
+import netpyne
+netpyne.gui = False
+
np = HybridTut.netParams
print("********************\n*\n* Note: setting noise to 1, since noise can only be 0 or 1 in NeuroML export currently!\n*\n********************")
np.popParams['background']['noise'] = 1
diff --git a/examples/M1/M1_export.py b/examples/M1/M1_export.py
index 7c26d1868..6ddf1d947 100644
--- a/examples/M1/M1_export.py
+++ b/examples/M1/M1_export.py
@@ -1,6 +1,9 @@
import M1 # import parameters file
from netpyne import sim # import netpyne sim module
+import netpyne
+netpyne.gui = False
+
np = M1.netParams
print("********************\n*\n* Note: setting noise to 1, since noise can only be 0 or 1 in NeuroML export currently!\n*\n********************")
np.popParams['background_E']['noise'] = 1
diff --git a/examples/sandbox/sandbox_export.py b/examples/sandbox/sandbox_export.py
index 027908589..20f7bfe71 100644
--- a/examples/sandbox/sandbox_export.py
+++ b/examples/sandbox/sandbox_export.py
@@ -1,6 +1,9 @@
import sandbox # import parameters file
from netpyne import sim # import netpyne sim module
+import netpyne
+netpyne.gui = False
+
sim.createExportNeuroML2(netParams = sandbox.netParams,
simConfig = sandbox.simConfig,
reference = 'sandbox') # create and export network to NeuroML 2
\ No newline at end of file
From 7775b204d19a2e5b435a4e35ed85a42089a3ff5d Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Fri, 4 Nov 2016 18:30:15 +0000
Subject: [PATCH 18/54] Improved usage of netpyne.__gui__
---
examples/HHTut/HHTut_export.py | 3 ---
examples/HHTut/HHTut_run.py | 10 ++++++++--
examples/HybridTut/HybridTut_export.py | 3 ---
examples/HybridTut/HybridTut_run.py | 6 ++++++
examples/M1/M1_export.py | 3 ---
examples/M1/M1_run.py | 6 ++++++
examples/sandbox/sandbox_export.py | 9 ++++++---
7 files changed, 26 insertions(+), 14 deletions(-)
diff --git a/examples/HHTut/HHTut_export.py b/examples/HHTut/HHTut_export.py
index 1124450c1..7710b4080 100644
--- a/examples/HHTut/HHTut_export.py
+++ b/examples/HHTut/HHTut_export.py
@@ -1,9 +1,6 @@
import HHTut # import parameters file
from netpyne import sim # import netpyne sim module
-import netpyne
-netpyne.gui = False
-
np = HHTut.netParams
print("********************\n*\n* Note: setting noise to 1, since noise can only be 0 or 1 in NeuroML export currently!\n*\n********************")
np.popParams['background']['noise'] = 1
diff --git a/examples/HHTut/HHTut_run.py b/examples/HHTut/HHTut_run.py
index 8039812c5..232e89f7d 100644
--- a/examples/HHTut/HHTut_run.py
+++ b/examples/HHTut/HHTut_run.py
@@ -1,4 +1,10 @@
-import HHTut # import parameters file
+import sys
+
+if '-nogui' in sys.argv:
+ import netpyne
+ netpyne.__gui__ = False
+
+import HHTut # import parameters file
from netpyne import sim # import netpyne sim module
-sim.createSimulateAnalyze(netParams = HHTut.netParams, simConfig = HHTut.simConfig) # create and simulate network
\ No newline at end of file
+sim.createSimulateAnalyze(netParams = HHTut.netParams, simConfig = HHTut.simConfig) # create and simulate network
diff --git a/examples/HybridTut/HybridTut_export.py b/examples/HybridTut/HybridTut_export.py
index a31f3bd29..e637f3b49 100644
--- a/examples/HybridTut/HybridTut_export.py
+++ b/examples/HybridTut/HybridTut_export.py
@@ -1,9 +1,6 @@
import HybridTut # import parameters file
from netpyne import sim # import netpyne sim module
-import netpyne
-netpyne.gui = False
-
np = HybridTut.netParams
print("********************\n*\n* Note: setting noise to 1, since noise can only be 0 or 1 in NeuroML export currently!\n*\n********************")
np.popParams['background']['noise'] = 1
diff --git a/examples/HybridTut/HybridTut_run.py b/examples/HybridTut/HybridTut_run.py
index f3c6200b7..aecccf2d8 100644
--- a/examples/HybridTut/HybridTut_run.py
+++ b/examples/HybridTut/HybridTut_run.py
@@ -1,3 +1,9 @@
+import sys
+
+if '-nogui' in sys.argv:
+ import netpyne
+ netpyne.__gui__ = False
+
import HybridTut # import parameters file
from netpyne import sim # import netpyne init module
diff --git a/examples/M1/M1_export.py b/examples/M1/M1_export.py
index 6ddf1d947..7c26d1868 100644
--- a/examples/M1/M1_export.py
+++ b/examples/M1/M1_export.py
@@ -1,9 +1,6 @@
import M1 # import parameters file
from netpyne import sim # import netpyne sim module
-import netpyne
-netpyne.gui = False
-
np = M1.netParams
print("********************\n*\n* Note: setting noise to 1, since noise can only be 0 or 1 in NeuroML export currently!\n*\n********************")
np.popParams['background_E']['noise'] = 1
diff --git a/examples/M1/M1_run.py b/examples/M1/M1_run.py
index fb1c55c87..23fac54f3 100644
--- a/examples/M1/M1_run.py
+++ b/examples/M1/M1_run.py
@@ -1,3 +1,9 @@
+import sys
+
+if '-nogui' in sys.argv:
+ import netpyne
+ netpyne.__gui__ = False
+
import M1 # import parameters file
from netpyne import sim # import netpyne init module
diff --git a/examples/sandbox/sandbox_export.py b/examples/sandbox/sandbox_export.py
index 20f7bfe71..6b54c2143 100644
--- a/examples/sandbox/sandbox_export.py
+++ b/examples/sandbox/sandbox_export.py
@@ -1,9 +1,12 @@
+import sys
+
+if '-nogui' in sys.argv:
+ import netpyne
+ netpyne.__gui__ = False
+
import sandbox # import parameters file
from netpyne import sim # import netpyne sim module
-import netpyne
-netpyne.gui = False
-
sim.createExportNeuroML2(netParams = sandbox.netParams,
simConfig = sandbox.simConfig,
reference = 'sandbox') # create and export network to NeuroML 2
\ No newline at end of file
From 75c3532d48129afd9da7b6642eac39ccb5a342ce Mon Sep 17 00:00:00 2001
From: salvadord
Date: Fri, 4 Nov 2016 23:29:01 -0400
Subject: [PATCH 19/54] added subcellular syn distribution methods
---
netpyne/analysis.py | 3 +-
netpyne/cell.py | 14 ++++-
netpyne/network.py | 137 +++++++++++++++++++++++++++++++++++---------
sdnotes.org | 4 +-
4 files changed, 125 insertions(+), 33 deletions(-)
diff --git a/netpyne/analysis.py b/netpyne/analysis.py
index 992e06cbc..80f947271 100644
--- a/netpyne/analysis.py
+++ b/netpyne/analysis.py
@@ -705,7 +705,8 @@ def plotShape (showSyns = True, figSize = (10,8), saveData = None, saveFig = Non
for cell in sim.net.cells:
for sec in cell.secs.values():
for synMech in sec['synMechs']:
- fig.point_mark(synMech['hSyn'], color)
+ if synMech['hSyn']:
+ fig.point_mark(synMech['hSyn'], color)
# save figure
if saveFig:
diff --git a/netpyne/cell.py b/netpyne/cell.py
index 19f6ec779..92de4dcb6 100644
--- a/netpyne/cell.py
+++ b/netpyne/cell.py
@@ -183,7 +183,7 @@ def createNEURONObj (self, prop):
if 'pt3d' in sectParams['geom']:
h.pt3dclear(sec=sec['hSec'])
x = self.tags['x']
- y = self.tags['y']
+ y = -self.tags['y'] # Neuron y-axis positive = upwards, so assume pia=0 and cortical depth = neg
z = self.tags['z']
for pt3d in sectParams['geom']['pt3d']:
h.pt3dadd(x+pt3d[0], y+pt3d[1], z+pt3d[2], pt3d[3], sec=sec['hSec'])
@@ -253,6 +253,16 @@ def createNEURONObj (self, prop):
sec['hSec'].connect(self.secs[sectParams['topol']['parentSec']]['hSec'], sectParams['topol']['parentX'], sectParams['topol']['childX']) # make topol connection
+ def addSynMechsNEURONObj(self):
+ # set params for all sections
+ for sectName,sectParams in self.secs.iteritems():
+ # add synMechs (only used when loading)
+ if 'synMechs' in sectParams:
+ for synMech in sectParams['synMechs']:
+ if 'label' in synMech and 'loc' in synMech:
+ self.addSynMech(synLabel=synMech['label'], secLabel=sectName, loc=synMech['loc'])
+
+
# Create NEURON objs for conns and syns if included in prop (used when loading)
def addStimsNEURONObj(self):
# assumes python structure exists
@@ -349,7 +359,7 @@ def addSynMech (self, synLabel, secLabel, loc):
synMech[paramName] = paramValue
sec['synMechs'].append(synMech)
- if sim.cfg.createNEURONObj:
+ if sim.cfg.createNEURONObj:
# add synaptic mechanism NEURON objectes
if 'synMechs' not in sec:
sec['synMechs'] = []
diff --git a/netpyne/network.py b/netpyne/network.py
index 4fd52a728..571face0b 100644
--- a/netpyne/network.py
+++ b/netpyne/network.py
@@ -7,7 +7,7 @@
Contributors: salvadordura@gmail.com
"""
-from matplotlib.pylab import array, sin, cos, tan, exp, sqrt, mean, inf, rand
+from matplotlib.pylab import array, sin, cos, tan, exp, sqrt, mean, inf, rand, dstack, unravel_index, argsort, zeros
from random import seed, random, randint, sample, uniform, triangular, gauss, betavariate, expovariate, gammavariate
from time import time
from numbers import Number
@@ -206,6 +206,68 @@ def fromtodistance(self, origin_segment, to_segment):
return h.distance(to_segment.x, sec=to_segment.sec)
+ ###############################################################################
+ # Calculate 2d point from segment location
+ ###############################################################################
+ def _posFromLoc(self, sec, x):
+ sec.push()
+ s = x * sec.L
+ numpts = int(h.n3d())
+ b = -1
+ for ii in range(numpts):
+ if h.arc3d(ii) >= s:
+ b = ii
+ break
+ if b == -1: print "an error occurred in pointFromLoc, SOMETHING IS NOT RIGHT"
+
+ if h.arc3d(b) == s: # shortcut
+ x, y, z = h.x3d(b), h.y3d(b), h.z3d(b)
+ else: # need to interpolate
+ a = b-1
+ t = (s - h.arc3d(a)) / (h.arc3d(b) - h.arc3d(a))
+ x = h.x3d(a) + t * (h.x3d(b) - h.x3d(a))
+ y = h.y3d(a) + t * (h.y3d(b) - h.y3d(a))
+ z = h.z3d(a) + t * (h.z3d(b) - h.z3d(a))
+
+ h.pop_section()
+ return x, y, z
+
+
+ ###############################################################################
+ # Calculate syn density for each segment from grid
+ ###############################################################################
+ def _interpolateSegmentSigma(self, cell, secList, gridX, gridY, gridSigma):
+ segNumSyn = {} #
+ for secName in secList:
+ sec = cell.secs[secName]
+ segNumSyn[secName] = []
+ for seg in sec['hSec']:
+ x, y, z = self._posFromLoc(sec['hSec'], seg.x)
+ distX = [abs(gx-x) for gx in gridX]
+ distY = [abs(gy-y) for gy in gridY]
+ ixs = array(distX).argsort()[:2]
+ jys = array(distY).argsort()[:2]
+ sigma = zeros((2,2))
+ i1,i2,j1,j2 = min(ixs), max(ixs), min(jys), max(jys)
+ x1,x2,y1,y2 = gridX[i1], gridX[i2], gridY[j1], gridY[j2]
+ sigma_x1_y1 = gridSigma[i1,j1]
+ sigma_x1_y2 = gridSigma[i1,j2]
+ sigma_x2_y1 = gridSigma[i2,j1]
+ sigma_x2_y2 = gridSigma[i2,j2]
+
+ if x1 == x2 or y1 == y2:
+ print "ERROR in closest grid points: ", secName, x1, x2, y1, y2
+ else:
+ # bilinear interpolation, see http://en.wikipedia.org/wiki/Bilinear_interpolation
+ sigma = ((sigma_x1_y1*abs(x2-x)*abs(y2-y) + sigma_x2_y1*abs(x-x1)*abs(y2-y) + sigma_x1_y2*abs(x2-x)*abs(y-y1) + sigma_x2_y2*abs(x-x1)*abs(y-y1))/(abs(x2-x1)*abs(y2-y1)))
+ #sigma = ((sigma_x1_y1*abs(x2-x)*abs(y2-y) + sigma_x2_y1*abs(x-x1)*abs(y2-y) + sigma_x1_y2*abs(x2-x)*abs(y-y1) + sigma_x2_y2*abs(x-x1)*abs(y-y1))/((x2-x1)*(y2-y1)))
+
+ numSyn = sigma * sec['hSec'].L / sec['hSec'].nseg # return num syns
+ segNumSyn[secName].append(numSyn)
+
+ return segNumSyn
+
+
###############################################################################
# Subcellular connectivity (distribution of synapses)
###############################################################################
@@ -245,21 +307,50 @@ def subcellularConn(self, allCellTags, allPopTags):
else:
secList = [subConnParam['sec']]
- # calculate new syn positions
- newSecs, newLocs = postCell._distributeSynsUniformly (secList=secList, numSyns=len(conns))
-
- postSynMechs = postCell.secs[conn['sec']].synMechs
+ if subConnParam.get('density', None) == 'uniform':
+ # calculate new syn positions
+ newSecs, newLocs = postCell._distributeSynsUniformly(secList=secList, numSyns=len(conns))
+
+ elif isinstance(subConnParam.get('density', None), dict) and subConnParam['density']['type'] == '2Dmap':
+ somaX, _, _ = self._posFromLoc(postCell.secs['soma']['hSec'], 0.5) # move method to Cell!
+ gridX = [x - somaX for x in subConnParam['density']['gridX']] # center x at cell soma
+ gridY = subConnParam['density']['gridY']
+ gridSigma = subConnParam['density']['gridValues']
+
+ segNumSyn = self._interpolateSegmentSigma(postCell, secList, gridX, gridY, gridSigma) # move method to Cell!
+ totSyn = sum([sum(nsyn) for nsyn in segNumSyn.values()])
+ scaleNumSyn = float(len(conns))/float(totSyn)
+ for sec in segNumSyn: segNumSyn[sec] = [int(round(x * scaleNumSyn)) for x in segNumSyn[sec]]
+
+ print gridSigma
+ print segNumSyn
+ print len(conns), totSyn, scaleNumSyn
+
+ # convert to list so can serialize and save
+ subConnParam['density']['gridY'] = list(subConnParam['density']['gridY'])
+ subConnParam['density']['gridValues'] = list(subConnParam['density']['gridValues'])
+
+ newSecs, newLocs = [], []
+ for sec, nsyns in segNumSyn.iteritems():
+ for i, seg in enumerate(postCell.secs[sec]['hSec']):
+ for isyn in range(nsyns[i]):
+ newSecs.append(sec)
+ newLocs.append(seg.x)
# modify syn positions
- # for conn,newSec,newLoc in zip(conns, newSecs, newLocs):
- # if newSec != conn['sec'] or newLoc != conn['loc']:
- # indexOld = next((i for i,synMech in enumerate(postSynMechs) if synMech['label']==conn['synMech'] and synMech['loc']==conn['loc']), None)
- # if indexOld: del postSynMechs[indexOld]
- # print conn['synMech']
- # postCell.addSynMech(conn['synMech'], newSec, newLoc)
+ for conn, newSec, newLoc in zip(conns, newSecs, newLocs):
+ postSynMechs = postCell.secs[conn['sec']].synMechs
+ if newSec != conn['sec'] or newLoc != conn['loc']:
+ indexOld = next((i for i,synMech in enumerate(postSynMechs) if synMech['label']==conn['synMech'] and synMech['loc']==conn['loc']), None)
+ if indexOld: del postSynMechs[indexOld]
+ postCell.addSynMech(conn['synMech'], newSec, newLoc)
+ conn['sec'] = newSec
+ conn['loc'] = newLoc
- # conn['sec'] = newSec
- # conn['loc'] = newLoc
+ # Add synMechs, stim and conn NEURON objects
+ postCell.addSynMechsNEURONObj()
+ postCell.addStimsNEURONObj()
+ postCell.addConnsNEURONObj()
#print self.fromtodistance(postCell.secs[secOrig](0.5), postCell.secs['secs'][conn['sec']](conn['loc']))
@@ -272,21 +363,6 @@ def subcellularConn(self, allCellTags, allPopTags):
# print seg.x, h.distance(seg.x)
- # print [(conn['sec'],conn['loc']) for conn in conns]
-
- # find postsyn cells
- # for each postsyn cell:
- # find syns from presyn cells
- # calculate new syn locations based on sec, yNormRange and density
- # get y location of synapse -- check Ben's code
- # move synapses
-
- # netParams['subConnParams'].append(
- # {'preConds': {'cellType': ['PYR']}, # 'cellType': ['IT', 'PT', 'CT']
- # 'postConds': {'popLabel': 'PYR3'}, # 'popLabel': 'L5_PT'
- # 'sec': 'all',
- # 'ynormRange': [0, 1.0],
- # 'density': [0.2, 0.1, 0.0, 0.0, 0.2, 0.5] }) # subcellulalr distribution
@@ -307,6 +383,10 @@ def connectCells (self):
allCellTags = {cell.gid: cell.tags for cell in self.cells}
allPopTags = {-i: pop.tags for i,pop in enumerate(self.pops.values())} # gather tags from pops so can connect NetStim pops
+ if self.params.subConnParams: # do not create NEURON objs until synapses are distributed based on subConnParams
+ origCreateNEURONObj = bool(sim.cfg.createNEURONObj)
+ sim.cfg.createNEURONObj = False
+
for connParamLabel,connParamTemp in self.params.connParams.iteritems(): # for each conn rule or parameter set
connParam = connParamTemp.copy()
connParam['label'] = connParamLabel
@@ -329,6 +409,7 @@ def connectCells (self):
# apply subcellular connectivity params (distribution of synaspes)
if self.params.subConnParams:
+ sim.cfg.createNEURONObj = origCreateNEURONObj # set to original value
self.subcellularConn(allCellTags, allPopTags)
diff --git a/sdnotes.org b/sdnotes.org
index 7ed3c2e75..b79324da5 100644
--- a/sdnotes.org
+++ b/sdnotes.org
@@ -1297,7 +1297,7 @@ after so not saved?)
- sCRACM maps were normalized to the largest pixels within a map and thus represent the relative strength of input within the dendritic tree.
-- Average sCRACM map of layer 2/3 inputs to M1-CSPs (n 23 neurons). L, Input profiles by soma depth. M, Location of perisomatic input from layer 2/3 relative to soma position, plotted as a function of soma depth. For each profile, the input depth was calculated as the center of mass across the perisomatic pixels.
+- Average sCRACM map of layer 2/3 inputs to M1-CSPs (n = 23 neurons). L, Input profiles by soma depth. M, Location of perisomatic input from layer 2/3 relative to soma position, plotted as a function of soma depth. For each profile, the input depth was calculated as the center of mass across the perisomatic pixels.
[[file+sys:/u/salvadord/Documents/ISB/Models/netpyne_repo/gif/20160313_212734.png][fig]]
@@ -1374,7 +1374,7 @@ eg an EPSP can end up being terminated by K chans or can be primarily and active
-- within given pixel stimulated with blue light, pixel = 50x50 um, num of syns/um/dendrite - homogeneous within 50x50um
-- convert scracm map -> map of how many syns per length of dendrite at each region of the map (at each pixel)
-- need to know how much dendrite in that pixel, and how many sysn were coactivated to get soma response
--- his method: put fixed number of dendrite, and then adjusted weight of these syns
+-- his method: put fixed number of [of syns in] dendrite, and then adjusted weight of these syns
-- couldnt really separate weights from num of synapses -- started with plausible weight and num of syns
-- length density for a specific assumption of conductances (used to convert map)
-- input to netpyne should be free from Neuron peculiarities -- relates to somehting experimentally -- num of syn per length
From c802abb75b9b7b8a9ff6d71eb81e9ef3bb2b65e9 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Sat, 5 Nov 2016 19:56:56 -0400
Subject: [PATCH 20/54] kept grouped synapses together during subcell conn
rules
---
netpyne/network.py | 113 ++++++++++++++++++++++++++++++++-------------
1 file changed, 81 insertions(+), 32 deletions(-)
diff --git a/netpyne/network.py b/netpyne/network.py
index 571face0b..dfe0eb65c 100644
--- a/netpyne/network.py
+++ b/netpyne/network.py
@@ -7,7 +7,7 @@
Contributors: salvadordura@gmail.com
"""
-from matplotlib.pylab import array, sin, cos, tan, exp, sqrt, mean, inf, rand, dstack, unravel_index, argsort, zeros
+from matplotlib.pylab import array, sin, cos, tan, exp, sqrt, mean, inf, rand, dstack, unravel_index, argsort, zeros, ceil
from random import seed, random, randint, sample, uniform, triangular, gauss, betavariate, expovariate, gammavariate
from time import time
from numbers import Number
@@ -285,14 +285,24 @@ def subcellularConn(self, allCellTags, allPopTags):
for postCellGid in postCellsTags: # for each postsyn cell
if postCellGid in self.lid2gid:
postCell = self.cells[self.gid2lid[postCellGid]]
- conns = [conn for conn in postCell.conns if conn['preGid'] in preCellsTags]
- # find origin section
- if 'soma' in postCell.secs:
- secOrig = 'soma'
- elif any([secName.startswith('som') for secName in postCell.secs.keys()]):
- secOrig = next(secName for secName in postCell.secs.keys() if secName.startswith('soma'))
- else:
- secOrig = postCell.secs.keys()[0]
+ allConns = [conn for conn in postCell.conns if conn['preGid'] in preCellsTags]
+
+ # group synMechs so they are not distributed separately
+ if subConnParam.get('groupSynMechs', None):
+ conns = []
+ connsGroup = {}
+ iConn = -1
+ for conn in allConns:
+ if not conn['synMech'].startswith('__grouped__'):
+ conns.append(conn)
+ iConn = iConn + 1
+ if conn['synMech'] in subConnParam['groupSynMechs']:
+ for synMech in [s for s in subConnParam['groupSynMechs'] if s != conn['synMech']]:
+ connGroup = next(c for c in allConns if c['synMech'] == synMech and c['sec']==conn['sec'] and c['loc']==conn['loc'])
+ connGroup['synMech'] = '__grouped__'+connGroup['synMech']
+ connsGroup[iConn] = connGroup
+ else:
+ conns = allConns
# if sectionList
if isinstance(subConnParam.get('sec'), str) and subConnParam.get('sec') in postCell.secLists:
@@ -307,11 +317,13 @@ def subcellularConn(self, allCellTags, allPopTags):
else:
secList = [subConnParam['sec']]
+ # Uniform distribution
if subConnParam.get('density', None) == 'uniform':
# calculate new syn positions
newSecs, newLocs = postCell._distributeSynsUniformly(secList=secList, numSyns=len(conns))
- elif isinstance(subConnParam.get('density', None), dict) and subConnParam['density']['type'] == '2Dmap':
+ # 2D map and 1D map (radial)
+ elif isinstance(subConnParam.get('density', None), dict) and subConnParam['density']['type'] in ['2Dmap', '1Dmap']:
somaX, _, _ = self._posFromLoc(postCell.secs['soma']['hSec'], 0.5) # move method to Cell!
gridX = [x - somaX for x in subConnParam['density']['gridX']] # center x at cell soma
gridY = subConnParam['density']['gridY']
@@ -319,12 +331,21 @@ def subcellularConn(self, allCellTags, allPopTags):
segNumSyn = self._interpolateSegmentSigma(postCell, secList, gridX, gridY, gridSigma) # move method to Cell!
totSyn = sum([sum(nsyn) for nsyn in segNumSyn.values()])
- scaleNumSyn = float(len(conns))/float(totSyn)
+ scaleNumSyn = float(len(conns))/float(totSyn) if totSyn>0 else 0.0
for sec in segNumSyn: segNumSyn[sec] = [int(round(x * scaleNumSyn)) for x in segNumSyn[sec]]
-
- print gridSigma
- print segNumSyn
- print len(conns), totSyn, scaleNumSyn
+ totSynRescale = sum([sum(nsyn) for nsyn in segNumSyn.values()])
+
+ print len(conns), totSynRescale
+ if totSynRescale < len(conns): # if missing syns, add extra
+ extraSyns = len(conns)-totSynRescale
+ extraAdded = 0
+ for sec in segNumSyn.values():
+ if extraAdded == extraSyns: break
+ for nsyn in sec:
+ if nsyn > 0:
+ nsyn = nsyn + 1
+ extraAdded = extraAdded + 1
+ if extraAdded == extraSyns: break
# convert to list so can serialize and save
subConnParam['density']['gridY'] = list(subConnParam['density']['gridY'])
@@ -337,15 +358,54 @@ def subcellularConn(self, allCellTags, allPopTags):
newSecs.append(sec)
newLocs.append(seg.x)
- # modify syn positions
- for conn, newSec, newLoc in zip(conns, newSecs, newLocs):
+
+
+ # Distance-based
+ elif subConnParam.get('density', None) == 'distance':
+ # find origin section
+ if 'soma' in postCell.secs:
+ secOrig = 'soma'
+ elif any([secName.startswith('som') for secName in postCell.secs.keys()]):
+ secOrig = next(secName for secName in postCell.secs.keys() if secName.startswith('soma'))
+ else:
+ secOrig = postCell.secs.keys()[0]
+
+ #print self.fromtodistance(postCell.secs[secOrig](0.5), postCell.secs['secs'][conn['sec']](conn['loc']))
+
+ # different case if has vs doesn't have 3d points
+ # h.distance(sec=h.soma[0], seg=0)
+ # for sec in apical:
+ # print h.secname()
+ # for seg in sec:
+ # print seg.x, h.distance(seg.x)
+
+
+ for i,(conn, newSec, newLoc) in enumerate(zip(conns, newSecs, newLocs)):
postSynMechs = postCell.secs[conn['sec']].synMechs
+
+ # if need to reposition conn, remove syns of conn, add new syn, and set new loc and sec
if newSec != conn['sec'] or newLoc != conn['loc']:
- indexOld = next((i for i,synMech in enumerate(postSynMechs) if synMech['label']==conn['synMech'] and synMech['loc']==conn['loc']), None)
- if indexOld: del postSynMechs[indexOld]
+ indexOld = next((i for i,synMech in enumerate(postSynMechs) if synMech['label']==conn['synMech'] and synMech['loc']==conn['loc']), None)
+ if indexOld != None:
+ del postSynMechs[indexOld]
postCell.addSynMech(conn['synMech'], newSec, newLoc)
- conn['sec'] = newSec
- conn['loc'] = newLoc
+ conn['sec'] = newSec
+ conn['loc'] = newLoc
+
+ # find grouped conns
+ if subConnParam.get('groupSynMechs', None) and conn['synMech'] in subConnParam['groupSynMechs']:
+ connGroup = connsGroup[i] # get grouped conn from previously stored dict
+ connGroup['synMech']
+ connGroup['synMech'] = connGroup['synMech'].split('__grouped__')[1] # remove '__grouped__' label
+
+ # if need to reposition conn, remove syns of grouped conn, add new syn, and set new loc and sec
+ if newSec != connGroup['sec'] or newLoc != connGroup['loc']:
+ indexOld = next((i for i,synMech in enumerate(postSynMechs) if synMech['label']==connGroup['synMech'] and synMech['loc']==connGroup['loc']), None)
+ if indexOld != None:
+ del postSynMechs[indexOld]
+ connGroup['sec'] = newSec
+ connGroup['loc'] = newLoc
+ postCell.addSynMech(connGroup['synMech'], newSec, newLoc)
# Add synMechs, stim and conn NEURON objects
postCell.addSynMechsNEURONObj()
@@ -353,17 +413,6 @@ def subcellularConn(self, allCellTags, allPopTags):
postCell.addConnsNEURONObj()
- #print self.fromtodistance(postCell.secs[secOrig](0.5), postCell.secs['secs'][conn['sec']](conn['loc']))
-
- # different case if has vs doesn't have 3d points
- # h.distance(sec=h.soma[0], seg=0)
- # for sec in apical:
- # print h.secname()
- # for seg in sec:
- # print seg.x, h.distance(seg.x)
-
-
-
From daab2d1c6b02cca4b82754f4949d1cfc50b95f7d Mon Sep 17 00:00:00 2001
From: salvadord
Date: Sat, 5 Nov 2016 20:53:45 -0400
Subject: [PATCH 21/54] added map1D (radial) syn distribution to subcell conn
rules
---
netpyne/network.py | 71 ++++++++++++++++++++++++++++------------------
1 file changed, 44 insertions(+), 27 deletions(-)
diff --git a/netpyne/network.py b/netpyne/network.py
index dfe0eb65c..9416fe646 100644
--- a/netpyne/network.py
+++ b/netpyne/network.py
@@ -243,24 +243,39 @@ def _interpolateSegmentSigma(self, cell, secList, gridX, gridY, gridSigma):
segNumSyn[secName] = []
for seg in sec['hSec']:
x, y, z = self._posFromLoc(sec['hSec'], seg.x)
- distX = [abs(gx-x) for gx in gridX]
- distY = [abs(gy-y) for gy in gridY]
- ixs = array(distX).argsort()[:2]
- jys = array(distY).argsort()[:2]
- sigma = zeros((2,2))
- i1,i2,j1,j2 = min(ixs), max(ixs), min(jys), max(jys)
- x1,x2,y1,y2 = gridX[i1], gridX[i2], gridY[j1], gridY[j2]
- sigma_x1_y1 = gridSigma[i1,j1]
- sigma_x1_y2 = gridSigma[i1,j2]
- sigma_x2_y1 = gridSigma[i2,j1]
- sigma_x2_y2 = gridSigma[i2,j2]
-
- if x1 == x2 or y1 == y2:
- print "ERROR in closest grid points: ", secName, x1, x2, y1, y2
- else:
- # bilinear interpolation, see http://en.wikipedia.org/wiki/Bilinear_interpolation
- sigma = ((sigma_x1_y1*abs(x2-x)*abs(y2-y) + sigma_x2_y1*abs(x-x1)*abs(y2-y) + sigma_x1_y2*abs(x2-x)*abs(y-y1) + sigma_x2_y2*abs(x-x1)*abs(y-y1))/(abs(x2-x1)*abs(y2-y1)))
- #sigma = ((sigma_x1_y1*abs(x2-x)*abs(y2-y) + sigma_x2_y1*abs(x-x1)*abs(y2-y) + sigma_x1_y2*abs(x2-x)*abs(y-y1) + sigma_x2_y2*abs(x-x1)*abs(y-y1))/((x2-x1)*(y2-y1)))
+ if gridX and gridY: # 2D
+ distX = [abs(gx-x) for gx in gridX]
+ distY = [abs(gy-y) for gy in gridY]
+ ixs = array(distX).argsort()[:2]
+ jys = array(distY).argsort()[:2]
+ i1,i2,j1,j2 = min(ixs), max(ixs), min(jys), max(jys)
+ x1,x2,y1,y2 = gridX[i1], gridX[i2], gridY[j1], gridY[j2]
+ sigma_x1_y1 = gridSigma[i1,j1]
+ sigma_x1_y2 = gridSigma[i1,j2]
+ sigma_x2_y1 = gridSigma[i2,j1]
+ sigma_x2_y2 = gridSigma[i2,j2]
+
+ if x1 == x2 or y1 == y2:
+ print "ERROR in closest grid points: ", secName, x1, x2, y1, y2
+ else:
+ # bilinear interpolation, see http://en.wikipedia.org/wiki/Bilinear_interpolation
+ sigma = ((sigma_x1_y1*abs(x2-x)*abs(y2-y) + sigma_x2_y1*abs(x-x1)*abs(y2-y) + sigma_x1_y2*abs(x2-x)*abs(y-y1) + sigma_x2_y2*abs(x-x1)*abs(y-y1))/(abs(x2-x1)*abs(y2-y1)))
+ #sigma = ((sigma_x1_y1*abs(x2-x)*abs(y2-y) + sigma_x2_y1*abs(x-x1)*abs(y2-y) + sigma_x1_y2*abs(x2-x)*abs(y-y1) + sigma_x2_y2*abs(x-x1)*abs(y-y1))/((x2-x1)*(y2-y1)))
+
+ elif gridY: # 1d = radial
+ distY = [abs(gy-y) for gy in gridY]
+ jys = array(distY).argsort()[:2]
+ sigma = zeros((1,2))
+ j1,j2 = min(jys), max(jys)
+ y1, y2 = gridY[j1], gridY[j2]
+ sigma_y1 = gridSigma[j1]
+ sigma_y2 = gridSigma[j2]
+
+ if y1 == y2:
+ print "ERROR in closest grid points: ", secName, y1, y2
+ else:
+ # linear interpolation, see http://en.wikipedia.org/wiki/Bilinear_interpolation
+ sigma = ((sigma_y1*abs(y2-y) + sigma_y2*abs(y-y1)) / abs(y2-y1))
numSyn = sigma * sec['hSec'].L / sec['hSec'].nseg # return num syns
segNumSyn[secName].append(numSyn)
@@ -272,7 +287,7 @@ def _interpolateSegmentSigma(self, cell, secList, gridX, gridY, gridSigma):
# Subcellular connectivity (distribution of synapses)
###############################################################################
def subcellularConn(self, allCellTags, allPopTags):
-
+ sim.timing('start', 'subConnectTime')
print(' Distributing synapses based on subcellular connectivity rules...')
for subConnParamTemp in self.params.subConnParams.values(): # for each conn rule or parameter set
subConnParam = subConnParamTemp.copy()
@@ -324,18 +339,22 @@ def subcellularConn(self, allCellTags, allPopTags):
# 2D map and 1D map (radial)
elif isinstance(subConnParam.get('density', None), dict) and subConnParam['density']['type'] in ['2Dmap', '1Dmap']:
- somaX, _, _ = self._posFromLoc(postCell.secs['soma']['hSec'], 0.5) # move method to Cell!
- gridX = [x - somaX for x in subConnParam['density']['gridX']] # center x at cell soma
+
gridY = subConnParam['density']['gridY']
gridSigma = subConnParam['density']['gridValues']
- segNumSyn = self._interpolateSegmentSigma(postCell, secList, gridX, gridY, gridSigma) # move method to Cell!
+ if subConnParam['density']['type'] == '2Dmap': # 2D
+ somaX, _, _ = self._posFromLoc(postCell.secs['soma']['hSec'], 0.5) # move method to Cell!
+ gridX = [x - somaX for x in subConnParam['density']['gridX']] # center x at cell soma
+ segNumSyn = self._interpolateSegmentSigma(postCell, secList, gridX, gridY, gridSigma) # move method to Cell!
+ elif subConnParam['density']['type'] == '1Dmap': # 1D
+ segNumSyn = self._interpolateSegmentSigma(postCell, secList, None, gridY, gridSigma) # move method to Cell!
+
totSyn = sum([sum(nsyn) for nsyn in segNumSyn.values()])
scaleNumSyn = float(len(conns))/float(totSyn) if totSyn>0 else 0.0
for sec in segNumSyn: segNumSyn[sec] = [int(round(x * scaleNumSyn)) for x in segNumSyn[sec]]
totSynRescale = sum([sum(nsyn) for nsyn in segNumSyn.values()])
- print len(conns), totSynRescale
if totSynRescale < len(conns): # if missing syns, add extra
extraSyns = len(conns)-totSynRescale
extraAdded = 0
@@ -412,9 +431,7 @@ def subcellularConn(self, allCellTags, allPopTags):
postCell.addStimsNEURONObj()
postCell.addConnsNEURONObj()
-
-
-
+ sim.pc.barrier()
###############################################################################
@@ -622,7 +639,7 @@ def fullConn (self, preCellsTags, postCellsTags, connParam):
# replace lambda function (with args as dict of lambda funcs) with list of values
seed(sim.id32('%d'%(sim.cfg.seeds['conn']+preCellsTags.keys()[0]+postCellsTags.keys()[0])))
connParam[paramStrFunc[:-4]+'List'] = {(preGid,postGid): connParam[paramStrFunc](**{k:v if isinstance(v, Number) else v(preCellTags,postCellTags) for k,v in connParam[paramStrFunc+'Vars'].iteritems()})
- for preGid,preCellTags in preCellsTags.iteritems() for postGid,postCellTags in postCellsTags.iteritems()}
+ for preGid,preCellTags in preCellsTags.iteritems() for postGid,postCellTags in postCellsTags.iteritems()}
for postCellGid in postCellsTags: # for each postsyn cell
if postCellGid in self.lid2gid: # check if postsyn is in this node's list of gids
From acba9fc97f58850b9495ea539cfa1d63e22ef9af Mon Sep 17 00:00:00 2001
From: salvadord
Date: Sun, 6 Nov 2016 19:19:25 -0500
Subject: [PATCH 22/54] add cfg.addSynMechs, and fixed bug when had multiple
subcell conn rules
---
netpyne/cell.py | 42 ++++++++++++++++++++++++++----------------
netpyne/network.py | 46 ++++++++++++++++++----------------------------
netpyne/specs.py | 1 +
sdnotes.org | 1 -
4 files changed, 45 insertions(+), 45 deletions(-)
diff --git a/netpyne/cell.py b/netpyne/cell.py
index 92de4dcb6..d1999bf14 100644
--- a/netpyne/cell.py
+++ b/netpyne/cell.py
@@ -8,6 +8,7 @@
from numbers import Number
from copy import deepcopy
+from time import sleep
from neuron import h # Import NEURON
from specs import Dict
import sim
@@ -263,6 +264,7 @@ def addSynMechsNEURONObj(self):
self.addSynMech(synLabel=synMech['label'], secLabel=sectName, loc=synMech['loc'])
+
# Create NEURON objs for conns and syns if included in prop (used when loading)
def addStimsNEURONObj(self):
# assumes python structure exists
@@ -295,7 +297,9 @@ def addConnsNEURONObj(self):
for conn in self.conns:
# set postsyn target
synMech = next((synMech for synMech in self.secs[conn['sec']]['synMechs'] if synMech['label']==conn['synMech'] and synMech['loc']==conn['loc']), None)
- if not synMech: continue # go to next conn
+ if not synMech:
+ synMech = self.addSynMech(conn['synMech'], conn['sec'], conn['loc'])
+ #continue # go to next conn
postTarget = synMech['hSyn']
# create NetCon
@@ -347,22 +351,23 @@ def associateGid (self, threshold = 10.0):
def addSynMech (self, synLabel, secLabel, loc):
synMechParams = sim.net.params.synMechParams.get(synLabel) # get params for this synMech
sec = self.secs.get(secLabel, None)
+ # add synaptic mechanism to python struct
+ if 'synMechs' not in sec or not isinstance(sec['synMechs'], list):
+ sec['synMechs'] = []
+
if synMechParams and sec: # if both the synMech and the section exist
- if sim.cfg.createPyStruct:
- # add synaptic mechanism to python struct
- if 'synMechs' not in sec:
- sec['synMechs'] = []
+ if sim.cfg.createPyStruct and sim.cfg.addSynMechs:
synMech = next((synMech for synMech in sec['synMechs'] if synMech['label']==synLabel and synMech['loc']==loc), None)
if not synMech: # if synMech not in section, then create
synMech = Dict({'label': synLabel, 'loc': loc})
for paramName, paramValue in synMechParams.iteritems():
synMech[paramName] = paramValue
sec['synMechs'].append(synMech)
+ else:
+ synMech = None
- if sim.cfg.createNEURONObj:
+ if sim.cfg.createNEURONObj and sim.cfg.addSynMechs:
# add synaptic mechanism NEURON objectes
- if 'synMechs' not in sec:
- sec['synMechs'] = []
if not synMech: # if pointer not created in createPyStruct, then check
synMech = next((synMech for synMech in sec['synMechs'] if synMech['label']==synLabel and synMech['loc']==loc), None)
if not synMech: # if still doesnt exist, then create
@@ -384,6 +389,8 @@ def addSynMech (self, synLabel, secLabel, loc):
synMech['hNetcon'].weight[0] = paramValue
elif paramName not in ['sec', 'loc']:
setattr(synMech['hNetcon'], paramName, paramValue)
+ else:
+ synMech = None
return synMech
@@ -938,14 +945,17 @@ def _setConnSynMechs (self, params, secLabels):
def _distributeSynsUniformly (self, secList, numSyns):
from numpy import cumsum
- #secLengths = [self.secs[s]['hSec'].L for s in secList]
- secLengths = [self.secs[s]['geom']['L'] for s in secList]
- totLength = sum(secLengths)
- cumLengths = list(cumsum(secLengths))
- absLocs = [i*(totLength/numSyns)+totLength/numSyns/2 for i in range(numSyns)]
- inds = [cumLengths.index(next(x for x in cumLengths if x >= absLoc)) for absLoc in absLocs]
- secs = [secList[ind] for ind in inds]
- locs = [(cumLengths[ind] - absLoc) / secLengths[ind] for absLoc,ind in zip(absLocs,inds)]
+ secLengths = [self.secs[s]['hSec'].L for s in secList]
+ #secLengths = [self.secs[s]['geom']['L'] for s in secList]
+ try:
+ totLength = sum(secLengths)
+ cumLengths = list(cumsum(secLengths))
+ absLocs = [i*(totLength/numSyns)+totLength/numSyns/2 for i in range(numSyns)]
+ inds = [cumLengths.index(next(x for x in cumLengths if x >= absLoc)) for absLoc in absLocs]
+ secs = [secList[ind] for ind in inds]
+ locs = [(cumLengths[ind] - absLoc) / secLengths[ind] for absLoc,ind in zip(absLocs,inds)]
+ except:
+ secs, locs = [],[]
return secs, locs
diff --git a/netpyne/network.py b/netpyne/network.py
index 9416fe646..33a0d3fa1 100644
--- a/netpyne/network.py
+++ b/netpyne/network.py
@@ -9,7 +9,7 @@
from matplotlib.pylab import array, sin, cos, tan, exp, sqrt, mean, inf, rand, dstack, unravel_index, argsort, zeros, ceil
from random import seed, random, randint, sample, uniform, triangular, gauss, betavariate, expovariate, gammavariate
-from time import time
+from time import time, sleep
from numbers import Number
from copy import copy
from specs import ODict
@@ -289,6 +289,7 @@ def _interpolateSegmentSigma(self, cell, secList, gridX, gridY, gridSigma):
def subcellularConn(self, allCellTags, allPopTags):
sim.timing('start', 'subConnectTime')
print(' Distributing synapses based on subcellular connectivity rules...')
+
for subConnParamTemp in self.params.subConnParams.values(): # for each conn rule or parameter set
subConnParam = subConnParamTemp.copy()
@@ -378,7 +379,6 @@ def subcellularConn(self, allCellTags, allPopTags):
newLocs.append(seg.x)
-
# Distance-based
elif subConnParam.get('density', None) == 'distance':
# find origin section
@@ -400,36 +400,17 @@ def subcellularConn(self, allCellTags, allPopTags):
for i,(conn, newSec, newLoc) in enumerate(zip(conns, newSecs, newLocs)):
- postSynMechs = postCell.secs[conn['sec']].synMechs
-
- # if need to reposition conn, remove syns of conn, add new syn, and set new loc and sec
- if newSec != conn['sec'] or newLoc != conn['loc']:
- indexOld = next((i for i,synMech in enumerate(postSynMechs) if synMech['label']==conn['synMech'] and synMech['loc']==conn['loc']), None)
- if indexOld != None:
- del postSynMechs[indexOld]
- postCell.addSynMech(conn['synMech'], newSec, newLoc)
- conn['sec'] = newSec
- conn['loc'] = newLoc
+ conn['sec'] = newSec
+ conn['loc'] = newLoc
# find grouped conns
if subConnParam.get('groupSynMechs', None) and conn['synMech'] in subConnParam['groupSynMechs']:
connGroup = connsGroup[i] # get grouped conn from previously stored dict
- connGroup['synMech']
connGroup['synMech'] = connGroup['synMech'].split('__grouped__')[1] # remove '__grouped__' label
-
- # if need to reposition conn, remove syns of grouped conn, add new syn, and set new loc and sec
- if newSec != connGroup['sec'] or newLoc != connGroup['loc']:
- indexOld = next((i for i,synMech in enumerate(postSynMechs) if synMech['label']==connGroup['synMech'] and synMech['loc']==connGroup['loc']), None)
- if indexOld != None:
- del postSynMechs[indexOld]
- connGroup['sec'] = newSec
- connGroup['loc'] = newLoc
- postCell.addSynMech(connGroup['synMech'], newSec, newLoc)
-
- # Add synMechs, stim and conn NEURON objects
- postCell.addSynMechsNEURONObj()
- postCell.addStimsNEURONObj()
- postCell.addConnsNEURONObj()
+
+ connGroup['sec'] = newSec
+ connGroup['loc'] = newLoc
+
sim.pc.barrier()
@@ -451,7 +432,10 @@ def connectCells (self):
if self.params.subConnParams: # do not create NEURON objs until synapses are distributed based on subConnParams
origCreateNEURONObj = bool(sim.cfg.createNEURONObj)
+ origAddSynMechs = bool(sim.cfg.addSynMechs)
sim.cfg.createNEURONObj = False
+ sim.cfg.addSynMechs = False
+
for connParamLabel,connParamTemp in self.params.connParams.iteritems(): # for each conn rule or parameter set
connParam = connParamTemp.copy()
@@ -475,8 +459,14 @@ def connectCells (self):
# apply subcellular connectivity params (distribution of synaspes)
if self.params.subConnParams:
- sim.cfg.createNEURONObj = origCreateNEURONObj # set to original value
self.subcellularConn(allCellTags, allPopTags)
+ sim.cfg.createNEURONObj = origCreateNEURONObj # set to original value
+ sim.cfg.addSynMechs = origAddSynMechs # set to original value
+ for cell in sim.net.cells:
+ # Add synMechs, stim and conn NEURON objects
+ cell.addStimsNEURONObj()
+ #cell.addSynMechsNEURONObj()
+ cell.addConnsNEURONObj()
print(' Number of connections on node %i: %i ' % (sim.rank, sum([len(cell.conns) for cell in self.cells])))
diff --git a/netpyne/specs.py b/netpyne/specs.py
index f95197017..6edc90d56 100644
--- a/netpyne/specs.py
+++ b/netpyne/specs.py
@@ -360,6 +360,7 @@ def __init__(self, simConfigDict = None):
self.seeds = Dict({'conn': 1, 'stim': 1, 'loc': 1}) # Seeds for randomizers (connectivity, input stimulation and cell locations)
self.createNEURONObj = True # create HOC objects when instantiating network
self.createPyStruct = True # create Python structure (simulator-independent) when instantiating network
+ self.addSynMechs = True # whether to add synaptich mechanisms or not
self.includeParamsLabel = True # include label of param rule that created that cell, conn or stim
self.gatherOnlySimData = False # omits gathering of net+cell data thus reducing gatherData time
self.timing = True # show timing of each process
diff --git a/sdnotes.org b/sdnotes.org
index b79324da5..21345e6d4 100644
--- a/sdnotes.org
+++ b/sdnotes.org
@@ -2437,7 +2437,6 @@ Plotting 2D representation of network cell locations and connections...
*** 380x120x380 (less conns, with subcell)
-
* 16jul20 Matplotlib errors
- http://stackoverflow.com/questions/4130355/python-matplotlib-framework-under-macosx
- fixed by adding 'backend: Agg' to ~/.matplotlib/matplotlibrc
From 6adb9af6268192da0222fb0ab581fb6d057323ec Mon Sep 17 00:00:00 2001
From: salvadord
Date: Mon, 7 Nov 2016 08:56:52 -0500
Subject: [PATCH 23/54] All section now include argument cell=self.gid to
differentiate them
---
netpyne/cell.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/netpyne/cell.py b/netpyne/cell.py
index d1999bf14..1ffd94221 100644
--- a/netpyne/cell.py
+++ b/netpyne/cell.py
@@ -171,7 +171,7 @@ def createNEURONObj (self, prop):
if sectName not in self.secs:
self.secs[sectName] = Dict() # create sect dict if doesn't exist
if not self.secs[sectName].get('hSec'):
- self.secs[sectName]['hSec'] = h.Section(name=sectName) # create h Section object
+ self.secs[sectName]['hSec'] = h.Section(name=sectName, cell=self.gid) # create h Section object
sec = self.secs[sectName] # pointer to section
# set geometry params
From cd07f10ab758717972226fcc69082fac5b11100f Mon Sep 17 00:00:00 2001
From: salvadord
Date: Mon, 7 Nov 2016 09:10:32 -0500
Subject: [PATCH 24/54] All section now include argument cell=self to
differentiate them
---
CHANGES.md | 2 ++
netpyne/cell.py | 2 +-
2 files changed, 3 insertions(+), 1 deletion(-)
diff --git a/CHANGES.md b/CHANGES.md
index 382060655..538904348 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -6,6 +6,8 @@
- Added option to overlay pop labels and show avg rates to plotRaster() (issue #111)
+- All section now include argument cell=self to differentiate them
+
- Fixed bug positioning cells with 3d geom
- Fixed bug in sim.clearAll by closing all figures instead of current (issue #168)
diff --git a/netpyne/cell.py b/netpyne/cell.py
index 1ffd94221..aa3df0022 100644
--- a/netpyne/cell.py
+++ b/netpyne/cell.py
@@ -171,7 +171,7 @@ def createNEURONObj (self, prop):
if sectName not in self.secs:
self.secs[sectName] = Dict() # create sect dict if doesn't exist
if not self.secs[sectName].get('hSec'):
- self.secs[sectName]['hSec'] = h.Section(name=sectName, cell=self.gid) # create h Section object
+ self.secs[sectName]['hSec'] = h.Section(name=sectName, cell=self) # create h Section object
sec = self.secs[sectName] # pointer to section
# set geometry params
From 66cb7492a3bad594278c5c5658b6053989c2fadb Mon Sep 17 00:00:00 2001
From: salvadord
Date: Mon, 7 Nov 2016 14:16:23 -0500
Subject: [PATCH 25/54] Added function plotSpikePSD to plot power spectral
density of spiking data
---
CHANGES.md | 2 +
doc/source/reference.rst | 17 ++++
examples/HybridTut/HybridTut.py | 3 +-
netpyne/analysis.py | 136 ++++++++++++++++++++++++++++++++
4 files changed, 157 insertions(+), 1 deletion(-)
diff --git a/CHANGES.md b/CHANGES.md
index 538904348..992717cf0 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,5 +1,7 @@
# Version 0.6.4
+- Added function plotSpikePSD to plot power spectral density of spiking data
+
- Added plotShape to plot 3D morphology of cell and synapse locations
- Added option to skip batch sims if output file already exists
diff --git a/doc/source/reference.rst b/doc/source/reference.rst
index c9377ed82..5f14ae03d 100644
--- a/doc/source/reference.rst
+++ b/doc/source/reference.rst
@@ -797,6 +797,23 @@ Analysis-related functions
- Returns figure handle
+* **analysis.plotSpikePSD** (include = ['allCells', 'eachPop'], timeRange = None, binSize = 5, Fs = 200, overlay=True, yaxis = 'rate', figSize = (10,8), saveData = None, saveFig = None, showFig = True)
+
+ Plot spikes power spectral density (PSD). Optional arguments:
+
+ - *include*: List of data series to include. Note: one line per item, not grouped (['all'|,'allCells'|,'allNetStims'|,120|,'L4'|,('L2', 56)|,('L5',[4,5,6])])
+ - *timeRange*: Time range of spikes shown; if None shows all ([start:stop])
+ - *binSize*: Size in ms of each bin (int)
+ - *Fs*: PSD sampling frequency used to calculate the Fourier frequencies (float)
+ - *overlay*: Whether to overlay the data lines or plot in separate subplots (True|False)
+ - *figSize*: Size of figure ((width, height))
+ - *saveData*: File name where to save the final data used to generate the figure (None|'fileName')
+ - *saveFig*: File name where to save the figure (None|'fileName')
+ - *showFig*: Whether to show the figure or not (True|False)
+
+ - Returns figure handle and power array
+
+
* **analysis.plotTraces** (include = [], timeRange = None, overlay = False, oneFigPer = 'cell', rerun = False, figSize = (10,8), saveData = None, saveFig = None, showFig = True)
Plot recorded traces (specified in ``simConfig.recordTraces)`. Optional arguments:
diff --git a/examples/HybridTut/HybridTut.py b/examples/HybridTut/HybridTut.py
index 3ae6437fa..3d1e4e633 100644
--- a/examples/HybridTut/HybridTut.py
+++ b/examples/HybridTut/HybridTut.py
@@ -123,5 +123,6 @@
# Analysis and plotting
simConfig.analysis['plotRaster'] = {'orderInverse': False} #True # Whether or not to plot a raster
-simConfig.analysis['plotTraces'] = {'include': [1,51]} # plot recorded traces for this list of cells
+#simConfig.analysis['plotTraces'] = {'include': [1,51]} # plot recorded traces for this list of cells
+simConfig.analysis['plotSpikePSD'] = {'include': ['allCells', 'PYR_HH', 'PYR_Izhi']} # plot recorded traces for this list of cells
diff --git a/netpyne/analysis.py b/netpyne/analysis.py
index 80f947271..717456402 100644
--- a/netpyne/analysis.py
+++ b/netpyne/analysis.py
@@ -515,7 +515,143 @@ def plotSpikeHist (include = ['allCells', 'eachPop'], timeRange = None, binSize
return fig
+
+
+######################################################################################################################################################
+## Plot spike histogram
+######################################################################################################################################################
+def plotSpikePSD (include = ['allCells', 'eachPop'], timeRange = None, binSize = 5, Fs = 200, overlay=True,
+ figSize = (10,8), saveData = None, saveFig = None, showFig = True):
+ '''
+ Plot spike histogram
+ - include (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): List of data series to include.
+ Note: one line per item, not grouped (default: ['allCells', 'eachPop'])
+ - timeRange ([start:stop]): Time range of spikes shown; if None shows all (default: None)
+ - binSize (int): Size in ms of spike bins (default: 5)
+ - Fs (float): PSD sampling frequency used to calculate the Fourier frequencies (default: 200)
+ - overlay (True|False): Whether to overlay the data lines or plot in separate subplots (default: True)
+ - graphType ('line'|'bar'): Type of graph to use (line graph or bar plot) (default: 'line')
+ - yaxis ('rate'|'count'): Units of y axis (firing rate in Hz, or spike count) (default: 'rate')
+ - figSize ((width, height)): Size of figure (default: (10,8))
+ - saveData (None|True|'fileName'): File name where to save the final data used to generate the figure;
+ if set to True uses filename from simConfig (default: None)
+ - saveFig (None|True|'fileName'): File name where to save the figure;
+ if set to True uses filename from simConfig (default: None)
+ - showFig (True|False): Whether to show the figure or not (default: True)
+
+ - Returns figure handle
+ '''
+
+ print('Plotting spikes power spectral density (PSD) ...')
+
+ colorList = [[0.42,0.67,0.84], [0.90,0.76,0.00], [0.42,0.83,0.59], [0.90,0.32,0.00],
+ [0.34,0.67,0.67], [0.90,0.59,0.00], [0.42,0.82,0.83], [1.00,0.85,0.00],
+ [0.33,0.67,0.47], [1.00,0.38,0.60], [0.57,0.67,0.33], [0.5,0.2,0.0],
+ [0.71,0.82,0.41], [0.0,0.2,0.5]]
+
+
+ # Replace 'eachPop' with list of pops
+ if 'eachPop' in include:
+ include.remove('eachPop')
+ for pop in sim.net.allPops: include.append(pop)
+
+ # time range
+ if timeRange is None:
+ timeRange = [0,sim.cfg.duration]
+
+ histData = []
+
+ # create fig
+ fig,ax1 = subplots(figsize=figSize)
+ fontsiz = 12
+
+ # Plot separate line for each entry in include
+ for iplot,subset in enumerate(include):
+ cells, cellGids, netStimPops = getCellsInclude([subset])
+ numNetStims = 0
+
+ # Select cells to include
+ if len(cellGids) > 0:
+ try:
+ spkinds,spkts = zip(*[(spkgid,spkt) for spkgid,spkt in zip(sim.allSimData['spkid'],sim.allSimData['spkt']) if spkgid in cellGids])
+ except:
+ spkinds,spkts = [],[]
+ else:
+ spkinds,spkts = [],[]
+
+
+ # Add NetStim spikes
+ spkts, spkinds = list(spkts), list(spkinds)
+ numNetStims = 0
+ for netStimPop in netStimPops:
+ if 'stims' in sim.allSimData:
+ cellStims = [cellStim for cell,cellStim in sim.allSimData['stims'].iteritems() if netStimPop in cellStim]
+ if len(cellStims) > 0:
+ lastInd = max(spkinds) if len(spkinds)>0 else 0
+ spktsNew = [spkt for cellStim in cellStims for spkt in cellStim[netStimPop] ]
+ spkindsNew = [lastInd+1+i for i,cellStim in enumerate(cellStims) for spkt in cellStim[netStimPop]]
+ spkts.extend(spktsNew)
+ spkinds.extend(spkindsNew)
+ numNetStims += len(cellStims)
+
+ histo = histogram(spkts, bins = arange(timeRange[0], timeRange[1], binSize))
+ histoT = histo[1][:-1]+binSize/2
+ histoCount = histo[0]
+
+ histData.append(histoCount)
+
+ color = colorList[iplot%len(colorList)]
+
+ if not overlay:
+ subplot(len(include),1,iplot+1) # if subplot, create new subplot
+ title (str(subset), fontsize=fontsiz)
+ color = 'blue'
+ power = psd(histoCount, Fs=Fs, linewidth=1.0, color=color)
+ #h=axes()
+ #h.set_yticklabels([])
+
+ if iplot == 0:
+ xlabel('Frequency (Hz)', fontsize=fontsiz)
+ ylabel('Power', fontsize=fontsiz) # add yaxis in opposite side
+ xlim([0, Fs/2])
+
+ if len(include) < 5: # if apply tight_layout with many subplots it inverts the y-axis
+ try:
+ tight_layout()
+ except:
+ pass
+
+ # Add legend
+ if overlay:
+ for i,subset in enumerate(include):
+ plot(0,0,color=colorList[i%len(colorList)],label=str(subset))
+ legend(fontsize=fontsiz, bbox_to_anchor=(1.04, 1), loc=2, borderaxespad=0.)
+ maxLabelLen = min(10,max([len(str(l)) for l in include]))
+ subplots_adjust(right=(0.9-0.012*maxLabelLen))
+
+
+ # save figure data
+ if saveData:
+ figData = {'histData': histData, 'histT': histoT, 'include': include, 'timeRange': timeRange, 'binSize': binSize,
+ 'saveData': saveData, 'saveFig': saveFig, 'showFig': showFig}
+
+ _saveFigData(figData, saveData, 'spikeHist')
+
+ # save figure
+ if saveFig:
+ if isinstance(saveFig, basestring):
+ filename = saveFig
+ else:
+ filename = sim.cfg.filename+'_'+'spikePSD.png'
+ savefig(filename)
+
+ # show fig
+ if showFig: _showFigure()
+
+ return fig, power
+
+
######################################################################################################################################################
## Plot recorded cell traces (V, i, g, etc.)
From f5d111e6306ad18c36114741a50f2d8a9831a5ee Mon Sep 17 00:00:00 2001
From: salvadord
Date: Mon, 7 Nov 2016 14:40:25 -0500
Subject: [PATCH 26/54] renamded function to plotRatePSD - plot firing rate
power spectral density
---
CHANGES.md | 2 +-
examples/HybridTut/HybridTut.py | 2 +-
netpyne/analysis.py | 5 +++--
3 files changed, 5 insertions(+), 4 deletions(-)
diff --git a/CHANGES.md b/CHANGES.md
index 992717cf0..05e8635db 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,6 +1,6 @@
# Version 0.6.4
-- Added function plotSpikePSD to plot power spectral density of spiking data
+- Added function plotRatePSD to plot firing rate power spectral density
- Added plotShape to plot 3D morphology of cell and synapse locations
diff --git a/examples/HybridTut/HybridTut.py b/examples/HybridTut/HybridTut.py
index 3d1e4e633..2f99a2f69 100644
--- a/examples/HybridTut/HybridTut.py
+++ b/examples/HybridTut/HybridTut.py
@@ -124,5 +124,5 @@
# Analysis and plotting
simConfig.analysis['plotRaster'] = {'orderInverse': False} #True # Whether or not to plot a raster
#simConfig.analysis['plotTraces'] = {'include': [1,51]} # plot recorded traces for this list of cells
-simConfig.analysis['plotSpikePSD'] = {'include': ['allCells', 'PYR_HH', 'PYR_Izhi']} # plot recorded traces for this list of cells
+simConfig.analysis['plotRatePSD'] = {'include': ['allCells', 'PYR_HH', 'PYR_Izhi']} # plot recorded traces for this list of cells
diff --git a/netpyne/analysis.py b/netpyne/analysis.py
index 717456402..940b4f3cf 100644
--- a/netpyne/analysis.py
+++ b/netpyne/analysis.py
@@ -523,7 +523,7 @@ def plotSpikeHist (include = ['allCells', 'eachPop'], timeRange = None, binSize
def plotSpikePSD (include = ['allCells', 'eachPop'], timeRange = None, binSize = 5, Fs = 200, overlay=True,
figSize = (10,8), saveData = None, saveFig = None, showFig = True):
'''
- Plot spike histogram
+ Plot firing rate power spectral density (PSD)
- include (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): List of data series to include.
Note: one line per item, not grouped (default: ['allCells', 'eachPop'])
- timeRange ([start:stop]): Time range of spikes shown; if None shows all (default: None)
@@ -542,7 +542,7 @@ def plotSpikePSD (include = ['allCells', 'eachPop'], timeRange = None, binSize =
- Returns figure handle
'''
- print('Plotting spikes power spectral density (PSD) ...')
+ print('Plotting firing rate power spectral density (PSD) ...')
colorList = [[0.42,0.67,0.84], [0.90,0.76,0.00], [0.42,0.83,0.59], [0.90,0.32,0.00],
[0.34,0.67,0.67], [0.90,0.59,0.00], [0.42,0.82,0.83], [1.00,0.85,0.00],
@@ -597,6 +597,7 @@ def plotSpikePSD (include = ['allCells', 'eachPop'], timeRange = None, binSize =
histo = histogram(spkts, bins = arange(timeRange[0], timeRange[1], binSize))
histoT = histo[1][:-1]+binSize/2
histoCount = histo[0]
+ histoCount = histoCount * (1000.0 / binSize) / (len(cellGids)+numNetStims) # convert to rates
histData.append(histoCount)
From 9e62ab044c1e9d696bd5047eef2f12c717399d21 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Tue, 8 Nov 2016 10:11:32 -0500
Subject: [PATCH 27/54] Added sleep after batch so enough time to submit last
job
---
netpyne/analysis.py | 6 +++---
netpyne/batch.py | 2 ++
2 files changed, 5 insertions(+), 3 deletions(-)
diff --git a/netpyne/analysis.py b/netpyne/analysis.py
index 940b4f3cf..e2e6d8e18 100644
--- a/netpyne/analysis.py
+++ b/netpyne/analysis.py
@@ -10,7 +10,7 @@
if __gui__:
from matplotlib.pylab import transpose, nanmax, nanmin, errstate, bar, histogram, floor, ceil, yticks, arange, gca, scatter, figure, hold, subplot, axes, shape, imshow, \
- colorbar, plot, xlabel, ylabel, title, xlim, ylim, clim, show, zeros, legend, savefig, psd, ion, subplots_adjust, subplots, tight_layout, get_fignums, text
+ colorbar, plot, xlabel, ylabel, title, xlim, ylim, clim, show, zeros, legend, savefig, psd, ion, subplots_adjust, subplots, tight_layout, get_fignums, text, log10
from matplotlib import gridspec
from scipy import size, array, linspace, ceil, cumsum
@@ -520,7 +520,7 @@ def plotSpikeHist (include = ['allCells', 'eachPop'], timeRange = None, binSize
######################################################################################################################################################
## Plot spike histogram
######################################################################################################################################################
-def plotSpikePSD (include = ['allCells', 'eachPop'], timeRange = None, binSize = 5, Fs = 200, overlay=True,
+def plotRatePSD (include = ['allCells', 'eachPop'], timeRange = None, binSize = 5, Fs = 200, overlay=True,
figSize = (10,8), saveData = None, saveFig = None, showFig = True):
'''
Plot firing rate power spectral density (PSD)
@@ -615,7 +615,7 @@ def plotSpikePSD (include = ['allCells', 'eachPop'], timeRange = None, binSize =
if iplot == 0:
xlabel('Frequency (Hz)', fontsize=fontsiz)
ylabel('Power', fontsize=fontsiz) # add yaxis in opposite side
- xlim([0, Fs/2])
+ xlim([0, (Fs/2)])
if len(include) < 5: # if apply tight_layout with many subplots it inverts the y-axis
try:
diff --git a/netpyne/batch.py b/netpyne/batch.py
index 35bd115a3..32d82c4ee 100644
--- a/netpyne/batch.py
+++ b/netpyne/batch.py
@@ -127,6 +127,8 @@ def run(self):
input.write(jobString)
print jobString+'\n'
input.close()
+
+ sleep(10) # give time for last job to get on queue
From d8d51587b42c617256807c326b6d6ab789dcdf56 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Wed, 9 Nov 2016 16:12:03 -0500
Subject: [PATCH 28/54] debugged subcellular syn distributions
---
netpyne/analysis.py | 21 ++++++++++++++-------
netpyne/cell.py | 8 +++++++-
netpyne/network.py | 44 +++++++++++++++++++++++++-------------------
3 files changed, 46 insertions(+), 27 deletions(-)
diff --git a/netpyne/analysis.py b/netpyne/analysis.py
index e2e6d8e18..a33991bef 100644
--- a/netpyne/analysis.py
+++ b/netpyne/analysis.py
@@ -148,7 +148,7 @@ def getCellsInclude(include):
## Raster plot
######################################################################################################################################################
def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, orderBy = 'gid', orderInverse = False, labels = 'legend', popRates = False,
- spikeHist = None, spikeHistBin = 5, syncLines = False, figSize = (10,8), saveData = None, saveFig = None, showFig = True):
+ spikeHist = None, spikeHistBin = 5, syncLines = False, lw = 2, marker = '|', figSize = (10,8), saveData = None, saveFig = None, showFig = True):
'''
Raster plot of network cells
- include (['all',|'allCells',|'allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): Cells to include (default: 'allCells')
@@ -161,6 +161,8 @@ def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, order
- spikeHist (None|'overlay'|'subplot'): overlay line over raster showing spike histogram (spikes/bin) (default: False)
- spikeHistBin (int): Size of bin in ms to use for histogram (default: 5)
- syncLines (True|False): calculate synchorny measure and plot vertical lines for each spike to evidence synchrony (default: False)
+ - lw (integer): Line width for each spike (default: 2)
+ - marker (char): Marker for each spike (default: '|')
- figSize ((width, height)): Size of figure (default: (10,8))
- saveData (None|True|'fileName'): File name where to save the final data used to generate the figure;
if set to True uses filename from simConfig (default: None)
@@ -269,7 +271,7 @@ def plotRaster (include = ['allCells'], timeRange = None, maxSpikes = 1e8, order
if spikeHist == 'subplot':
gs = gridspec.GridSpec(2, 1,height_ratios=[2,1])
ax1=subplot(gs[0])
- ax1.scatter(spkts, spkinds, 10, linewidths=2, marker='|', color = spkgidColors) # Create raster
+ ax1.scatter(spkts, spkinds, 10, linewidths=lw, marker=marker, color = spkgidColors) # Create raster
ax1.set_xlim(timeRange)
# Plot stats
@@ -818,7 +820,7 @@ def invertDictMapping(d):
######################################################################################################################################################
## Plot cell shape
######################################################################################################################################################
-def plotShape (showSyns = True, figSize = (10,8), saveData = None, saveFig = None, showFig = True):
+def plotShape (showSyns = True, include = [], style = '.', siz=10, figSize = (10,8), saveData = None, saveFig = None, showFig = True):
'''
Plot 3D cell shape using NEURON Interview PlotShape
- showSyns (True|False): Show synaptic connections in 3D
@@ -835,15 +837,20 @@ def plotShape (showSyns = True, figSize = (10,8), saveData = None, saveFig = Non
from neuron import h, gui
fig = h.Shape()
+ secList = h.SectionList()
if showSyns:
color = 2 # red
- style = 'o'
- siz = 10
- for cell in sim.net.cells:
+ for cell in [c for c in sim.net.cells if c.tags['popLabel'] in include]:
for sec in cell.secs.values():
+ sec['hSec'].push()
+ secList.append()
+ h.pop_section()
for synMech in sec['synMechs']:
if synMech['hSyn']:
- fig.point_mark(synMech['hSyn'], color)
+ fig.point_mark(synMech['hSyn'], color, style, siz)
+
+ fig.observe(secList)
+ fig.flush()
# save figure
if saveFig:
diff --git a/netpyne/cell.py b/netpyne/cell.py
index aa3df0022..be9a7ebce 100644
--- a/netpyne/cell.py
+++ b/netpyne/cell.py
@@ -300,7 +300,13 @@ def addConnsNEURONObj(self):
if not synMech:
synMech = self.addSynMech(conn['synMech'], conn['sec'], conn['loc'])
#continue # go to next conn
- postTarget = synMech['hSyn']
+ try:
+ postTarget = synMech['hSyn']
+ except:
+ print 'tags:',self.tags
+ print 'synMechs:',self.secs[conn['sec']]['synMechs']
+ print 'conn:', conn
+ exit()
# create NetCon
if conn['preGid'] == 'NetStim':
diff --git a/netpyne/network.py b/netpyne/network.py
index 33a0d3fa1..358c50b9e 100644
--- a/netpyne/network.py
+++ b/netpyne/network.py
@@ -7,7 +7,7 @@
Contributors: salvadordura@gmail.com
"""
-from matplotlib.pylab import array, sin, cos, tan, exp, sqrt, mean, inf, rand, dstack, unravel_index, argsort, zeros, ceil
+from matplotlib.pylab import array, sin, cos, tan, exp, sqrt, mean, inf, rand, dstack, unravel_index, argsort, zeros, ceil, copy
from random import seed, random, randint, sample, uniform, triangular, gauss, betavariate, expovariate, gammavariate
from time import time, sleep
from numbers import Number
@@ -250,10 +250,10 @@ def _interpolateSegmentSigma(self, cell, secList, gridX, gridY, gridSigma):
jys = array(distY).argsort()[:2]
i1,i2,j1,j2 = min(ixs), max(ixs), min(jys), max(jys)
x1,x2,y1,y2 = gridX[i1], gridX[i2], gridY[j1], gridY[j2]
- sigma_x1_y1 = gridSigma[i1,j1]
- sigma_x1_y2 = gridSigma[i1,j2]
- sigma_x2_y1 = gridSigma[i2,j1]
- sigma_x2_y2 = gridSigma[i2,j2]
+ sigma_x1_y1 = gridSigma[i1][j1]
+ sigma_x1_y2 = gridSigma[i1][j2]
+ sigma_x2_y1 = gridSigma[i2][j1]
+ sigma_x2_y2 = gridSigma[i2][j2]
if x1 == x2 or y1 == y2:
print "ERROR in closest grid points: ", secName, x1, x2, y1, y2
@@ -351,21 +351,28 @@ def subcellularConn(self, allCellTags, allPopTags):
elif subConnParam['density']['type'] == '1Dmap': # 1D
segNumSyn = self._interpolateSegmentSigma(postCell, secList, None, gridY, gridSigma) # move method to Cell!
- totSyn = sum([sum(nsyn) for nsyn in segNumSyn.values()])
- scaleNumSyn = float(len(conns))/float(totSyn) if totSyn>0 else 0.0
- for sec in segNumSyn: segNumSyn[sec] = [int(round(x * scaleNumSyn)) for x in segNumSyn[sec]]
+ totSyn = sum([sum(nsyn) for nsyn in segNumSyn.values()]) # summed density
+ scaleNumSyn = float(len(conns))/float(totSyn) if totSyn>0 else 0.0
+ diffList = []
+ for sec in segNumSyn:
+ for seg,x in enumerate(segNumSyn[sec]):
+ orig = float(x*scaleNumSyn)
+ scaled = int(round(x * scaleNumSyn))
+ segNumSyn[sec][seg] = scaled
+ diff = orig - scaled
+ if diff > 0:
+ diffList.append([diff,sec,seg])
+
totSynRescale = sum([sum(nsyn) for nsyn in segNumSyn.values()])
- if totSynRescale < len(conns): # if missing syns, add extra
+ # if missing syns due to rescaling to 0, find top values which were rounded to 0 and make 1
+ if totSynRescale < len(conns):
extraSyns = len(conns)-totSynRescale
- extraAdded = 0
- for sec in segNumSyn.values():
- if extraAdded == extraSyns: break
- for nsyn in sec:
- if nsyn > 0:
- nsyn = nsyn + 1
- extraAdded = extraAdded + 1
- if extraAdded == extraSyns: break
+ diffList = sorted(diffList, key=lambda l:l[0], reverse=True)
+ for i in range(extraSyns):
+ sec = diffList[i][1]
+ seg = diffList[i][2]
+ segNumSyn[sec][seg] += 1
# convert to list so can serialize and save
subConnParam['density']['gridY'] = list(subConnParam['density']['gridY'])
@@ -411,8 +418,7 @@ def subcellularConn(self, allCellTags, allPopTags):
connGroup['sec'] = newSec
connGroup['loc'] = newLoc
-
- sim.pc.barrier()
+ sim.pc.barrier()
###############################################################################
From 3629fa3206ae48d0b42e02750aa22f99f92c134c Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 10 Nov 2016 23:58:52 -0500
Subject: [PATCH 29/54] temporarily fixed y pt3d for SFN16 simulation
---
netpyne/cell.py | 13 +++++++++----
netpyne/network.py | 14 ++------------
2 files changed, 11 insertions(+), 16 deletions(-)
diff --git a/netpyne/cell.py b/netpyne/cell.py
index be9a7ebce..055a18bcf 100644
--- a/netpyne/cell.py
+++ b/netpyne/cell.py
@@ -184,7 +184,12 @@ def createNEURONObj (self, prop):
if 'pt3d' in sectParams['geom']:
h.pt3dclear(sec=sec['hSec'])
x = self.tags['x']
- y = -self.tags['y'] # Neuron y-axis positive = upwards, so assume pia=0 and cortical depth = neg
+ #y = -self.tags['y'] # Neuron y-axis positive = upwards, so assume pia=0 and cortical depth = neg
+
+ ############################################################################
+ y = -735 # TEMPORARILY FIX THIS FOR SFN16 EXPERIMENT!! - REMEMBER TO REMOVE!!!!!!!
+ ############################################################################
+
z = self.tags['z']
for pt3d in sectParams['geom']['pt3d']:
h.pt3dadd(x+pt3d[0], y+pt3d[1], z+pt3d[2], pt3d[3], sec=sec['hSec'])
@@ -303,9 +308,9 @@ def addConnsNEURONObj(self):
try:
postTarget = synMech['hSyn']
except:
- print 'tags:',self.tags
- print 'synMechs:',self.secs[conn['sec']]['synMechs']
- print 'conn:', conn
+ print '\nError: no synMech available for conn: ', conn
+ print ' cell tags: ',self.tags
+ print ' cell synMechs: ',self.secs[conn['sec']]['synMechs']
exit()
# create NetCon
diff --git a/netpyne/network.py b/netpyne/network.py
index 358c50b9e..321453eb1 100644
--- a/netpyne/network.py
+++ b/netpyne/network.py
@@ -320,18 +320,8 @@ def subcellularConn(self, allCellTags, allPopTags):
else:
conns = allConns
- # if sectionList
- if isinstance(subConnParam.get('sec'), str) and subConnParam.get('sec') in postCell.secLists:
- secList = list(postCell.secLists[subConnParam['sec']])
- elif isinstance(subConnParam['sec'], list):
- for item in subConnParam['sec']:
- secList = []
- if item in postCell.secLists:
- secList.extend(postCell.secLists[item])
- else:
- secList.append(item)
- else:
- secList = [subConnParam['sec']]
+ # set sections to be used
+ secList = postCell._setConnSections(subConnParam)
# Uniform distribution
if subConnParam.get('density', None) == 'uniform':
From 4b20850c450b8403913dbc967876778eee20cd29 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Sun, 13 Nov 2016 11:43:31 -0800
Subject: [PATCH 30/54] added smoothing to psd plot
---
examples/HybridTut/HybridTut.py | 2 +-
netpyne/analysis.py | 92 +++++++++++++++++++++++++++++----
2 files changed, 83 insertions(+), 11 deletions(-)
diff --git a/examples/HybridTut/HybridTut.py b/examples/HybridTut/HybridTut.py
index 2f99a2f69..c300398c0 100644
--- a/examples/HybridTut/HybridTut.py
+++ b/examples/HybridTut/HybridTut.py
@@ -124,5 +124,5 @@
# Analysis and plotting
simConfig.analysis['plotRaster'] = {'orderInverse': False} #True # Whether or not to plot a raster
#simConfig.analysis['plotTraces'] = {'include': [1,51]} # plot recorded traces for this list of cells
-simConfig.analysis['plotRatePSD'] = {'include': ['allCells', 'PYR_HH', 'PYR_Izhi']} # plot recorded traces for this list of cells
+simConfig.analysis['plotRatePSD'] = {'include': ['allCells', 'PYR_HH', 'PYR_Izhi'], 'Fs': 200, 'smooth': 10} # plot recorded traces for this list of cells
diff --git a/netpyne/analysis.py b/netpyne/analysis.py
index a33991bef..408f18779 100644
--- a/netpyne/analysis.py
+++ b/netpyne/analysis.py
@@ -10,9 +10,9 @@
if __gui__:
from matplotlib.pylab import transpose, nanmax, nanmin, errstate, bar, histogram, floor, ceil, yticks, arange, gca, scatter, figure, hold, subplot, axes, shape, imshow, \
- colorbar, plot, xlabel, ylabel, title, xlim, ylim, clim, show, zeros, legend, savefig, psd, ion, subplots_adjust, subplots, tight_layout, get_fignums, text, log10
+ colorbar, plot, xlabel, ylabel, title, xlim, ylim, clim, show, zeros, legend, savefig, ion, subplots_adjust, subplots, tight_layout, get_fignums, text, log10
from matplotlib import gridspec
-
+ from matplotlib import mlab
from scipy import size, array, linspace, ceil, cumsum
from numbers import Number
import math
@@ -84,6 +84,70 @@ def _saveFigData(figData, fileName, type=''):
print 'File extension to save figure data not recognized: %s'%(ext)
+import numpy
+
+
+######################################################################################################################################################
+## Smooth 1d signal
+######################################################################################################################################################
+def _smooth1d(x,window_len=11,window='hanning'):
+ """smooth the data using a window with requested size.
+
+ This method is based on the convolution of a scaled window with the signal.
+ The signal is prepared by introducing reflected copies of the signal
+ (with the window size) in both ends so that transient parts are minimized
+ in the begining and end part of the output signal.
+
+ input:
+ x: the input signal
+ window_len: the dimension of the smoothing window; should be an odd integer
+ window: the type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'
+ flat window will produce a moving average smoothing.
+
+ output:
+ the smoothed signal
+
+ example:
+
+ t=linspace(-2,2,0.1)
+ x=sin(t)+randn(len(t))*0.1
+ y=smooth(x)
+
+ see also:
+
+ numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, numpy.convolve
+ scipy.signal.lfilter
+
+ TODO: the window parameter could be the window itself if an array instead of a string
+ NOTE: length(output) != length(input), to correct this: return y[(window_len/2-1):-(window_len/2)] instead of just y.
+ """
+
+ if x.ndim != 1:
+ raise ValueError, "smooth only accepts 1 dimension arrays."
+
+ if x.size < window_len:
+ raise ValueError, "Input vector needs to be bigger than window size."
+
+
+ if window_len<3:
+ return x
+
+
+ if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
+ raise ValueError, "Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'"
+
+
+ s=numpy.r_[x[window_len-1:0:-1],x,x[-1:-window_len:-1]]
+ #print(len(s))
+ if window == 'flat': #moving average
+ w=numpy.ones(window_len,'d')
+ else:
+ w=eval('numpy.'+window+'(window_len)')
+
+ y=numpy.convolve(w/w.sum(),s,mode='valid')
+ return y[(window_len/2-1):-(window_len/2)]
+
+
######################################################################################################################################################
## Synchrony measure
######################################################################################################################################################
@@ -522,7 +586,7 @@ def plotSpikeHist (include = ['allCells', 'eachPop'], timeRange = None, binSize
######################################################################################################################################################
## Plot spike histogram
######################################################################################################################################################
-def plotRatePSD (include = ['allCells', 'eachPop'], timeRange = None, binSize = 5, Fs = 200, overlay=True,
+def plotRatePSD (include = ['allCells', 'eachPop'], timeRange = None, binSize = 5, Fs = 200, smooth = 0, overlay=True,
figSize = (10,8), saveData = None, saveFig = None, showFig = True):
'''
Plot firing rate power spectral density (PSD)
@@ -531,6 +595,7 @@ def plotRatePSD (include = ['allCells', 'eachPop'], timeRange = None, binSize =
- timeRange ([start:stop]): Time range of spikes shown; if None shows all (default: None)
- binSize (int): Size in ms of spike bins (default: 5)
- Fs (float): PSD sampling frequency used to calculate the Fourier frequencies (default: 200)
+ - smooth (int): Window size for smoothing; no smoothing if 0 (default: 0)
- overlay (True|False): Whether to overlay the data lines or plot in separate subplots (default: True)
- graphType ('line'|'bar'): Type of graph to use (line graph or bar plot) (default: 'line')
- yaxis ('rate'|'count'): Units of y axis (firing rate in Hz, or spike count) (default: 'rate')
@@ -610,14 +675,21 @@ def plotRatePSD (include = ['allCells', 'eachPop'], timeRange = None, binSize =
title (str(subset), fontsize=fontsiz)
color = 'blue'
- power = psd(histoCount, Fs=Fs, linewidth=1.0, color=color)
- #h=axes()
- #h.set_yticklabels([])
+ power = mlab.psd(histoCount, Fs=Fs, NFFT=256, detrend=mlab.detrend_none, window=mlab.window_hanning,
+ noverlap=0, pad_to=None, sides='default', scale_by_freq=None)
- if iplot == 0:
- xlabel('Frequency (Hz)', fontsize=fontsiz)
- ylabel('Power', fontsize=fontsiz) # add yaxis in opposite side
- xlim([0, (Fs/2)])
+ if smooth:
+ signal = _smooth1d(10*log10(power[0]), smooth)
+ else:
+ signal = 10*log10(power[0])
+ freqs = power[1]
+
+
+ plot(freqs, signal, linewidth=1.5, color=color)
+
+ xlabel('Frequency (Hz)', fontsize=fontsiz)
+ ylabel('Power Spectral Density (dB/Hz)', fontsize=fontsiz) # add yaxis in opposite side
+ xlim([0, (Fs/2)-1])
if len(include) < 5: # if apply tight_layout with many subplots it inverts the y-axis
try:
From c2056cefdfc33b525352478caeb75bff1f38af68 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Mon, 14 Nov 2016 12:14:42 +0000
Subject: [PATCH 31/54] Regenerated test NeuroML2 files with latest libNeuromL
& pyNeuroML libraries
---
.gitignore | 1 +
examples/NeuroMLImport/LEMS_SimpleNet.xml | 2 +-
examples/NeuroMLImport/SimpleNet.net.nml | 6 +++---
3 files changed, 5 insertions(+), 4 deletions(-)
diff --git a/.gitignore b/.gitignore
index 0efadb380..5956f0a85 100644
--- a/.gitignore
+++ b/.gitignore
@@ -38,3 +38,4 @@ umac
/examples/NeuroMLImport/LEMS_SimpleNet_nrn.py
/examples/NeuroMLImport/LEMS_SimpleNet_pynn.py
/examples/NeuroMLImport/LEMS_SimpleNet_pynn.py_main.json
+/nb-configuration.xml
diff --git a/examples/NeuroMLImport/LEMS_SimpleNet.xml b/examples/NeuroMLImport/LEMS_SimpleNet.xml
index f030ebade..c062a9d41 100644
--- a/examples/NeuroMLImport/LEMS_SimpleNet.xml
+++ b/examples/NeuroMLImport/LEMS_SimpleNet.xml
@@ -2,7 +2,7 @@
diff --git a/examples/NeuroMLImport/SimpleNet.net.nml b/examples/NeuroMLImport/SimpleNet.net.nml
index e7559304c..69952bb5f 100644
--- a/examples/NeuroMLImport/SimpleNet.net.nml
+++ b/examples/NeuroMLImport/SimpleNet.net.nml
@@ -1,9 +1,9 @@
-This NeuroML 2 file was generated by OpenCortex v0.0.6 using:
- libNeuroML v0.2.20
- pyNeuroML v0.2.0
+This NeuroML 2 file was generated by OpenCortex v0.0.7 using:
+ libNeuroML v0.2.23
+ pyNeuroML v0.2.2
From 70da43e4d36f86282ec2df0d3b4224d5abf95079 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Mon, 14 Nov 2016 12:23:02 +0000
Subject: [PATCH 32/54] Better support for PyNN cells specified in
LEMS/NeuroML2: https://www.neuroml.org/NeuroML2CoreTypes/PyNN.html
---
netpyne/cell.py | 4 ++--
netpyne/simFuncs.py | 9 +++++++++
2 files changed, 11 insertions(+), 2 deletions(-)
diff --git a/netpyne/cell.py b/netpyne/cell.py
index f4636c868..1a70c136e 100644
--- a/netpyne/cell.py
+++ b/netpyne/cell.py
@@ -590,8 +590,8 @@ def addConn (self, params, netStimParams = None):
sec = params['sec'] if pointp else synMechSecs[i]
loc = params['loc'] if pointp else synMechLocs[i]
preGid = netStimParams['source']+' NetStim' if netStimParams else params['preGid']
- print(' Created connection preGid=%s, postGid=%s, sec=%s, loc=%.4g, synMech=%s, weight=%.4g, delay=%.1f'%
- (preGid, self.gid, sec, loc, params['synMech'], weights[i], delays[i]))
+ print(' Created connection preGid=%s, postGid=%s, sec=%s, loc=%.4g, synMech=%s, weight=%.4g, delay=%.2f, threshold=%s'%
+ (preGid, self.gid, sec, loc, params['synMech'], weights[i], delays[i],params['threshold']))
def modifyConns (self, params):
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index 3dd8d42a1..a527d3b56 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -2019,6 +2019,13 @@ def handlePopulation(self, population_id, component, size, component_obj):
#print("c: %s, area: %s, sc: %s"%(capTotSI, area, specCapNeu))
+ soma['geom']['cm'] = specCapNeu
+ # PyNN cells
+ elif hasattr(component_obj,'cm') and 'IF_c' in str(type(component_obj)):
+ capTotSI = component_obj.cm * 1e-9
+ area = math.pi * default_diam * default_diam
+ specCapNeu = 10e13 * capTotSI / area
+
soma['geom']['cm'] = specCapNeu
else:
@@ -2226,6 +2233,8 @@ def importNeuroML2(fileName, simConfig):
threshold = 0
elif hasattr(preComp,'thresh'):
threshold = pynml.convert_to_units(preComp.thresh,'mV')
+ elif hasattr(preComp,'v_thresh'):
+ threshold = float(preComp.v_thresh) # PyNN cells...
else:
threshold = 0
From b918d47fde4d21ae009da6b42e5421ece9d64014 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Wed, 16 Nov 2016 10:16:08 +0000
Subject: [PATCH 33/54] Changes to travis.yml to test current examples
---
.travis.yml | 39 +++++++++++++++++++--------------------
1 file changed, 19 insertions(+), 20 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 4a01e4f24..2680f7cec 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -18,10 +18,6 @@ python: 2.7
env:
system_site_packages: true
-before_script:
- - "export DISPLAY=:99.0"
- - "sh -e /etc/init.d/xvfb start"
- - sleep 3 # give xvfb some time to start
install:
- pip install git+https://github.com/OpenSourceBrain/osb-model-validation
@@ -33,29 +29,32 @@ install:
script:
- - cp examples/HHTut/HHTut.py doc/source/code/
- - cd doc/source/code/
- - $NEURON_HOME/bin/nrnivmodl mod
- - python tut1.py
- - python tut2.py
- - python tut3.py
- - python tut5.py
- - python tut6.py
- - python tut_import.py
- - cd ../../../examples
- - python HHTut/HHTut_run.py
- - python HybridTut/HybridTut_run.py
- - cd M1
+ # Temporarily removed...
+ # - cp examples/HHTut/HHTut.py doc/source/code/
+ # - cd doc/source/code/
+ # - $NEURON_HOME/bin/nrnivmodl mod
+ # - python tut1.py
+ # - python tut2.py
+ # - python tut3.py
+ # - python tut5.py
+ # - python tut6.py
+ # - python tut_import.py
+ - cd examples/HHTut
+ - python HHTut_run.py -nogui
+ - cd ../HybridTut
+ - $NEURON_HOME/bin/nrnivmodl
+ - python HybridTut_run.py
+ - cd ../M1
- $NEURON_HOME/bin/nrnivmodl
- python M1/M1_run.py
- cd ../RL_arm
- $NEURON_HOME/bin/nrnivmodl
- - python RL_arm/main.py
+ - python main.py
notifications:
email: false
- slack: neurosim:pj4DaRn3CrmH6hSRV0zBhfjS
+ #slack: neurosim:pj4DaRn3CrmH6hSRV0zBhfjS
@@ -201,4 +200,4 @@ notifications:
# - $HOME/nrn-7.3
# - $HOME/build/nest-2.6.0
# - $HOME/build/nrn-7.3
-# - $HOME/.cache/pip
\ No newline at end of file
+# - $HOME/.cache/pip
From e8cb277ccad47afac8364bd63d9cf50f9a759a1d Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Wed, 16 Nov 2016 10:43:20 +0000
Subject: [PATCH 34/54] Improved travis script
---
.travis.yml | 21 +++++++++++----------
1 file changed, 11 insertions(+), 10 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 2680f7cec..1d4612643 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -9,27 +9,28 @@ addons:
- python-numpy
- python-scipy
- python-matplotlib
+ - python-tk
- python-sympy
- python-tables
language: python
python: 2.7
-env:
+virtualenv:
system_site_packages: true
-
install:
+ # Install OMV to facilitate installation of packages below
- pip install git+https://github.com/OpenSourceBrain/osb-model-validation
-
# Need to pre install NEURON so nrnivmodl can be run targeting mod files in a different directory
- omv install NEURON
+ # Need to pre install pyNeuroML to test export to NeuroML 2
+ - omv install pyNeuroML
+ # Main install for NetPyNE
- python setup.py install
- export NEURON_HOME=/home/travis/neuron/nrn/x86_64/
-
script:
- # Temporarily removed...
# - cp examples/HHTut/HHTut.py doc/source/code/
# - cd doc/source/code/
# - $NEURON_HOME/bin/nrnivmodl mod
@@ -43,13 +44,13 @@ script:
- python HHTut_run.py -nogui
- cd ../HybridTut
- $NEURON_HOME/bin/nrnivmodl
- - python HybridTut_run.py
+ - python HybridTut_run.py -nogui
- cd ../M1
- $NEURON_HOME/bin/nrnivmodl
- - python M1/M1_run.py
- - cd ../RL_arm
- - $NEURON_HOME/bin/nrnivmodl
- - python main.py
+ - python M1_run.py -nogui
+ # - cd ../RL_arm
+ # - $NEURON_HOME/bin/nrnivmodl
+ # - python main.py
notifications:
From 6228615ad18f4ef062fed68149b69a36e35bb500 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 17 Nov 2016 09:59:41 -0800
Subject: [PATCH 35/54] removed temporal fix in cell.py to set ypos of cell at
-735
---
netpyne/cell.py | 7 +------
1 file changed, 1 insertion(+), 6 deletions(-)
diff --git a/netpyne/cell.py b/netpyne/cell.py
index 055a18bcf..06da1c63e 100644
--- a/netpyne/cell.py
+++ b/netpyne/cell.py
@@ -184,12 +184,7 @@ def createNEURONObj (self, prop):
if 'pt3d' in sectParams['geom']:
h.pt3dclear(sec=sec['hSec'])
x = self.tags['x']
- #y = -self.tags['y'] # Neuron y-axis positive = upwards, so assume pia=0 and cortical depth = neg
-
- ############################################################################
- y = -735 # TEMPORARILY FIX THIS FOR SFN16 EXPERIMENT!! - REMEMBER TO REMOVE!!!!!!!
- ############################################################################
-
+ y = -self.tags['y'] # Neuron y-axis positive = upwards, so assume pia=0 and cortical depth = neg
z = self.tags['z']
for pt3d in sectParams['geom']['pt3d']:
h.pt3dadd(x+pt3d[0], y+pt3d[1], z+pt3d[2], pt3d[3], sec=sec['hSec'])
From 6e276708e3e6e6e46dff8d337a59bc8b1f105a89 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Wed, 23 Nov 2016 11:57:25 -0500
Subject: [PATCH 36/54] added insert of ions in cell.py
---
CHANGES.md | 2 ++
doc/source/code/tut_import.py | 19 +++++++++++++------
netpyne/cell.py | 2 +-
3 files changed, 16 insertions(+), 7 deletions(-)
diff --git a/CHANGES.md b/CHANGES.md
index 05e8635db..19780af1e 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -22,6 +22,8 @@
- Fixed str vs basestring in analysis.py and simFuncs.py
+- Fixed bug due to not inserting ions in section
+
# Version 0.6.3
- Added cvode_active simConfig option to set variable time step (issue #116)
diff --git a/doc/source/code/tut_import.py b/doc/source/code/tut_import.py
index f05309514..089143394 100644
--- a/doc/source/code/tut_import.py
+++ b/doc/source/code/tut_import.py
@@ -38,10 +38,10 @@
netParams.importCellParams(label='PYR_Mainen_rule', conds={'cellType': 'PYR', 'cellModel': 'Mainen'},
fileName='mainen.py', cellName='PYR2')
-### Friesen
-cellRule = netParams.importCellParams(label='PYR_Friesen_rule', conds={'cellType': 'PYR', 'cellModel': 'Friesen'},
- fileName='friesen.py', cellName='MakeRSFCELL')
-cellRule['secs']['axon']['spikeGenLoc'] = 0.5 # spike generator location.
+# ### Friesen
+# cellRule = netParams.importCellParams(label='PYR_Friesen_rule', conds={'cellType': 'PYR', 'cellModel': 'Friesen'},
+# fileName='friesen.py', cellName='MakeRSFCELL')
+# cellRule['secs']['axon']['spikeGenLoc'] = 0.5 # spike generator location.
### Izhi2003a (independent voltage)
cellRule = netParams.importCellParams(label='PYR_Izhi03a_rule', conds={'cellType': 'PYR', 'cellModel':'Izhi2003a'},
@@ -68,6 +68,13 @@
## Connectivity params
+# netParams.connParams['izhi07a->izhi07a'] = {
+# 'preConds': {'popLabel': 'Izhi07a_pop'}, 'postConds': {'cellModel': ['Izhi2003b']}, # background -> PYR (weight=0.1)
+# 'connFunc': 'fullConn', # connectivity function (all-to-all)
+# 'weight': 0.1, # synaptic weight
+# 'delay': 5, # transmission delay (ms)
+# 'sec': 'soma'}
+
netParams.connParams['bg1'] = {
'preConds': {'popLabel': 'background'}, 'postConds': {'cellType': 'PYR', 'cellModel': ['Traub', 'HH', 'HH3D', 'Mainen', 'Izhi2003b', 'Izhi2007b']}, # background -> PYR (weight=0.1)
'connFunc': 'fullConn', # connectivity function (all-to-all)
@@ -78,7 +85,7 @@
netParams.connParams['bg2'] = {
'preConds': {'popLabel': 'background'}, 'postConds': {'cellType': 'PYR', 'cellModel': ['Friesen','Izhi2003a', 'Izhi2007a']}, # background -> PYR (weight = 10)
'connFunc': 'fullConn', # connectivity function (all-to-all)
- 'weight': 5, # synaptic weight
+ 'weight': 0.1, # synaptic weight
'delay': 5, # transmission delay (ms)
'synMech':'AMPA',
'sec': 'soma'}
@@ -96,7 +103,7 @@
simConfig = specs.SimConfig() # object of class SimConfig to store simulation configuration
simConfig.duration = 1*1e3 # Duration of the simulation, in ms
simConfig.dt = 0.025 # Internal integration timestep to use
-simConfig.verbose = False # Show detailed messages
+simConfig.verbose = 1 # Show detailed messages
simConfig.recordTraces = {'V_soma':{'sec':'soma','loc':0.5,'var':'v'}} # Dict with traces to record
simConfig.recordStep = 1 # Step size in ms to save data (eg. V traces, LFP, etc)
simConfig.filename = 'model_output' # Set file output name
diff --git a/netpyne/cell.py b/netpyne/cell.py
index 06da1c63e..ec0265baf 100644
--- a/netpyne/cell.py
+++ b/netpyne/cell.py
@@ -208,7 +208,7 @@ def createNEURONObj (self, prop):
for ionName,ionParams in sectParams['ions'].iteritems():
if ionName not in sec['ions']:
sec['ions'][ionName] = Dict()
- # Assume a mechanism using this ion is already present...
+ sec['hSec'].insert(ionName+'_ion') # insert mechanism
for ionParamName,ionParamValue in ionParams.iteritems(): # add params of the mechanism
ionParamValueFinal = ionParamValue
for iseg,seg in enumerate(sec['hSec']): # set ion params for each segment
From dec766d583278d749338cc1b09717342b3a67a0c Mon Sep 17 00:00:00 2001
From: salvadord
Date: Wed, 23 Nov 2016 16:23:50 -0500
Subject: [PATCH 37/54] tested tut_import.py works, and artificial cell conns
work
---
doc/source/code/tut_import.py | 25 ++++++++++++-------------
1 file changed, 12 insertions(+), 13 deletions(-)
diff --git a/doc/source/code/tut_import.py b/doc/source/code/tut_import.py
index 089143394..dce601945 100644
--- a/doc/source/code/tut_import.py
+++ b/doc/source/code/tut_import.py
@@ -38,10 +38,10 @@
netParams.importCellParams(label='PYR_Mainen_rule', conds={'cellType': 'PYR', 'cellModel': 'Mainen'},
fileName='mainen.py', cellName='PYR2')
-# ### Friesen
-# cellRule = netParams.importCellParams(label='PYR_Friesen_rule', conds={'cellType': 'PYR', 'cellModel': 'Friesen'},
-# fileName='friesen.py', cellName='MakeRSFCELL')
-# cellRule['secs']['axon']['spikeGenLoc'] = 0.5 # spike generator location.
+### Friesen
+cellRule = netParams.importCellParams(label='PYR_Friesen_rule', conds={'cellType': 'PYR', 'cellModel': 'Friesen'},
+ fileName='friesen.py', cellName='MakeRSFCELL')
+cellRule['secs']['axon']['spikeGenLoc'] = 0.5 # spike generator location.
### Izhi2003a (independent voltage)
cellRule = netParams.importCellParams(label='PYR_Izhi03a_rule', conds={'cellType': 'PYR', 'cellModel':'Izhi2003a'},
@@ -68,13 +68,6 @@
## Connectivity params
-# netParams.connParams['izhi07a->izhi07a'] = {
-# 'preConds': {'popLabel': 'Izhi07a_pop'}, 'postConds': {'cellModel': ['Izhi2003b']}, # background -> PYR (weight=0.1)
-# 'connFunc': 'fullConn', # connectivity function (all-to-all)
-# 'weight': 0.1, # synaptic weight
-# 'delay': 5, # transmission delay (ms)
-# 'sec': 'soma'}
-
netParams.connParams['bg1'] = {
'preConds': {'popLabel': 'background'}, 'postConds': {'cellType': 'PYR', 'cellModel': ['Traub', 'HH', 'HH3D', 'Mainen', 'Izhi2003b', 'Izhi2007b']}, # background -> PYR (weight=0.1)
'connFunc': 'fullConn', # connectivity function (all-to-all)
@@ -85,7 +78,7 @@
netParams.connParams['bg2'] = {
'preConds': {'popLabel': 'background'}, 'postConds': {'cellType': 'PYR', 'cellModel': ['Friesen','Izhi2003a', 'Izhi2007a']}, # background -> PYR (weight = 10)
'connFunc': 'fullConn', # connectivity function (all-to-all)
- 'weight': 0.1, # synaptic weight
+ 'weight': 5, # synaptic weight
'delay': 5, # transmission delay (ms)
'synMech':'AMPA',
'sec': 'soma'}
@@ -98,12 +91,18 @@
'delay': 5, # transmission delay (ms)
'sec': 'soma'} # section to connect to
+# netParams.connParams['izhi07a->izhi07a'] = {
+# 'preConds': {'popLabel': 'HH_pop'}, 'postConds': {'popLabel': 'Izhi07a_pop'}, # background -> PYR (weight=0.1)
+# 'connFunc': 'fullConn', # connectivity function (all-to-all)
+# 'weight': 5, # synaptic weight
+# 'delay': 5, # transmission delay (ms)
+# 'sec': 'soma'}
# Simulation options
simConfig = specs.SimConfig() # object of class SimConfig to store simulation configuration
simConfig.duration = 1*1e3 # Duration of the simulation, in ms
simConfig.dt = 0.025 # Internal integration timestep to use
-simConfig.verbose = 1 # Show detailed messages
+simConfig.verbose = 0 # Show detailed messages
simConfig.recordTraces = {'V_soma':{'sec':'soma','loc':0.5,'var':'v'}} # Dict with traces to record
simConfig.recordStep = 1 # Step size in ms to save data (eg. V traces, LFP, etc)
simConfig.filename = 'model_output' # Set file output name
From 8bb2b5b9c3cc07c9c59f2a7bc5a4629a2a4f7e1c Mon Sep 17 00:00:00 2001
From: salvadord
Date: Wed, 23 Nov 2016 20:10:40 -0500
Subject: [PATCH 38/54] added HH->Izhi07a and Izhi07a->HH conns in
tut_import.py
---
doc/source/code/tut_import.py | 20 ++++++++++++++------
1 file changed, 14 insertions(+), 6 deletions(-)
diff --git a/doc/source/code/tut_import.py b/doc/source/code/tut_import.py
index dce601945..6cf2b4c99 100644
--- a/doc/source/code/tut_import.py
+++ b/doc/source/code/tut_import.py
@@ -91,12 +91,20 @@
'delay': 5, # transmission delay (ms)
'sec': 'soma'} # section to connect to
-# netParams.connParams['izhi07a->izhi07a'] = {
-# 'preConds': {'popLabel': 'HH_pop'}, 'postConds': {'popLabel': 'Izhi07a_pop'}, # background -> PYR (weight=0.1)
-# 'connFunc': 'fullConn', # connectivity function (all-to-all)
-# 'weight': 5, # synaptic weight
-# 'delay': 5, # transmission delay (ms)
-# 'sec': 'soma'}
+netParams.connParams['HH->izhi07a'] = {
+ 'preConds': {'popLabel': 'HH_pop'}, 'postConds': {'popLabel': 'Izhi07a_pop'}, # background -> PYR (weight=0.1)
+ 'connFunc': 'fullConn', # connectivity function (all-to-all)
+ 'weight': 5, # synaptic weight
+ 'delay': 5, # transmission delay (ms)
+ 'sec': 'soma'}
+
+netParams.connParams['izhi07a->HH'] = {
+ 'preConds': {'popLabel': 'Izhi07a_pop'}, 'postConds': {'popLabel': 'HH_pop'}, # background -> PYR (weight=0.1)
+ 'connFunc': 'fullConn', # connectivity function (all-to-all)
+ 'weight': 0.1, # synaptic weight
+ 'delay': 5, # transmission delay (ms)
+ 'sec': 'soma'}
+
# Simulation options
simConfig = specs.SimConfig() # object of class SimConfig to store simulation configuration
From c604baa036f8e0257811b88a9117bd6f7b53a412 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 24 Nov 2016 19:05:54 -0500
Subject: [PATCH 39/54] added support for gap junctions (example in sandbox)
---
CHANGES.md | 2 +
README.md | 2 +-
examples/sandbox/sandbox.py | 267 ++++++------------------------------
netpyne/cell.py | 71 +++++++---
netpyne/network.py | 9 +-
netpyne/sim.py | 2 +-
netpyne/simFuncs.py | 11 +-
sdnotes.org | 33 ++++-
8 files changed, 145 insertions(+), 252 deletions(-)
diff --git a/CHANGES.md b/CHANGES.md
index 19780af1e..be108e22a 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,5 +1,7 @@
# Version 0.6.4
+- Added support for gap junction (electrical synapse) connections
+
- Added function plotRatePSD to plot firing rate power spectral density
- Added plotShape to plot 3D morphology of cell and synapse locations
diff --git a/README.md b/README.md
index 1c8090bd7..c2fb9d426 100644
--- a/README.md
+++ b/README.md
@@ -20,7 +20,7 @@ Several example model parameters are provided, including:
Additional details of the modelling framework can be found here:
* [NetPyNE Documentation](http://neurosimlab.org/netpyne/)
-* [SFN'15 poster](http://neurosimlab.org/salvadord/sfn15-sal-final.pdf)
+* [CNS'16 poster](http://neurosimlab.org/salvadord/CNS16_poster.pdf)
* [slides](https://drive.google.com/file/d/0B8v-knmZRjhtVl9BOFY2bzlWSWs/view?usp=sharing)
diff --git a/examples/sandbox/sandbox.py b/examples/sandbox/sandbox.py
index b5e6683aa..df012ff58 100644
--- a/examples/sandbox/sandbox.py
+++ b/examples/sandbox/sandbox.py
@@ -31,198 +31,52 @@
netParams.scaleConnWeightModels = {'HH': 1.0}
# Population parameters
-netParams.addPopParams('PYR', {'cellModel': 'HH', 'cellType': 'PYR', 'ynormRange': [0,0.5], 'numCells': 10}) # add dict with params for this pop
-netParams.addPopParams('PYR2', {'cellModel': 'HH', 'cellType': 'PYR', 'ynormRange': [0.3,0.6], 'numCells': 20}) # add dict with params for this pop
-netParams.addPopParams('PYR3', {'cellModel': 'HH', 'cellType': 'PYR', 'ynormRange': [0.2,1.0],'numCells': 20}) # add dict with params for this pop
-
-netParams.addPopParams('background', {'cellModel': 'NetStim', 'rate': 100, 'noise': 0.5, 'start': 1, 'seed': 2}) # background inputs
-netParams.addPopParams('background2', {'cellModel': 'NetStim', 'rate': 20, 'noise': 0.5, 'start': 1, 'seed': 2}) # background inputs
-netParams.addPopParams('microstim', {'rate': 50, 'noise': 0, 'cellModel': 'NetStim'})
+netParams.addPopParams('PYR1', {'cellModel': 'HH', 'cellType': 'PYR', 'numCells': 1}) # add dict with params for this pop
+netParams.addPopParams('PYR2', {'cellModel': 'HH', 'cellType': 'PYR', 'numCells': 1}) # add dict with params for this pop
+netParams.addPopParams('background', {'cellModel': 'NetStim', 'rate': 20, 'noise': 0.5, 'start': 1, 'seed': 2}) # background inputs
# Synaptic mechanism parameters
netParams.addSynMechParams('AMPA', {'mod': 'Exp2Syn', 'tau1': 0.1, 'tau2': 1.0, 'e': 0})
-netParams.addSynMechParams('NMDA', {'mod': 'Exp2Syn', 'tau1': 0.1, 'tau2': 5.0, 'e': 0})
-netParams.addSynMechParams('homSyn', {'mod':'hsyn','tau1':0.05,'tau2':5.3,'e':0, 'selfNetCon': {'threshold': -15, 'weight': -1, 'delay': 0, 'sec': 'soma', 'loc': 0.5}})
+netParams.addSynMechParams('esyn', {'mod': 'ElectSyn', 'g': 0.000049999999999999996})
# Cell parameters
## PYR cell properties
-# cellParams = Dict()
-# cellParams.secs.soma.geom = {'diam': 18.8, 'L': 18.8, 'Ra': 123.0}
-# cellParams.secs.soma.mechs.hh = {'gnabar': 0.12, 'gkbar': 0.036, 'gl': 0.003, 'el': -70}
-# cellParams.conds = {'cellType': 'PYR'}
-# netParams.addCellParams('PYR2sec', cellParams)
-
-
-## PYR2sec cell properties
-soma = {'geom': {}, 'mechs': {}} # soma params dict
-soma['geom'] = {'diam': 18.8, 'L': 18.8, 'cm':1} # soma geometry
-soma['mechs']['hh'] = {'gnabar': 0.12, 'gkbar': 0.036, 'gl': 0.0003, 'el': -54} # soma hh mechanisms
-dend = {'geom': {}, 'topol': {}, 'mechs': {}, 'synMechs': {}} # dend params dict
-dend['geom'] = {'diam': 5.0, 'L': 150.0, 'Ra': 150.0, 'cm': 1} # dend geometry
-dend['topol'] = {'parentSec': 'soma', 'parentX': 1.0, 'childX': 0} # dend topology
-dend['mechs']['pas'] = {'g': 0.0000357, 'e': -70} # dend mechanisms
-cellParams = {'conds': {'cellType': 'PYR'},
- 'secs': {'soma': soma, 'dend': dend},
- 'secLists': {'all': ['soma', 'dend']}} # cell rule dict
-netParams.addCellParams('PYR2sec', cellParams) # add dict to list of cell properties
-
-##
-# cellRule = Dict(conds={'cellType': 'PYR2sec', 'cellModel': 'HH'}, secs=Dict(), secLists=Dict())
-# cellRule.secs.soma.geom = Dict({'diam': 6.3, 'L': 5, 'Ra': 123.0, 'pt3d':[]})
-# cellRule.secs.soma.geom.pt3d.append((0, 0, 0, 20))
-# cellRule.secs.soma.geom.pt3d.append((0, 0, 20, 20))
-# cellRule.secs.soma.mechs.hh = {'gnabar': 0.12, 'gkbar': 0.036, 'gl': 0.003, 'el': -70}
-
-# cellRule.secs.dend.geom = Dict({'diam': 5.0, 'L': 150.0, 'Ra': 150.0, 'cm': 1, 'pt3d': []})
-# cellRule.secs.dend.geom.pt3d.append((0, 0, 0, 40))
-# cellRule.secs.dend.geom.pt3d.append((0, 0, 50, 40))
-# cellRule.secs.dend.topol = {'parentSec': 'soma', 'parentX': 1.0, 'childX': 0}
-# cellRule.secs.dend.mechs.pas = {'g': 0.0000357, 'e': -70}
-
-# cellRule.secLists.all = ['soma', 'dend']
-# netParams.addCellParams('PYR2sec', cellRule) # add dict to list of cell properties
-
-### HH
-# cellRule = {'label': 'PYR_HH_rule', 'conds': {'cellType': 'PYR', 'cellModel': 'HH'}} # cell rule dict
-# synMechParams = []
-# utils.importCell(cellRule=cellRule, synMechParams=netParams['synMechParams'], fileName='HHCellFile.py', cellName='HHCellClass')
-# netParams['cellParams'].append(cellRule) # add dict to list of cell parameters
-
-### Import from net
-
-# netParams.importCellParamsFromNet(
-# labelList = ['PYR', 'BAS'],
-# condsList = [{'cellType': 'PYR'}, {'cellType': 'BAS'}],
-# fileName = '/u/salvadord/Models/ca3ihdemo/simcells.py',
-# cellNameList = ['net.bas.cell[0]', 'net.bas.cell[0]'],
-# importSynMechs = True)
-
-
-#Stimulation parameters
-# netParams.addStimSourceParams('Input_1', {'type': 'IClamp', 'delay': 10, 'dur': 800, 'amp': 'uniform(0.05,0.5)'})
-# netParams.addStimSourceParams('Input_2', {'type': 'VClamp', 'dur':[0,1,1], 'amp': [1,1,1], 'gain': 1, 'rstim': 0, 'tau1': 1, 'tau2': 1, 'i': 1})
-# netParams.addStimSourceParams('Input_3', {'type': 'AlphaSynapse', 'onset': 'uniform(1,500)', 'tau': 5, 'gmax': 'post_ynorm', 'e': 0})
-# netParams.addStimSourceParams('Input_4', {'type': 'NetStim', 'interval': 'uniform(20,100)', 'number': 1000, 'start': 5, 'noise': 0.1})
-
-# netParams.addStimTargetParams('Input_1_PYR',
-# {'source': 'Input_1',
-# 'sec':'soma',
-# 'loc': 0.5,
-# 'conds': {'popLabel':'PYR', 'cellList': range(8)}})
-
-
-# netParams.addStimTargetParams('Input_3_PYR2',
-# {'source': 'Input_3',
-# 'sec':'soma',
-# 'loc': 0.5,
-# 'conds': {'popLabel':'PYR2', 'ynorm':[0.2,0.6]}})
-
-# netParams.addStimTargetParams('Input_4_PYR3',
-# {'source': 'Input_4',
-# 'sec':'soma',
-# 'loc': 0.5,
-# 'weight': '0.1+gauss(0.2,0.05)',
-# 'delay': 1,
-# 'conds': {'popLabel':'PYR3', 'cellList': [0,1,2,3,4,5,10,11,12,13,14,15]}})
-
-
-# # Connectivity parameters
-# netParams.addConnParams('PYRconn1',
-# {'preConds': {'popLabel': 'PYR'}, 'postConds': {'popLabel': 'PYR'},
-# 'weight': [[0.005, 0.02, 0.05, 0.04, 0.1], [0.11, 0.22, 0.33, 0.44, 0.55]], # weight of each connection
-# 'delay': '0.2+gauss(13.0,1.4)', # delay min=0.2, mean=13.0, var = 1.4
-# 'synsPerConn': 5,
-# 'sec': 'all',
-# 'synMech': ['AMPA', 'NMDA'],
-# 'threshold': 10}) # threshold
-
-# duration = 1*1e3
-# netParams.connParams['mist->PYR'] = {
-# 'preConds': {'popLabel': 'microstim'},
-# 'postConds': {'cellType': 'PYR'},
-# 'weight': 0.1,
-# 'shape': {'switchOnOff': [200,400, 600, 800],
-# 'pulseType': 'gaussian',
-# 'pulsePeriod': 100,
-# 'pulseWidth': 60},
-# 'synMech':'AMPA'}
-
-
-# netParams.addConnParams('PYRconn2',
-# {'preConds': {'popLabel': 'PYR'}, 'postConds': {'popLabel': 'PYR'},
-# 'weight': 0.005, # weight of each connection
-# 'delay': '0.2+gauss(13.0,1.4)', # delay min=0.2, mean=13.0, var = 1.4
-# 'threshold': 10, # threshold
-# 'convergence': 'uniform(1,15)',
-# 'synMech': 'homSyn',
-# 'sec': 'all',
-# 'synsPerConn': 2}) # convergence (num presyn targeting postsyn) is uniformly distributed between 1 and 15
-
-# netParams.addConnParams('PYR->PYR',
-# {'preConds': {'popLabel': 'PYR'}, 'postConds': {'popLabel': ['PYR','PYR2', 'PYR3']},
-# 'weight': 0.001, # weight of each connection
-# 'delay': '0.2+gauss(13.0,1.4)', # delay min=0.2, mean=13.0, var = 1.4
-# 'threshold': 10, # threshold
-# 'divergence': 'uniform(1,15)'}) # convergence (num presyn targeting postsyn) is uniformly distributed between 1 and 15
-
-# netParams.addConnParams(2,
-# {'preConds': {'popLabel': ['PYR']}, 'postConds': {'cellModel': 'HH', 'popLabel': 'PYR2'},
-# 'weight': 'uniform(0.01, 0.1)', # weight of each connection
-# 'delay': '0.2+gauss(13.0,1.4)', # delay min=0.2, mean=13.0, var = 1.4
-# 'threshold': 10, # threshold
-# 'convergence': 10})
-# #'probability': 'uniform(0.2,0.6)'}) # convergence (num presyn targeting postsyn) is uniformly distributed between 1 and 15
-
-
-# netParams.addConnParams(3,
-# {'preConds': {'popLabel': 'PYR'}, 'postConds': {'popLabel': 'PYR'},
-# 'connList': [[0,1],[3,1]], # list of connections
-# 'synMech': ['AMPA', 'NMDA'],
-# 'synsPerConn': 3,
-# 'weight': [[[0.1, 0.5, 0.7], [0.3, 0.4, 0.5]],[[0.1, 0.5, 0.7], [0.3, 0.4, 0.5]]], # weight of each connection
-# 'delay': 5,
-# 'loc': 0.2,
-# 'threshold': 10}) # threshold
-
-
-netParams.addConnParams('bg->PYR',
- {'preConds': {'popLabel': 'background2'}, 'postConds': {'cellType': 'PYR'}, # background -> PYR
+cellParams = Dict()
+cellParams.secs.soma.geom = {'diam': 18.8, 'L': 18.8, 'Ra': 123.0}
+cellParams.secs.soma.mechs.hh = {'gnabar': 0.12, 'gkbar': 0.036, 'gl': 0.003, 'el': -70}
+cellParams.conds = {'cellType': 'PYR'}
+netParams.addCellParams('PYR', cellParams)
+
+netParams.connParams['bg->PYR1'] = {
+ 'preConds': {'popLabel': 'background'}, 'postConds': {'popLabel': 'PYR1'}, # background -> PYR
'weight': 0.1, # fixed weight of 0.08
'synMech': 'AMPA', # target NMDA synapse
- 'delay': 4,
- 'sec': 'soma'}) # uniformly distributed delays between 1-5ms
-
-# netParams.addConnParams('PYRconn3',
-# {'preConds': {'popLabel': 'background2'}, 'postConds': {'cellType': 'PYR2sec'}, # background -> PYR
-# 'synMech': ['AMPA', 'NMDA'],
-# 'synsPerConn': 3,
-# 'weight': 0.2,
-# 'delay': [5, 10],
-# 'loc': [[0.1, 0.5, 0.7], [0.3, 0.4, 0.5]]}) # uniformly distributed delays between 1-5ms
-
-# netParams.addConnParams('PYRconn4',
-# {'preConds': {'popLabel': 'background2'}, 'postConds': {'cellType': 'PYR2sec'}, # background -> PYR
-# 'weight': 0.02, # fixed weight of 0.08
-# 'synMech': 'AMPA', # target NMDA synapse
-# 'synsPerConn': 2,
-# 'delay': 1}) # uniformly distributed delays between 1-5ms
-
-
-
-# netParams.connParams['PYRconn5']= {'preConds': {'popLabel': 'background2'}, 'postConds': {'cellType': 'PYR2sec'}, # background -> PYR
-# 'weight': 0.1, # fixed weight of 0.08
-# 'synMech': 'AMPA', # target NMDA synapse
-# 'delay': 'uniform(1,5)'} # uniformly distributed delays between 1-5ms
-
-
-netParams.addSubConnParams('PYRsub1',
- {'preConds': {'cellType': ['PYR2sec']}, # 'cellType': ['IT', 'PT', 'CT']
- 'postConds': {'popLabel': 'PYR'}, # 'popLabel': 'L5_PT'
- 'sec': 'all',
- 'ynormRange': [0, 1.0],
- 'density': [0.2, 0.1, 0.0, 0.0, 0.2, 0.5] }) # subcellulalr distribution
-
+ 'delay': 'uniform(1,5)'} # uniformly distributed delays between 1-5ms
+
+netParams.addConnParams('PYR1->PYR2',
+ {'preConds': {'popLabel': 'PYR1'}, 'postConds': {'popLabel': 'PYR2'}, # PYR1 -> PYR2
+ 'weight': 200.0, # fixed weight of 0.08
+ 'synMech': 'esyn', # target NMDA synapse
+ 'gapJunction': True,
+ 'sec': 'soma',
+ 'loc': 0.5,
+ 'preSec': 'soma',
+ 'preLoc': 0.5})
+
+
+# if (isCellOnNode("SampleCellGroup", 1)) {
+# a_SampleCellGroup[1].Soma { elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_A[0] = new ElectSyn(0.5) }
+# elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_A[0].weight = 1.0
+# pnm.pc.target_var(&elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_A[0].vgap, 100000000)
+# pnm.pc.source_var(&a_SampleCellGroup[1].Soma.v(0.5), 200000000)
+# }
+# if (isCellOnNode("SampleCellGroup", 0)) {
+# a_SampleCellGroup[0].Soma { elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_B[0] = new ElectSyn(0.5) }
+# elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_B[0].weight = 1.0
+# pnm.pc.target_var(&elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_B[0].vgap, 200000000)
+# pnm.pc.source_var(&a_SampleCellGroup[0].Soma.v(0.5), 100000000)
+# }
###############################################################################
@@ -231,7 +85,7 @@
# Simulation parameters
simConfig.duration = 1*1e3 # Duration of the simulation, in ms
-simConfig.dt = 0.025 # Internal integration timestep to use
+simConfig.dt = 0.1 # Internal integration timestep to use
simConfig.seeds = {'conn': 2, 'stim': 2, 'loc': 2} # Seeds for randomizers (connectivity, input stimulation and cell locations)
simConfig.createNEURONObj = 1 # create HOC objects when instantiating network
simConfig.createPyStruct = 1 # create Python structure (simulator-independent) when instantiating network
@@ -249,7 +103,7 @@
# Saving
simConfig.filename = 'mpiHHTut' # Set file output name
simConfig.saveFileStep = 1000 # step size in ms to save data to disk
-simConfig.savePickle = 1 # Whether or not to write spikes etc. to a .mat file
+simConfig.savePickle = 0 # Whether or not to write spikes etc. to a .mat file
simConfig.saveJson = 0 # Whether or not to write spikes etc. to a .mat file
simConfig.saveMat = 0 # Whether or not to write spikes etc. to a .mat file
simConfig.saveDpk = 0 # save to a .dpk pickled file
@@ -258,23 +112,13 @@
# # Analysis and plotting
simConfig.addAnalysis('plotRaster', {'spikeHist': 'subplot'})
-simConfig.analysis['plotSpikeHist'] = {'overlay': False}
-simConfig.addAnalysis('plotTraces', {'include': [0,1,2], 'oneFigPer':'cell', 'saveFig':True})
-# simConfig.addAnalysis('plotSpikeHist', {'include': ['PYR', 'allNetStims', 'background2', ('PYR',[5,6,7,8])],
-# 'timeRange': [400,600], 'binSize': 10, 'overlay':True, 'graphType': 'line', 'yaxis': 'count', 'saveData': True, 'saveFig': True, 'showFig': True})
-# simConfig.addAnalysis('plot2Dnet', {'include': ['allCells']})
-# simConfig.addAnalysis('plotConn', True)
+simConfig.addAnalysis('plotTraces', {'include': [0,1]})
###############################################################################
# RUN SIM
###############################################################################
-#sim.createSimulateAnalyze(netParams = netParams, simConfig = simConfig) # create and simulate network
-# sim.createSimulate(netParams = netParams, simConfig = simConfig) # create and simulate network
-# sim.saveData()
-# sim.loadSimulateAnalyze('mpiHHTut.pkl')
-# sim.analysis.plotData()
sim.initialize(netParams = netParams, simConfig = simConfig)
sim.net.createPops()
@@ -287,35 +131,4 @@
sim.analyze()
-# ###############################################################################
-# # MODIFY and RUN SIM
-# ###############################################################################
-
-# sim.net.modifyCells({'conds': {'label': 'PYR2sec'},
-# 'secs': {'soma': {'geom': {'L': 100}}}})
-
-# sim.net.modifyConns({'conds': {'label': 'PYR->PYR', 'weight': [0,0.001], 'loc': 0.5},
-# 'postConds': {'popLabel': 'PYR2', 'ynorm': [0.4,0.6]},
-# 'weight': 0.01})
-
-# sim.net.modifyStims({'conds': {'source': 'Input_1', 'label': 'Input_1_PYR', 'dur': [600, 900]},
-# 'cellConds': {'popLabel': 'PYR', 'ynorm': [0.0,0.5]},
-# 'delay': 300})
-
-# sim.net.modifySynMechs({'conds': {'label':'homSyn', 'sec': 'soma', 'loc': [0,1]},
-# 'cellConds': {'cellType': 'PYR2sec'},
-# 'targetrate': 0.6})
-
-
-# for c in sim.net.cells:
-# if c.tags['cellType'] == 'PYR':
-# for sec in c.secs:
-# for synMech in sec.synMechs:
-# if synMech.label == 'homSyn':
-# synMech.hSyn.targetrate = 0.6
-
-
-# sim.simulate() # create and simulate network
-# sim.analyze()
-
diff --git a/netpyne/cell.py b/netpyne/cell.py
index ec0265baf..ab536c86f 100644
--- a/netpyne/cell.py
+++ b/netpyne/cell.py
@@ -389,7 +389,7 @@ def addSynMech (self, synLabel, secLabel, loc):
secLabelNetCon = synParamValue.get('sec', 'soma')
locNetCon = synParamValue.get('loc', 0.5)
secNetCon = self.secs.get(secLabelNetCon, None)
- synMech['hNetcon'] = h.NetCon(secNetCon['hSec'](locNetCon)._ref_v, synMech['hSyn'], sec=secNetCon['hSec'])
+ synMech['hNetcon'] = h.NetCon(secNetCon['hSec'](locNetCon)._ref_v, synMech[''], sec=secNetCon['hSec'])
for paramName,paramValue in synParamValue.iteritems():
if paramName == 'weight':
synMech['hNetcon'].weight[0] = paramValue
@@ -534,7 +534,24 @@ def addConn (self, params, netStimParams = None):
if netStimParams:
netstim = self.addNetStim(netStimParams)
-
+
+ if params.get('gapJunction', False) == True: # only run for post gap junc (not pre)
+ preGapId = 10e9*sim.rank + sim.net.lastGapId # global index for presyn gap junc
+ postGapId = preGapId + 1 # global index for postsyn gap junc
+ sim.net.lastGapId += 2 # keep track of num of gap juncs in this node
+ if not getattr(sim.net, 'preGapJunctions', False):
+ sim.net.preGapJunctions = [] # if doesn't exist, create list to store presynaptic cell gap junctions
+ preGapParams = {'gid': params['preGid'],
+ 'preGid': self.gid,
+ 'sec': params.get('preSec', 'soma'),
+ 'loc': params.get('preLoc', 0.5),
+ 'weight': params['weight'],
+ 'gapId': preGapId,
+ 'preGapId': postGapId,
+ 'synMech': params['synMech'],
+ 'gapJunction': 'pre'}
+ sim.net.preGapJunctions.append(preGapParams) # add conn params to add pre gap junction later
+
# Python Structure
if sim.cfg.createPyStruct:
connParams = {k:v for k,v in params.iteritems() if k not in ['synsPerConn']}
@@ -546,28 +563,46 @@ def addConn (self, params, netStimParams = None):
if netStimParams:
connParams['preGid'] = 'NetStim'
connParams['preLabel'] = netStimParams['source']
+ if params.get('gapJunction', 'False') == True: # only run for post gap junc (not pre)
+ connParams['gapId'] = postGapId
+ connParams['preGapId'] = preGapId
+ connParams['gapJunction'] = 'post'
self.conns.append(Dict(connParams))
else: # do not fill in python structure (just empty dict for NEURON obj)
self.conns.append(Dict())
# NEURON objects
if sim.cfg.createNEURONObj:
- if pointp:
- sec = self.secs[secLabels[0]]
- postTarget = sec['pointps'][pointp]['hPointp'] # local point neuron
- else:
+ # gap junctions
+ if params.get('gapJunction', 'False') in [True, 'pre', 'post']: # create NEURON obj for pre and post
+ synMechs[i]['hSyn'].weight = weights[i]
+ sourceVar = self.secs[synMechSecs[i]]['hSec'](synMechLocs[i])._ref_v
+ targetVar = synMechs[i]['hSyn']._ref_vgap # assumes variable is vgap -- make a parameter
sec = self.secs[synMechSecs[i]]
- postTarget = synMechs[i]['hSyn'] # local synaptic mechanism
+ sim.pc.target_var(targetVar, connParams['gapId'])
+ self.secs[synMechSecs[i]]['hSec'].push()
+ sim.pc.source_var(sourceVar, connParams['preGapId'])
+ h.pop_section()
+ netcon = None
+
+ # connections using NetCons
+ else:
+ if pointp:
+ sec = self.secs[secLabels[0]]
+ postTarget = sec['pointps'][pointp]['hPointp'] # local point neuron
+ else:
+ sec = self.secs[synMechSecs[i]]
+ postTarget = synMechs[i]['hSyn'] # local synaptic mechanism
- if netStimParams:
- netcon = h.NetCon(netstim, postTarget) # create Netcon between netstim and target
- else:
- netcon = sim.pc.gid_connect(params['preGid'], postTarget) # create Netcon between global gid and target
-
- netcon.weight[weightIndex] = weights[i] # set Netcon weight
- netcon.delay = delays[i] # set Netcon delay
- netcon.threshold = params['threshold'] # set Netcon threshold
- self.conns[-1]['hNetcon'] = netcon # add netcon object to dict in conns list
+ if netStimParams:
+ netcon = h.NetCon(netstim, postTarget) # create Netcon between netstim and target
+ else:
+ netcon = sim.pc.gid_connect(params['preGid'], postTarget) # create Netcon between global gid and target
+
+ netcon.weight[weightIndex] = weights[i] # set Netcon weight
+ netcon.delay = delays[i] # set Netcon delay
+ netcon.threshold = params['threshold'] # set Netcon threshold
+ self.conns[-1]['hNetcon'] = netcon # add netcon object to dict in conns list
# Add time-dependent weight shaping
@@ -1073,11 +1108,11 @@ def __getstate__ (self):
###############################################################################
#
-# POINT NEURON CLASS (v not from Section)
+# ARTIFICIAL CELL CLASS (no sections)
#
###############################################################################
-class PointNeuron (Cell):
+class ArtifCell (Cell):
'''
Point Neuron that doesn't use v from Section - TO DO
'''
diff --git a/netpyne/network.py b/netpyne/network.py
index 321453eb1..ab1495437 100644
--- a/netpyne/network.py
+++ b/netpyne/network.py
@@ -38,7 +38,7 @@ def __init__ (self, params = None):
self.lid2gid = [] # Empty list for storing local index -> GID (index = local id; value = gid)
self.gid2lid = {} # Empty dict for storing GID -> local index (key = gid; value = local id) -- ~x6 faster than .index()
self.lastGid = 0 # keep track of last cell gid
-
+ self.lastGapId = 0 # keep track of last gap junction gid
###############################################################################
@@ -453,6 +453,12 @@ def connectCells (self):
self._connStrToFunc(preCellsTags, postCellsTags, connParam) # convert strings to functions (for the delay, and probability params)
connFunc(preCellsTags, postCellsTags, connParam) # call specific conn function
+ # add gap junctions of presynaptic cells (need to do separately because could be in different ranks)
+ for preGapParams in getattr(sim.net, 'preGapJunctions', []):
+ if preGapParams['gid'] in self.lid2gid: # only cells in this rank
+ cell = self.cells[self.gid2lid[preGapParams['gid']]]
+ cell.addConn(preGapParams)
+
# apply subcellular connectivity params (distribution of synaspes)
if self.params.subConnParams:
self.subcellularConn(allCellTags, allPopTags)
@@ -823,6 +829,7 @@ def _addCellConn (self, connParam, preCellGid, postCellGid):
'plast': connParam.get('plast')}
if sim.cfg.includeParamsLabel: params['label'] = connParam.get('label')
+ if connParam.get('gapJunction', False): params['gapJunction'] = connParam.get('gapJunction')
postCell.addConn(params=params, netStimParams=connParam.get('netStimParams'))
diff --git a/netpyne/sim.py b/netpyne/sim.py
index 7bcfb652c..c838b42c7 100644
--- a/netpyne/sim.py
+++ b/netpyne/sim.py
@@ -12,7 +12,7 @@
from wrappers import *
import analysis
from network import Network
-from cell import Cell, PointNeuron
+from cell import Cell
from pop import Pop
import utils
from neuron import h
diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index 70891e49c..286f6c1a4 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -686,6 +686,7 @@ def preRun():
for cell in sim.net.cells:
sim.fih.append(h.FInitializeHandler(cell.initV))
+ # cvode variables
if not getattr(h, 'cvode', None):
h('objref cvode')
h('cvode = new CVode()')
@@ -700,17 +701,23 @@ def preRun():
else:
h.cvode.cache_efficient(0)
- h.dt = sim.cfg.dt # set time step
+ # time vars
+ h.dt = sim.cfg.dt
h.tstop = sim.cfg.duration
+
+ # h params
for key,val in sim.cfg.hParams.iteritems():
try:
setattr(h, key, val) # set other h global vars (celsius, clamp_resist)
except:
print '\nError: could not set %s = %s' % (key, str(val))
+
+ # parallelcontext vars
sim.pc.set_maxstep(10)
mindelay = sim.pc.allreduce(sim.pc.set_maxstep(10), 2) # flag 2 returns minimum value
if sim.rank==0 and sim.cfg.verbose: print('Minimum delay (time-step for queue exchange) is %.2f'%(mindelay))
-
+ sim.pc.setup_transfer() # setup transfer of source_var to target_var
+
# handler for printing out time during simulation run
if sim.rank == 0 and sim.cfg.printRunTime:
def printRunTime():
diff --git a/sdnotes.org b/sdnotes.org
index 21345e6d4..ba0050420 100644
--- a/sdnotes.org
+++ b/sdnotes.org
@@ -2436,7 +2436,6 @@ Plotting 2D representation of network cell locations and connections...
*** 380x120x380 (less conns, with subcell)
-
* 16jul20 Matplotlib errors
- http://stackoverflow.com/questions/4130355/python-matplotlib-framework-under-macosx
- fixed by adding 'backend: Agg' to ~/.matplotlib/matplotlibrc
@@ -2717,6 +2716,37 @@ type 1094
- huge leakge with plotSpikeHist
+* 16nov23 Adding support for gap junctions
+** useful links
+http://www.neuron.yale.edu/neuron/static/new_doc/modelspec/programmatic/network/parcon.html?highlight=gap%20junction
+https://senselab.med.yale.edu/ModelDB/ShowModel.cshtml?model=97917
+https://senselab.med.yale.edu/ModelDB/ShowModel.cshtml?model=97917
+
+** example from padraig
+if (isCellOnNode("SampleCellGroup", 1)) {
+ a_SampleCellGroup[1].Soma { elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_A[0] = new ElectSyn(0.5) }
+ elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_A[0].weight = 1.0
+ pnm.pc.target_var(&elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_A[0].vgap, 100000000)
+ pnm.pc.source_var(&a_SampleCellGroup[1].Soma.v(0.5), 200000000)
+}
+if (isCellOnNode("SampleCellGroup", 0)) {
+ a_SampleCellGroup[0].Soma { elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_B[0] = new ElectSyn(0.5) }
+ elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_B[0].weight = 1.0
+ pnm.pc.target_var(&elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_B[0].vgap, 200000000)
+ pnm.pc.source_var(&a_SampleCellGroup[0].Soma.v(0.5), 100000000)
+}
+
+** issues
+- bidirectional
+- requires creating synMechs both in presyn and postsyn
+- requires creating conns from pre->post and post->pre
+- need to do at the network.py level so can call createConn func for both cells
+- requires specifying sec and loc of source cell as well as of target cell -- add preSec and preLoc
+
+- create a temporal list with the presyn cells and ids, and create the synMech, target_var and source_var with the
+ appropriate ids after connections have been created.
+- check that it plays well with subcellular distributions
+- check that no duplicates are created
* Netpyne Models/Users
** Github examples folder
@@ -2742,7 +2772,6 @@ Christian G. Fink, PhD
Assistant Professor of Physics and Neuroscience
Ohio Wesleyan University
-
* Related tools
** NEURON
Multiscale simulator
From b334399297fc5449c282f6e421021fc9b7ece845 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 24 Nov 2016 19:08:05 -0500
Subject: [PATCH 40/54] cleaned sandbox.py
---
examples/sandbox/sandbox.py | 28 ++++------------------------
1 file changed, 4 insertions(+), 24 deletions(-)
diff --git a/examples/sandbox/sandbox.py b/examples/sandbox/sandbox.py
index df012ff58..334d3cc4b 100644
--- a/examples/sandbox/sandbox.py
+++ b/examples/sandbox/sandbox.py
@@ -48,6 +48,7 @@
cellParams.conds = {'cellType': 'PYR'}
netParams.addCellParams('PYR', cellParams)
+# Connections
netParams.connParams['bg->PYR1'] = {
'preConds': {'popLabel': 'background'}, 'postConds': {'popLabel': 'PYR1'}, # background -> PYR
'weight': 0.1, # fixed weight of 0.08
@@ -55,9 +56,9 @@
'delay': 'uniform(1,5)'} # uniformly distributed delays between 1-5ms
netParams.addConnParams('PYR1->PYR2',
- {'preConds': {'popLabel': 'PYR1'}, 'postConds': {'popLabel': 'PYR2'}, # PYR1 -> PYR2
- 'weight': 200.0, # fixed weight of 0.08
- 'synMech': 'esyn', # target NMDA synapse
+ {'preConds': {'popLabel': 'PYR1'}, 'postConds': {'popLabel': 'PYR2'}, # PYR1 -> PYR2 (gap junction)
+ 'weight': 200.0,
+ 'synMech': 'esyn',
'gapJunction': True,
'sec': 'soma',
'loc': 0.5,
@@ -65,20 +66,6 @@
'preLoc': 0.5})
-# if (isCellOnNode("SampleCellGroup", 1)) {
-# a_SampleCellGroup[1].Soma { elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_A[0] = new ElectSyn(0.5) }
-# elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_A[0].weight = 1.0
-# pnm.pc.target_var(&elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_A[0].vgap, 100000000)
-# pnm.pc.source_var(&a_SampleCellGroup[1].Soma.v(0.5), 200000000)
-# }
-# if (isCellOnNode("SampleCellGroup", 0)) {
-# a_SampleCellGroup[0].Soma { elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_B[0] = new ElectSyn(0.5) }
-# elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_B[0].weight = 1.0
-# pnm.pc.target_var(&elecsyn_NetConn_SampleCellGroup_SampleCellGroup_ElectSyn_B[0].vgap, 200000000)
-# pnm.pc.source_var(&a_SampleCellGroup[0].Soma.v(0.5), 100000000)
-# }
-
-
###############################################################################
# SIMULATION PARAMETERS
###############################################################################
@@ -94,8 +81,6 @@
# Recording
simConfig.recordCells = []# [1,2] # which cells to record from
simConfig.recordTraces = {'Vsoma':{'sec':'soma','loc':0.5,'var':'v'}}
-#'AMPA_i': {'synMech':'homSyn', 'var':'i'}}
-#'AMPA_i': {'synMech':'homSyn', 'sec': 'dend', 'loc': 0.775, 'var':'i'}}
simConfig.recordStim = True # record spikes of cell stims
simConfig.recordStep = 0.1 # Step size in ms to save data (eg. V traces, LFP, etc)
@@ -104,11 +89,6 @@
simConfig.filename = 'mpiHHTut' # Set file output name
simConfig.saveFileStep = 1000 # step size in ms to save data to disk
simConfig.savePickle = 0 # Whether or not to write spikes etc. to a .mat file
-simConfig.saveJson = 0 # Whether or not to write spikes etc. to a .mat file
-simConfig.saveMat = 0 # Whether or not to write spikes etc. to a .mat file
-simConfig.saveDpk = 0 # save to a .dpk pickled file
-simConfig.saveHDF5 = 0
-simConfig.saveCSV = 0
# # Analysis and plotting
simConfig.addAnalysis('plotRaster', {'spikeHist': 'subplot'})
From efcc2b610bcdc9be257f1b2610f11284584699e6 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Mon, 28 Nov 2016 16:50:12 -0500
Subject: [PATCH 41/54] temporary fix so subcellular ditributions can include
netstims
---
netpyne/network.py | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/netpyne/network.py b/netpyne/network.py
index ab1495437..9b994637e 100644
--- a/netpyne/network.py
+++ b/netpyne/network.py
@@ -302,6 +302,8 @@ def subcellularConn(self, allCellTags, allPopTags):
if postCellGid in self.lid2gid:
postCell = self.cells[self.gid2lid[postCellGid]]
allConns = [conn for conn in postCell.conns if conn['preGid'] in preCellsTags]
+ if 'NetStim' in [x['cellModel'] for x in preCellsTags.values()]: # temporary fix to include netstim conns
+ allConns.extend([conn for conn in postCell.conns if conn['preGid'] == 'NetStim'])
# group synMechs so they are not distributed separately
if subConnParam.get('groupSynMechs', None):
@@ -327,7 +329,7 @@ def subcellularConn(self, allCellTags, allPopTags):
if subConnParam.get('density', None) == 'uniform':
# calculate new syn positions
newSecs, newLocs = postCell._distributeSynsUniformly(secList=secList, numSyns=len(conns))
-
+
# 2D map and 1D map (radial)
elif isinstance(subConnParam.get('density', None), dict) and subConnParam['density']['type'] in ['2Dmap', '1Dmap']:
@@ -526,7 +528,7 @@ def _findPrePostCellsCondition(self, allCellTags, allPopTags, preConds, postCond
if not 'start' in prePop: prePop['start'] = 1 # add default start time
if not 'number' in prePop: prePop['number'] = 1e9 # add default number
preCellsTags = prePops
-
+
if preCellsTags: # only check post if there are pre
postCellsTags = allCellTags
for condKey,condValue in postConds.iteritems(): # Find subset of cells that match postsyn criteria
From df66ebfe0762a7be3e308126ba3c4e2e3085a7b8 Mon Sep 17 00:00:00 2001
From: Padraig Gleeson
Date: Tue, 29 Nov 2016 16:59:21 +0000
Subject: [PATCH 42/54] Adding mod file required for gap junction example
Works fine here @salvadord after adding this file to the sandbox
directory
---
examples/sandbox/ElectSyn.mod | 77 +++++++++++++++++++++++++++++++++++
1 file changed, 77 insertions(+)
create mode 100644 examples/sandbox/ElectSyn.mod
diff --git a/examples/sandbox/ElectSyn.mod b/examples/sandbox/ElectSyn.mod
new file mode 100644
index 000000000..86db62b1c
--- /dev/null
+++ b/examples/sandbox/ElectSyn.mod
@@ -0,0 +1,77 @@
+COMMENT
+
+ **************************************************
+ File generated by: neuroConstruct v1.7.1
+ **************************************************
+
+ This file holds the implementation in NEURON of the Cell Mechanism:
+ ElectSyn (Type: Gap junction, Model: Template based ChannelML file)
+
+ with parameters:
+ /channelml/@units = Physiological Units
+ /channelml/notes = ChannelML file describing a single synaptic mechanism
+ /channelml/synapse_type/@name = ElectSyn
+ /channelml/synapse_type/status/@value = stable
+ /channelml/synapse_type/status/contributor/name = Padraig Gleeson
+ /channelml/synapse_type/notes = Description of an electrical synapse at a gap junction
+ /channelml/synapse_type/electrical_syn/@conductance = 5e-8
+
+// File from which this was generated: /home/padraig/nC_projects/Gaps/cellMechanisms/ElectSyn/ElectSyn.xml
+
+// XSL file with mapping to simulator: /home/padraig/nC_projects/Gaps/cellMechanisms/ElectSyn/ChannelML_v1.8.1_NEURONmod.xsl
+
+ENDCOMMENT
+
+
+? This is a NEURON mod file generated from a ChannelML file
+
+? Unit system of original ChannelML file: Physiological Units
+
+COMMENT
+ ChannelML file describing a single synaptic mechanism
+ENDCOMMENT
+
+? Creating synaptic mechanism for an electrical synapse
+
+
+TITLE Channel: ElectSyn
+
+COMMENT
+ Description of an electrical synapse at a gap junction
+ENDCOMMENT
+
+
+UNITS {
+ (nA) = (nanoamp)
+ (mV) = (millivolt)
+ (uS) = (microsiemens)
+}
+
+
+NEURON {
+ POINT_PROCESS ElectSyn
+ NONSPECIFIC_CURRENT i
+ RANGE g, i
+ RANGE weight
+
+ RANGE vgap : Using a RANGE variable as opposed to POINTER for parallel mode
+
+
+}
+
+PARAMETER {
+ v (millivolt)
+ vgap (millivolt)
+ g = 0.000049999999999999996 (microsiemens)
+ weight = 1
+
+}
+
+
+ASSIGNED {
+ i (nanoamp)
+}
+
+BREAKPOINT {
+ i = weight * g * (v - vgap)
+}
From 3c67735fba838007dcd0a7a5f2eaf766d217aba9 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Wed, 30 Nov 2016 16:58:27 -0500
Subject: [PATCH 43/54] fixed some bugs in tutorial.rst suggested by apdavison
---
doc/source/tutorial.rst | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/doc/source/tutorial.rst b/doc/source/tutorial.rst
index c336c2146..f27f28024 100644
--- a/doc/source/tutorial.rst
+++ b/doc/source/tutorial.rst
@@ -110,7 +110,7 @@ The image below illustrates this process:
:width: 50%
:align: center
-We will now create a new model file (call it ``tut2.py``) where we will specify from scratch all the network parameters. To create the structures that will hold the network parameters add the following code:
+We will now create a new model file (call it ``tut2.py``) where we will specify from scratch all the network parameters. To create the structures that will hold the network parameters add the following code::
from netpyne import specs, sim
@@ -129,7 +129,9 @@ First, we need to create some populations for our network, by adding items to th
* ``cellModel`` - an attribute or tag that will be assigned to cells in this population, can later be used to set specific cell model implementation for cells with this tag. Eg. 'HH' (standard Hodkgin-Huxley type cell model) or 'Izhi2007b' (Izhikevich 2007 point neuron model). Cell models can be defined by the user or imported.
-We will start by creating 2 populations labeled ``S`` (sensory) and ``M`` (motor), with ``20`` cells each, of type ``PYR`` (pyramidal), and using ``HH`` cell model (standard compartmental Hodgkin-Huxley type cell).
+We will start by creating 2 populations labeled ``S`` (sensory) and ``M`` (motor), with ``20`` cells each, of type ``PYR`` (pyramidal), and using ``HH`` cell model (standard compartmental Hodgkin-Huxley type cell).
+
+::
## Population parameters
netParams.popParams['S'] = {'cellType': 'PYR', 'numCells': 20, 'cellModel': 'HH'}
From 94c62cea3730de87bd599f253d0309cf3f6932d1 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Wed, 30 Nov 2016 17:55:14 -0500
Subject: [PATCH 44/54] Added option to fix soma depth for subcellular
distributions based on 1d and 2d maps
---
CHANGES.md | 4 ++++
netpyne/network.py | 8 +++++---
2 files changed, 9 insertions(+), 3 deletions(-)
diff --git a/CHANGES.md b/CHANGES.md
index be108e22a..1ffde8391 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,5 +1,9 @@
# Version 0.6.4
+- Added option to fix soma depth for subcellular distributions based on 1d and 2d maps
+
+- Improved import of multicompartmental cells from NeuroML2
+
- Added support for gap junction (electrical synapse) connections
- Added function plotRatePSD to plot firing rate power spectral density
diff --git a/netpyne/network.py b/netpyne/network.py
index 9b994637e..ced092c6c 100644
--- a/netpyne/network.py
+++ b/netpyne/network.py
@@ -335,9 +335,11 @@ def subcellularConn(self, allCellTags, allPopTags):
gridY = subConnParam['density']['gridY']
gridSigma = subConnParam['density']['gridValues']
-
- if subConnParam['density']['type'] == '2Dmap': # 2D
- somaX, _, _ = self._posFromLoc(postCell.secs['soma']['hSec'], 0.5) # move method to Cell!
+ somaX, somaY, _ = self._posFromLoc(postCell.secs['soma']['hSec'], 0.5) # get cell pos move method to Cell!
+ if subConnParam['density'].get('fixedSomaY', None): # is fixed cell soma y, adjust y grid accordingly
+ fixedSomaY = subConnParam['density'].get('fixedSomaY')
+ gridY = [y+(somaY-fixedSomaY) for y in gridY] # adjust grid so cell soma is at fixedSomaY
+ if subConnParam['density']['type'] == '2Dmap': # 2D
gridX = [x - somaX for x in subConnParam['density']['gridX']] # center x at cell soma
segNumSyn = self._interpolateSegmentSigma(postCell, secList, gridX, gridY, gridSigma) # move method to Cell!
elif subConnParam['density']['type'] == '1Dmap': # 1D
From 93cd57538057c67887731e421b74f6fdf927349b Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 1 Dec 2016 17:53:05 -0500
Subject: [PATCH 45/54] Added analysis.nTE() function to calculate normalized
transfer entropy (issue #42)
---
CHANGES.md | 2 +
netpyne/analysis.py | 150 ++++++++-
netpyne/support/__init__.py | 0
netpyne/support/nte.hoc | 98 ++++++
netpyne/support/nte.mod | 613 ++++++++++++++++++++++++++++++++++++
netpyne/utils.py | 2 +-
6 files changed, 861 insertions(+), 4 deletions(-)
create mode 100644 netpyne/support/__init__.py
create mode 100644 netpyne/support/nte.hoc
create mode 100644 netpyne/support/nte.mod
diff --git a/CHANGES.md b/CHANGES.md
index 1ffde8391..22c30bb9e 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,5 +1,7 @@
# Version 0.6.4
+- Added analysis.nTE() function to calculate normalized transfer entropy (issue #42)
+
- Added option to fix soma depth for subcellular distributions based on 1d and 2d maps
- Improved import of multicompartmental cells from NeuroML2
diff --git a/netpyne/analysis.py b/netpyne/analysis.py
index 408f18779..0ddca6b4c 100644
--- a/netpyne/analysis.py
+++ b/netpyne/analysis.py
@@ -936,9 +936,6 @@ def plotShape (showSyns = True, include = [], style = '.', siz=10, figSize = (10
return fig
-
-
-
######################################################################################################################################################
## Plot LFP (time-resolved or power spectra)
######################################################################################################################################################
@@ -1338,6 +1335,153 @@ def plot2Dnet (include = ['allCells'], figSize = (12,12), showConns = True, save
return fig
+######################################################################################################################################################
+## Calculate normalized transfer entropy
+######################################################################################################################################################
+def nTE(cells1 = [], cells2 = [], spks1 = None, spks2 = None, trange = [0,1000], binSize = 20, numShuffle = 30):
+ '''
+ Calculate normalized transfer entropy
+ - cells1 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): Subset of cells from which to obtain spike train 1 (default: [])
+ - cells2 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): Subset of cells from which to obtain spike train 1 (default: [])
+ subset of cells in network
+ - spks1 (list): Spike train 1; list of spike times; if omitted then obtains spikes from cells1 (default: None)
+ - spks2 (list): Spike train 2; list of spike times; if omitted then obtains spikes from cells2 (default: None)
+ - trange ([min, max]): Range of time to calculate nTE in ms (default: [0,1000])
+ - binSize (int): Bin size used to convert spike times into histogram
+ - numShuffle (int): Number of times to shuffle spike train 1 to calculate TEshuffled; note: nTE = (TE - TEShuffled)/H(X2F|X2P)
+
+ - Returns nTE (float): normalized transfer entropy
+ '''
+
+ from neuron import h
+ import numpy as np
+ import netpyne
+ import os
+
+ root = os.path.dirname(netpyne.__file__)
+
+ if 'nte' not in dir(h):
+ try:
+ print ' Warning: support/nte.mod not compiled; attempting to compile from %s via "nrnivmodl support"'%(root)
+ os.system('cd ' + root + '; nrnivmodl support')
+ from neuron import load_mechanisms
+ load_mechanisms(root)
+ print ' Compilation of support folder mod files successful'
+ except:
+ print ' Error compiling support folder mod files'
+ return
+
+ h.load_file(root+'/support/nte.hoc') # nTE code (also requires support/net.mod)
+
+ if not spks1: # if doesnt contain a list of spk times, obtain from cells specified
+ cells, cellGids, netStimPops = getCellsInclude(cells1)
+ numNetStims = 0
+
+ # Select cells to include
+ if len(cellGids) > 0:
+ try:
+ spkts = [spkt for spkgid,spkt in zip(sim.allSimData['spkid'],sim.allSimData['spkt']) if spkgid in cellGids]
+ except:
+ spkts = []
+ else:
+ spkts = []
+
+ # Add NetStim spikes
+ spkts = list(spkts)
+ numNetStims = 0
+ for netStimPop in netStimPops:
+ if 'stims' in sim.allSimData:
+ cellStims = [cellStim for cell,cellStim in sim.allSimData['stims'].iteritems() if netStimPop in cellStim]
+ if len(cellStims) > 0:
+ spktsNew = [spkt for cellStim in cellStims for spkt in cellStim[netStimPop] ]
+ spkts.extend(spktsNew)
+ numNetStims += len(cellStims)
+
+ spks1 = list(spkts)
+
+ if not spks2: # if doesnt contain a list of spk times, obtain from cells specified
+ cells, cellGids, netStimPops = getCellsInclude(cells2)
+ numNetStims = 0
+
+ # Select cells to include
+ if len(cellGids) > 0:
+ try:
+ spkts = [spkt for spkgid,spkt in zip(sim.allSimData['spkid'],sim.allSimData['spkt']) if spkgid in cellGids]
+ except:
+ spkts = []
+ else:
+ spkts = []
+
+ # Add NetStim spikes
+ spkts = list(spkts)
+ numNetStims = 0
+ for netStimPop in netStimPops:
+ if 'stims' in sim.allSimData:
+ cellStims = [cellStim for cell,cellStim in sim.allSimData['stims'].iteritems() if netStimPop in cellStim]
+ if len(cellStims) > 0:
+ spktsNew = [spkt for cellStim in cellStims for spkt in cellStim[netStimPop] ]
+ spkts.extend(spktsNew)
+ numNetStims += len(cellStims)
+
+ spks2 = list(spkts)
+
+
+ inputVec = h.Vector()
+ outputVec = h.Vector()
+ histo1 = histogram(spks1, bins = np.arange(trange[0], trange[1], binSize))
+ histoCount1 = histo1[0]
+ histo2 = histogram(spks2, bins = np.arange(trange[0], trange[1], binSize))
+ histoCount2 = histo2[0]
+
+ inputVec.from_python(histoCount1)
+ outputVec.from_python(histoCount2)
+ out = h.normte(inputVec, outputVec, numShuffle)
+ TE, H, nTE, _, _ = out.to_python()
+ return nTE
+
+
+######################################################################################################################################################
+## Calculate granger causality
+######################################################################################################################################################
+def granger(spk1, spk2, binSize=5, trange=[0,2000]):
+ """
+ Typical usage is as follows:
+ from bsmart import pwcausalr
+ F,pp,cohe,Fx2y,Fy2x,Fxy=pwcausalr(x,ntrls,npts,p,fs,freq);
+
+ Outputs:
+ F is the frequency vector for the remaining quantities
+ pp is the spectral power
+ cohe is the coherence
+ Fx2y is the causality of channel X to channel Y
+ Fy2x is the causality of channel Y to channel X
+ Fxy is the "instantaneous" causality (cohe-Fx2y-Fy2x I think)
+ Inputs:
+ x is the data for at least two channels, e.g. a 2x8000 array consisting of two LFP time series
+ ntrls is the number of trials (whatever that means -- just leave it at 1)
+ npts is the number of points in the data (in this example, 8000)
+ p is the order of the polynomial fit (e.g. 10 for a smooth fit, 20 for a less smooth fit)
+ fs is the sampling rate (e.g. 200 Hz)
+ freq is the maximum frequency to calculate (e.g. fs/2=100, which will return 0:100 Hz)
+ """
+
+ from pylab import histogram, plot, show
+ import numpy as np
+ from support.bsmart import pwcausalr
+
+ histo1 = histogram(spk1, bins = np.arange(trange[0], trange[1], binSize))
+ histoCount1 = histo1[0]
+
+ histo2 = histogram(spk2, bins = np.arange(trange[0], trange[1], binSize))
+ histoCount2 = histo2[0]
+
+ fs = 1000/binSize
+ F,pp,cohe,Fx2y,Fy2x,Fxy = pwcausalr(np.array([histoCount1, histoCount2]), 1, len(histoCount1), 10, fs, fs/2)
+
+ return F, Fx2y[0],Fy2x[0], Fxy[0]
+
+
+
######################################################################################################################################################
## Plot weight changes
######################################################################################################################################################
diff --git a/netpyne/support/__init__.py b/netpyne/support/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/netpyne/support/nte.hoc b/netpyne/support/nte.hoc
new file mode 100644
index 000000000..7c498a4d5
--- /dev/null
+++ b/netpyne/support/nte.hoc
@@ -0,0 +1,98 @@
+// $Id: infot.hoc,v 1.43 2009/12/04 01:25:55 samn Exp $
+
+print "Loading infot.hoc..."
+
+
+install_nte()
+verbose_infot = 0
+
+
+{ MSONUM=100 MSOSIZ=100 msomax=0 msoptr=0 objref mso[MSONUM] }
+
+
+//** vrsz(VEC or NUM,VEC1,VEC2...,VECn or NUM) -- vector resize -- to size of first arg
+// vrsz(VEC or NUM,VEC1,NUM,VEC2,etc) -- vector resize -- to size of first arg (vec or num)
+// or prior NUM
+// optional final number is fill
+func vrsz () { local i,sz,max,fill,flag,rsz0
+ max=numarg()
+ flag=rsz0=0
+ if (argtype(1)==1) {
+ if (isobj($o1,"Vector")) sz=$o1.size else if (isobj($o1,"List")) sz=$o1.count
+ if (argtype(2)==0) {printf("vrsz(vec,num) backwards ERR\n") return -1}
+ } else sz=$1
+ if (argtype(max)==0) {i=max max-=1 fill=$i flag=1}
+ if (argtype(max)==2) {max-=1 rsz0=1} // string means resize(0)
+ if (sz<0) sz+=$o2.size // reduce size
+ if (sz<0) {printf("vrsz ERR: can't resize %s to %d\n",$o2,sz) return sz}
+ for i=2, max {
+ if (argtype(i)==0) sz=$i else {
+ $oi.resize(sz)
+ if (rsz0) $oi.resize(0) else if (flag) $oi.fill(fill)
+ }
+ }
+ return sz
+}
+
+func allocvecs () { local i,ii,llen,sz,newv,aflg,lflg,na localobj o
+ if (numarg()==0) {
+ print "p=allocvecs(#) or p=allocvecs(v1,v2,...), access with mso[p], mso[p+1]..." return 0 }
+ sz=MSOSIZ na=numarg()
+ lflg=0
+ if (argtype(1)==0) {
+ aflg=0 newv=$1
+ if (na>=2) if (argtype(2)==0) sz=$2 else {lflg=1 o=$o2} // append to list in arg2
+ if (na>=3) if (argtype(3)==0) sz=$3 else {lflg=1 o=$o3}
+ if (lflg) o.remove_all
+ } else {
+ aflg=1
+ if (argtype(na)==0) {
+ i=na sz=$i newv=i-1
+ } else newv=na
+ }
+ llen = msoptr
+ for ii=msomax,msoptr+newv-1 { // may need new vectors
+ if (ii>=MSONUM) { print "alloc ERROR: MSONUM exceeded." return 0 }
+ mso[ii] = new Vector(sz)
+ }
+ for ii=0,newv-1 {
+ mso[msoptr].resize(sz)
+ mso[msoptr].resize(0)
+ msoptr = msoptr+1
+ }
+ if (msomax$o2
+//vo.x(1)=H($o2Future|$o2Past)
+//vo.x(2)=normalized transfer entropy in 0,1 range
+//$3==number of shuffles
+//$o1,$o2 should both have same size and non-negative values. this func is meant for time-binned spike train data
+obfunc normte () { local a localobj ve,vo
+ a=allocvecs(ve) vo=new Vector()
+ nshuf=0
+ nshuf=$3 vrsz(3+nshuf,vo)
+ te=$o1.tentropspks($o2,vo,nshuf)
+ if(verbose_infot>2) vo.printf
+ if(vo.x(1)<=0 && verbose_infot>0){printf("WARNING H(X2F|X2P)==%g<=0\n",vo.x(1)) vo.x(1)=1 }
+ if (nshuf>0) {
+ ve.copy(vo,3,vo.size-1)
+ vo.resize(4)
+ if (ve.mean!=vo.x[2]) printf("normte ERRA\n")
+ vo.append(ve.stdev)
+ }
+ vo.x[2]=te
+ dealloc(a)
+ return vo
+}
diff --git a/netpyne/support/nte.mod b/netpyne/support/nte.mod
new file mode 100644
index 000000000..2fa050863
--- /dev/null
+++ b/netpyne/support/nte.mod
@@ -0,0 +1,613 @@
+: $Id: infot.mod,v 1.161 2010/08/19 20:02:27 samn Exp $
+
+NEURON {
+ SUFFIX nte
+ GLOBAL installed,beg,end
+ GLOBAL verbose
+ GLOBAL MINEXP,MAXEXP,MINLOG2,MAXLOG2,count,cutoff,binmin,binmax
+}
+
+PARAMETER {
+ installed = 0
+ verbose = 0.5
+ useslice = 0 : whether to look @ beg,end variables for vector slices
+ beg = 0 : for doing vector slices from [beg,end)
+ end = 0
+ MINEXP=-20
+ MAXEXP=20
+ MINLOG2=0.001
+ MAXLOG2=32
+ count=0
+ cutoff=0.2 : ignore hist pairs that are not similar
+ binmin=0 : ignore hists not filling out this % of the entries
+ binmax=0 : ignore hists filling out more than this % of the entries
+ KTProb=0 : use Krichevsky-Trofimov probability estimates p(n)=(n+.5)/(N+.5*jmax)
+}
+
+VERBATIM
+// misc.h copied here
+// $Id: misc.h,v 1.38 2011/11/02 15:26:48 billl Exp $
+
+#include
+#include
+#include /* contains LONG_MAX */
+#include
+#include
+#include
+#include
+
+#if !defined(t)
+ #define _pval pval
+#endif
+
+typedef struct LISTVEC {
+ int isz;
+ Object* pL;
+ double** pv;
+ unsigned int* plen;
+ unsigned int* pbuflen;
+} ListVec;
+
+typedef struct BVEC {
+ int size;
+ int bufsize;
+ short *x;
+ Object* o;
+} bvec;
+
+#define BYTEHEADER int _II__; char *_IN__; char _OUT__[16]; int BYTESWAP_FLAG=0;
+#define BYTESWAP(_X__,_TYPE__) \
+ if (BYTESWAP_FLAG == 1) { \
+ _IN__ = (char *) &(_X__); \
+ for (_II__=0;_II__ (Y) ? (X) : (Y))
+
+//square root of 2 * PI
+#define SQRT2PI 2.5066282746310002416
+//ln(2), base e log of 2
+#define LG2 0.69314718055994530941723212145818
+#define VRRY 200
+#define ISVEC(_OB__) (strncmp(hoc_object_name(_OB__),"Vector",6)==0)
+#define dmaxuint 4294967295. // for 32 bits
+
+// Andre Fentons cast designations
+typedef unsigned char ui1; /* one byte unsigned integer */
+typedef char si1; /* one byte signed integer */
+typedef unsigned short ui2; /* two byte unsigned integer */
+typedef short si2; /* two byte signed integer */
+typedef unsigned int ui4; /* four byte unsigned integer */
+typedef int si4; /* four byte signed integer */
+typedef float sf4; /* four byte signed floating point number */
+typedef double sf8; /* eight byte signed floating point number */
+
+extern double ERR,GET,SET,OK,NOP,ALL,NEG,POS,CHK,NOZ,GTH,GTE,LTH,LTE,EQU;
+extern double EQV,EQW,EQX,NEQ,SEQ,RXP,IBE,EBI,IBI,EBE;
+
+extern double *vector_newsize();
+extern unsigned int dcrsz;
+extern double *dcr;
+extern double *dcrset(int);
+extern unsigned int scrsz;
+extern unsigned int *scr;
+extern unsigned int *scrset(int);
+extern unsigned int iscrsz;
+extern int *iscr;
+extern int *iscrset(int);
+extern double BVBASE;
+extern double* hoc_pgetarg();
+extern void hoc_notify_iv();
+extern double hoc_call_func(Symbol*, int narg);
+extern FILE* hoc_obj_file_arg(int narg);
+extern Object** hoc_objgetarg();
+char *gargstr();
+char** hoc_pgargstr();
+extern void vector_resize();
+extern int vector_instance_px();
+extern void* vector_arg();
+extern double* vector_vec();
+extern int vector_buffer_size(void*);
+extern double hoc_epsilon;
+extern int stoprun;
+extern void set_seed();
+extern void dshuffle(double* x,int nx);
+extern void mcell_ran4_init(u_int32_t);
+extern double mcell_ran4(u_int32_t *idx1, double *x, unsigned int n, double range);
+extern int nrn_mlh_gsort();
+extern int ivoc_list_count(Object*);
+extern Object* ivoc_list_item(Object*, int);
+extern int list_vector_px2();
+extern int hoc_is_double_arg(int narg);
+extern int hoc_is_str_arg(int narg);
+extern int hoc_is_object_arg(int narg);
+extern int hoc_is_pdouble_arg(int narg);
+extern Symbol *hoc_get_symbol(char *);
+extern Symbol *hoc_lookup(const char*);
+extern Point_process* ob2pntproc(Object*);
+
+extern char* hoc_object_name(Object*);
+extern int cmpdfn();
+extern int openvec(int, double **);
+int list_vector_px();
+double *list_vector_resize();
+static void hxe() { hoc_execerror("",0); }
+extern void FreeListVec(ListVec** pp);
+extern ListVec* AllocListVec(Object* p);
+extern ListVec* AllocILV(Object*, int, double *);
+void FillListVec(ListVec* p,double dval);
+void ListVecResize(ListVec* p,int newsz);
+extern short *nrn_artcell_qindex_;
+extern double nrn_event_queue_stats(double*);
+extern void clear_event_queue();
+
+static double sc[6];
+static FILE* testout;
+
+//* in vecst.mod
+extern int** getint2D(int rows,int cols);
+extern void freeint2D(int*** ppp,int rows);
+extern double** getdouble2D(int rows,int cols);
+extern void freedouble2D(double*** ppp,int rows);
+extern double ismono1 (double *x, int n, int flag);
+
+//* in stats.mod
+double kcorfast(double* input1, double* input2, double* i1d , double* i2d,int n,double* ps);
+double Rktau (double* x, double* y, int n); // R version
+double kcorfast (double* input1, double* input2, double* i1d , double* i2d,int n,double* ps);
+
+// end of misc.h
+
+
+// from vecst.mod: Maintain parallel int vector to avoid slowness of repeated casts
+int cmpdfn (double a, double b) {return ((a)<=(b))?(((a) == (b))?0:-1):1;}
+static unsigned int bufsz=0;
+unsigned int scrsz=0;
+unsigned int *scr=0x0;
+unsigned int dcrsz=0;
+double *dcr=0x0;
+
+int *iscr=0x0;
+unsigned int iscrsz=0;
+
+int *iscrset (int nx) {
+ if (nx>iscrsz) {
+ iscrsz=nx+10000;
+ if (iscrsz>0) { iscr=(int *)realloc((void*)iscr,(size_t)iscrsz*sizeof(int));
+ } else { iscr=(int *)ecalloc(iscrsz, sizeof(int)); }
+ }
+ return iscr;
+}
+
+// end of from vecst.mod
+
+
+// from stats.mod
+static u_int32_t ilow=0;
+static u_int32_t ihigh=0;
+
+//shuffle array of unsigned ints
+void ishuffle(int* x,int nx) {
+ int n,k,temp; double y[1];
+ for (n=nx;n>1;) {
+ mcell_ran4(&ihigh, y, 1, n);
+ n--;
+ k=(int)y[0]; // random int(n) // 0 <= k < n.
+ temp = x[n];
+ x[n] = x[k];
+ x[k] = temp;
+ }
+}
+
+// end of stats.mod
+
+static const double* ITsortdata = NULL; /* used in the quicksort algorithm */
+static double tetrospks2(), pdfpr(), tetrospks3();
+static int dbxi[10];
+
+
+double log2d ( double d ) {
+ return log(d) / LG2;
+}
+
+// for debugging -- print out a pdf
+static double pdfpr (double* pdf,int szp,int dim, char* name) {
+ double x,ds; int i,j,k,l,m,cnt,*nonzero;
+ ds=0.;
+ printf("Contents of PDF %s\n",name);
+ if (dim>2) { // may also use for higher dims if ever both with these
+ nonzero=(int *)calloc(szp,sizeof(int));
+ for(k=0,cnt=0;k0.) cnt++;
+ if (cnt>0) nonzero[k]=1; // will need to print this slice
+ }
+ }
+ if (dim==1) {
+ for(m=0;m X2, values in vecs must be discrete & non-negative
+// output vec will store 0) transfer entropy, 1) H(X2F|X2P) , X2's entropy conditioned on its past
+// can be used to calculate the normalized transfer entropy (NTE), which is
+// (TE - TEShuffled)/H(X2F|X2P) and is in range 0,1 inclusive
+// when X3spikecounts is present, calculated transfer entropy
+// of X1spikecounts,X2spikecounts onto X3spikecounts
+static double tentropspks (void* vv) {
+ double *X1,*X2,*XO,*X3; int szX1,szX2,szXO,shuf,szX3;
+ szX1 = vector_instance_px(vv,&X1);
+ if((szX2=vector_arg_px(1,&X2))!=szX1) {
+ printf("tentropspks ERRA: X1,X2 must have same size (%d,%d)\n",szX1,szX2); return -1.0; }
+ szXO=ifarg(2)?vector_arg_px(2,&XO):0;
+ shuf=ifarg(3)?((int)*getarg(3)):0;
+ szX3=ifarg(4)?vector_arg_px(4,&X3):0;
+ if(szX3) tetrospks3(X1,X2,X3,XO,szX1,szXO,shuf);
+ else return tetrospks2(X1,X2,XO,szX1,szXO,shuf);
+}
+
+//** entropxfgxpd - get conditional entropy of X future given its past: H(XF|XP)
+// H(XF|XP)=-sum(p(XF,XP)*log(p(XF|XP)))=-sum(p(XF,XP)*log(p(XF,XP)/p(XP)))
+// XF = future value of X, XP = past value of X
+double entropxfgxpd (double* pXP, double* pXFXP,int minv,int maxv,int szp) {
+ static double tmp[4];
+ int k,l;
+ //*** normalize on unshuffled with H(X2F|X2P); NB only X1 is being shuffled anyway:
+ for(tmp[0]=0.,k=minv;k<=maxv;k++) for(l=minv;l<=maxv;l++) {
+ tmp[1]=pXP[l]; tmp[2]=pXFXP[k*szp+l];
+ if(tmp[1]>0. && tmp[2]>0.) { tmp[3] = tmp[2]/tmp[1];
+ if (usetable && tmp[3]>=MINLOG2 && tmp[3]<=MAXLOG2) {
+ tmp[0] -=tmp[2]*_n_LOG2(tmp[3]);
+ } else { tmp[0] -=tmp[2]* log2d(tmp[3]); if (usetable&&verbose>0.4) {
+ printf("WARNA:%g outside of [%g,%g] TABLE\n",tmp[3],MINLOG2,MAXLOG2); }}}}
+ return tmp[0];
+}
+
+//** entropxfgxp - get conditional entropy of X future given its past: H(XF|XP)
+// Vector has elements of X
+static double entropxfgxp (void* vv) {
+ double *x,*pXP,*pXFXP,dret;
+ int sz,minv,maxv,cnt,i,j,szp,*X;
+ sz = vector_instance_px(vv,&x);
+ cnt=0;
+ X=scrset(sz);
+ minv=1e9; maxv=-1e9;
+ for (i=0;i0) cnt++;
+ if (X[i]>maxv) maxv=X[i]; if (X[i]= 0:%d\n",minv);hxe();}
+ szp = maxv + 1;
+ pXFXP = (double*) calloc(szp*szp,sizeof(double));
+ pXP = (double*) calloc(szp,sizeof(double));
+ for(i=1;i2) pdfpr(pXP,szp,1,"pXP");
+ for(i=0;i3) pdfpr(pXFXP,szp,2,"pXFXP");
+ dret = entropxfgxpd(pXP,pXFXP,minv,maxv,szp);
+ free(pXP); free(pXFXP);
+ return dret;
+}
+
+//** entropx2fgx2px1p - get conditional entropy of X2 future given its past and X1's past:H(X2F|X2P,X1P)
+// H(X2F|X2P,X1P) = -sum( p(X2F,X2P,X1P) * log( p(X2F,X2P,X1P) / p(X2P,X1P) ) )
+double entropx2fgx2px1pd (double* pX2FX2PX1P, double* pX2PX1P, int minv1, int maxv1, int minv2, int maxv2, int szp) {
+ static double tmp[4];
+ double ent = 0.0;
+ int l,k,m;
+ for(l=minv2;l<=maxv2;l++) for(k=minv2;k<=maxv2;k++) for(m=minv1;m<=maxv1;m++) {
+ tmp[0]=pX2FX2PX1P[k*szp*szp+l*szp+m]; tmp[1]=pX2PX1P[l*szp+m];
+ if (tmp[0]>1e-9 && tmp[1]>1e-9) {
+ tmp[2] = tmp[0] / tmp[1];
+ if (usetable && tmp[2]>=MINLOG2 && tmp[2]<=MAXLOG2) {
+ ent -= tmp[0]*_n_LOG2(tmp[2]);
+ } else { ent -= tmp[0] * log2d(tmp[2]);
+ if (usetable&&verbose>0.4) {
+ printf("WARNB:%g outside of [%g,%g] TABLE (",tmp[2],MINLOG2,MAXLOG2) ;
+ printf("%g, %g, %g)\n",tmp[0],tmp[1],tmp[2]); }
+ }
+ if(verbose>2){printf("tmp0=%g, tmp1=%g, tmp2=%g\n",tmp[0],tmp[1],tmp[2]);
+ printf("l2d:%g\n",log2d(tmp[2])); printf("ent:%g\n",ent); }
+ }
+ }
+ return ent;
+}
+
+//** entropx3fgx1px2px3p - get conditional entropy of X3 future given its past and X1,X2's past:H(X3F|X1P,X2P,X3P)
+// H(X3F|X1P,X2P,X3P) = -sum( p(X3F,X1P,X2P,X3P) * log( p(X3F,X1P,X2P,X3P) / p(X1P,X2P,X3P) )
+double entropx3fgx1px2px3pd (double* pX3FX1PX2PX3P, double* pX1PX2PX3P,
+ int minv1, int maxv1, int minv2, int maxv2, int minv3, int maxv3, int szp) {
+ static double tmp[4];
+ double ent = 0.0;
+ int l,k,m,n;
+ for(l=minv3;l<=maxv3;l++) for(k=minv1;k<=maxv1;k++) for(m=minv2;m<=maxv2;m++) for(n=minv3;n<=maxv3;n++) {
+ tmp[0]=pX3FX1PX2PX3P[l*szp*szp*szp+k*szp*szp+m*szp+n]; tmp[1]=pX1PX2PX3P[k*szp*szp+m*szp+n];
+ if (tmp[0]>1e-9 && tmp[1]>1e-9) {
+ tmp[2] = tmp[0] / tmp[1];
+ if (usetable && tmp[2]>=MINLOG2 && tmp[2]<=MAXLOG2) {
+ ent -= tmp[0]*_n_LOG2(tmp[2]);
+ } else { ent -= tmp[0] * log2d(tmp[2]);
+ if (usetable&&verbose>0.4) {
+ printf("WARNB:%g outside of [%g,%g] TABLE (",tmp[2],MINLOG2,MAXLOG2) ;
+ printf("%g, %g, %g)\n",tmp[0],tmp[1],tmp[2]); }
+ }
+ if(verbose>2){printf("tmp0=%g, tmp1=%g, tmp2=%g\n",tmp[0],tmp[1],tmp[2]);
+ printf("l2d:%g\n",log2d(tmp[2])); printf("ent:%g\n",ent); }
+ }
+ }
+ return ent;
+}
+
+// count # of non-zero elements in a pdf
+double pdfnz (double* pdf, int szp, int dim) {
+ double x,ds,cnt; int i,j,k,l,m,n;
+ cnt = 0.0;
+ if(dim==1) {
+ for(m=0;m0.0) cnt+=1.0;
+ } else if(dim==2) {
+ for(l=0;l0.0) cnt+=1.0;
+ } else if(dim==3) {
+ for(k=0;k0.0) cnt+=1.0;
+ } else if(dim==4) {
+ for(k=0;k0.0) cnt+=1.0; }
+ } else {
+ printf("pdfnz WARNA: invalid dim=%d for pdf!\n",dim);
+ }
+ return cnt;
+}
+
+//** tetrospks3() -- another helper function for tentrospks()
+// calculates H(X3F|X3P) - H(X3F|X1P,X2P,X3P)
+// H(X3F|X1P,X2P,X3P) = -sum( p(X3F,X1P,X2P,X3P) * log( p(X3F,X1P,X2P,X3P) / p(X1P,X2P,X3P) )
+// H(X3F|X3P) = -sum( p(X3F,X3P) * log(p(X3F,X3P) / p(X3P) ) )
+//
+// pX3FX3P , pX3P, pX3FX1PX2PX3P, pX1PX2PX3P
+//
+static double tetrospks3 (double* X1d,double* X2d,double* X3d,double* XO,int szX1,int szXO,int shuf) {
+
+ double *pX3FX3P , *pX3P, *pX3FX1PX2PX3P, *pX1PX2PX3P, te, ds, tmp[5], mout[200], mean, norm, teout;
+ double cnt1,cnt2,cnt3,jmax,N;
+ int i,j,k,l,m,n,sz,szp,*X1,*X2,*X3,minv1,maxv1,minv2,maxv2,minv3,maxv3;
+
+ if (shuf>200) {printf("tetrospks3 INTERR nshuf (%d) >200\n",shuf); hxe();}
+ if(useslice) { // if doing a slice of the vector
+ if (end>0.0 && end<=szX1) szX1=(int)end;
+ printf("WARNING: using newly modified useslice capability\n");
+ } else end=beg=0;
+ sz=szX1-(int)beg; // max index
+ X1=iscrset(sz*3); X2=X1+sz; X3=X1+2*sz;// move into integer arrays
+ if(verbose>3) printf("X1:%p , X2:%p, X3:%p:%p\n",X1,X2,X3);
+ minv1=minv2=minv3=INT_MAX; maxv1=maxv2=maxv3=INT_MIN; cnt1=cnt2=cnt3=0;
+ for (i=0;i0) cnt1++; if (X2[i]>0) cnt2++; if(X3[i]>0) cnt3++;
+ if (X1[i]>maxv1) maxv1=X1[i]; if (X1[i]maxv2) maxv2=X2[i]; if (X2[i]maxv3) maxv3=X3[i]; if (X3[i]= 0:%d,%d,%d\n",minv1,minv2,minv3);hxe();}
+ count+=1;
+ if (minv1==maxv1 || minv2==maxv2 || minv3==maxv3) { // no variation return 0,1
+ if(verbose>0) printf("tetrospks3 WARN0: #1:%d,%d,#2:%d,%d,#3:%d,%d)\n",minv1,maxv1,minv2,maxv2,minv3,maxv3);
+ for (i=0;i=4+shuf)XO[shuf+3]=1.0; return 0.; }
+ szp=(maxv1>maxv2)?(maxv1+1):(maxv2+1); if(maxv3+1>szp) szp=maxv3+1;
+ if(verbose>1){printf("minv1:%d,maxv1:%d,cnt1:%g\n",minv1,maxv1,cnt1);
+ printf("minv2:%d,maxv2:%d,cnt2:%g\n",minv2,maxv2,cnt2);
+ printf("minv3:%d,maxv3:%d,cnt3:%g\n",minv3,maxv3,cnt3);}
+ pX3P = (double*) calloc(szp,sizeof(double));
+ pX3FX3P = (double*) calloc(szp*szp,sizeof(double));
+ pX1PX2PX3P = (double*) calloc(szp*szp*szp,sizeof(double));
+ pX3FX1PX2PX3P = (double*) calloc(szp*szp*szp*szp,sizeof(double));
+
+ // only need to do the X3 stuff once since only shuffle X1
+ for(k=1;k0.0) {
+ pX3P[k] = (0.5+pX3P[k]) / ( sz-1.0 + 0.5*jmax );}
+ } else for(k=minv3;k<=maxv3;k++) pX3P[k] /= (sz-1);
+
+ if (verbose>2) pdfpr(pX3P,szp,1,"pX3P");
+
+ for(k=0;k0.0){
+ pX3FX3P[k*szp+l] = (pX3FX3P[k*szp+l]+0.5) / ( sz-1.0 + 0.5*jmax ); }
+ } else for(k=minv3;k<=maxv3;k++) for(l=minv3;l<=maxv3;l++) pX3FX3P[k*szp+l]/=(sz-1);
+ if (verbose>3) pdfpr(pX3FX3P,szp,2,"pX3FX3P");
+
+ //*** normalize on unshuffled with H(X3F|X3P); NB only X1,X2 is being shuffled anyway:
+
+ norm=entropxfgxpd(pX3P,pX3FX3P,minv3,maxv3,szp);
+
+ if (verbose>2) printf("H(X3F|X3P)=%g\n",norm);
+
+ for (j=0,mean=0.;j<=shuf;j++) {
+
+ //*** create X1 requiring pdfs: pX2PX1P, pX2FX2PX1P
+ if (j>0) { ishuffle(X1,sz); ishuffle(X2,sz); // shuffle and then reset pdfs
+ memset(pX1PX2PX3P,0,sizeof(double)*szp*szp*szp);
+ memset(pX3FX1PX2PX3P,0,sizeof(double)*szp*szp*szp*szp); }
+ for(l=1;l0.0){
+ pX1PX2PX3P[l*szp*szp+m*szp+n] = ( pX1PX2PX3P[l*szp*szp+m*szp+n] + 0.5 ) / ( sz-1.0 + 0.5*jmax ); }
+ } else for(l=minv1;l<=maxv1;l++) for(m=minv2;m<=maxv2;m++) for(n=minv3;n<=maxv3;n++) pX1PX2PX3P[l*szp*szp+m*szp+n]/=(sz-1);
+ if (verbose>3) pdfpr(pX1PX2PX3P,szp,3,"pX1PX2PX3P");
+
+ //*** init X3 future, X1 past, X2 past
+ for(k=0;k0.0){
+ pX3FX1PX2PX3P[k*szp*szp*szp+l*szp*szp+m*szp+n]=(pX3FX1PX2PX3P[k*szp*szp*szp+l*szp*szp+m*szp+n]+0.5)/(sz-1.0 + 0.5*jmax);}}
+ } else for(k=minv3;k<=maxv3;k++) for(l=minv1;l<=maxv1;l++) for(m=minv2;m<=maxv2;m++) for(n=minv3;n<=maxv3;n++) {
+ pX3FX1PX2PX3P[k*szp*szp*szp+l*szp*szp+m*szp+n]/=(sz-1); }
+ if (verbose>3) pdfpr(pX3FX1PX2PX3P,szp,4,"pX3FX1PX2PX3P");
+
+ //*** calculate log2
+ te = norm - entropx3fgx1px2px3pd(pX3FX1PX2PX3P,pX1PX2PX3P,minv1,maxv1,minv2,maxv2,minv3,maxv3,szp);
+ if (j>0) {mean+=te; mout[j-1]=te;} else teout=te; // saving these for now -- don't need to
+ }
+ if (shuf>0) mean/=shuf;
+ if (szXO>0) XO[0]=teout; if (szXO>1) XO[1]=norm; if (szXO>2) XO[2]=mean;
+ if (szXO>=3+shuf) for (i=0;i=4+shuf && shuf>0) { //get p-value, low means unlikely te due to chance
+ cnt1 = 0.0;
+ for(i=0;i2)printf("teout:%g, XO[%d]=%g\n",teout,i+3,XO[i+3]);}
+ XO[shuf+3] = cnt1 / (double)shuf;
+ if(verbose>2) printf("cnt1=%g, shuf=%d, XO[%d]=%g\n",cnt1,shuf,shuf+3,XO[shuf+3]);
+ }
+ if(verbose>2) printf("te=%g\n",te);
+ free(pX3P); free(pX3FX3P); free(pX1PX2PX3P); free(pX3FX1PX2PX3P);
+ return (teout-mean)/norm;
+}
+
+//** tetrospks2() -- helper function for tentrospks()
+// sum( p(X2F, X2P, X1P) * log( p(X2F, X2P, X1P) * p(X2P) / ( p(X2P, X1P) * p(X2F, X2P) ) ) )
+ // H(X2F|X2P)=-sum(p(X2F,X2P)*log(p(X2F|X2P)))=-sum(p(X2F,X2P)*log(p(X2F,X2P)/p(X2P)))
+ // calculate p(X2F,X2P) and p(X2P), so can just sum up their entropy
+ // tmp[0] will store the conditional entropy below
+static double tetrospks2 (double* X1d,double* X2d,double* XO,int szX1,int szXO,int shuf) {
+ double *pX2P,*pX2FX2PX1P,*pX2PX1P,*pX2FX2P,te,ds,tmp[5],mout[200],mean,norm,teout;
+ double cnt1,cnt2,jmax,N; int i,j,k,l,m,sz,szp,*X1,*X2,minv1,maxv1,minv2,maxv2;
+ if (shuf>200) {printf("tetrospks2 INTERR nshuf (%d) >200\n",shuf); hxe();}
+ if(useslice) { // if doing a slice of the vector
+ if (end>0.0 && end<=szX1) szX1=(int)end;
+ printf("WARNING: using newly modified useslice capability\n");
+ } else end=beg=0;
+ sz=szX1-(int)beg; // max index
+ X1=iscrset(sz*2); X2=X1+sz; // move into integer arrays
+ if(verbose>3) printf("X1:%p , X2:%p\n",X1,X2);
+ minv1=minv2=INT_MAX; maxv1=maxv2=INT_MIN; cnt1=cnt2=0;
+ if(verbose>2) printf("before: minv1=%d ,maxv1=%d, minv2=%d, maxv2=%d\n",minv1,maxv1,minv2,maxv2);
+ for (i=0;i0) cnt1++; if (X2[i]>0) cnt2++;
+ if (X1[i]>maxv1) maxv1=X1[i]; if (X1[i]maxv2) maxv2=X2[i]; if (X2[i]2) printf("after: minv1=%d ,maxv1=%d, minv2=%d, maxv2=%d\n",minv1,maxv1,minv2,maxv2);
+ if (minv1<0 || minv2<0) {
+ printf("tentropspks ERRB: minimum value must be >= 0:%d,%d\n",minv1,minv2);hxe();}
+ if (binmin) {
+ cnt1/=sz; cnt2/=sz; // ignore if not enough of the windows are filled
+ if (cnt1binmax) return -11.; else if (cnt2>binmax) return -12.;
+ if (abs(cnt1-cnt2)>cutoff) return -13.;
+ }
+ if(verbose>2)printf("tentropspks:minv1=%d,maxv1=%d,minv2=%d,maxv2=%d\n",minv1,maxv1,minv2,maxv2);
+ count+=1;
+ if (minv1==maxv1 || minv2==maxv2) { // no variation return 0,1
+ if(verbose>0) printf("tentropspk WARN0: #1:%d,%d,#2:%d,%d)\n",minv1,maxv1,minv2,maxv2);
+ for (i=0;i=4+shuf)XO[shuf+3]=1.0; return 0.; }
+ szp=(maxv1>maxv2)?(maxv1+1):(maxv2+1);
+ pX2P = (double*) calloc(szp,sizeof(double));
+ pX2PX1P = (double*) calloc(szp*szp,sizeof(double));
+ pX2FX2P = (double*) calloc(szp*szp,sizeof(double));
+ pX2FX2PX1P = (double*) calloc(szp*szp*szp,sizeof(double));
+ // only need to do the X2 stuff once since only shuffle X1
+ for(k=1;k0.0) {
+ pX2P[k] = (0.5+pX2P[k]) / ( sz-1.0 + 0.5*jmax );}
+ } else for(k=minv2;k<=maxv2;k++) pX2P[k] /= (sz-1);
+ if (verbose>2) pdfpr(pX2P,szp,1,"pX2P");
+ for(k=0;k0.0){
+ pX2FX2P[k*szp+l] = (pX2FX2P[k*szp+l]+0.5) / ( sz-1.0 + 0.5*jmax ); }
+ } else for(k=minv2;k<=maxv2;k++) for(l=minv2;l<=maxv2;l++) pX2FX2P[k*szp+l]/=(sz-1);
+ if (verbose>3) pdfpr(pX2FX2P,szp,2,"pX2FX2P");
+ //*** normalize on unshuffled with H(X2F|X2P); NB only X1 is being shuffled anyway:
+ norm=entropxfgxpd(pX2P,pX2FX2P,minv2,maxv2,szp);
+ if (verbose>2) printf("H(X2F|X2P)=%g\n",tmp[0]);
+ for (j=0,mean=0.;j<=shuf;j++) {
+ //*** create X1 requiring pdfs: pX2PX1P, pX2FX2PX1P
+ if (j>0) { ishuffle(X1,sz); // shuffle and then reset pdfs
+ memset(pX2PX1P,0,sizeof(double)*szp*szp);
+ memset(pX2FX2PX1P,0,sizeof(double)*szp*szp*szp); }
+ for(l=1;l0.0){
+ pX2PX1P[l*szp+m] = ( pX2PX1P[l*szp+m] + 0.5 ) / ( sz-1.0 + 0.5*jmax ); }
+ } else for(l=minv2;l<=maxv2;l++) for(m=minv1;m<=maxv1;m++) pX2PX1P[l*szp+m]/=(sz-1);
+ if (verbose>3) pdfpr(pX2PX1P,szp,2,"pX2PX1P");
+ //*** init X2 future, X2 past, X1 past
+ for(k=0;k0.0){
+ pX2FX2PX1P[k*szp*szp+l*szp+m]=(pX2FX2PX1P[k*szp*szp+l*szp+m]+0.5)/(sz-1.0 + 0.5*jmax);}}
+ } else for(k=minv2;k<=maxv2;k++) for(l=minv2;l<=maxv2;l++) for(m=minv1;m<=maxv1;m++) {
+ pX2FX2PX1P[k*szp*szp+l*szp+m]/=(sz-1); }
+ if (verbose>3) pdfpr(pX2FX2PX1P,szp,3,"pX2FX2PX1P");
+ //*** calculate log2
+ te = norm - entropx2fgx2px1pd(pX2FX2PX1P,pX2PX1P,minv1,maxv1,minv2,maxv2,szp);
+ if (j>0) {mean+=te; mout[j-1]=te;} else teout=te; // saving these for now -- don't need to
+ }
+ if (shuf>0) mean/=shuf;
+ if (szXO>0) XO[0]=teout; if (szXO>1) XO[1]=norm; if (szXO>2) XO[2]=mean;
+ if (szXO>=3+shuf) for (i=0;i=4+shuf && shuf>0) { //get p-value, low means unlikely te due to chance
+ cnt1 = 0.0;
+ for(i=0;i2)printf("teout:%g, XO[%d]=%g\n",teout,i+3,XO[i+3]);}
+ XO[shuf+3] = cnt1 / (double)shuf;
+ if(verbose>2) printf("cnt1=%g, shuf=%d, XO[%d]=%g\n",cnt1,shuf,shuf+3,XO[shuf+3]);
+ }
+ if(verbose>2) printf("te=%g\n",te);
+ free(pX2FX2P); free(pX2PX1P); free(pX2P); free(pX2FX2PX1P);
+ return (teout-mean)/norm;
+}
+
+
+ENDVERBATIM
+
+
+FUNCTION EXP (x) {
+ TABLE DEPEND MINEXP,MAXEXP FROM MINEXP TO MAXEXP WITH 50000
+ EXP = exp(x)
+}
+
+FUNCTION LOG2 (x) {
+ TABLE DEPEND MINLOG2,MAXLOG2 FROM MINLOG2 TO MAXLOG2 WITH 50000
+ LOG2 = log(x)/LG2
+}
+
+PROCEDURE install () {
+ if (installed==1) {
+ printf("infot.mod version %s\n","$Id: infot.mod,v 1.161 2010/08/19 20:02:27 samn Exp $")
+ } else {
+ installed = 1
+ VERBATIM
+ _check_LOG2(); _check_EXP();
+ install_vector_method("tentropspks",tentropspks);
+ ENDVERBATIM
+ }
+}
+
diff --git a/netpyne/utils.py b/netpyne/utils.py
index 3045454e5..bfaeecdaf 100644
--- a/netpyne/utils.py
+++ b/netpyne/utils.py
@@ -1,7 +1,7 @@
"""
utils.py
-Useful functions related to the parameters file, eg. create params file from excel table
+Useful functions
Contributors: salvador dura@gmail.com
"""
From 978fd9f885cb053f4161baef2139656ea0fdb935 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 1 Dec 2016 18:35:08 -0500
Subject: [PATCH 46/54] Added analysis.granger() function to calculate and plot
Spectral Granger Causality (issue #42)
---
CHANGES.md | 2 +
netpyne/analysis.py | 161 ++++++++++++++++----
netpyne/support/bsmart.py | 310 ++++++++++++++++++++++++++++++++++++++
3 files changed, 442 insertions(+), 31 deletions(-)
create mode 100644 netpyne/support/bsmart.py
diff --git a/CHANGES.md b/CHANGES.md
index 22c30bb9e..6bf4b9b9c 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -2,6 +2,8 @@
- Added analysis.nTE() function to calculate normalized transfer entropy (issue #42)
+- Added analysis.granger() function to calculate and plot Spectral Granger Causality (issue #42)
+
- Added option to fix soma depth for subcellular distributions based on 1d and 2d maps
- Improved import of multicompartmental cells from NeuroML2
diff --git a/netpyne/analysis.py b/netpyne/analysis.py
index 0ddca6b4c..956c62285 100644
--- a/netpyne/analysis.py
+++ b/netpyne/analysis.py
@@ -35,7 +35,7 @@ def plotData ():
if kwargs == True: kwargs = {}
elif kwargs == False: continue
func = getattr(sim.analysis, funcName) # get pointer to function
- func(**kwargs) # call function with user arguments
+ out = func(**kwargs) # call function with user arguments
# Print timings
if sim.cfg.timing:
@@ -1338,7 +1338,7 @@ def plot2Dnet (include = ['allCells'], figSize = (12,12), showConns = True, save
######################################################################################################################################################
## Calculate normalized transfer entropy
######################################################################################################################################################
-def nTE(cells1 = [], cells2 = [], spks1 = None, spks2 = None, trange = [0,1000], binSize = 20, numShuffle = 30):
+def nTE(cells1 = [], cells2 = [], spks1 = None, spks2 = None, timeRange = None, binSize = 20, numShuffle = 30):
'''
Calculate normalized transfer entropy
- cells1 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): Subset of cells from which to obtain spike train 1 (default: [])
@@ -1346,7 +1346,7 @@ def nTE(cells1 = [], cells2 = [], spks1 = None, spks2 = None, trange = [0,1000],
subset of cells in network
- spks1 (list): Spike train 1; list of spike times; if omitted then obtains spikes from cells1 (default: None)
- spks2 (list): Spike train 2; list of spike times; if omitted then obtains spikes from cells2 (default: None)
- - trange ([min, max]): Range of time to calculate nTE in ms (default: [0,1000])
+ - trange ([min, max]): Range of time to calculate nTE in ms (default: [0,cfg.duration])
- binSize (int): Bin size used to convert spike times into histogram
- numShuffle (int): Number of times to shuffle spike train 1 to calculate TEshuffled; note: nTE = (TE - TEShuffled)/H(X2F|X2P)
@@ -1425,12 +1425,17 @@ def nTE(cells1 = [], cells2 = [], spks1 = None, spks2 = None, trange = [0,1000],
spks2 = list(spkts)
+ # time range
+ if getattr(sim, 'cfg', None):
+ timeRange = [0,sim.cfg.duration]
+ else:
+ timeRange = [0, max(spks1+spks2)]
inputVec = h.Vector()
outputVec = h.Vector()
- histo1 = histogram(spks1, bins = np.arange(trange[0], trange[1], binSize))
+ histo1 = histogram(spks1, bins = np.arange(timeRange[0], timeRange[1], binSize))
histoCount1 = histo1[0]
- histo2 = histogram(spks2, bins = np.arange(trange[0], trange[1], binSize))
+ histo2 = histogram(spks2, bins = np.arange(timeRange[0], timeRange[1], binSize))
histoCount2 = histo2[0]
inputVec.from_python(histoCount1)
@@ -1443,42 +1448,136 @@ def nTE(cells1 = [], cells2 = [], spks1 = None, spks2 = None, trange = [0,1000],
######################################################################################################################################################
## Calculate granger causality
######################################################################################################################################################
-def granger(spk1, spk2, binSize=5, trange=[0,2000]):
- """
- Typical usage is as follows:
- from bsmart import pwcausalr
- F,pp,cohe,Fx2y,Fy2x,Fxy=pwcausalr(x,ntrls,npts,p,fs,freq);
-
- Outputs:
- F is the frequency vector for the remaining quantities
- pp is the spectral power
- cohe is the coherence
- Fx2y is the causality of channel X to channel Y
- Fy2x is the causality of channel Y to channel X
- Fxy is the "instantaneous" causality (cohe-Fx2y-Fy2x I think)
- Inputs:
- x is the data for at least two channels, e.g. a 2x8000 array consisting of two LFP time series
- ntrls is the number of trials (whatever that means -- just leave it at 1)
- npts is the number of points in the data (in this example, 8000)
- p is the order of the polynomial fit (e.g. 10 for a smooth fit, 20 for a less smooth fit)
- fs is the sampling rate (e.g. 200 Hz)
- freq is the maximum frequency to calculate (e.g. fs/2=100, which will return 0:100 Hz)
- """
+def granger(cells1 = [], cells2 = [], spks1 = None, spks2 = None, label1 = 'spkTrain1', label2 = 'spkTrain2', timeRange = None, binSize=5, plotFig = True,
+ saveData = None, saveFig = None, showFig = True):
+ '''
+ Calculate and optionally plot Granger Causality
+ - cells1 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): Subset of cells from which to obtain spike train 1 (default: [])
+ - cells2 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): Subset of cells from which to obtain spike train 1 (default: [])
+ subset of cells in network
+ - spks1 (list): Spike train 1; list of spike times; if omitted then obtains spikes from cells1 (default: None)
+ - spks2 (list): Spike train 2; list of spike times; if omitted then obtains spikes from cells2 (default: None)
+ - label1 (string): Label for spike train 1 to use in plot
+ - label2 (string): Label for spike train 2 to use in plot
+ - timeRange ([min, max]): Range of time to calculate nTE in ms (default: [0,cfg.duration])
+ - binSize (int): Bin size used to convert spike times into histogram
+ - plotFug (True|False): Whether to plot a figure showing Granger Causality Fx2y and Fy2x
+ - saveData (None|'fileName'): File name where to save the final data used to generate the figure (default: None)
+ - saveFig (None|'fileName'): File name where to save the figure;
+ if set to True uses filename from simConfig (default: None)(default: None)
+ - showFig (True|False): Whether to show the figure or not;
+ if set to True uses filename from simConfig (default: None)
+
+ - Returns
+ F: list of freqs
+ Fx2y: causality measure from x to y
+ Fy2x: causality from y to x
+ Fxy: instantaneous causality between x and y
+ fig: Figure handle
+ '''
- from pylab import histogram, plot, show
import numpy as np
- from support.bsmart import pwcausalr
+ from netpyne.support.bsmart import pwcausalr
+
+ if not spks1: # if doesnt contain a list of spk times, obtain from cells specified
+ cells, cellGids, netStimPops = getCellsInclude(cells1)
+ numNetStims = 0
+
+ # Select cells to include
+ if len(cellGids) > 0:
+ try:
+ spkts = [spkt for spkgid,spkt in zip(sim.allSimData['spkid'],sim.allSimData['spkt']) if spkgid in cellGids]
+ except:
+ spkts = []
+ else:
+ spkts = []
- histo1 = histogram(spk1, bins = np.arange(trange[0], trange[1], binSize))
+ # Add NetStim spikes
+ spkts = list(spkts)
+ numNetStims = 0
+ for netStimPop in netStimPops:
+ if 'stims' in sim.allSimData:
+ cellStims = [cellStim for cell,cellStim in sim.allSimData['stims'].iteritems() if netStimPop in cellStim]
+ if len(cellStims) > 0:
+ spktsNew = [spkt for cellStim in cellStims for spkt in cellStim[netStimPop] ]
+ spkts.extend(spktsNew)
+ numNetStims += len(cellStims)
+
+ spks1 = list(spkts)
+
+ if not spks2: # if doesnt contain a list of spk times, obtain from cells specified
+ cells, cellGids, netStimPops = getCellsInclude(cells2)
+ numNetStims = 0
+
+ # Select cells to include
+ if len(cellGids) > 0:
+ try:
+ spkts = [spkt for spkgid,spkt in zip(sim.allSimData['spkid'],sim.allSimData['spkt']) if spkgid in cellGids]
+ except:
+ spkts = []
+ else:
+ spkts = []
+
+ # Add NetStim spikes
+ spkts = list(spkts)
+ numNetStims = 0
+ for netStimPop in netStimPops:
+ if 'stims' in sim.allSimData:
+ cellStims = [cellStim for cell,cellStim in sim.allSimData['stims'].iteritems() if netStimPop in cellStim]
+ if len(cellStims) > 0:
+ spktsNew = [spkt for cellStim in cellStims for spkt in cellStim[netStimPop] ]
+ spkts.extend(spktsNew)
+ numNetStims += len(cellStims)
+
+ spks2 = list(spkts)
+
+
+ # time range
+ if timeRange is None:
+ if getattr(sim, 'cfg', None):
+ timeRange = [0,sim.cfg.duration]
+ else:
+ timeRange = [0, max(spks1+spks2)]
+
+ histo1 = histogram(spks1, bins = np.arange(timeRange[0], timeRange[1], binSize))
histoCount1 = histo1[0]
- histo2 = histogram(spk2, bins = np.arange(trange[0], trange[1], binSize))
+ histo2 = histogram(spks2, bins = np.arange(timeRange[0], timeRange[1], binSize))
histoCount2 = histo2[0]
fs = 1000/binSize
F,pp,cohe,Fx2y,Fy2x,Fxy = pwcausalr(np.array([histoCount1, histoCount2]), 1, len(histoCount1), 10, fs, fs/2)
- return F, Fx2y[0],Fy2x[0], Fxy[0]
+
+ # plot granger
+ fig = -1
+ if plotFig:
+ fig = figure()
+ plot(F, Fy2x[0], label = label2 + ' -> ' + label1)
+ plot(F, Fx2y[0], 'r', label = label1 + ' -> ' + label2)
+ xlabel('Frequency (Hz)')
+ ylabel('Granger Causality')
+ legend()
+
+ # save figure data
+ if saveData:
+ figData = {'cells1': cells1, 'cells2': cells2, 'spks1': cells1, 'spks2': cells2, 'binSize': binSize, 'Fy2x': Fy2x[0], 'Fx2y': Fx2y[0],
+ 'saveData': saveData, 'saveFig': saveFig, 'showFig': showFig}
+
+ _saveFigData(figData, saveData, '2Dnet')
+
+ # save figure
+ if saveFig:
+ if isinstance(saveFig, basestring):
+ filename = saveFig
+ else:
+ filename = sim.cfg.filename+'_'+'2Dnet.png'
+ savefig(filename)
+
+ # show fig
+ if showFig: _showFigure()
+
+ return F, Fx2y[0],Fy2x[0], Fxy[0], fig
diff --git a/netpyne/support/bsmart.py b/netpyne/support/bsmart.py
new file mode 100644
index 000000000..94de1cfd9
--- /dev/null
+++ b/netpyne/support/bsmart.py
@@ -0,0 +1,310 @@
+"""
+This file contains all the function definitions necessary for running spectral
+Granger causality. It is based on Mingzhou Ding's Matlab code package BSMART,
+available from www.brain-smart.org.
+
+Typical usage is as follows:
+from bsmart import pwcausalr
+F,pp,cohe,Fx2y,Fy2x,Fxy=pwcausalr(x,ntrls,npts,p,fs,freq);
+
+Outputs:
+ F is the frequency vector for the remaining quantities
+ pp is the spectral power
+ cohe is the coherence
+ Fx2y is the causality of channel X to channel Y
+ Fy2x is the causality of channel Y to channel X
+ Fxy is the "instantaneous" causality (cohe-Fx2y-Fy2x I think)
+Inputs:
+ x is the data for at least two channels, e.g. a 2x8000 array consisting of two LFP time series
+ ntrls is the number of trials (whatever that means -- just leave it at 1)
+ npts is the number of points in the data (in this example, 8000)
+ p is the order of the polynomial fit (e.g. 10 for a smooth fit, 20 for a less smooth fit)
+ fs is the sampling rate (e.g. 200 Hz)
+ freq is the maximum frequency to calculate (e.g. fs/2=100, which will return 0:100 Hz)
+
+The other two functions (armorf and spectrum_AR) can also be called directly, but
+more typically they are used by pwcausalr in intermediate calculations. Note that the
+sampling rate of the returned quantities is calculated as fs/2.
+
+To calculate the power spectrum powspec of a single time series x over the frequency range 0:freq,
+use the following (NB: now accessible via "from spectrum import ar")
+from bsmart import armorf, spectrum_AR
+[A,Z,tmp]=armorf(x,ntrls,npts,p) # Calculate autoregressive fit
+for i in range(freq+1): # Loop over frequencies
+ [S,H]=spectrum_AR(A,Z,p,i,fs) # Calculate spectrum
+ powspec[i]=abs(S**2) # Calculate and store power
+
+In either case (pwcausalr or spectrum_AR), the smoothness of the spectra is determined by the
+polynomial order p. Larger values of p give less-smooth spectra.
+
+Version: 2011feb11 by Cliff Kerr (cliffk@neurosim.downstate.edu)
+"""
+
+# ARMORF -- AR parameter estimation via LWR method modified by Morf.
+#
+# X is a matrix whose every row is one variable's time series
+# ntrls is the number of realizations, npts is the length of every realization
+# If the time series are stationary long, just let ntrls=1, npts=length(x)
+#
+# A = ARMORF(X,NR,NL,ORDER) returns the polynomial coefficients A corresponding to
+# the AR model estimate of matrix X using Morf's method.
+# ORDER is the order of the AR model.
+#
+# [A,E] = ARMORF(...) returns the final prediction error E (the variance
+# estimate of the white noise input to the AR model).
+#
+# [A,E,K] = ARMORF(...) returns the vector K of reflection coefficients (parcor coefficients).
+#
+# Ref: M. Morf, etal, Recursive Multichannel Maximum Entropy Spectral Estimation,
+# IEEE trans. GeoSci. Elec., 1978, Vol.GE-16, No.2, pp85-94.
+# S. Haykin, Nonlinear Methods of Spectral Analysis, 2nd Ed.
+# Springer-Verlag, 1983, Chapter 2
+
+def timefreq(x,fs=200):
+ """
+ TIMEFREQ
+
+ This function takes the time series and the sampling rate and calculates the
+ total number of points, the maximum frequency, the minimum (or change in)
+ frequency, and the vector of frequency points F.
+
+ Version: 2011may04
+ """
+ from numpy import size, shape, arange, append
+
+ maxfreq=float(fs)/2.0 # Maximum frequency
+ minfreq=float(fs)/float(size(x,0)) # Minimum and delta frequency -- simply the inverse of the length of the recording in seconds
+ F=arange(minfreq,maxfreq+minfreq,minfreq) # Create frequencies evenly spaced from 0:minfreq:maxfreq
+ F=append(0,F) # Add zero-frequency component
+
+ return F
+
+
+def ckchol(M):
+ """
+ CKCHOL
+
+ This function computes the Cholesky decomposition of the matrix if it's
+ positive-definite; else it returns the identity matrix. It was written
+ to handle the "matrix must be positive definite" error in linalg.cholesky.
+
+ Version: 2011may03
+ """
+ from numpy import linalg, matrix, eye, size
+
+ try: # First, try the Cholesky decomposition
+ output=linalg.cholesky(M)
+ except: # If not, just return garbage
+ print 'WARNING: Cholesky failed, so returning (invalid) identity matrix!'
+ output=matrix(eye(size(M,0)))
+
+ return output
+
+
+
+def armorf(x,ntrls,npts,p):
+ from scipy import shape, array, matrix, zeros, disp, concatenate, eye, dstack
+ from numpy import linalg # for inverse and Cholesky factorization;
+ import numpy as np
+ inv=linalg.inv; # Make name consistent with Matlab
+
+ # Initialization
+ x=matrix(x)
+ [L,N]=shape(x); # L is the number of channels, N is the npts*ntrls
+ R0=R0f=R0b=pf=pb=pfb=ap=bp=En=matrix(zeros((L,L,1))); # covariance matrix at 0,
+
+ # calculate the covariance matrix?
+ for i in range(ntrls):
+ En=En+x[:,i*npts:(i+1)*npts]*x[:,i*npts:(i+1)*npts].H;
+ ap=ap+x[:,i*npts+1:(i+1)*npts]*x[:,i*npts+1:(i+1)*npts].H;
+ bp=bp+x[:,i*npts:(i+1)*npts-1]*x[:,i*npts:(i+1)*npts-1].H;
+
+ ap = inv((ckchol(ap/ntrls*(npts-1)).T).H);
+ bp = inv((ckchol(bp/ntrls*(npts-1)).T).H);
+
+ for i in range(ntrls):
+ efp = ap*x[:,i*npts+1:(i+1)*npts];
+ ebp = bp*x[:,i*npts:(i+1)*npts-1];
+ pf = pf + efp*efp.H;
+ pb = pb + ebp*ebp.H;
+ pfb = pfb + efp*ebp.H;
+
+ En = (ckchol(En/N).T).H; # Covariance of the noise
+
+ # Initial output variables
+ tmp=[]
+ for i in range(L): tmp.append([]) # In Matlab, coeff=[], and anything can be appended to that.
+ coeff = matrix(tmp);# Coefficient matrices of the AR model
+ kr = matrix(tmp); # reflection coefficients
+ aparr=array(ap) # Convert AP matrix to an array, so it can be dstacked
+ bparr=array(bp)
+
+ for m in range(p):
+ # Calculate the next order reflection (parcor) coefficient
+ ck = inv((ckchol(pf).T).H)*pfb*inv(ckchol(pb).T);
+ kr=concatenate((kr,ck),1);
+ # Update the forward and backward prediction errors
+ ef = eye(L)- ck*ck.H;
+ eb = eye(L)- ck.H*ck;
+
+ # Update the prediction error
+ En = En*(ckchol(ef).T).H;
+ E = (ef+eb)/2;
+
+ # Update the coefficients of the forward and backward prediction errors
+ Z=zeros((L,L)) # Make it easier to define this
+ aparr=dstack((aparr,Z))
+ bparr=dstack((bparr,Z))
+ pf = pb = pfb = Z
+ # Do some variable juggling to handle Python's array/matrix limitations
+ a=b=zeros((L,L,0))
+
+ for i in range(m+2):
+ tmpap1=matrix(aparr[:,:,i]) # Need to convert back to matrix to perform operations
+ tmpbp1=matrix(bparr[:,:,i])
+ tmpap2=matrix(aparr[:,:,m+1-i])
+ tmpbp2=matrix(bparr[:,:,m+1-i])
+ tmpa = inv((ckchol(ef).T).H)*(tmpap1-ck*tmpbp2);
+ tmpb = inv((ckchol(eb).T).H)*(tmpbp1-ck.H*tmpap2);
+ a=dstack((a,array(tmpa)))
+ b=dstack((b,array(tmpb)))
+
+ for k in range(ntrls):
+ efp = zeros((L,npts-m-2));
+ ebp = zeros((L,npts-m-2));
+ for i in range(m+2):
+ k1=m+2-i+k*npts;
+ k2=npts-i+k*npts;
+ efp = efp+matrix(a[:,:,i])*matrix(x[:,k1:k2]);
+ ebp = ebp+matrix(b[:,:,m+1-i])*matrix(x[:,k1-1:k2-1]);
+ pf = pf + efp*efp.H;
+ pb = pb + ebp*ebp.H;
+ pfb = pfb + efp*ebp.H;
+
+ aparr = a;
+ bparr = b;
+
+ for j in range(p):
+ coeff = concatenate((coeff,inv(matrix(a[:,:,0]))*matrix(a[:,:,j+1])),1);
+
+ return coeff, En*En.H, kr
+
+
+#Port of spectrum_AR.m
+# Version: 2010jan18
+
+def spectrum_AR(A,Z,M,f,fs): # Get the spectrum in one specific frequency-f
+ from scipy import eye, size, exp, pi, real, disp
+ from numpy import linalg; inv=linalg.inv
+ N = size(Z,0); H = eye(N,N); # identity matrix
+ for m in range(M):
+ H = H + A[:,m*N:(m+1)*N]*exp(-1j*(m+1)*2*pi*f/fs); # Multiply f in the exponent by sampling interval (=1/fs). See Richard Shiavi
+
+ H = inv(H);
+ S = H*Z*H.H/fs;
+
+ return S,H
+
+
+
+# Using Geweke's method to compute the causality between any two channels
+#
+# x is a two dimentional matrix whose each row is one variable's time series
+# Nr is the number of realizations,
+# Nl is the length of every realization
+# If the time series have one ralization and are stationary long, just let Nr=1, Nl=length(x)
+# porder is the order of AR model
+# fs is sampling frequency
+# freq is a vector of frequencies of interest, usually freq=0:fs/2
+# CK: WRONG!! freq must be a scalar, else the for loop doesn't work.
+#
+# Fx2y is the causality measure from x to y
+# Fy2x is causality from y to x
+# Fxy is instantaneous causality between x and y
+# the order of Fx2y/Fy2x is 1 to 2:L, 2 to 3:L,....,L-1 to L. That is,
+# 1st column: 1&2; 2nd: 1&3; ...; (L-1)th: 1&L; ...; (L(L-1))th: (L-1)&L.
+
+# revised Jan. 2006 by Yonghong Chen
+# Note: remove the ensemble mean before using this code
+
+def pwcausalr(x,Nr,Nl,porder,fs,freq=0): # Note: freq determines whether the frequency points are calculated or chosen
+ from pylab import size, shape, real, log, conj, zeros, arange, disp, array
+ from numpy import linalg; det=linalg.det
+ import numpy as np # Just for "sum"; can't remember what's wrong with pylab's sum
+ [L,N] = shape(x); #L is the number of channels, N is the total points in every channel
+
+ if freq==0: F=timefreq(x[0,:],fs) # Define the frequency points
+ else: F=array(range(0,freq+1)) # Or just pick them
+ npts=size(F,0)
+ # Initialize arrays
+ maxindex=np.sum(arange(1,L))
+ pp=zeros((L,npts))
+ # Had these all defined on one line, and stupidly they STAY linked!!
+ cohe=zeros((maxindex,npts))
+ Fy2x=zeros((maxindex,npts))
+ Fx2y=zeros((maxindex,npts))
+ Fxy=zeros((maxindex,npts))
+ index = 0;
+
+ for i in range(1,L):
+ for j in range(i+1,L+1):
+ y=zeros((2,N)) # Initialize y
+ index = index + 1;
+ y[0,:] = x[i-1,:];
+ y[1,:] = x[j-1,:];
+ A2,Z2,tmp = armorf(y,Nr,Nl,porder); #fitting a model on every possible pair
+ eyx = Z2[1,1] - Z2[0,1]**2/Z2[0,0]; #corrected covariance
+ exy = Z2[0,0] - Z2[1,0]**2/Z2[1,1];
+ f_ind = 0;
+ for f in F:
+ f_ind = f_ind + 1;
+ S2,H2 = spectrum_AR(A2,Z2,porder,f,fs);
+ pp[i-1,f_ind-1] = abs(S2[0,0]*2); # revised
+ if (i==L-1) & (j==L):
+ pp[j-1,f_ind-1] = abs(S2[1,1]*2); # revised
+ cohe[index-1,f_ind-1] = real(abs(S2[0,1])**2 / S2[0,0]/S2[1,1]);
+ Fy2x[index-1,f_ind-1] = log(abs(S2[0,0])/abs(S2[0,0]-(H2[0,1]*eyx*conj(H2[0,1]))/fs)); #Geweke's original measure
+ Fx2y[index-1,f_ind-1] = log(abs(S2[1,1])/abs(S2[1,1]-(H2[1,0]*exy*conj(H2[1,0]))/fs));
+ Fxy[index-1,f_ind-1] = log(abs(S2[0,0]-(H2[0,1]*eyx*conj(H2[0,1]))/fs)*abs(S2[1,1]-(H2[1,0]*exy*conj(H2[1,0]))/fs)/abs(det(S2)));
+
+ return F,pp,cohe,Fx2y,Fy2x,Fxy
+
+
+
+
+
+
+
+def granger(vec1,vec2,order=10,rate=200,maxfreq=0):
+ """
+ GRANGER
+
+ Provide a simple way of calculating the key quantities.
+
+ Usage:
+ F,pp,cohe,Fx2y,Fy2x,Fxy=granger(vec1,vec2,order,rate,maxfreq)
+ where:
+ F is a 1xN vector of frequencies
+ pp is a 2xN array of power spectra
+ cohe is the coherence between vec1 and vec2
+ Fx2y is the causality from vec1->vec2
+ Fy2x is the causality from vec2->vec1
+ Fxy is non-directional causality (cohe-Fx2y-Fy2x)
+
+ vec1 is a time series of length N
+ vec2 is another time series of length N
+ rate is the sampling rate, in Hz
+ maxfreq is the maximum frequency to be returned, in Hz
+
+ Version: 2011jul18
+ """
+ from bsmart import timefreq, pwcausalr
+ from scipy import array, size
+
+ if maxfreq==0: F=timefreq(vec1,rate) # Define the frequency points
+ else: F=array(range(0,maxfreq+1)) # Or just pick them
+ npts=size(F,0)
+
+ data=array([vec1,vec2])
+ F,pp,cohe,Fx2y,Fy2x,Fxy=pwcausalr(data,1,npts,order,rate,maxfreq)
+ return F,pp[0,:],cohe[0,:],Fx2y[0,:],Fy2x[0,:],Fxy[0,:]
From 754eccd9ce5ad51312fe9affd6432dff0374902c Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 1 Dec 2016 18:40:50 -0500
Subject: [PATCH 47/54] updated travis script to test all doc tutorials
---
.travis.yml | 22 +++++++++++++---------
examples/saveLoadV1/load_run.py | 1 -
2 files changed, 13 insertions(+), 10 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 1d4612643..97614e65e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -31,15 +31,15 @@ install:
- export NEURON_HOME=/home/travis/neuron/nrn/x86_64/
script:
- # - cp examples/HHTut/HHTut.py doc/source/code/
- # - cd doc/source/code/
- # - $NEURON_HOME/bin/nrnivmodl mod
- # - python tut1.py
- # - python tut2.py
- # - python tut3.py
- # - python tut5.py
- # - python tut6.py
- # - python tut_import.py
+ - cd doc/source/code/
+ - $NEURON_HOME/bin/nrnivmodl mod
+ - python tut1.py
+ - python tut2.py
+ - python tut3.py
+ - python tut5.py
+ - python tut6.py
+ - python tut7.py
+ - python tut_import.py
- cd examples/HHTut
- python HHTut_run.py -nogui
- cd ../HybridTut
@@ -48,6 +48,10 @@ script:
- cd ../M1
- $NEURON_HOME/bin/nrnivmodl
- python M1_run.py -nogui
+ - cd ../M1
+ - $NEURON_HOME/bin/nrnivmodl
+ - python M1_run.py -nogui
+
# - cd ../RL_arm
# - $NEURON_HOME/bin/nrnivmodl
# - python main.py
diff --git a/examples/saveLoadV1/load_run.py b/examples/saveLoadV1/load_run.py
index b5678f596..249847b22 100644
--- a/examples/saveLoadV1/load_run.py
+++ b/examples/saveLoadV1/load_run.py
@@ -28,4 +28,3 @@
###############################################################################
sim.loadSimulateAnalyze(filename='V1.json', simConfig=simConfig)
-#sim.load(filename='V1.json', simConfig=simConfig)
From f5ff5d76b4f023b1f18e6dfe8629b298b1314fc3 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 1 Dec 2016 18:56:29 -0500
Subject: [PATCH 48/54] added -nogui to travis; updated doc with nTE+granger
---
.travis.yml | 12 +++++------
doc/source/reference.rst | 45 ++++++++++++++++++++++++++++++++++++++++
netpyne/analysis.py | 5 ++---
3 files changed, 53 insertions(+), 9 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 97614e65e..791883543 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -33,12 +33,12 @@ install:
script:
- cd doc/source/code/
- $NEURON_HOME/bin/nrnivmodl mod
- - python tut1.py
- - python tut2.py
- - python tut3.py
- - python tut5.py
- - python tut6.py
- - python tut7.py
+ - python tut1.py -nogui
+ - python tut2.py -nogui
+ - python tut3.py -nogui
+ - python tut5.py -nogui
+ - python tut6.py -nogui
+ - python tut7.py -nogui
- python tut_import.py
- cd examples/HHTut
- python HHTut_run.py -nogui
diff --git a/doc/source/reference.rst b/doc/source/reference.rst
index 5f14ae03d..0c3aa2fbb 100644
--- a/doc/source/reference.rst
+++ b/doc/source/reference.rst
@@ -861,6 +861,51 @@ Analysis-related functions
- Returns figure handles
+* **analysis.nTE** (cells1 = [], cells2 = [], spks1 = None, spks2 = None, timeRange = None, binSize = 20, numShuffle = 30)
+
+ Calculate normalized transfer entropy
+
+ - *cells1*: Subset of cells from which to obtain spike train 1 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])])
+ - *cells2*: Subset of cells from which to obtain spike train 2 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])])
+ subset of cells in network
+ - spks1: Spike train 1; list of spike times; if omitted then obtains spikes from cells1 (list)
+ - spks2: Spike train 2; list of spike times; if omitted then obtains spikes from cells2 (list)
+ - timeRange: Range of time to calculate nTE in ms ([min, max])
+ - binSize: Bin size used to convert spike times into histogram (int)
+ - numShuffle: Number of times to shuffle spike train 1 to calculate TEshuffled; note: nTE = (TE - TEShuffled)/H(X2F|X2P) (int)
+
+ - Returns nTE: normalized transfer entropy (float)
+
+
+* **analysis.granger** (cells1 = [], cells2 = [], spks1 = None, spks2 = None, label1 = 'spkTrain1', label2 = 'spkTrain2',
+ timeRange = None, binSize=5, plotFig = True, saveData = None, saveFig = None, showFig = True):
+
+ Calculate and optionally plot Granger Causality
+
+ - cells1: Subset of cells from which to obtain spike train 1 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])])
+ - cells2: Subset of cells from which to obtain spike train 2 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])])
+ - spks1 (list): Spike train 1; list of spike times; if omitted then obtains spikes from cells1 (default: None)
+ - spks2 (list): Spike train 2; list of spike times; if omitted then obtains spikes from cells2 (default: None)
+ - label1 (string): Label for spike train 1 to use in plot
+ - label2 (string): Label for spike train 2 to use in plot
+ - timeRange ([min, max]): Range of time to calculate nTE in ms (default: [0,cfg.duration])
+ - binSize (int): Bin size used to convert spike times into histogram
+ - plotFug (True|False): Whether to plot a figure showing Granger Causality Fx2y and Fy2x
+ - saveData (None|'fileName'): File name where to save the final data used to generate the figure (default: None)
+ - saveFig (None|'fileName'): File name where to save the figure;
+ if set to True uses filename from simConfig (default: None)(default: None)
+ - showFig (True|False): Whether to show the figure or not;
+ if set to True uses filename from simConfig (default: None)
+
+ Returns:
+ - F: list of freqs
+ - Fx2y: causality measure from x to y
+ - Fy2x: causality from y to x
+ - Fxy: instantaneous causality between x and y
+ - fig: Figure handle
+
+
+
NOTE: The *include* argument can have the following values:
- 'all': all cells and netstims
- 'allCells': only all cells
diff --git a/netpyne/analysis.py b/netpyne/analysis.py
index 956c62285..a1b3bdbb5 100644
--- a/netpyne/analysis.py
+++ b/netpyne/analysis.py
@@ -1346,7 +1346,7 @@ def nTE(cells1 = [], cells2 = [], spks1 = None, spks2 = None, timeRange = None,
subset of cells in network
- spks1 (list): Spike train 1; list of spike times; if omitted then obtains spikes from cells1 (default: None)
- spks2 (list): Spike train 2; list of spike times; if omitted then obtains spikes from cells2 (default: None)
- - trange ([min, max]): Range of time to calculate nTE in ms (default: [0,cfg.duration])
+ - timeRange ([min, max]): Range of time to calculate nTE in ms (default: [0,cfg.duration])
- binSize (int): Bin size used to convert spike times into histogram
- numShuffle (int): Number of times to shuffle spike train 1 to calculate TEshuffled; note: nTE = (TE - TEShuffled)/H(X2F|X2P)
@@ -1453,8 +1453,7 @@ def granger(cells1 = [], cells2 = [], spks1 = None, spks2 = None, label1 = 'spkT
'''
Calculate and optionally plot Granger Causality
- cells1 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): Subset of cells from which to obtain spike train 1 (default: [])
- - cells2 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): Subset of cells from which to obtain spike train 1 (default: [])
- subset of cells in network
+ - cells2 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): Subset of cells from which to obtain spike train 2 (default: [])
- spks1 (list): Spike train 1; list of spike times; if omitted then obtains spikes from cells1 (default: None)
- spks2 (list): Spike train 2; list of spike times; if omitted then obtains spikes from cells2 (default: None)
- label1 (string): Label for spike train 1 to use in plot
From 9637a4a7a3c5300b4ded5efd1fd81f665b7d9c1f Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 1 Dec 2016 18:57:51 -0500
Subject: [PATCH 49/54] debugged doc
---
doc/source/reference.rst | 7 ++-----
netpyne/analysis.py | 1 -
2 files changed, 2 insertions(+), 6 deletions(-)
diff --git a/doc/source/reference.rst b/doc/source/reference.rst
index 0c3aa2fbb..a85453733 100644
--- a/doc/source/reference.rst
+++ b/doc/source/reference.rst
@@ -867,7 +867,6 @@ Analysis-related functions
- *cells1*: Subset of cells from which to obtain spike train 1 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])])
- *cells2*: Subset of cells from which to obtain spike train 2 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])])
- subset of cells in network
- spks1: Spike train 1; list of spike times; if omitted then obtains spikes from cells1 (list)
- spks2: Spike train 2; list of spike times; if omitted then obtains spikes from cells2 (list)
- timeRange: Range of time to calculate nTE in ms ([min, max])
@@ -892,10 +891,8 @@ Analysis-related functions
- binSize (int): Bin size used to convert spike times into histogram
- plotFug (True|False): Whether to plot a figure showing Granger Causality Fx2y and Fy2x
- saveData (None|'fileName'): File name where to save the final data used to generate the figure (default: None)
- - saveFig (None|'fileName'): File name where to save the figure;
- if set to True uses filename from simConfig (default: None)(default: None)
- - showFig (True|False): Whether to show the figure or not;
- if set to True uses filename from simConfig (default: None)
+ - saveFig (None|'fileName'): File name where to save the figure; if set to True uses filename from simConfig (default: None)(default: None)
+ - showFig (True|False): Whether to show the figure or not; if set to True uses filename from simConfig (default: None)
Returns:
- F: list of freqs
diff --git a/netpyne/analysis.py b/netpyne/analysis.py
index a1b3bdbb5..986c21355 100644
--- a/netpyne/analysis.py
+++ b/netpyne/analysis.py
@@ -1343,7 +1343,6 @@ def nTE(cells1 = [], cells2 = [], spks1 = None, spks2 = None, timeRange = None,
Calculate normalized transfer entropy
- cells1 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): Subset of cells from which to obtain spike train 1 (default: [])
- cells2 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])]): Subset of cells from which to obtain spike train 1 (default: [])
- subset of cells in network
- spks1 (list): Spike train 1; list of spike times; if omitted then obtains spikes from cells1 (default: None)
- spks2 (list): Spike train 2; list of spike times; if omitted then obtains spikes from cells2 (default: None)
- timeRange ([min, max]): Range of time to calculate nTE in ms (default: [0,cfg.duration])
From dd7685bd7bf79d35f8db66b77f7f525a2052112d Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 1 Dec 2016 19:09:13 -0500
Subject: [PATCH 50/54] debugged doc
---
CHANGES.md | 8 +++---
doc/source/reference.rst | 55 +++++++++++++++++++++++++---------------
netpyne/analysis.py | 2 +-
3 files changed, 39 insertions(+), 26 deletions(-)
diff --git a/CHANGES.md b/CHANGES.md
index 6bf4b9b9c..d34ee2d9a 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -4,16 +4,16 @@
- Added analysis.granger() function to calculate and plot Spectral Granger Causality (issue #42)
+- Added analysis.plotRatePSD() to plot firing rate power spectral density
+
+- Added analysis.plotShape() to plot 3D morphology of cell and synapse locations
+
- Added option to fix soma depth for subcellular distributions based on 1d and 2d maps
- Improved import of multicompartmental cells from NeuroML2
- Added support for gap junction (electrical synapse) connections
-- Added function plotRatePSD to plot firing rate power spectral density
-
-- Added plotShape to plot 3D morphology of cell and synapse locations
-
- Added option to skip batch sims if output file already exists
- Added option to overlay pop labels and show avg rates to plotRaster() (issue #111)
diff --git a/doc/source/reference.rst b/doc/source/reference.rst
index a85453733..725388cd1 100644
--- a/doc/source/reference.rst
+++ b/doc/source/reference.rst
@@ -831,6 +831,19 @@ Analysis-related functions
- Returns figure handles
+* **plotShape** (showSyns = True, include = [], style = '.', siz=10, figSize = (10,8), saveData = None, saveFig = None, showFig = True):
+
+ Plot 3D cell shape using NEURON Interview PlotShape
+
+ - *showSyns*: Show synaptic connections in 3D (True|False)
+ - *figSize*: Size of figure ((width, height))
+ - *saveData*: File name where to save the final data used to generate the figure (None|'fileName')
+ - *saveFig*: File name where to save the figure (None|'fileName')
+ - *showFig*: Whether to show the figure or not (True|False)
+
+ - Returns figure handles
+
+
* **analysis.plotConn** (include = ['all'], feature = 'strength', orderBy = 'gid', figSize = (10,10), groupBy = 'pop', saveData = None, saveFig = None, showFig = True)
Plot network connectivity. Optional arguments:
@@ -867,11 +880,11 @@ Analysis-related functions
- *cells1*: Subset of cells from which to obtain spike train 1 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])])
- *cells2*: Subset of cells from which to obtain spike train 2 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])])
- - spks1: Spike train 1; list of spike times; if omitted then obtains spikes from cells1 (list)
- - spks2: Spike train 2; list of spike times; if omitted then obtains spikes from cells2 (list)
- - timeRange: Range of time to calculate nTE in ms ([min, max])
- - binSize: Bin size used to convert spike times into histogram (int)
- - numShuffle: Number of times to shuffle spike train 1 to calculate TEshuffled; note: nTE = (TE - TEShuffled)/H(X2F|X2P) (int)
+ - *spks1*: Spike train 1; list of spike times; if omitted then obtains spikes from cells1 (list)
+ - *spks2*: Spike train 2; list of spike times; if omitted then obtains spikes from cells2 (list)
+ - *timeRange*: Range of time to calculate nTE in ms ([min, max])
+ - *binSize*: Bin size used to convert spike times into histogram (int)
+ - *numShuffle*: Number of times to shuffle spike train 1 to calculate TEshuffled; note: nTE = (TE - TEShuffled)/H(X2F|X2P) (int)
- Returns nTE: normalized transfer entropy (float)
@@ -881,25 +894,25 @@ Analysis-related functions
Calculate and optionally plot Granger Causality
- - cells1: Subset of cells from which to obtain spike train 1 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])])
+ - *cells1*: Subset of cells from which to obtain spike train 1 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])])
- cells2: Subset of cells from which to obtain spike train 2 (['all',|'allCells','allNetStims',|,120,|,'E1'|,('L2', 56)|,('L5',[4,5,6])])
- - spks1 (list): Spike train 1; list of spike times; if omitted then obtains spikes from cells1 (default: None)
- - spks2 (list): Spike train 2; list of spike times; if omitted then obtains spikes from cells2 (default: None)
- - label1 (string): Label for spike train 1 to use in plot
- - label2 (string): Label for spike train 2 to use in plot
- - timeRange ([min, max]): Range of time to calculate nTE in ms (default: [0,cfg.duration])
- - binSize (int): Bin size used to convert spike times into histogram
- - plotFug (True|False): Whether to plot a figure showing Granger Causality Fx2y and Fy2x
- - saveData (None|'fileName'): File name where to save the final data used to generate the figure (default: None)
- - saveFig (None|'fileName'): File name where to save the figure; if set to True uses filename from simConfig (default: None)(default: None)
- - showFig (True|False): Whether to show the figure or not; if set to True uses filename from simConfig (default: None)
+ - *spks1*: Spike train 1; list of spike times; if omitted then obtains spikes from cells1 (list)
+ - *spks2*: Spike train 2; list of spike times; if omitted then obtains spikes from cells2 (list)
+ - *label1*: Label for spike train 1 to use in plot (string)
+ - *label2*: Label for spike train 2 to use in plot (string)
+ - *timeRange*: Range of time to calculate nTE in ms ([min, max])
+ - *binSize*: Bin size used to convert spike times into histogram
+ - *plotFig*: Whether to plot a figure showing Granger Causality Fx2y and Fy2x (True|False)
+ - *saveData*: File name where to save the final data used to generate the figure (None|'fileName')
+ - *saveFig*: File name where to save the figure (None|'fileName')
+ - *showFig*: Whether to show the figure or not (True|False)
Returns:
- - F: list of freqs
- - Fx2y: causality measure from x to y
- - Fy2x: causality from y to x
- - Fxy: instantaneous causality between x and y
- - fig: Figure handle
+ - *F*: list of freqs
+ - *Fx2y*: causality measure from x to y
+ - *Fy2x*: causality from y to x
+ - *Fxy*: instantaneous causality between x and y
+ - *fig*: Figure handle
diff --git a/netpyne/analysis.py b/netpyne/analysis.py
index 986c21355..6dfc6fa46 100644
--- a/netpyne/analysis.py
+++ b/netpyne/analysis.py
@@ -1459,7 +1459,7 @@ def granger(cells1 = [], cells2 = [], spks1 = None, spks2 = None, label1 = 'spkT
- label2 (string): Label for spike train 2 to use in plot
- timeRange ([min, max]): Range of time to calculate nTE in ms (default: [0,cfg.duration])
- binSize (int): Bin size used to convert spike times into histogram
- - plotFug (True|False): Whether to plot a figure showing Granger Causality Fx2y and Fy2x
+ - plotFig (True|False): Whether to plot a figure showing Granger Causality Fx2y and Fy2x
- saveData (None|'fileName'): File name where to save the final data used to generate the figure (default: None)
- saveFig (None|'fileName'): File name where to save the figure;
if set to True uses filename from simConfig (default: None)(default: None)
From 590681daf1c5a863271eb06d0add5ae011939d6b Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 1 Dec 2016 19:18:36 -0500
Subject: [PATCH 51/54] Added option to turn off graphics by adding -nogui from
command line
---
.travis.yml | 3 +--
CHANGES.md | 6 +++++-
examples/HHTut/HHTut_run.py | 6 ------
netpyne/sim.py | 6 ++++++
4 files changed, 12 insertions(+), 9 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 791883543..314c3f9a9 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -33,14 +33,13 @@ install:
script:
- cd doc/source/code/
- $NEURON_HOME/bin/nrnivmodl mod
- - python tut1.py -nogui
- python tut2.py -nogui
- python tut3.py -nogui
- python tut5.py -nogui
- python tut6.py -nogui
- python tut7.py -nogui
- python tut_import.py
- - cd examples/HHTut
+ - cd ../../../examples/HHTut
- python HHTut_run.py -nogui
- cd ../HybridTut
- $NEURON_HOME/bin/nrnivmodl
diff --git a/CHANGES.md b/CHANGES.md
index d34ee2d9a..cfdbd1361 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -10,9 +10,11 @@
- Added option to fix soma depth for subcellular distributions based on 1d and 2d maps
+- Added support for gap junction (electrical synapse) connections
+
- Improved import of multicompartmental cells from NeuroML2
-- Added support for gap junction (electrical synapse) connections
+- Added option to turn off graphics by adding -nogui from command line
- Added option to skip batch sims if output file already exists
@@ -20,6 +22,8 @@
- All section now include argument cell=self to differentiate them
+- Fixed travis script so checks all tuts and examples after each commit (issue #37)
+
- Fixed bug positioning cells with 3d geom
- Fixed bug in sim.clearAll by closing all figures instead of current (issue #168)
diff --git a/examples/HHTut/HHTut_run.py b/examples/HHTut/HHTut_run.py
index 232e89f7d..96755d89f 100644
--- a/examples/HHTut/HHTut_run.py
+++ b/examples/HHTut/HHTut_run.py
@@ -1,9 +1,3 @@
-import sys
-
-if '-nogui' in sys.argv:
- import netpyne
- netpyne.__gui__ = False
-
import HHTut # import parameters file
from netpyne import sim # import netpyne sim module
diff --git a/netpyne/sim.py b/netpyne/sim.py
index c838b42c7..1742e5f58 100644
--- a/netpyne/sim.py
+++ b/netpyne/sim.py
@@ -7,7 +7,13 @@
Contributors: salvadordura@gmail.com
"""
+# check for -nogui option
+import sys
+if '-nogui' in sys.argv:
+ import netpyne
+ netpyne.__gui__ = False
+# import all required modules
from simFuncs import *
from wrappers import *
import analysis
From 14e7730e6d1eab8df98b2d684a0696d6de61f21b Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 1 Dec 2016 19:20:00 -0500
Subject: [PATCH 52/54] removed hardcoded -nogui from examples
---
examples/HybridTut/HybridTut_run.py | 6 ------
examples/M1/M1_run.py | 6 ------
2 files changed, 12 deletions(-)
diff --git a/examples/HybridTut/HybridTut_run.py b/examples/HybridTut/HybridTut_run.py
index aecccf2d8..f3c6200b7 100644
--- a/examples/HybridTut/HybridTut_run.py
+++ b/examples/HybridTut/HybridTut_run.py
@@ -1,9 +1,3 @@
-import sys
-
-if '-nogui' in sys.argv:
- import netpyne
- netpyne.__gui__ = False
-
import HybridTut # import parameters file
from netpyne import sim # import netpyne init module
diff --git a/examples/M1/M1_run.py b/examples/M1/M1_run.py
index 23fac54f3..fb1c55c87 100644
--- a/examples/M1/M1_run.py
+++ b/examples/M1/M1_run.py
@@ -1,9 +1,3 @@
-import sys
-
-if '-nogui' in sys.argv:
- import netpyne
- netpyne.__gui__ = False
-
import M1 # import parameters file
from netpyne import sim # import netpyne init module
From 9f8e3db1910b867c38daff9e8204b6585ea1ecb9 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 1 Dec 2016 19:28:08 -0500
Subject: [PATCH 53/54] fixed tut7.py so checks for gui; removed tut_import.py
from travis
---
.travis.yml | 2 +-
doc/source/code/tut7.py | 7 +++++--
netpyne/__init__.py | 2 +-
3 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 314c3f9a9..0df06e3b0 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -38,7 +38,7 @@ script:
- python tut5.py -nogui
- python tut6.py -nogui
- python tut7.py -nogui
- - python tut_import.py
+ # - python tut_import.py
- cd ../../../examples/HHTut
- python HHTut_run.py -nogui
- cd ../HybridTut
diff --git a/doc/source/code/tut7.py b/doc/source/code/tut7.py
index 88374241d..5373af84e 100644
--- a/doc/source/code/tut7.py
+++ b/doc/source/code/tut7.py
@@ -103,8 +103,11 @@
'secs': {'soma': {'geom': {'L': 160}}}})
sim.simulate()
-sim.analysis.plotRaster(syncLines=True)
-sim.analysis.plotTraces(include = [1])
+
+from netpyne import gui
+if gui:
+ sim.analysis.plotRaster(syncLines=True)
+ sim.analysis.plotTraces(include = [1])
diff --git a/netpyne/__init__.py b/netpyne/__init__.py
index 661b0eed8..80862d914 100644
--- a/netpyne/__init__.py
+++ b/netpyne/__init__.py
@@ -1,3 +1,3 @@
-__version__ = '0.6.3'
+__version__ = '0.6.4'
__gui__ = True # global option to enable/disable graphics
\ No newline at end of file
From 07208e4d54641e39cacfa7b7a72d85eb5bcb7d93 Mon Sep 17 00:00:00 2001
From: salvadord
Date: Thu, 1 Dec 2016 19:28:30 -0500
Subject: [PATCH 54/54] VERSION 0.6.4
---
netpyne/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/netpyne/__init__.py b/netpyne/__init__.py
index 80862d914..72aab188a 100644
--- a/netpyne/__init__.py
+++ b/netpyne/__init__.py
@@ -1,3 +1,3 @@
-__version__ = '0.6.4'
+__version__ = '0.6.4'
__gui__ = True # global option to enable/disable graphics
\ No newline at end of file