Skip to content

Commit

Permalink
First commit of Alvaro's xtcav code
Browse files Browse the repository at this point in the history
  • Loading branch information
chrisvam committed Oct 25, 2014
1 parent b68590a commit 89b7e1b
Show file tree
Hide file tree
Showing 15 changed files with 3,008 additions and 0 deletions.
32 changes: 32 additions & 0 deletions SConscript
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
#--------------------------------------------------------------------------
# File and Version Information:
# $Id$
#
# Description:
# SConscript file for package pyimgalgos
#------------------------------------------------------------------------

# Do not delete following line, it must be present in
# SConscript file for any SIT project
Import('*')

#
# For the standard SIT packages which build libraries, applications,
# and Python modules it is usually sufficient to call
# standardSConscript() function which defines rules for all
# above targets. Many standard packages do not need any special options,
# but those which need can modify standardSConscript() behavior using
# a number of arguments, here is a complete list:
#
# LIBS - list of additional libraries needed by this package
# LIBPATH - list of directories for additional libraries
# BINS - dictionary of executables and their corresponding source files
# TESTS - dictionary of test applications and their corresponding source files
# SCRIPTS - list of scripts in app/ directory
# UTESTS - names of the unit tests to run, if not given then all tests are unit tests
# PYEXTMOD - name of the Python extension module, package name used by default
# CCFLAGS - additional flags passed to C/C++ compilers
# NEED_QT - set to True to enable Qt support
#
#
standardSConscript()
12 changes: 12 additions & 0 deletions app/xtcavDark
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
#!/usr/bin/env python

from xtcav.GenerateDarkBackground import *

GDB=GenerateDarkBackground();

GDB.experiment='amoc8114'
GDB.runs='85'
GDB.maxshots=150
GDB.SetValidityRange(85,109)

GDB.Generate();
13 changes: 13 additions & 0 deletions app/xtcavLasingOff
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/usr/bin/env python

from xtcav.GenerateLasingOffReference import *

GLOC=GenerateLasingOffReference();
GLOC.experiment='amoc8114'
GLOC.runs='86'
GLOC.maxshots=401
GLOC.nb=1
GLOC.groupsize=5
GLOC.SetValidityRange(86,91)

GLOC.Generate();
16 changes: 16 additions & 0 deletions doc/ChangeLog
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#--------------------------------------------------------------------------
# File and Version Information:
# $Id: ChangeLog 8463 2014-06-24 01:53:04Z [email protected] $
#
# Description:
# ChangeLog file for package xtcav
#------------------------------------------------------------------------

Package author: Alvaro Sanchez-Gonzalez

Please describe any modifications that you made to the package in the
reverse time order.

Tag: V00-00-01
2014-10-24 Christopher O'Grady
- Initial tag
33 changes: 33 additions & 0 deletions examples/xtcavShotToShot.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import psana
from xtcav.ShotToShotCharacterization import *

maxshots=5 #Maximum number of valid shots to process
experiment='amoc8114' #Experiment label
runs='87' #Runs

#Loading the dataset from the "dark" run, this way of working should be compatible with both xtc and hdf5 files
dataSource=psana.DataSource("exp=%s:run=%s:idx" % (experiment,runs))

#XTCAV Retrieval (setting the data source is useful to get information such as experiment name)
XTCAVRetrieval=ShotToShotCharacterization();
XTCAVRetrieval.SetDataSource(dataSource)

for r,run in enumerate(dataSource.runs()):
n_r=0 #Counter for the total number of xtcav images processed within the run
times = run.times()
for t in times:
evt = run.event(t)

if not XTCAVRetrieval.SetCurrentEvent(evt):
continue

t,power=XTCAVRetrieval.XRayPower()
agreement=XTCAVRetrieval.ReconstructionAgreement()

print "%d/%d" % (n_r,maxshots) #Debugging purposes, will be removed
print 'Agreement: %g %% Maximum power: %g GW' %(agreement*100,np.amax(power))

n_r=n_r+1

if n_r>=maxshots: #After a certain number of shots we stop (Ideally this would be an argument, rather than a hardcoded value)
break
37 changes: 37 additions & 0 deletions src/CalibrationPaths.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
from psana import *
from PSCalib.CalibFileFinder import CalibFileFinder

class CalibrationPaths:
def __init__(self,datasource):
self.ds = datasource
self.calibgroup = 'Xtcav::CalibV1'
self.src = 'XrayTransportDiagnostic.0:Opal1000.0'
self.cdir = self.ds.env().calibDir()

def findCalFileName(self,type,rnum):
"""
Returns calibration file name, given run number and type
"""
cff = CalibFileFinder(self.cdir, self.calibgroup, pbits=0)
fname = cff.findCalibFile(self.src, type, rnum)
return fname

def newCalFileName(self,type,runBegin,runEnd='end'):
"""
Returns calibration file name, given run number and type
(either 'pedestals' or 'nolasing' for XTCAV.)"
"""

path=os.path.join(self.cdir)
if not os.path.exists(path):
os.mkdir(path)
path=os.path.join(self.cdir,self.calibgroup)
if not os.path.exists(path):
os.mkdir(path)
path=os.path.join(self.cdir,self.calibgroup,self.src)
if not os.path.exists(path):
os.mkdir(path)
path=os.path.join(self.cdir,self.calibgroup,self.src,type)
if not os.path.exists(path):
os.mkdir(path)
return path+'/'+str(runBegin)+'-'+str(runEnd)+'.data'
98 changes: 98 additions & 0 deletions src/Constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
import h5py
import numpy
import logging

class Empty(object):
pass

class ConstantsStore(object):
def __init__(self,obj,file):
self.f = h5py.File(file,'w')
self.cwd = ''
for name in obj.__dict__:
subobj = getattr(obj,name)
self.dispatch(subobj,name)
self.f.close()
def pushdir(self,dir):
'''move down a level and keep track of what hdf directory level we are in'''
self.cwd += '/'+dir
def popdir(self):
'''move up a level and keep track of what hdf directory level we are in'''
self.cwd = self.cwd[:self.cwd.rfind('/')]
def typeok(self,obj,name):
'''check if we support serializing this type to hdf'''
allowed = [dict,int,float,str,numpy.ndarray]
return type(obj) in allowed
def storevalue(self,v,name):
'''persist one of the supported types to the hdf file'''
self.f[self.cwd+'/'+name] = v
def dict(self,d,name):
'''called for every dictionary level to create a new hdf group name.
it then looks into the dictionary to see if other groups need to
be created'''
self.f.create_group(name)
self.pushdir(name)
for k in d.keys():
self.dispatch(d[k],k)
self.popdir()
def dispatch(self,obj,name):
'''either persist a supported object, or look into a dictionary
to see what objects need to be persisted'''
if type(obj) is dict:
self.dict(obj,name)
else:
if self.typeok(obj,name):
self.storevalue(obj,name)
else:
logging.warning('XTCAV Constants.py: variable "'+name+'" of type "'+type(obj).__name__+'" not supported')

class ConstantsLoad(object):
def __init__(self,file):
self.obj = Empty()
self.f = h5py.File(file,'r')
self.f.visititems(self.loadCallBack)
self.f.close()
def setval(self,name,obj):
'''see if this hdfname has a / in it. if so, create the dictionary
object. if not, set our attribute value. call ourselves
recursively to see if other dictionary levels exist.'''
if '/' in name:
dictname=name[:name.find('/')]
remainder=name[name.find('/')+1:]
if not hasattr(self.obj,dictname):
setattr(obj,dictname,{})
self.setval(remainder,getattr(obj,dictname))
else:
if type(obj) is dict:
obj[name]=self.f[self.fullname].value
else:
setattr(obj,name,self.f[self.fullname].value)
def loadCallBack(self,name,obj):
'''called back by h5py routine visititems for each
item (group/dataset) in the h5 file'''
if isinstance(obj,h5py._hl.group.Group):
return
self.fullname = name
self.setval(name,self.obj)

def Load(file):
'''takes a string filename, and returns a constants object.'''
c = ConstantsLoad(file)
return c.obj

def Save(obj,file):
'''store a constants object in an hdf5 file. the object
can be a hierarchy (defined by python dictionaries) and
hdf5 supported types (int, float, numpy.ndarray, string).
the hierarchy can be created by having one value of
a dictionary itself be a dictionary.'''
c = ConstantsStore(obj,file)

if __name__ == "__main__":

import cPickle
f = open("/reg/d/psdm/amo/amoc8114/calib/XTCAV/XrayTransportDiagnostic.0:Opal1000.0/lasingoffreference/151-159.data")
data = cPickle.load(f)
Save(data,'junk.h5')
data = Load('junk.h5')
print data.parameters
37 changes: 37 additions & 0 deletions src/CustomLoadmat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
#Code extracted from http://stackoverflow.com/questions/7008608/scipy-io-loadmat-nested-structures-i-e-dictionaries

import scipy.io

def customloadmat(filename):
'''
this function should be called instead of direct spio.loadmat
as it cures the problem of not properly recovering python dictionaries
from mat files. It calls the function check keys to cure all entries
which are still mat-objects
'''
data = scipy.io.loadmat(filename, struct_as_record=False, squeeze_me=True)
return _check_keys(data)

def _check_keys(dict):
'''
checks if entries in dictionary are mat-objects. If yes
todict is called to change them to nested dictionaries
'''
for key in dict:
if isinstance(dict[key], scipy.io.matlab.mio5_params.mat_struct):
print key
dict[key] = _todict(dict[key])
return dict

def _todict(matobj):
'''
A recursive function which constructs from matobjects nested dictionaries
'''
dict = {}
for strg in matobj._fieldnames:
elem = matobj.__dict__[strg]
if isinstance(elem, scipy.io.matlab.mio5_params.mat_struct):
dict[strg] = _todict(elem)
else:
dict[strg] = elem
return dict
21 changes: 21 additions & 0 deletions src/DarkBackground.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@

import scipy.io
import cPickle
import numpy
from xtcav.CustomLoadmat import customloadmat
from xtcav.Constants import Load as constLoad
from xtcav.Constants import Save as constSave

class DarkBackground(object):
def __init__(self):
self.image=[]
self.ROI=[]
self.runs=numpy.array([],dtype=int)
self.n=0

def Save(self,path):
constSave(self,path)

@staticmethod
def Load(path):
return constLoad(path)
Loading

0 comments on commit 89b7e1b

Please sign in to comment.