Skip to content

Commit

Permalink
Merge pull request #104 from ZryletTC/cleanup
Browse files Browse the repository at this point in the history
MNT: Cleanup of a few files
  • Loading branch information
silkenelson authored Feb 11, 2022
2 parents 59c9bd3 + 01e133a commit 6c27ec3
Show file tree
Hide file tree
Showing 4 changed files with 63 additions and 70 deletions.
2 changes: 1 addition & 1 deletion rc/bashrc
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ fi

source /reg/g/pcds/setup/pcds_shortcuts.sh

export PATH=$PATH:/reg/g/pcds/engineering_tools/latest/scripts/
export PATH=$PATH:/reg/g/pcds/engineering_tools/latest-released/scripts/

########################################
### Specific PCDS environment logins ###
Expand Down
1 change: 0 additions & 1 deletion scripts/ami_offline_psana
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@ fi

#plugin_path & plugin string.
pp=`grep plugin_path /reg/g/pcds/dist/pds/$HUTCH/scripts/$CNF | grep ami_base | grep -v '#' | awk 'BEGIN { FS = "= " }; { print $2}' | sed s/ami_base_path+//g | sed s/\'//g`
plugin_path=$ami_base_path`grep plugin_path /reg/g/pcds/dist/pds/$HUTCH/scripts/$CNF | grep ami_base | grep -v '#' | awk 'BEGIN { FS = "= " }; { print $2}' | sed s/ami_base_path+//g | sed s/\'//g`
plugin_path=$ami_base_path$pp
plugin_str=$plugin_path'/libtimetooldbd.so'

Expand Down
121 changes: 58 additions & 63 deletions scripts/epicsArchChecker
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ epicsArch Checker
This script helps the engineers and scientists to verify
if the epicsArch files have errors by creating 3 types
of reports:
1. Pvs and Aliases duplicated.
2. Pvs with no alias and Aliases with no PVS.
3. Pvs no connected.
1. PVs and Aliases duplicated.
2. PVs with no alias and Aliases with no PVS.
3. PVs no connected.
4. Files that do not exist.
"""
import os
Expand All @@ -20,25 +20,26 @@ def main():
' mismatches of PVs and aliases,'
' missing files, and unconnected PVs.')
parser.add_argument('filepath',
help='Full filepath of the file to check'
help='Path of the file to check'
' e.g /reg/g/pcds/dist/pds/xpp/misc/epicsArch.txt',
type=str)
parser.add_argument('-s', '--status', action='store_true',
help='Displays Pvs not connected (default:False.)', default=False)
help='Displays PVs not connected (default:False.)',
default=False)
args = parser.parse_args()

fullpath = args.filepath
fullpath = os.path.abspath(args.filepath)
dirpath = os.path.dirname(fullpath)
filename = os.path.basename(fullpath)
os.chdir(dirpath)
entries, extraKeys, noKeyPVs = read_file(filename)

myKeys, myPvs, myFiles, lineNumbers = create_Lists(entries)
indKeys, indPvs = find_index(myKeys, myPvs, myFiles)
report_duplicates(indKeys, indPvs, myKeys, myPvs, myFiles, lineNumbers)
myKeys, myPVs, myFiles, lineNumbers = create_Lists(entries)
indKeys, indPVs = find_index(myKeys, myPVs, myFiles)
report_duplicates(indKeys, indPVs, myKeys, myPVs, myFiles, lineNumbers)
report_warnings(extraKeys, noKeyPVs)
if args.status:
report_statusPv(myKeys, myPvs, myFiles)
report_statusPV(myKeys, myPVs, myFiles)


def read_file(filename):
Expand Down Expand Up @@ -88,7 +89,7 @@ def read_file(filename):
key = line[1:].strip()
keyline = lineNum
elif line[0].isalnum():
pv = line.replace(' ca','').replace(' pva','').strip()
pv = line.replace(' ca', '').replace(' pva', '').strip()
if key == '':
noKeyPVs.append((pv, filename, lineNum))
else:
Expand All @@ -109,61 +110,55 @@ def create_Lists(entries):
"""

myKeys = []
myPvs = []
myPVs = []
myFiles = []
lineNumbers = []
for entry in entries:
myKeys.append(entry[0])
myPvs.append(entry[1])
myPVs.append(entry[1])
myFiles.append(entry[2])
lineNumbers.append('№ ' + str(entry[3]))
return(myKeys, myPvs, myFiles, lineNumbers)
return(myKeys, myPVs, myFiles, lineNumbers)


def find_index(myKeys, myPvs, myFiles):
def find_index(myKeys, myPVs, myFiles):
"""
This function will check the duplicated elements inside of
the lists myKeys, and myPvs.It will also find the
the lists myKeys, and myPVs.It will also find the
index of each duplicated element inside of the lists
myKeys, and myPvs.
myKeys, and myPVs.
"""
indKeys = []
indPvs = []
indPVs = []

sKeys = sorted(myKeys)
dmyKeys = [dkey for dkey in sKeys if sKeys.count(dkey) > 1]
sPvs = sorted(myPvs)
dmyPvs = [dpv for dpv in sPvs if sPvs.count(dpv) > 1]
sPVs = sorted(myPVs)
dmyPVs = [dpv for dpv in sPVs if sPVs.count(dpv) > 1]

for dkey in range(len(dmyKeys)):
for key in range(len(myKeys)):
if dmyKeys[dkey] == myKeys[key]:
indKeys.append(key)
for dpv in range(len(dmyPvs)):
for pv in range(len(myPvs)):
if dmyPvs[dpv] == myPvs[pv]:
indPvs.append(pv)
for dpv in range(len(dmyPVs)):
for pv in range(len(myPVs)):
if dmyPVs[dpv] == myPVs[pv]:
indPVs.append(pv)

return(indKeys, indPvs)
return(indKeys, indPVs)


def report_duplicates(indKeys, indPvs, myKeys, myPvs, myFiles, numLines):
"""This function will display the duplicate Pvs and/or Aliases."""
def report_duplicates(indKeys, indPVs, myKeys, myPVs, myFiles, numLines):
"""This function will display the duplicate PVs and/or Aliases."""

duplikey = [myKeys[key] for key in indKeys]
duplipvs = [myPvs[pv] for pv in indPvs]
duplipvs = [myPVs[pv] for pv in indPVs]
copyKey = sorted(set(duplikey))
copyPv = sorted(set(duplipvs))
if copyKey and copyPv:
print("Report of duplicate Aliases and Pvs from the input file: \n")
elif copyPv:
print("Report of duplicate Pvs from the input file: \n")
elif copyKey:
print("Report of duplicate Aliases from the input file: \n")
copyPV = sorted(set(duplipvs))
size_per_col = 35
nameA = "Alias"
nameL = "Location"
nameP = "Pv"
nameP = "PV"
nLine = "Line number error"
ms1 = "Alias name has an space!!!!!"
if copyKey:
Expand All @@ -183,29 +178,29 @@ def report_duplicates(indKeys, indPvs, myKeys, myPvs, myFiles, numLines):
print(110*"=")
for value in range(len(myKeys)):
if nameKey == myKeys[value]:
if nameKey == myPvs[value]:
print(str(myPvs[value]).center(size_per_col),
if nameKey == myPVs[value]:
print(str(myPVs[value]).center(size_per_col),
str(myFiles[value]).center(size_per_col),
str(numLines[value]).center(size_per_col))
else:
print(str(myPvs[value]).center(size_per_col),
print(str(myPVs[value]).center(size_per_col),
str(myFiles[value]).center(size_per_col),
str(numLines[value]).center(size_per_col))
print(105*"=")
print("\n")
if copyPv:
if copyPV:
print("--------------------------------------------",
"Duplicate by Pvs",
"Duplicate by PVs",
"--------------------------------------------\n")
for namePv in copyPv:
print("Duplicate Pv: ", namePv)
for namePV in copyPV:
print("Duplicate PV: ", namePV)
print("\n")
print(str(nameA).center(size_per_col),
str(nameL).center(size_per_col),
str(nLine).center(size_per_col))
print(105*"=")
for value in range(len(myPvs)):
if namePv == myPvs[value]:
for value in range(len(myPVs)):
if namePV == myPVs[value]:
if " " in myKeys[value]:
print(str(myKeys[value]).center(size_per_col),
str(myFiles[value]).center(size_per_col),
Expand All @@ -222,7 +217,7 @@ def report_duplicates(indKeys, indPvs, myKeys, myPvs, myFiles, numLines):

def report_warnings(extraKeys, noKeyPVs):
"""
This function will display the Pvs with no alias
This function will display the PVs with no alias
and/or Aliases with no pv.
"""
if extraKeys:
Expand All @@ -235,37 +230,37 @@ def report_warnings(extraKeys, noKeyPVs):
print(table)
if noKeyPVs:
print("PVs with no Alias, WARNING!!!!:")
sortedListPvs = sorted(noKeyPVs, key=lambda x: x[1])
sortedListPVs = sorted(noKeyPVs, key=lambda x: x[1])
table = PrettyTable()
table.field_names = ["Pv name", "Location",
table.field_names = ["PV name", "Location",
"Line number error"]
table.add_rows(sortedListPvs)
table.add_rows(sortedListPVs)
print(table)


def report_statusPv(myKeys, myPvs, myFiles):
"""This function will display the no connected Pvs."""
def report_statusPV(myKeys, myPVs, myFiles):
"""This function will display the no connected PVs."""
totalInfo = []
for pv in range(len(myPvs)):
statusPv = []
for pv in range(len(myPVs)):
statusPV = []
try:
ophyd.signal.EpicsSignal(myPvs[pv]).get()
ophyd.signal.EpicsSignal(myPVs[pv]).get()
except Exception:
statusPv.append(myPvs[pv])
statusPv.append("No connected!")
statusPv.append(myKeys[pv])
statusPv.append(myFiles[pv])
if statusPv:
totalInfo.append(statusPv)
statusPV.append(myPVs[pv])
statusPV.append("No connected!")
statusPV.append(myKeys[pv])
statusPV.append(myFiles[pv])
if statusPV:
totalInfo.append(statusPV)
if totalInfo:
sortedList = sorted(totalInfo, key=lambda x: x[3])
table = PrettyTable()
print("Pvs NO connected:")
table.field_names = ["Pv Name", "Status", "Alias", "Location"]
print("PVs NO connected:")
table.field_names = ["PV Name", "Status", "Alias", "Location"]
table.add_rows(sortedList)
print(table)
else:
print("All the Pvs are connected!")
print("All the PVs are connected!")


if __name__ == "__main__":
Expand Down
9 changes: 4 additions & 5 deletions scripts/restartdaq
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ elif [[ $AIMHOST == 'cxi-monitor' ]]; then
fi

HUTCH=`get_hutch_name`
HNAME=`cat /etc/hostname`

#go to hutches DAQ scripts directory (puts pid file in consistent location)
cd /reg/g/pcds/dist/pds/$HUTCH/scripts/
Expand All @@ -85,7 +84,7 @@ else
PROCMGR="/reg/g/pcds/dist/pds/$HUTCH/current/tools/procmgr/procmgr"
fi

IS_DAQ_HOST=`netconfig search $HNAME-$DAQNETWORK --brief | grep $DAQNETWORK | wc -l`
IS_DAQ_HOST=`netconfig search $AIMHOST-$DAQNETWORK --brief | grep $DAQNETWORK | wc -l`
if [ $IS_DAQ_HOST == 0 ]; then
HOSTS=`netconfig search $HUTCH-*-$DAQNETWORK --brief | sed s/-$DAQNETWORK//g`
WORKINGHOSTS=''
Expand All @@ -96,8 +95,8 @@ if [ $IS_DAQ_HOST == 0 ]; then
WORKINGHOSTS=$WORKINGHOSTS' '$HOST
fi
done
echo $AIMHOST does not have $DADQNETWORK, please choose one of the following machines to run the DAQ: $WORKINGHOSTS
echo '"restartdaq <machine_with_$DAQNETWORK>"'
echo $AIMHOST does not have $DAQNETWORK, please choose one of the following machines to run the DAQ: $WORKINGHOSTS
echo "restartdaq -m <machine_with_$DAQNETWORK>"
exit
fi

Expand Down Expand Up @@ -148,7 +147,7 @@ fi

DAQHOST=`wheredaq`
if [[ $DAQHOST == *$NOTRUNNING* ]]; then
echo 'we tried restarting the DAQ, but wheredaq does not return a host!'
echo 'We tried restarting the DAQ, but wheredaq says its still off!'
if [[ $DAQHOST != $AIMHOST ]]; then
echo 'We tried to run the DAQ on another host: target '$AIMHOST' from '$HOSTNAME
fi
Expand Down

0 comments on commit 6c27ec3

Please sign in to comment.