Skip to content

Commit

Permalink
pluginization done
Browse files Browse the repository at this point in the history
andreatramacere committed Mar 8, 2018
1 parent a54c3a8 commit c120ccc
Showing 20 changed files with 1,212 additions and 447 deletions.
84 changes: 84 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
#open stack machine
http://10.194.169.75

#Sentry
invitation
- `http://10.194.169.75:9000/accept/2/afddf62f254a471bb3d858d200a5c6dd19a96317f4444e27bad2fbea176da748/`

monitor
- `http://10.194.169.75:9000/sentry/python/`

#logstash
`http://openstack-compute01:5601/`

## tunnels

to logger
- `ssh -t -L 5001:localhost:5001 nx ssh -t -L 5001:localhost:5001 tramacer@openstack-compute01`

to monitor
- `ssh -t -L 5601:localhost:5601 nx ssh -t -L 5601:localhost:5601 tramacer@openstack-compute01`

## url from laptop
`http://localhost:5601`




#Setup On Mac

- mount isdc using fuse

PATH is the local_cache directory from conf_env.yml

typically `PATH=/Users/orion/astro/Integral_Web_Analysis/TEST_DISPATCHER/ddosa_cache`

- `mkdir PATH`

- `sshfs tramacer@nx:/unsaved_data/neronov/data PATH`

- tunnel to intgccn01 machine
`ssh -t -L 32778:localhost:32778 nx ssh -t -L 32778:localhost:32778 tramacer@intggcn01.isdc.unige.ch`

#Setup cidci machine
- `cd /var/cdci/dispatcher`
- `source conf/set_environment_dev_disp.sh`
- cd to work dir `/var/cdci/dispatcher/online_analysis/XXX/workdir`
- single thread: `run_osa_cdci_server.py -conf_file conf_env.yml`
- multi thread: `run_osa_cdci_server.py -conf_file conf_env.yml -use_gunicorn `

#on every machine
cp .secret-ddosa-client ~


#XSPEC python

- login to cdciweb01

- run always on cdciweb01 machine!

- bash
- heainint

## Xspec python install

- include: python-config --cflags
- lib: python-config --ldflags

in heasoft-<ver>/Xspec/BUILD_DIR/hmakerc

PYTHON_INC="-I/home/isdc/tramacer/anaconda2/include/python2.7"
PYTHON_LIB="-lpython2.7"


- cd /path/to/heasoft-<ver>/Xspec/src/XSUser/Python/xspec
- hmake clean
- hmake
- hmake install


#To run tests example in run_test directory:

pytest ../tests/test_plugins.py::test_image -s -v


192 changes: 86 additions & 106 deletions cdci_data_analysis/analysis/instrument.py
Original file line number Diff line number Diff line change
@@ -23,24 +23,20 @@
from builtins import (bytes, str, open, super, range,
zip, round, input, int, pow, object, map, zip)

from werkzeug.utils import secure_filename

from pathlib import Path
import json
import logging
import re
#logger = logging.getLogger(__name__)
import yaml

import os
import numpy as np
from astropy.table import Table

from cdci_data_analysis.analysis.queries import _check_is_base_query
from .catalog import BasicCatalog
from .products import QueryOutput
from .io_helper import view_traceback

import os

from .io_helper import FilePath
__author__ = "Andrea Tramacere"

# Standard library
@@ -54,13 +50,16 @@
# Project
# relative import eg: from .mod import f



class Instrument(object):
def __init__(self,
instr_name,
src_query,
instrumet_query,
input_product_query=None,
catalog=None,
data_serve_conf_file=None,
product_queries_list=None,
data_server_query_class=None,
query_dictionary={}):
@@ -76,6 +75,7 @@ def __init__(self,
self.instrumet_query=instrumet_query


self.set_data_server_conf_dict(data_serve_conf_file)

self.product_queries_list=product_queries_list

@@ -94,6 +94,22 @@ def __init__(self,
self.query_dictionary = query_dictionary


def set_data_server_conf_dict(self,data_serve_conf_file):
conf_dict=None
if data_serve_conf_file is not None:
with open(data_serve_conf_file, 'r') as ymlfile:
cfg_dict = yaml.load(ymlfile)
for k in cfg_dict['instruments'].keys():
#print ('name',k)
if self.name ==k:
#print('name', k,cfg_dict['instruments'][k])
conf_dict=cfg_dict['instruments'][k]

self.data_server_conf_dict=conf_dict

def get_logger(self):
logger = logging.getLogger(__name__)
return logger


def _check_names(self):
@@ -127,64 +143,43 @@ def get_query_by_name(self,prod_name):

return p

def test_communication(self,config):
def test_communication(self,config,logger=None):
if self.data_server_query_class is not None:
return self.data_server_query_class(config=config).test_connection()
return self.data_server_query_class(config=config).test_communication(logger=logger)

def test_busy(self, config):
def test_busy(self, config,logger=None):
if self.data_server_query_class is not None:
return self.data_server_query_class(config=config).test_busy()
return self.data_server_query_class(config=config).test_busy(logger=logger)

def test_has_input_products(self, config,instrument):
def test_has_input_products(self, config,instrument,logger=None):
if self.data_server_query_class is not None:
return self.data_server_query_class(config=config).test_has_input_products(instrument)



# def set_query_exception(query_out,excep,failed_operation,message_prepend_str='',extra_message=None,logger_prepend_str='==>',logger=None ):
#
# if excep.__repr__ is None:
# e_message=''
# else:
# e_message=excep.__repr__
#
#
# print('!!! >>>Exception<<<', e_message)
# print('!!! failed operation',failed_operation)
# view_traceback()
# if logger is not None:
# logger.exception(e_message)
# status = 1
#
# message = '%s'%message_prepend_str
# message += 'failed: %s' % (failed_operation)
# if extra_message is not None:
# message += 'message: %s' % (extra_message)
#
# debug_message = e_message
#
# msg_str = '%s'%logger_prepend_str
# msg_str += 'failed%s:', failed_operation
# msg_str += ' error%s:', e_message
# if extra_message is not None:
# msg_str += ' message: %s' % (extra_message)
#
# logger.info(msg_str)
#
# query_out.set_status(status, message, debug_message=str(debug_message))


def run_query(self,product_type,par_dic,request,back_end_query,job,prompt_delegate,config=None,out_dir=None,query_type='Real',verbose=False,logger=None,**kwargs):
return self.data_server_query_class(config=config).test_has_input_products(instrument,logger=logger)



def run_query(self,product_type,
par_dic,
request,
back_end_query,
job,
run_asynch,
config=None,
out_dir=None,
query_type='Real',
verbose=False,
logger=None,
sentry_client=None,
**kwargs):

#prod_dictionary={}


if logger is None:
logger = logging.getLogger(__name__)
logger = self.get_logger()

#set pars
query_out=self.set_pars_from_form(par_dic,verbose=verbose,sentry_client=sentry_client)

query_out=self.set_pars_from_form(par_dic,verbose=verbose)

if verbose ==True:
self.show_parameters_list()
@@ -193,12 +188,12 @@ def run_query(self,product_type,par_dic,request,back_end_query,job,prompt_delega

#set catalog
if query_out.status_dictionary['status']==0:
query_out=self.set_catalog_from_fronted(par_dic, request,back_end_query,logger=logger,verbose=verbose)
query_out=self.set_catalog_from_fronted(par_dic, request,back_end_query,logger=logger,verbose=verbose,sentry_client=sentry_client)


#set input products
if query_out.status_dictionary['status'] == 0:
query_out=self.set_input_products_from_fronted(par_dic, request,back_end_query,logger=logger,verbose=verbose)
query_out=self.set_input_products_from_fronted(par_dic, request,back_end_query,logger=logger,verbose=verbose,sentry_client=sentry_client)



@@ -214,34 +209,18 @@ def run_query(self,product_type,par_dic,request,back_end_query,job,prompt_delega
try:
query_name = self.query_dictionary[product_type]
print ('=======> query_name',query_name)
query_out = self.get_query_by_name(query_name).run_query(self, out_dir, job, prompt_delegate,
query_out = self.get_query_by_name(query_name).run_query(self, out_dir, job, run_asynch,
query_type=query_type, config=config,
logger=logger)
logger=logger,
sentry_client=sentry_client)
if query_out.status_dictionary['status'] == 0:
query_out.set_status(status, message, debug_message=str(debug_message))
else:
pass

except Exception as e:

#print('!!! >>>Exception<<<', e)
query_out.set_query_exception(e,product_type,logger=logger)

#print('!!! >>>Exception<<<', e)
#print("product error", e)
#view_traceback()
#logger.exception(e)
#status = 1
#message = 'product error: %s'%(product_type)
#debug_message = e

#msg_str = '==>product error:',e
#logger.info(msg_str)

#

#print ('ciccio failed',query_out.tatus_dictionary['status'])

query_out.set_query_exception(e,product_type,logger=logger,sentry_client=sentry_client)



@@ -286,7 +265,7 @@ def get_parameters_list_as_json(self):



def set_pars_from_form(self,par_dic,logger=None,verbose=False):
def set_pars_from_form(self,par_dic,logger=None,verbose=False,sentry_client=None):
print('---------------------------------------------')
print('setting form paramters')
q=QueryOutput()
@@ -300,7 +279,7 @@ def set_pars_from_form(self,par_dic,logger=None,verbose=False):
self.set_pars_from_dic(par_dic,verbose=verbose)
q.set_status(status, error_message, str(debug_message))
except Exception as e:
q.set_query_exception(e,'setting form parameters',logger=logger)
q.set_query_exception(e,'setting form parameters',logger=logger,sentry_client=sentry_client)

#status=1
#error_message= 'error in form parameter'
@@ -312,7 +291,7 @@ def set_pars_from_form(self,par_dic,logger=None,verbose=False):
return q


def set_input_products_from_fronted(self,par_dic,request,back_end_query,verbose=False,logger=None):
def set_input_products_from_fronted(self,par_dic,request,back_end_query,verbose=False,logger=None,sentry_client=None):
print('---------------------------------------------')
print('setting user input prods')
input_prod_list_name = self.instrumet_query.input_prod_list_name
@@ -331,20 +310,21 @@ def set_input_products_from_fronted(self,par_dic,request,back_end_query,verbose=

q.set_status(status, error_message, str(debug_message))
except Exception as e:
q.set_query_exception(e,'failed to upload scw_list file',
extra_message='failed to upload %s'%self.input_prod_name )
#error_message = 'failed to upload %s'%self.input_prod_name
#status = 1
#debug_message = e
#logger.exception(e)
q.set_query_exception(e,
'failed to upload scw_list file',
extra_message='failed to upload %s' % self.input_prod_name,
sentry_client=sentry_client)


try:
has_input=self.set_input_products(par_dic,input_file_path,input_prod_list_name)
q.set_status(status, error_message, str(debug_message))
except Exception as e :
q.set_query_exception(e,'scw_list file is not valid',
extra_message='scw_list file is not valid',
logger=logger)
q.set_query_exception(e,
'scw_list file is not valid',
extra_message='scw_list file is not valid',
logger=logger,
sentry_client=sentry_client)


#error_message = 'scw_list file is not valid'
@@ -363,7 +343,10 @@ def set_input_products_from_fronted(self,par_dic,request,back_end_query,verbose=
q.set_status(status, error_message, str(debug_message))

except:
q.set_query_exception(e,'setting input scw_list',extra_message='No scw_list from file accepted')
q.set_query_exception(e,
'setting input scw_list',
extra_message='No scw_list from file accepted',
sentry_client=sentry_client)

#error_message = 'No scw_list from file accepted'
#status = 1
@@ -394,7 +377,7 @@ def set_input_products(self, par_dic, input_file_path,input_prod_list_name):
return len(acceptList)>=1


def set_catalog_from_fronted(self,par_dic,request,back_end_query,logger=None,verbose=False):
def set_catalog_from_fronted(self,par_dic,request,back_end_query,logger=None,verbose=False,sentry_client=None):
print('---------------------------------------------')
print('setting user catalog')

@@ -415,29 +398,26 @@ def set_catalog_from_fronted(self,par_dic,request,back_end_query,logger=None,ver
print('set_catalog_from_fronted,request.method', request.method, par_dic['user_catalog_file'],cat_file_path)
q.set_status(status, error_message, str(debug_message))
except Exception as e:
q.set_query_exception(e,'upload catalog file',
extra_message='failed to upload catalog file',
logger=logger)
q.set_query_exception(e,
'upload catalog file',
extra_message='failed to upload catalog file',
logger=logger,
sentry_client=sentry_client)


#error_message = 'failed to upload catalog file'
#status = 1
#debug_message=e
#logger.exception(e)

try:
self.set_catalog(par_dic, scratch_dir=back_end_query.scratch_dir)
q.set_status(status, error_message, str(debug_message))
except Exception as e:

q.set_query_exception(e,'set catalog file',
extra_message='failed to set catalog',
logger=logger)
q.set_query_exception(e,
'set catalog file',
extra_message='failed to set catalog',
logger=logger,
sentry_client=sentry_client)


#error_message = 'failed to set catalog '
#status = 1
#debug_message = e
#print(e)
#logger.exception(e)

self.set_pars_from_dic(par_dic,verbose=verbose)

@@ -507,11 +487,11 @@ def build_catalog(cat_dic,catalog_selected_objects=None):
meta_ids = user_catalog._table['meta_ID']
IDs=[]
for ID,cat_ID in enumerate(meta_ids):
#print ("ID,cat_id",ID,cat_ID,catalog_selected_objects)

if cat_ID in catalog_selected_objects:
IDs.append(ID)
#print('selected')

user_catalog.select_IDs(IDs)
#TODO: check this indentation
user_catalog.select_IDs(IDs)

return user_catalog
17 changes: 12 additions & 5 deletions cdci_data_analysis/analysis/io_helper.py
Original file line number Diff line number Diff line change
@@ -44,7 +44,7 @@


@decorator.decorator
def check_exist(func,self):
def check_exist(func,self,**kwargs):
if self.file_path.exists()==False:
raise RuntimeError('file %s',self.file_path.path,'does not exists')
else:
@@ -73,16 +73,23 @@ class FitsFile(File):
def __init__(self,file_path):
super(FitsFile,self).__init__(file_path)

@check_exist
#@check_exist
def open(self):
#print('ciccio r', self.file_path)
return pf.open(self.file_path.path)
#print ('ciccio r',r)
#return r

@check_exist
def writeto(self,filename, data, header=None, output_verify='exception', overwrite=False, checksum=False):
pf.writeto(filename,data,header=header,output_verify=output_verify,overwrite=overwrite,checksum=checksum,)
#@check_exist
def writeto(self,out_filename=None, data=None, header=None, output_verify='exception', overwrite=False, checksum=False):
if out_filename is None:
out_filename=self.file_path.path

if data is None:

pf.open(self.file_path.path).writeto(out_filename,output_verify=output_verify,overwrite=overwrite,checksum=checksum)
else:
pf.writeto(out_filename,data,header=header,output_verify=output_verify,overwrite=overwrite,checksum=checksum)

class FilePath(object):
def __init__(self,file_name='',file_dir=u'./',name_prefix=None):
57 changes: 45 additions & 12 deletions cdci_data_analysis/analysis/products.py
Original file line number Diff line number Diff line change
@@ -44,46 +44,76 @@
from .parameters import *
from .io_helper import FilePath
from .io_helper import view_traceback,FitsFile

import sys



class QueryOutput(object):
def __init__(self):
self.prod_dictionary={}
self.status_dictionary={}

self.set_status(0,job_status='unknown')

def set_products(self,keys,values):
for k,v in zip(keys,values):
self.prod_dictionary[k] =v

def set_status(self,status,error_message='',debug_message=''):
def set_status(self,status,error_message='',debug_message='',job_status=None):


if job_status is not None:
self.status_dictionary['job_status'] = job_status

self.status_dictionary['status']=status
self.status_dictionary['error_message']=str(error_message)
self.status_dictionary['debug_message']=str(debug_message)

def set_query_exception(self, excep, failed_operation, message_prepend_str='', extra_message=None,
logger_prepend_str='==>', logger=None,status=1):
def get_status(self):
return self.status_dictionary['status']

def get_job_status(self):
print ('ciccio job_status',self.status_dictionary['job_status'])
return self.status_dictionary['job_status']

def set_query_exception(self, excep,
failed_operation,
message_prepend_str='',
extra_message=None,
message=None,
logger_prepend_str='==>',
logger=None,
status=1,
sentry_client=None):



if excep.__repr__ is None:
e_message = ''
else:
e_message = excep.__repr__()


if sentry_client is not None:
sentry_client.capture('raven.events.Message', message=e_message)

print('!!! >>>Exception<<<', e_message)
print('!!! failed operation', failed_operation)

view_traceback()

if logger is not None:
logger.exception(e_message)


message = '%s' % message_prepend_str
message += 'failed: %s' % (failed_operation)
if extra_message is not None:
message += 'message: %s' % (extra_message)
if message is None:
message = '%s' % message_prepend_str
message += 'failed: %s' % (failed_operation)
if extra_message is not None:
message += 'message: %s' % (extra_message)
else:
pass



debug_message = e_message

@@ -93,7 +123,8 @@ def set_query_exception(self, excep, failed_operation, message_prepend_str='', e
if extra_message is not None:
msg_str += ' message: %s' % (extra_message)

logger.info(msg_str)
if logger is not None:
logger.info(msg_str)

self.set_status(status, message, debug_message=str(debug_message))

@@ -408,6 +439,7 @@ def set_arf_file(self, in_arf_file=None,arf_kw=None, out_arf_file=None, overwrit
print("-->", self.header[arf_kw])
self.header[arf_kw] = 'NONE'
if out_arf_file is not None and in_arf_file is not None:
#print('in_arf_file', in_arf_file,out_arf_file)
#pf.open(in_arf_file).writeto(out_arf_file, overwrite=overwrite)
FitsFile(in_arf_file).writeto(out_arf_file, overwrite=overwrite)
print('arf written to', out_arf_file)
@@ -474,8 +506,9 @@ def from_fits_file(cls, file_name, prod_name, ext=0,arf_file_name=None,rmf_file_

def write(self,file_name=None,overwrite=True,file_dir=None):
file_path = self.file_path.get_file_path(file_name=file_name, file_dir=file_dir)

pf.writeto(file_path, data=self.data, header=self.header,overwrite=overwrite)
#print('ciccio')
FitsFile(file_path).writeto( data=self.data, header=self.header,overwrite=overwrite)
#pf.writeto(file_path, data=self.data, header=self.header,overwrite=overwrite)



283 changes: 117 additions & 166 deletions cdci_data_analysis/analysis/queries.py

Large diffs are not rendered by default.

20 changes: 13 additions & 7 deletions cdci_data_analysis/config_dir/conf_env.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@
local_cache: isdc
ddcache_root: unsaved_data/savchenk/data/reduced/ddcache
dummy_cache: dummy_prods
data_server_url: 127.0.0.1
data_server_port: 32778
dispatcher_url: 0.0.0.0
dispatcher_port: 5000
data_server:
isgri:
dispatcher_mnt_point: /Users/orion/astro/Integral_Web_Analysis/TEST_DISPATCHER/ddosa_cache
data_server_cache: reduced/ddcache
dummy_cache: dummy_prods
data_server_url: intggcn01.isdc.unige.ch
data_server_port: 32778



dispatcher:
dispatcher_url: 0.0.0.0
dispatcher_port: 5000
140 changes: 108 additions & 32 deletions cdci_data_analysis/configurer.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,18 @@




from __future__ import absolute_import, division, print_function

from builtins import (bytes, str, open, super, range,
zip, round, input, int, pow, object, map, zip)

from cdci_data_analysis import conf_dir

from cdci_data_analysis import conf_dir
from cdci_data_analysis.analysis.io_helper import FilePath
import yaml

import sys
import os

__author__ = "Andrea Tramacere"


# Standard library
# eg copy
# absolute import rg:from copy import deepcopy
@@ -28,42 +25,121 @@
# relative import eg: from .mod import f


# ----------------------------------------
# launch
# ----------------------------------------


#----------------------------------------
# launch
#----------------------------------------
class DataServerConf(object):

def __init__(self, data_server_url, data_server_port, data_server_remote_cache, dispatcher_mnt_point,
dummy_cache):
# dataserver port
self.data_server_port = data_server_port

# dataserver url
self.data_server_url = data_server_url
self.dataserver_url = 'http://%s:%d' % (self.data_server_url, self.data_server_port)

# dummy prods local cache
self.dummy_cache = dummy_cache

# path to dataserver cache
self.data_server_remote_path = data_server_remote_cache

self.dispatcher_mnt_point = os.path.abspath(dispatcher_mnt_point)

FilePath(file_dir=self.dispatcher_mnt_point).mkdir()

self.dataserver_cache = os.path.join(self.dispatcher_mnt_point, self.data_server_remote_path)

@classmethod
def from_conf_dict(cls,conf_dict):

# dataserver port
data_server_port = conf_dict['data_server_port']

# dataserver url
data_server_url = conf_dict['data_server_url']

# dummy prods local cache
dummy_cache = conf_dict['dummy_cache']

# path to dataserver cache
data_server_remote_cache = conf_dict['data_server_cache']

dispatcher_mnt_point = conf_dict['dispatcher_mnt_point']

return DataServerConf(data_server_url,data_server_port,data_server_remote_cache,dispatcher_mnt_point,dummy_cache)

@classmethod
def from_conf_file(cls, conf_file):

with open(conf_file, 'r') as ymlfile:
cfg_dict = yaml.load(ymlfile)

return DataServerConf.from_conf_dict(cfg_dict)


class ConfigEnv(object):
def __init__(self,
cfg_dict):


self._data_server_conf_dict={}
print (cfg_dict.keys())

if 'data_server' in cfg_dict.keys():
for instr_name in cfg_dict['data_server']:
self.add_data_server_conf_dict(instr_name,cfg_dict)



if 'dispatcher' in cfg_dict.keys():

disp_dict=cfg_dict['dispatcher']

self.set_conf_dispatcher(disp_dict['dispatcher_url'],
disp_dict['dispatcher_port'],
disp_dict['sentry_url'])






def get_data_server_conf_dict(self,instr_name):
if instr_name in self._data_server_conf_dict.keys():
return self._data_server_conf_dict[instr_name]

def add_data_server_conf_dict(self,instr_name,data_server_conf_dict):
self._data_server_conf_dict[instr_name] = data_server_conf_dict
#self._data_server_conf_dict[instr_name] = DataServerConf.from_conf_dict(data_server_conf_dict)

def __init__(self,local_cache,ddcache_root,dummy_cache,data_server_url,data_server_port,dispatcher_url,dispatcher_port):
self.local_cache=os.path.abspath(local_cache)
self.ddcache_root=ddcache_root
self.dummy_cache=dummy_cache
self.data_server_url=data_server_url
self.data_server_port=data_server_port
self.dispatcher_url=dispatcher_url
self.dispatcher_port = dispatcher_port
def set_conf_dispatcher(self,dispatcher_url,dispatcher_port,sentry_url):
# Generic to dispatcher
print(dispatcher_url, dispatcher_port)
self.dispatcher_url = dispatcher_url
self.dispatcher_port = dispatcher_port
self.sentry_url=sentry_url

self.dataserver_url = 'http://%s:%d' % (self.data_server_url, self.data_server_port)
self.dataserver_cache = '%s/%s' % (self.local_cache, self.ddcache_root)

@classmethod
def from_conf_file(cls,conf_file_path):
if conf_file_path is None:
conf_file_path = conf_dir+'/conf_env.yml'

with open(conf_file_path, 'r') as ymlfile:
cfg = yaml.load(ymlfile)

def get_data_serve_conf(self,instr_name):
if instr_name in self.data_server_conf_dict.keys():
c= self._data_server_conf_dict[instr_name]
else:
c=None

return ConfigEnv(local_cache=cfg['local_cache'],
ddcache_root=cfg['ddcache_root'],
dummy_cache=cfg['dummy_cache'],
data_server_url=cfg['data_server_url'],
data_server_port=cfg['data_server_port'],
dispatcher_url=cfg['dispatcher_url'],
dispatcher_port=cfg['dispatcher_port'])
return c

@classmethod
def from_conf_file(cls, conf_file_path):
if conf_file_path is None:
conf_file_path = conf_dir + '/conf_env.yml'

with open(conf_file_path, 'r') as ymlfile:
cfg_dict = yaml.load(ymlfile)
#print('cfg_dict',cfg_dict)
return ConfigEnv(cfg_dict)
179 changes: 126 additions & 53 deletions cdci_data_analysis/flask_app/app.py
Original file line number Diff line number Diff line change
@@ -21,20 +21,20 @@
from flask import Flask, request


from cdci_data_analysis.plugins import OSA_ISGRI
from cdci_data_analysis.plugins import OSA_JEMX
from ..plugins import importer

from ..analysis.queries import *
from ..analysis.job_manager import Job
from ..analysis.io_helper import FilePath
from .mock_data_server import mock_query
from ..analysis.products import QueryOutput
from ..configurer import DataServerConf
import tempfile
import tarfile
import gzip
import logging
import socket

import logstash

#UPLOAD_FOLDER = '/path/to/the/uploads'
#ALLOWED_EXTENSIONS = set(['txt', 'fits', 'fits.gz'])
@@ -81,6 +81,7 @@ def __init__(self,instrument_name=None,par_dic=None,config=None,data_server_call
self.set_scratch_dir(self.par_dic['session_id'],job_id=self.job_id,verbose=verbose)

self.set_session_logger(self.scratch_dir,verbose=verbose)
self.set_sentry_client()

if data_server_call_back is False:
self.set_instrument(self.instrument_name)
@@ -90,9 +91,7 @@ def __init__(self,instrument_name=None,par_dic=None,config=None,data_server_call
except Exception as e:
print ('e',e)

#status = -1
#message = 'failed InstrumentQueryBackEnd constructor '
#debug_message = e


query_out = QueryOutput()
query_out.set_query_exception(e,'InstrumentQueryBackEnd constructor',extra_message='InstrumentQueryBackEnd constructor failed')
@@ -110,25 +109,10 @@ def __init__(self,instrument_name=None,par_dic=None,config=None,data_server_call
def generate_job_id(self):
print("!!! GENERATING JOB ID")
self.job_id=u''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(16))
#number = '0123456789'
#alpha = 'abcdefghijklmnopqrstuvwxyz'.capitalize()
#ID = ''
#for i in range(0, 16, 2):
# ID += random.choice(number)
# ID += random.choice(alpha)
#self.job_id=ID
#print ('------->str check',type(self.job_id),self.job_id)


def set_instrument(self,instrument_name):
if instrument_name == 'isgri':
self.instrument = OSA_ISGRI()
elif instrument_name=='jemx':
self.instrument=OSA_JEMX()
elif instrument_name=='mock':
self.instrument='mock'
else:
raise Exception("instrument not recognized".format(instrument_name))





def set_session_logger(self,scratch_dir,verbose=False):
logger = logging.getLogger(__name__)
@@ -141,11 +125,33 @@ def set_session_logger(self,scratch_dir,verbose=False):
log.removeHandler(hdlr)
log.addHandler(fileh) # set the new handler
logger.setLevel(logging.INFO)



if verbose==True:
print('logfile set to dir=', scratch_dir, ' with name=session.log')

host='10.194.169.75'
port=5001
logger.addHandler(logstash.TCPLogstashHandler(host, port))

extra = {
'origin': 'cdici_dispatcher',
}
logger = logging.LoggerAdapter(logger, extra)
self.logger=logger

def set_sentry_client(self,sentry_url=None):

if sentry_url is not None:
from raven import Client

client= Client(sentry_url)
else:
client=None


self.sentry_client=client

def get_current_ip(self):
return socket.gethostbyname(socket.gethostname())
@@ -274,10 +280,17 @@ def get_meta_data(self,name=None):

def run_call_back(self,status_kw_name='action'):

if self.config is None:
config = app.config.get('osaconf')
else:
config = self.config
try:
config, config_data_server = self.set_config()
print('dispatcher port', config.dispatcher_port)
except Exception as e:
query_out = QueryOutput()
query_out.set_query_exception(e, 'run_query failed in ', self.__class__.__name__,
extra_message='configuration failed')





job = Job(work_dir=self.scratch_dir,
server_url=self.get_current_ip(),
@@ -317,10 +330,10 @@ def run_query_mock(self, off_line=False):
log_str = 'parameters dictionary, key=' + key + ' value=' + str(self.par_dic[key])
self.logger.info(log_str)

if self.config is None:
config = app.config.get('osaconf')
else:
config = self.config
#if self.config is None:
# config_disp = app.config.get('dispatcher_conf')
#else:
# config_disp = self.config['dispatcher_conf']

out_dict=mock_query(self.par_dic,session_id,self.job_id,self.scratch_dir)

@@ -377,21 +390,70 @@ def build_dispatcher_response(self, out_dict=None,query_new_status=None,query_ou

return jsonify(out_dict)

def set_instrument(self, instrument_name):
new_instrument=None
if instrument_name == 'mock':
new_instrument = 'mock'

else:
for instrument_factory in importer.instrument_facotry_list:
instrument = instrument_factory()
if instrument.name == instrument_name:
#print('setting instr',instrument_name,instrument.name)
new_instrument = instrument




if new_instrument is None:

raise Exception("instrument not recognized".format(instrument_name))
else:
self.instrument=new_instrument

def set_config(self):
if self.config is None:
config = app.config.get('conf')
else:
config = self.config

disp_data_server_conf_dict=config.get_data_server_conf_dict(self.instrument_name)

#print('pre',self.instrument.data_server_conf_dict)
if disp_data_server_conf_dict is not None:
if 'data_server' in disp_data_server_conf_dict.keys():
if self.instrument.name in disp_data_server_conf_dict['data_server'].keys():
for k in disp_data_server_conf_dict['data_server'][self.instrument.name].keys():
if k in self.instrument.data_server_conf_dict.keys():
#print (k,self.instrument.data_server_conf_dict.keys(),disp_data_server_conf_dict['data_server'][self.instrument.name].keys())
self.instrument.data_server_conf_dict[k] = disp_data_server_conf_dict['data_server'][self.instrument.name][k]

#print('post',self.instrument.data_server_conf_dict)
config_data_server=DataServerConf.from_conf_dict(self.instrument.data_server_conf_dict)
else:
config_data_server=None

return config,config_data_server

def run_query(self,off_line=False):

print ('==============================> run query <==============================')
query_type = self.par_dic['query_type']
product_type = self.par_dic['product_type']

#JOBID=PID+RAND
query_status=self.par_dic['query_status']

try:
query_type = self.par_dic['query_type']
product_type = self.par_dic['product_type']
query_status=self.par_dic['query_status']

except Exception as e:
query_out = QueryOutput()
query_out.set_query_exception(e, 'run_query failed in ',self.__class__.__name__,
extra_message='InstrumentQueryBackEnd constructor failed')


if self.par_dic.has_key('instrumet'):
self.par_dic.pop('instrumet')
#prod_dictionary = self.instrument.set_pars_from_from(par_dic)




@@ -407,12 +469,16 @@ def run_query(self,off_line=False):
log_str = 'parameters dictionary, key=' + key + ' value=' + str(self.par_dic[key])
self.logger.info(log_str)

if self.config is None:
config = app.config.get('osaconf')
else:
config=self.config
try:
config, config_data_server=self.set_config()
print('dispatcher port', config.dispatcher_port)
except Exception as e:
query_out = QueryOutput()
query_out.set_query_exception(e, 'run_query failed in ', self.__class__.__name__,
extra_message='configuration failed')

print('conf', config.dispatcher_port)
if config.sentry_url is not None:
self.set_sentry_client(config.sentry_url)

job = Job(work_dir=self.scratch_dir,
server_url=self.get_current_ip(),
@@ -430,24 +496,28 @@ def run_query(self,off_line=False):
out_dict=None
query_out=None


if query_status=='new' or query_status=='ready':
if query_status=='new':
prompt_delegate=True
else:
prompt_delegate=True
run_asynch = True

#if query_status=='new':
# run_asynch=True
#else:
# run_asynch=True

print ('*** prompt_delegate',prompt_delegate)
print ('*** run_asynch',run_asynch)
query_out = self.instrument.run_query(product_type,
self.par_dic,
request,
self,
job,
prompt_delegate,
run_asynch,
out_dir=self.scratch_dir,
config=config,
config=config_data_server,
query_type=query_type,
logger=self.logger,
verbose=False)
sentry_client=self.sentry_client,
verbose=False)


print('-----------------> job status after query:', job.status)
@@ -460,9 +530,10 @@ def run_query(self,off_line=False):
else:
query_new_status = 'failed'

job.write_dataserver_status()
print('-----------------> query status new 1: ', query_new_status)

elif query_status=='progress' or query_status=='unaccessible' or query_status=='unknown':
elif query_status=='progress' or query_status=='unaccessible' or query_status=='unknown' or query_status=='submitted':

job_monitor = job.get_dataserver_status()
print('-----------------> job status from data server', job_monitor['status'])
@@ -474,6 +545,8 @@ def run_query(self,off_line=False):
query_new_status='progress'
elif job_monitor['status'] == 'unaccessible':
query_new_status='unaccessible'
elif job_monitor['status'] == 'submitted':
query_new_status='submitted'
else:
query_new_status='progress'

@@ -622,7 +695,7 @@ def dataserver_call_back():


def run_app(conf,debug=False,threaded=False):
app.config['osaconf'] = conf
app.config['conf'] = conf
app.run(host=conf.dispatcher_url, port=conf.dispatcher_port, debug=debug,threaded=threaded)


41 changes: 0 additions & 41 deletions cdci_data_analysis/plugins/__init__.py
Original file line number Diff line number Diff line change
@@ -1,41 +0,0 @@
"""
Overview
--------
general info about this module
Classes and Inheritance Structure
----------------------------------------------
.. inheritance-diagram::
Summary
---------
.. autosummary::
list of the module you want
Module API
----------
"""

from __future__ import absolute_import, division, print_function

from builtins import (bytes, str, open, super, range,
zip, round, input, int, pow, object, map, zip)

__author__ = "Andrea Tramacere"

# Standard library
# eg copy
# absolute import rg:from copy import deepcopy

# Dependencies
# eg numpy
# absolute import eg: import numpy as np

# Project
# relative import eg: from .mod import f


from cdci_osa_plugin import OSA_ISGRI
from cdci_osa_plugin import OSA_JEMX
44 changes: 44 additions & 0 deletions cdci_data_analysis/plugins/dummy_instrument/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
"""
Overview
--------
general info about this module
Classes and Inheritance Structure
----------------------------------------------
.. inheritance-diagram::
Summary
---------
.. autosummary::
list of the module you want
Module API
----------
"""

from __future__ import absolute_import, division, print_function

from builtins import (bytes, str, open, super, range,
zip, round, input, int, pow, object, map, zip)

__author__ = "Andrea Tramacere"

# Standard library
# eg copy
# absolute import rg:from copy import deepcopy

# Dependencies
# eg numpy
# absolute import eg: import numpy as np

# Project
# relative import eg: from .mod import f


from .my_instrument import my_instr_factory



instr_factory_list=[my_instr_factory]
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
data_server:
isgri:
dispatcher_mnt_point:
data_server_cache: reduced/ddcache
dummy_cache: dummy_prods
data_server_url: intggcn01.isdc.unige.ch
data_server_port: 32778
104 changes: 104 additions & 0 deletions cdci_data_analysis/plugins/dummy_instrument/data_server_dispatcher.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
"""
Overview
--------
general info about this module
Classes and Inheritance Structure
----------------------------------------------
.. inheritance-diagram::
Summary
---------
.. autosummary::
list of the module you want
Module API
----------
"""

from __future__ import absolute_import, division, print_function

from builtins import (bytes, str, open, super, range,
zip, round, input, int, pow, object, map, zip)

__author__ = "Andrea Tramacere"

# Standard library
# eg copy
# absolute import rg:from copy import deepcopy

# Dependencies
# eg numpy
# absolute import eg: import numpy as np

# Project
# relative import eg: from .mod import f

import simple_logger
import logging
from cdci_data_analysis.analysis.job_manager import Job



class AysnchExcept(Exception):
pass

class DataServerQuery(object):

def __init__(self):
pass




def test_connection(self):
pass

def test_has_input_products(self):
pass


def run_query(self, job, prompt_delegate=True):
res = None
try:
# redirect_out('./')
# with silence_stdout():
#simple_logger.logger.setLevel(logging.ERROR)

if isinstance(job, Job):
pass
else:
raise RuntimeError('job object not passed')

print('--osa disp--')
print('call_back_url', job.get_call_back_url())
print('*** prompt_delegate', prompt_delegate)

#call to dataserver to get products

print('--> url for call_back', job.get_call_back_url())
print("--> cached object in", res, res.ddcache_root_local)
job.set_done()
except Exception as e:

job.set_failed()
print("ERROR->")
print(type(e), e)
print("e", e)
e.display()
raise RuntimeWarning('ddosa connection or processing failed', e)

except AysnchExcept as e:

if isinstance(job, Job):
print('--> url for call_back', job.get_call_back_url())
else:
raise RuntimeError('job object not passed')

return res




195 changes: 195 additions & 0 deletions cdci_data_analysis/plugins/dummy_instrument/image_query.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,195 @@
"""
Overview
--------
general info about this module
Classes and Inheritance Structure
----------------------------------------------
.. inheritance-diagram::
Summary
---------
.. autosummary::
list of the module you want
Module API
----------
"""

from __future__ import absolute_import, division, print_function

from builtins import (bytes, str, open, super, range,
zip, round, input, int, pow, object, map, zip)

from cdci_data_analysis.analysis.catalog import BasicCatalog
from cdci_data_analysis.analysis.io_helper import FitsFile
from cdci_data_analysis.analysis.products import ImageProduct, QueryProductList, QueryOutput, CatalogProduct
from cdci_data_analysis.analysis.queries import ImageQuery
from .data_server_dispatcher import DataServerQuery
from .instr_catalog import MyInstrCatalog

__author__ = "Andrea Tramacere"

# Standard library
# eg copy
# absolute import rg:from copy import deepcopy

# Dependencies
# eg numpy
# absolute import eg: import numpy as np

# Project
# relative import eg: from .mod import f


class MyInstrImageProduct(ImageProduct):

def __init__(self,name,file_name,skyima,out_dir=None,prod_prefix=None):
header = skyima.header
data = skyima.data
super(MyInstrImageProduct, self).__init__(name,data=data,header=header,name_prefix=prod_prefix,file_dir=out_dir,file_name=file_name)
#check if you need to copy!





@classmethod
def build_from_ddosa_skyima(cls,name,file_name,skyima,out_dir=None,prod_prefix=None):
#skyima = pf.open(skyima)
skyima = FitsFile(skyima).open()
return cls(name,skyima=skyima[4],out_dir=out_dir,prod_prefix=prod_prefix,file_name=file_name)





class MosaicQuery(ImageQuery):

def __init__(self,name):

super(MosaicQuery, self).__init__(name)


def get_products(self,instrument,job,prompt_delegate,dump_json=False,use_dicosverer=False,config=None,out_dir=None,prod_prefix='query_spectrum'):
scwlist_assumption, cat, extramodules, inject=DataServerQuery.get_osa_query_base(instrument)
E1=instrument.get_par_by_name('E1_keV').value
E2=instrument.get_par_by_name('E2_keV').value
target, modules, assume=self.set_instr_dictionaries(extramodules,scwlist_assumption,E1,E2)
q=DataServerQuery(config=config, target=target, modules=modules, assume=assume, inject=inject)

#import sys
#print ('ciccio',target,modules,assume,inject)

res = q.run_query( job=job, prompt_delegate=prompt_delegate)

if job.status != 'done':
prod_list = QueryProductList(prod_list=[], job=job)
return prod_list
else:
return self.build_product_list(job,res,out_dir,prod_prefix)


def process_product(self, instrument, job, prod_list):

query_image = prod_list.get_prod_by_name('mosaic_image')
query_catalog = prod_list.get_prod_by_name('mosaic_catalog')
detection_significance = instrument.get_par_by_name('detection_threshold').value

if detection_significance is not None:
query_catalog.catalog.selected = query_catalog.catalog._table['significance'] > float(
detection_significance)

print('--> query was ok')
# file_path = Path(scratch_dir, 'query_mosaic.fits')
query_image.write(overwrite=True)
# file_path = Path(scratch_dir, 'query_catalog.fits')
query_catalog.write(overwrite=True)

html_fig = query_image.get_html_draw(catalog=query_catalog.catalog,
vmin=instrument.get_par_by_name('image_scale_min').value,
vmax=instrument.get_par_by_name('image_scale_max').value)

query_out = QueryOutput()

query_out.prod_dictionary['image'] = html_fig
query_out.prod_dictionary['catalog'] = query_catalog.catalog.get_dictionary()
# TODO: use query_image.file_path.path -> DONE AND PASSED
# print ("########## TESTING TODO: use query_image.file_path.path ", query_image.file_path.path)
query_out.prod_dictionary['file_name'] = str(query_image.file_path.name)

query_out.prod_dictionary['session_id'] = job.session_id
query_out.prod_dictionary['job_id'] = job.job_id

query_out.prod_dictionary['download_file_name'] = 'image.gz'
query_out.prod_dictionary['prod_process_maessage'] = ''

return query_out


def set_instr_dictionaries(self,extramodules,scwlist_assumption,E1,E2):
raise RuntimeError('Must be specified for each instrument')


class MyInstrMosaicQuery(MosaicQuery):
def __init__(self,name ):
super(MyInstrMosaicQuery, self).__init__(name)


def get_dummy_products(self, instrument, config=None, **kwargs):
pass


def set_instr_dictionaries(self,extramodules,scwlist_assumption,E1,E2):
target = "mosaic_jemx"
modules = ["git://ddosa", "git://ddosadm", "git://ddjemx", 'git://rangequery'] + extramodules

assume = ['ddjemx.JMXScWImageList(input_scwlist=%s)' % scwlist_assumption,
'ddjemx.JEnergyBins(use_bins=[(%(E1)s,%(E2)s)])' % dict(E1=E1, E2=E2),
'ddjemx.JEMX(use_num=2)']

return target, modules, assume

def build_product_list(self, job, res, out_dir, prod_prefix):

image = MyInstrMosaicQuery.build_from_ddosa_skyima('mosaic_image', 'jemx_query_mosaic.fits', res.skyima,
out_dir=out_dir, prod_prefix=prod_prefix)
osa_catalog = CatalogProduct('mosaic_catalog', catalog=MyInstrCatalog.build_from_ddosa_srclres(res.srclres),
file_name='query_catalog.fits', name_prefix=prod_prefix, file_dir=out_dir)

prod_list = QueryProductList(prod_list=[image, osa_catalog], job=job)

return prod_list




def get_dummy_products(self, instrument, config, out_dir='./'):

dummy_cache = config.dummy_cache

failed = False
image = None
catalog = None

user_catalog = instrument.get_par_by_name('user_catalog').value

image = ImageProduct.from_fits_file(in_file='%s/isgri_query_mosaic.fits' % dummy_cache,
out_file_name='isgri_query_mosaic.fits',
prod_name='mosaic_image',
ext=0,
file_dir=out_dir)

catalog = CatalogProduct(name='mosaic_catalog',
catalog=BasicCatalog.from_fits_file('%s/query_catalog.fits' % dummy_cache),
file_name='query_catalog.fits',
file_dir=out_dir)

if user_catalog is not None:
print('setting from user catalog', user_catalog, catalog)
catalog.catalog = user_catalog

prod_list = QueryProductList(prod_list=[image, catalog])
return prod_list
112 changes: 112 additions & 0 deletions cdci_data_analysis/plugins/dummy_instrument/instr_catalog.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
"""
Overview
--------
general info about this module
Classes and Inheritance Structure
----------------------------------------------
.. inheritance-diagram::
Summary
---------
.. autosummary::
list of the module you want
Module API
----------
"""

from __future__ import absolute_import, division, print_function

from builtins import (bytes, str, open, super, range,
zip, round, input, int, pow, object, map, zip)

__author__ = "Andrea Tramacere"

# Standard library
# eg copy
# absolute import rg:from copy import deepcopy

# Dependencies
# eg numpy
# absolute import eg: import numpy as np

# Project
# relative import eg: from .mod import f
import numpy as np

from cdci_data_analysis.analysis.io_helper import FitsFile
from cdci_data_analysis.analysis.catalog import BasicCatalog



class MyInstrCatalog(BasicCatalog):

def __init__(self,
src_names,
lon,
lat,
significance,
unit='deg',
frame='FK5',
NEW_SOURCE=None,
ISGRI_FLAG=None,
FLAG=None,
ERR_RAD=None):

super(MyInstrCatalog, self).__init__(src_names,
lon,
lat,
significance,
unit=unit,
frame=frame,)



self.add_column(data=NEW_SOURCE, name='NEW_SOURCE')
self.add_column(data=ISGRI_FLAG, name='ISGRI_FLAG', dtype=np.int)
self.add_column(data=FLAG, name='FLAG', dtype=np.int)
self.add_column(data=ERR_RAD, name='ERR_RAD', dtype=np.float)

@classmethod
def build_from_dict_list(cls, distlist):
frame = "FK5"

get_key_column = lambda key, default=None: [de.get(key, default) for de in distlist]

print(get_key_column('name'), cls)

return cls(get_key_column('name'),
get_key_column('ra'),
get_key_column('dec'),
significance=get_key_column('DETSIG', 0),
frame="fk5",
ISGRI_FLAG=get_key_column("ISGRI_FLAG", 1),
NEW_SOURCE=get_key_column("NEW_SOURCE", 0),
FLAG=get_key_column("FLAG", 1),
ERR_RAD=get_key_column('err_rad', 0.01))

@classmethod
def build_from_ddosa_srclres(cls, srclres,prod_prefix=None):
#catalog = pf.open(srclres)[1]
catalog=FitsFile(srclres).open()[1]

print ('cat file',srclres)
frame = catalog.header['RADECSYS'].lower()
catalog=catalog.data
return cls( [n.strip() for n in catalog['NAME']],
catalog['RA_FIN'],
catalog['DEC_FIN'],
significance=catalog['DETSIG'],
frame=frame,
NEW_SOURCE=catalog['NEW_SOURCE'],
ISGRI_FLAG=catalog['ISGRI_FLAG'],
FLAG=catalog['FLAG'],
ERR_RAD=catalog['ERR_RAD'] )





85 changes: 85 additions & 0 deletions cdci_data_analysis/plugins/dummy_instrument/my_instrument.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
"""
Overview
--------
general info about this module
Classes and Inheritance Structure
----------------------------------------------
.. inheritance-diagram::
Summary
---------
.. autosummary::
list of the module you want
Module API
----------
"""

from __future__ import absolute_import, division, print_function

from builtins import (bytes, str, open, super, range,
zip, round, input, int, pow, object, map, zip)

from cdci_data_analysis.plugins.dummy_instrument.image_query import MyInstrMosaicQuery

__author__ = "Andrea Tramacere"

# Standard library
# eg copy
# absolute import rg:from copy import deepcopy

# Dependencies
# eg numpy
# absolute import eg: import numpy as np

# Project
# relative import eg: from .mod import f


from cdci_data_analysis.analysis.instrument import Instrument
from cdci_data_analysis.analysis.queries import *

from .data_server_dispatcher import DataServerQuery
from .image_query import MyInstrMosaicQuery

def my_instr_factory():
src_query = SourceQuery('src_query')

max_pointings = Integer(value=50, name='max_pointings')

radius = Angle(value=5.0, units='deg', name='radius')
E1_keV = SpectralBoundary(value=10., E_units='keV', name='E1_keV')
E2_keV = SpectralBoundary(value=40., E_units='keV', name='E2_keV')
spec_window = ParameterRange(E1_keV, E2_keV, 'spec_window')

instr_query_pars = [radius, max_pointings, spec_window]

instr_query = InstrumentQuery(
name='my_instr_parameters',
extra_parameters_list=instr_query_pars,
input_prod_list_name='scw_list',
input_prod_value=None,
catalog=None,
catalog_name='user_catalog')

#
#my_instr_image_query -> name given to this query
image= MyInstrMosaicQuery('my_instr_image_query')

# this dicts binds the product query name to the product name from frontend
# eg my_instr_image is the parameter passed by the fronted to access the
# the MyInstrMosaicQuery, and the dictionary will bing
query_dictionary={}
query_dictionary['my_instr_image'] = 'my_instr_image_query'

return Instrument('OSA_MYINSTR',
src_query=src_query,
instrumet_query=instr_query,
product_queries_list=[image],
data_server_query_class=DataServerQuery,
query_dictionary=query_dictionary,
max_pointings=50)

49 changes: 49 additions & 0 deletions cdci_data_analysis/plugins/importer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
"""
Overview
--------
general info about this module
Classes and Inheritance Structure
----------------------------------------------
.. inheritance-diagram::
Summary
---------
.. autosummary::
list of the module you want
Module API
----------
"""

from __future__ import absolute_import, division, print_function

from builtins import (bytes, str, open, super, range,
zip, round, input, int, pow, object, map, zip)

__author__ = "Andrea Tramacere"

# Standard library
# eg copy
# absolute import rg:from copy import deepcopy

# Dependencies
# eg numpy
# absolute import eg: import numpy as np

# Project
# relative import eg: from .mod import f
import importlib

plugin_list=['cdci_osa_plugin']


instrument_facotry_list=[]
for plugin_name in plugin_list:
e=importlib.import_module(plugin_name+'.exposer')
instrument_facotry_list.extend(e.instr_factory_list)
#for p in plugin.instr_factory_list:
# print ('p',p)

17 changes: 17 additions & 0 deletions instructions.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,18 @@
#open stack machine
http://10.194.169.75

#Sentry
open stack machine
http://10.194.169.75:9000/accept/2/afddf62f254a471bb3d858d200a5c6dd19a96317f4444e27bad2fbea176da748/
http://10.194.169.75:9000/sentry/python/

#Python-stash
http://openstack-compute01:5601/
ssh -t -L 5001:localhost:5001 nx ssh -t -L 5001:localhost:5001 tramacer@openstack-compute01
ssh -t -L 5601:localhost:5601 nx ssh -t -L 5601:localhost:5601 tramacer@openstack-compute01
http://localhost:5601




#On Mac
@@ -27,6 +42,8 @@
cp .secret-ddosa-client ~




cdciweb01
testing machine: http://cdciweb01.isdc.unige.ch
http://cdciweb01.isdc.unige.ch/cdci/online-data-analysis
4 changes: 3 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -7,4 +7,6 @@ flask
astropy
pathlib
gunicorn
decorator
decorator
python-logstash
raven
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
@@ -37,6 +37,7 @@
author='Andrea Tramacere',
author_email='andrea.tramacere@unige.ch',
scripts=scripts_list,
package_data={'cdci_data_analysis':['config_dir/*']},
packages=packs,
include_package_data=True,
install_requires=install_req,
28 changes: 4 additions & 24 deletions tests/test_plugins.py
Original file line number Diff line number Diff line change
@@ -8,7 +8,7 @@
logger = logging.getLogger(__name__)

from cdci_data_analysis.configurer import ConfigEnv
osaconf = ConfigEnv.from_conf_file('./conf_env.yml')
osaconf = ConfigEnv.from_conf_file('./conf_env_test.yml')

import time
from flask import Flask, request
@@ -23,26 +23,6 @@



def test_instr(use_scw_list=True):

from cdci_data_analysis.plugins.ddosa.osa_isgri import OSA_ISGRI

instr= OSA_ISGRI()

parameters_dic=dict(E1_keV=20.,E2_keV=40.,T1='2003-03-15T23:27:40.0', T2='2003-03-16T00:03:15.0',RA=257.815417,DEC=257.815417,radius=25,scw_list=None,T_format='isot')


instr.set_pars_from_dic(parameters_dic)

if use_scw_list==True:
instr.set_par('scw_list',cookbook_scw_list)
else:
instr.set_par('scw_list', [])
instr.set_par('time_group_selector','time_range_iso')






def build_user_catalog(RA_user_cat,Dec_user_cat):
@@ -361,9 +341,9 @@ def test_asynch_full():
"""

instrument_name='isgri'
parameters_dic,upload_data=set_lc_query(instrument_name=instrument_name,
scw_list=None,
E1_keV=22.5,
parameters_dic,upload_data=set_mosaic_query(instrument_name=instrument_name,
scw_list=asynch_scw_list,
E1_keV=23.0,
RA_user_cat=[80.63168334960938],
Dec_user_cat=[20.01494598388672],
user_catalog=False,

0 comments on commit c120ccc

Please sign in to comment.