From b7356f051f99e56a5bb2b76d7f1ec5e0bc6018da Mon Sep 17 00:00:00 2001 From: "dr.hualinxiao@gmail.com" Date: Sun, 10 Jul 2022 10:56:30 +0200 Subject: [PATCH] updated documentation --- docs/stixdcpy/net.html | 692 +++++++++++++++++++++++++++-------------- stixdcpy/science.py | 214 ++++++------- 2 files changed, 554 insertions(+), 352 deletions(-) diff --git a/docs/stixdcpy/net.html b/docs/stixdcpy/net.html index f227860..18b703b 100644 --- a/docs/stixdcpy/net.html +++ b/docs/stixdcpy/net.html @@ -40,22 +40,22 @@

Module stixdcpy.net

""" import hashlib - import numpy as np import pandas as pd import pprint from pathlib import Path, PurePath from dateutil import parser as dtparser - import requests from astropy.io import fits from tqdm import tqdm +import logging +logger = logging.getLogger(__name__) DOWNLOAD_PATH = Path.cwd() / 'downloads' DOWNLOAD_PATH.mkdir(parents=False, exist_ok=True) -HOST = 'https://datacenter.stix.i4ds.net' -#HOST='http://localhost:5000' -URLS_POST = { +#HOST = 'https://datacenter.stix.i4ds.net' +HOST='http://localhost:5000' +ENDPOINTS = { 'LC': f'{HOST}/api/request/ql/lightcurves', 'HK': f'{HOST}/api/request/housekeeping', 'HK2': f'{HOST}/api/request/hk2', @@ -66,6 +66,7 @@

Module stixdcpy.net

'TRANSMISSION': f'{HOST}/api/request/transmission', 'FLARE_LIST': f'{HOST}/api/request/flare-list', 'STIX_POINTING': f'{HOST}/api/request/stixfov', + 'FITS': f'{HOST}/api/query/fits', 'CFL_SOLVER': f'{HOST}/api/request/solve/cfl' } @@ -76,6 +77,9 @@

Module stixdcpy.net

class FitsQueryResult(object): + """ + FITS query result manager + """ def __init__(self, resp): self.hdu_objects = [] self.result = resp @@ -95,38 +99,68 @@

Module stixdcpy.net

def __len__(self): return len(self.result) - - def pprint(self): - pprint.pprint(self.result) - def to_pandas(self): + """ + Convert FitsQueryResult to pandas dataframe + Returns: + pandas data frame + """ return pd.DataFrame(self.result) def open_fits(self): + """ + Open all the FITS files + + Returns: + hdu_objcts: list + A list of HDU objects + + """ self.hdu_objects = [] for filename in self.downloaded_fits_files: self.hdu_objects.append(fits.open(filename)) return self.hdu_objects def fits_info(self): + """ + Print out information of the loaded specified FITS files + """ for hdu in self.hdu_objects: - print(hdu.info()) + logger.info(hdu.info()) def get_fits_ids(self): + """ + Get FITS file IDs + Return + ids: list + FITS file ids + """ return [row['fits_id'] for row in self.result] def fetch(self): + """ + Download fits files from STIX data center + FITS files will be stored in the folder download/ in the current directory + + Returns: + + fits_filenames: list + List of downloaded FITS filenames + + + """ if self.result: self.downloaded_fits_files = FitsQuery.fetch(self.result) return self.downloaded_fits_files else: - print('WARNING: Nothing to be downloaded from stix data center!') + logger.warning('WARNING: Nothing to be downloaded from stix data center!') class FitsQuery(object): """ - Request FITS format data from STIX data center + Query or Fetch FITS products from STIX data center """ + def __init__(self): self.fits_file_list = [] @@ -135,7 +169,7 @@

Module stixdcpy.net

"""Download a file from the link and save the file to a temporary file. Downloading progress will be shown in a progress bar - Args: + Parameters: url (str): URL desc (str): description to be shown on the progress bar @@ -146,7 +180,7 @@

Module stixdcpy.net

resp = requests.get(url, stream=stream) content_type = resp.headers.get('content-type') if content_type != 'binary/x-fits': - print('ERROR:', resp.content) + logger.error(resp.content) return None folder = DOWNLOAD_PATH @@ -159,7 +193,7 @@

Module stixdcpy.net

filename = PurePath(folder, fname) file_path = Path(filename) if file_path.is_file(): - print(f'Found the data in local storage. Filename: {filename} ...') + logger.info(f'Found the data in local storage. Filename: {filename} ...') return str(file_path) f = open(filename, 'wb') chunk_size = 1024 @@ -179,28 +213,56 @@

Module stixdcpy.net

return name @staticmethod - def query(start_utc, stop_utc, product_type='lc'): + def query(begin_utc, end_utc, product_type='lc', filter=None): + """Query FITS products from STIX data center + + Args: + start_utc (str): start time + stop_utc (str): end time + product_type (str, optional): + FITS product type. Defaults to 'lc'. + + + Returns: + results: FitsQueryResult + file result object + """ if product_type not in FITS_TYPES: raise TypeError( f'Invalid product type! product_type can be one of {str(FITS_TYPES)}' ) - url = f'{HOST}/query/fits/{start_utc}/{stop_utc}/{product_type}' - r = requests.get(url).json() - res = [] + form = {'start_utc': begin_utc, 'end_utc': end_utc, 'type':product_type, 'filter':str(filter)} + url = ENDPOINTS['FITS'] + res=[] + r=JSONRequest.post(url, form) if isinstance(r, list): res = r elif 'error' in r: - print(r['error']) + logger.error(r['error']) return FitsQueryResult(res) @staticmethod def fetch_bulk_science_by_request_id(request_id): url = f'{HOST}/download/fits/bsd/{request_id}' - fname = FitsQuery.wget(url, f'Downloading STIX Science data #{request_id}') + fname = FitsQuery.wget(url, + f'Downloading STIX Science data #{request_id}') return fname @staticmethod def fetch(query_results): + """ + Download FITS files + Arguments + ---- + query_results: FitsQueryResult + FitsQueryResult object + + Returns + ------- + filenames: list + A list of fits filenames + + """ fits_ids = [] if isinstance(query_results, FitsQueryResult): fits_ids = query_results.get_fits_ids() @@ -234,7 +296,7 @@

Module stixdcpy.net

@staticmethod def get_fits(fits_id, progress_bar=True): """Download FITS data products from STIX data center. - Args: + Parameters: fits_id: FITS file ID progress_bar: show the progress bar if it is true @@ -271,7 +333,7 @@

Module stixdcpy.net

def fetch_light_curves(begin_utc: str, end_utc: str, ltc: bool): """ Request light curve from STIX data center - Args: + Parameters: begin_utc: str Observation start time end_utc: str @@ -284,14 +346,14 @@

Module stixdcpy.net

""" form = {'begin': begin_utc, 'ltc': ltc, 'end': end_utc} - url = URLS_POST['LC'] + url = ENDPOINTS['LC'] return JSONRequest.post(url, form) @staticmethod def fetch_housekeeping(begin_utc: str, end_utc: str): """Fetch housekeeping data from STIX data center - Args: + Parameters: begin_utc: Data start time end_utc: data end time @@ -311,14 +373,14 @@

Module stixdcpy.net

'start_unix': start_unix, 'duration': duration, } - url = URLS_POST['HK'] + url = ENDPOINTS['HK'] return JSONRequest.post(url, form) @staticmethod def solve_cfl(cfl_counts, cfl_counts_err, fluence, fluence_err): """compute flare location using the online flare location solver - Args: + Parameters: cfl_counts: numpy array or list counts recorded by the 12 CFL pixels cfl_counts_err: numpy array or list @@ -332,51 +394,62 @@

Module stixdcpy.net

CFL location, ephemeris and the chisquare map """ - form = {'counts': cfl_counts, 'counts_err':cfl_counts_err, - 'fluence':fluence,'fluence_err':fluence_err} - url = URLS_POST['CFL_SOLVER'] + form = { + 'counts': cfl_counts, + 'counts_err': cfl_counts_err, + 'fluence': fluence, + 'fluence_err': fluence_err + } + url = ENDPOINTS['CFL_SOLVER'] return JSONRequest.post(url, form) @staticmethod def fetch_elut(utc): """Download ELUT from STIX data center - Args: + Parameters: utc: Time Returns: dict object: a diction string containing elut information """ form = {'utc': utc} - url = URLS_POST['ELUT'] + url = ENDPOINTS['ELUT'] return JSONRequest.post(url, form) @staticmethod - def request_ephemeris(start_utc: str, end_utc: str, steps=1): - return JSONRequest.post(URLS_POST['EPHEMERIS'], { - 'start_utc': start_utc, + def request_ephemeris(begin_utc: str, end_utc: str, steps=1): + return JSONRequest.post(ENDPOINTS['EPHEMERIS'], { + 'start_utc': begin_utc, 'end_utc': end_utc, 'steps': steps }) + @staticmethod def request_pointing(utc: str): - return JSONRequest.post(URLS_POST['STIX_POINTING'], { + return JSONRequest.post(ENDPOINTS['STIX_POINTING'], { 'utc': utc, }) + @staticmethod - def request_attitude(start_utc: str, end_utc: str, steps=1, instrument_frame='SOLO_SRF', ref_frame='SOLO_SUN_RTN'): - form={ - 'start_utc': start_utc, + def request_attitude(begin_utc: str, + end_utc: str, + steps=1, + instrument_frame='SOLO_SRF', + ref_frame='SOLO_SUN_RTN'): + form = { + 'start_utc': begin_utc, 'end_utc': end_utc, 'steps': steps, - 'frame1':instrument_frame, - 'frame2':ref_frame + 'frame1': instrument_frame, + 'frame2': ref_frame } - ret=JSONRequest.post(URLS_POST['ATTITUDE'], form) + ret = JSONRequest.post(ENDPOINTS['ATTITUDE'], form) return ret + @staticmethod def fetch_science_data(_id: int): """fetch science data from stix data center - Args: + Parameters: _id: int science data unique ID, which can be found on STIX data center bulk science data web page @@ -386,16 +459,17 @@

Module stixdcpy.net

science data received from data center if success or None if failed """ - return JSONRequest.post(URLS_POST['SCIENCE'], { + return JSONRequest.post(ENDPOINTS['SCIENCE'], { 'id': _id, }) @staticmethod - def fetch_flare_list(start_utc: str, end_utc: str, sort: str = 'time'): + def fetch_flare_list(begin_utc: str, end_utc: str, sort: str = 'time'): """ query and download flare list from stix data center - Args: - start_utc: str + Parameters: + ------ + begin_utc: str flare start UTC end_utc: str flare end UTC @@ -404,12 +478,13 @@

Module stixdcpy.net

Returns: + ----- flare_list: dict or None flare list if success or None if failed. """ - return JSONRequest.post(URLS_POST['FLARE_LIST'], { - 'start_utc': start_utc, + return JSONRequest.post(ENDPOINTS['FLARE_LIST'], { + 'start_utc': begin_utc, 'end_utc': end_utc, 'sort': sort }) @@ -428,15 +503,16 @@

Classes

class FitsQuery
-

Request FITS format data from STIX data center

+

Query or Fetch FITS products from STIX data center

Expand source code
class FitsQuery(object):
     """
-    Request FITS format data from STIX data center
+    Query or Fetch FITS products from STIX data center
     """
+
     def __init__(self):
         self.fits_file_list = []
 
@@ -445,7 +521,7 @@ 

Classes

"""Download a file from the link and save the file to a temporary file. Downloading progress will be shown in a progress bar - Args: + Parameters: url (str): URL desc (str): description to be shown on the progress bar @@ -456,7 +532,7 @@

Classes

resp = requests.get(url, stream=stream) content_type = resp.headers.get('content-type') if content_type != 'binary/x-fits': - print('ERROR:', resp.content) + logger.error(resp.content) return None folder = DOWNLOAD_PATH @@ -469,7 +545,7 @@

Classes

filename = PurePath(folder, fname) file_path = Path(filename) if file_path.is_file(): - print(f'Found the data in local storage. Filename: {filename} ...') + logger.info(f'Found the data in local storage. Filename: {filename} ...') return str(file_path) f = open(filename, 'wb') chunk_size = 1024 @@ -489,28 +565,56 @@

Classes

return name @staticmethod - def query(start_utc, stop_utc, product_type='lc'): + def query(begin_utc, end_utc, product_type='lc', filter=None): + """Query FITS products from STIX data center + + Args: + start_utc (str): start time + stop_utc (str): end time + product_type (str, optional): + FITS product type. Defaults to 'lc'. + + + Returns: + results: FitsQueryResult + file result object + """ if product_type not in FITS_TYPES: raise TypeError( f'Invalid product type! product_type can be one of {str(FITS_TYPES)}' ) - url = f'{HOST}/query/fits/{start_utc}/{stop_utc}/{product_type}' - r = requests.get(url).json() - res = [] + form = {'start_utc': begin_utc, 'end_utc': end_utc, 'type':product_type, 'filter':str(filter)} + url = ENDPOINTS['FITS'] + res=[] + r=JSONRequest.post(url, form) if isinstance(r, list): res = r elif 'error' in r: - print(r['error']) + logger.error(r['error']) return FitsQueryResult(res) @staticmethod def fetch_bulk_science_by_request_id(request_id): url = f'{HOST}/download/fits/bsd/{request_id}' - fname = FitsQuery.wget(url, f'Downloading STIX Science data #{request_id}') + fname = FitsQuery.wget(url, + f'Downloading STIX Science data #{request_id}') return fname @staticmethod def fetch(query_results): + """ + Download FITS files + Arguments + ---- + query_results: FitsQueryResult + FitsQueryResult object + + Returns + ------- + filenames: list + A list of fits filenames + + """ fits_ids = [] if isinstance(query_results, FitsQueryResult): fits_ids = query_results.get_fits_ids() @@ -544,7 +648,7 @@

Classes

@staticmethod def get_fits(fits_id, progress_bar=True): """Download FITS data products from STIX data center. - Args: + Parameters: fits_id: FITS file ID progress_bar: show the progress bar if it is true @@ -570,13 +674,37 @@

Static methods

def fetch(query_results)
-
+

Download FITS files +Arguments

+
+
+
query_results : FitsQueryResult
+
FitsQueryResult object
+
+

Returns

+
+
filenames :  list
+
A list of fits filenames
+
Expand source code
@staticmethod
 def fetch(query_results):
+    """
+    Download FITS files
+    Arguments
+    ----
+    query_results: FitsQueryResult 
+            FitsQueryResult object
+
+    Returns
+    -------
+    filenames:  list
+        A list of fits filenames 
+
+    """
     fits_ids = []
     if isinstance(query_results, FitsQueryResult):
         fits_ids = query_results.get_fits_ids()
@@ -620,7 +748,8 @@ 

Static methods

@staticmethod
 def fetch_bulk_science_by_request_id(request_id):
     url = f'{HOST}/download/fits/bsd/{request_id}'
-    fname = FitsQuery.wget(url, f'Downloading STIX Science data #{request_id}')
+    fname = FitsQuery.wget(url,
+                           f'Downloading STIX Science data #{request_id}')
     return fname
@@ -647,13 +776,9 @@

Static methods

Download FITS data products from STIX data center.

-

Args

-
-
fits_id
-
FITS file ID
-
progress_bar
-
show the progress bar if it is true
-
+

Parameters

+

fits_id: FITS file ID +progress_bar: show the progress bar if it is true

Returns

A FITS hdulist object if success; None if failed

@@ -664,7 +789,7 @@

Returns

@staticmethod
 def get_fits(fits_id, progress_bar=True):
     """Download FITS data products from STIX data center.
-    Args:
+    Parameters:
         fits_id: FITS file ID
         progress_bar: show the progress bar if it is true
 
@@ -678,27 +803,57 @@ 

Returns

-def query(start_utc, stop_utc, product_type='lc') +def query(begin_utc, end_utc, product_type='lc', filter=None)
-
+

Query FITS products from STIX data center

+

Args

+
+
start_utc : str
+
start time
+
stop_utc : str
+
end time
+
product_type : str, optional
+
 
+
+

FITS product type. Defaults to 'lc'.

+

Returns

+
+
results
+
FitsQueryResult +file result object
+
Expand source code
@staticmethod
-def query(start_utc, stop_utc, product_type='lc'):
+def query(begin_utc, end_utc, product_type='lc', filter=None):
+    """Query FITS products from STIX data center
+
+    Args:
+        start_utc (str): start time
+        stop_utc (str): end time
+        product_type (str, optional): 
+        FITS product type. Defaults to 'lc'.
+
+
+    Returns:
+        results: FitsQueryResult
+            file result object
+    """
     if product_type not in FITS_TYPES:
         raise TypeError(
             f'Invalid product type! product_type can be one of {str(FITS_TYPES)}'
         )
-    url = f'{HOST}/query/fits/{start_utc}/{stop_utc}/{product_type}'
-    r = requests.get(url).json()
-    res = []
+    form = {'start_utc': begin_utc, 'end_utc': end_utc, 'type':product_type, 'filter':str(filter)}
+    url = ENDPOINTS['FITS']
+    res=[]
+    r=JSONRequest.post(url, form)
     if isinstance(r, list):
         res = r
     elif 'error' in r:
-        print(r['error'])
+        logger.error(r['error'])
     return FitsQueryResult(res)
@@ -708,13 +863,9 @@

Returns

Download a file from the link and save the file to a temporary file. Downloading progress will be shown in a progress bar

-

Args

-
-
url : str
-
URL
-
desc : str
-
description to be shown on the progress bar
-
+

Parameters

+

url (str): URL +desc (str): description to be shown on the progress bar

Returns

temporary filename

@@ -726,7 +877,7 @@

Returns

"""Download a file from the link and save the file to a temporary file. Downloading progress will be shown in a progress bar - Args: + Parameters: url (str): URL desc (str): description to be shown on the progress bar @@ -737,7 +888,7 @@

Returns

resp = requests.get(url, stream=stream) content_type = resp.headers.get('content-type') if content_type != 'binary/x-fits': - print('ERROR:', resp.content) + logger.error(resp.content) return None folder = DOWNLOAD_PATH @@ -750,7 +901,7 @@

Returns

filename = PurePath(folder, fname) file_path = Path(filename) if file_path.is_file(): - print(f'Found the data in local storage. Filename: {filename} ...') + logger.info(f'Found the data in local storage. Filename: {filename} ...') return str(file_path) f = open(filename, 'wb') chunk_size = 1024 @@ -777,12 +928,15 @@

Returns

(resp)
-
+

FITS query result manager

Expand source code
class FitsQueryResult(object):
+    """
+        FITS query result manager 
+    """
     def __init__(self, resp):
         self.hdu_objects = []
         self.result = resp
@@ -802,32 +956,61 @@ 

Returns

def __len__(self): return len(self.result) - - def pprint(self): - pprint.pprint(self.result) - def to_pandas(self): + """ + Convert FitsQueryResult to pandas dataframe + Returns: + pandas data frame + """ return pd.DataFrame(self.result) def open_fits(self): + """ + Open all the FITS files + + Returns: + hdu_objcts: list + A list of HDU objects + + """ self.hdu_objects = [] for filename in self.downloaded_fits_files: self.hdu_objects.append(fits.open(filename)) return self.hdu_objects def fits_info(self): + """ + Print out information of the loaded specified FITS files + """ for hdu in self.hdu_objects: - print(hdu.info()) + logger.info(hdu.info()) def get_fits_ids(self): + """ + Get FITS file IDs + Return + ids: list + FITS file ids + """ return [row['fits_id'] for row in self.result] def fetch(self): + """ + Download fits files from STIX data center + FITS files will be stored in the folder download/ in the current directory + + Returns: + + fits_filenames: list + List of downloaded FITS filenames + + + """ if self.result: self.downloaded_fits_files = FitsQuery.fetch(self.result) return self.downloaded_fits_files else: - print('WARNING: Nothing to be downloaded from stix data center!')
+ logger.warning('WARNING: Nothing to be downloaded from stix data center!')

Methods

@@ -835,43 +1018,71 @@

Methods

def fetch(self)
-
+

Download fits files from STIX data center +FITS files will be stored in the folder download/ in the current directory

+

Returns:

+

fits_filenames: +list +List of downloaded FITS filenames

Expand source code
def fetch(self):
+    """
+    Download fits files from STIX data center
+    FITS files will be stored in the folder download/ in the current directory
+    
+    Returns:
+
+    fits_filenames:  list
+        List of downloaded FITS filenames
+    
+
+    """
     if self.result:
         self.downloaded_fits_files = FitsQuery.fetch(self.result)
         return self.downloaded_fits_files
     else:
-        print('WARNING: Nothing to be downloaded from stix data center!')
+ logger.warning('WARNING: Nothing to be downloaded from stix data center!')
def fits_info(self)
-
+

Print out information of the loaded specified FITS files

Expand source code
def fits_info(self):
+    """
+    Print out information of the loaded specified FITS files
+    """
     for hdu in self.hdu_objects:
-        print(hdu.info())
+ logger.info(hdu.info())
def get_fits_ids(self)
-
+

Get FITS file IDs +Return +ids: list +FITS file ids

Expand source code
def get_fits_ids(self):
+    """
+    Get FITS file IDs
+    Return
+    ids: list
+        FITS file ids
+    """
     return [row['fits_id'] for row in self.result]
@@ -879,41 +1090,49 @@

Methods

def open_fits(self)
-
+

Open all the FITS files

+

Returns

+
+
hdu_objcts
+
list +A list of HDU objects
+
Expand source code
def open_fits(self):
+    """
+     Open all the FITS files 
+    
+    Returns:
+        hdu_objcts:  list
+            A list of HDU objects
+
+    """
     self.hdu_objects = []
     for filename in self.downloaded_fits_files:
         self.hdu_objects.append(fits.open(filename))
     return self.hdu_objects
-
-def pprint(self) -
-
-
-
- -Expand source code - -
def pprint(self):
-    pprint.pprint(self.result)
-
-
def to_pandas(self)
-
+

Convert FitsQueryResult to pandas dataframe

+

Returns

+

pandas data frame

Expand source code
def to_pandas(self):
+    """
+    Convert FitsQueryResult to pandas dataframe
+    Returns:
+        pandas data frame
+    """
     return pd.DataFrame(self.result)
@@ -944,7 +1163,7 @@

Methods

def fetch_light_curves(begin_utc: str, end_utc: str, ltc: bool): """ Request light curve from STIX data center - Args: + Parameters: begin_utc: str Observation start time end_utc: str @@ -957,14 +1176,14 @@

Methods

""" form = {'begin': begin_utc, 'ltc': ltc, 'end': end_utc} - url = URLS_POST['LC'] + url = ENDPOINTS['LC'] return JSONRequest.post(url, form) @staticmethod def fetch_housekeeping(begin_utc: str, end_utc: str): """Fetch housekeeping data from STIX data center - Args: + Parameters: begin_utc: Data start time end_utc: data end time @@ -984,14 +1203,14 @@

Methods

'start_unix': start_unix, 'duration': duration, } - url = URLS_POST['HK'] + url = ENDPOINTS['HK'] return JSONRequest.post(url, form) @staticmethod def solve_cfl(cfl_counts, cfl_counts_err, fluence, fluence_err): """compute flare location using the online flare location solver - Args: + Parameters: cfl_counts: numpy array or list counts recorded by the 12 CFL pixels cfl_counts_err: numpy array or list @@ -1005,51 +1224,62 @@

Methods

CFL location, ephemeris and the chisquare map """ - form = {'counts': cfl_counts, 'counts_err':cfl_counts_err, - 'fluence':fluence,'fluence_err':fluence_err} - url = URLS_POST['CFL_SOLVER'] + form = { + 'counts': cfl_counts, + 'counts_err': cfl_counts_err, + 'fluence': fluence, + 'fluence_err': fluence_err + } + url = ENDPOINTS['CFL_SOLVER'] return JSONRequest.post(url, form) @staticmethod def fetch_elut(utc): """Download ELUT from STIX data center - Args: + Parameters: utc: Time Returns: dict object: a diction string containing elut information """ form = {'utc': utc} - url = URLS_POST['ELUT'] + url = ENDPOINTS['ELUT'] return JSONRequest.post(url, form) @staticmethod - def request_ephemeris(start_utc: str, end_utc: str, steps=1): - return JSONRequest.post(URLS_POST['EPHEMERIS'], { - 'start_utc': start_utc, + def request_ephemeris(begin_utc: str, end_utc: str, steps=1): + return JSONRequest.post(ENDPOINTS['EPHEMERIS'], { + 'start_utc': begin_utc, 'end_utc': end_utc, 'steps': steps }) + @staticmethod def request_pointing(utc: str): - return JSONRequest.post(URLS_POST['STIX_POINTING'], { + return JSONRequest.post(ENDPOINTS['STIX_POINTING'], { 'utc': utc, }) + @staticmethod - def request_attitude(start_utc: str, end_utc: str, steps=1, instrument_frame='SOLO_SRF', ref_frame='SOLO_SUN_RTN'): - form={ - 'start_utc': start_utc, + def request_attitude(begin_utc: str, + end_utc: str, + steps=1, + instrument_frame='SOLO_SRF', + ref_frame='SOLO_SUN_RTN'): + form = { + 'start_utc': begin_utc, 'end_utc': end_utc, 'steps': steps, - 'frame1':instrument_frame, - 'frame2':ref_frame + 'frame1': instrument_frame, + 'frame2': ref_frame } - ret=JSONRequest.post(URLS_POST['ATTITUDE'], form) + ret = JSONRequest.post(ENDPOINTS['ATTITUDE'], form) return ret + @staticmethod def fetch_science_data(_id: int): """fetch science data from stix data center - Args: + Parameters: _id: int science data unique ID, which can be found on STIX data center bulk science data web page @@ -1059,16 +1289,17 @@

Methods

science data received from data center if success or None if failed """ - return JSONRequest.post(URLS_POST['SCIENCE'], { + return JSONRequest.post(ENDPOINTS['SCIENCE'], { 'id': _id, }) @staticmethod - def fetch_flare_list(start_utc: str, end_utc: str, sort: str = 'time'): + def fetch_flare_list(begin_utc: str, end_utc: str, sort: str = 'time'): """ query and download flare list from stix data center - Args: - start_utc: str + Parameters: + ------ + begin_utc: str flare start UTC end_utc: str flare end UTC @@ -1077,12 +1308,13 @@

Methods

Returns: + ----- flare_list: dict or None flare list if success or None if failed. """ - return JSONRequest.post(URLS_POST['FLARE_LIST'], { - 'start_utc': start_utc, + return JSONRequest.post(ENDPOINTS['FLARE_LIST'], { + 'start_utc': begin_utc, 'end_utc': end_utc, 'sort': sort }) @@ -1094,12 +1326,9 @@

Static methods

Download ELUT from STIX data center

-

Args

-
-
utc
-
Time
-
-

Returns: dict +

Parameters

+

utc: Time +Returns: dict object: a diction string containing elut information

@@ -1108,49 +1337,44 @@

Args

@staticmethod
 def fetch_elut(utc):
     """Download ELUT from STIX data center
-    Args:
+    Parameters:
         utc: Time
     Returns: dict
         object: a diction string containing elut information
     """
     form = {'utc': utc}
-    url = URLS_POST['ELUT']
+    url = ENDPOINTS['ELUT']
     return JSONRequest.post(url, form)
-def fetch_flare_list(start_utc: str, end_utc: str, sort: str = 'time') +def fetch_flare_list(begin_utc: str, end_utc: str, sort: str = 'time')

query and download flare list from stix data center

-

Args

-
-
start_utc
-
str -flare start UTC
-
end_utc
-
str -flare end UTC
-
sort
-
str -key to sort flares. It can be one of ['goes','time', 'LC0','LC1','LC2','LC3','LC4], LCi here means the i-th QL light curve
-
-

Returns

-
-
flare_list
-
dict or None -flare list if success or None if failed.
-
+

Parameters:

+
begin_utc: str
+    flare start UTC
+end_utc: str
+    flare end UTC
+sort: str
+    key to sort flares. It can be one of ['goes','time', 'LC0','LC1','LC2','LC3','LC4], LCi here means the i-th QL light curve
+
+

Returns:

+
flare_list: dict or None
+    flare list if success or None if failed.
+
Expand source code
@staticmethod
-def fetch_flare_list(start_utc: str, end_utc: str, sort: str = 'time'):
+def fetch_flare_list(begin_utc: str, end_utc: str, sort: str = 'time'):
     """ query and download flare list from stix data center
 
-    Args:
-        start_utc: str
+    Parameters:
+    ------
+        begin_utc: str
             flare start UTC
         end_utc: str
             flare end UTC
@@ -1159,12 +1383,13 @@ 

Returns

Returns: + ----- flare_list: dict or None flare list if success or None if failed. """ - return JSONRequest.post(URLS_POST['FLARE_LIST'], { - 'start_utc': start_utc, + return JSONRequest.post(ENDPOINTS['FLARE_LIST'], { + 'start_utc': begin_utc, 'end_utc': end_utc, 'sort': sort })
@@ -1175,13 +1400,9 @@

Returns

Fetch housekeeping data from STIX data center

-

Args

-
-
begin_utc
-
Data start time
-
end_utc
-
data end time
-
+

Parameters

+

begin_utc: Data start time +end_utc: data end time

Returns

result
@@ -1196,7 +1417,7 @@

Returns

def fetch_housekeeping(begin_utc: str, end_utc: str): """Fetch housekeeping data from STIX data center - Args: + Parameters: begin_utc: Data start time end_utc: data end time @@ -1216,7 +1437,7 @@

Returns

'start_unix': start_unix, 'duration': duration, } - url = URLS_POST['HK'] + url = ENDPOINTS['HK'] return JSONRequest.post(url, form)
@@ -1225,19 +1446,15 @@

Returns

Request light curve from STIX data center

-

Args

-
-
begin_utc
-
str -Observation start time
-
end_utc
-
str -Observation end time
-
ltc
-
bool, optional +

Parameters

+

begin_utc: +str +Observation start time +end_utc: str +Observation end time +ltc: bool, optional Light time correction enabling flag. -Do light time correction if True

-
+Do light time correction if True

Returns

lightcurve
@@ -1252,7 +1469,7 @@

Returns

def fetch_light_curves(begin_utc: str, end_utc: str, ltc: bool): """ Request light curve from STIX data center - Args: + Parameters: begin_utc: str Observation start time end_utc: str @@ -1265,7 +1482,7 @@

Returns

""" form = {'begin': begin_utc, 'ltc': ltc, 'end': end_utc} - url = URLS_POST['LC'] + url = ENDPOINTS['LC'] return JSONRequest.post(url, form)
@@ -1274,12 +1491,9 @@

Returns

fetch science data from stix data center

-

Args

-
-
_id
-
int -science data unique ID, which can be found on STIX data center bulk science data web page
-
+

Parameters

+

_id: int +science data unique ID, which can be found on STIX data center bulk science data web page

Returns

science_data
@@ -1294,7 +1508,7 @@

Returns

def fetch_science_data(_id: int): """fetch science data from stix data center - Args: + Parameters: _id: int science data unique ID, which can be found on STIX data center bulk science data web page @@ -1304,7 +1518,7 @@

Returns

science data received from data center if success or None if failed """ - return JSONRequest.post(URLS_POST['SCIENCE'], { + return JSONRequest.post(ENDPOINTS['SCIENCE'], { 'id': _id, }) @@ -1330,7 +1544,7 @@

Returns

-def request_attitude(start_utc: str, end_utc: str, steps=1, instrument_frame='SOLO_SRF', ref_frame='SOLO_SUN_RTN') +def request_attitude(begin_utc: str, end_utc: str, steps=1, instrument_frame='SOLO_SRF', ref_frame='SOLO_SUN_RTN')
@@ -1339,20 +1553,24 @@

Returns

Expand source code
@staticmethod
-def request_attitude(start_utc: str, end_utc: str,  steps=1, instrument_frame='SOLO_SRF', ref_frame='SOLO_SUN_RTN'):
-    form={
-        'start_utc': start_utc,
+def request_attitude(begin_utc: str,
+                     end_utc: str,
+                     steps=1,
+                     instrument_frame='SOLO_SRF',
+                     ref_frame='SOLO_SUN_RTN'):
+    form = {
+        'start_utc': begin_utc,
         'end_utc': end_utc,
         'steps': steps,
-        'frame1':instrument_frame,
-        'frame2':ref_frame
+        'frame1': instrument_frame,
+        'frame2': ref_frame
     }
-    ret=JSONRequest.post(URLS_POST['ATTITUDE'], form)
+    ret = JSONRequest.post(ENDPOINTS['ATTITUDE'], form)
     return ret
-def request_ephemeris(start_utc: str, end_utc: str, steps=1) +def request_ephemeris(begin_utc: str, end_utc: str, steps=1)
@@ -1361,9 +1579,9 @@

Returns

Expand source code
@staticmethod
-def request_ephemeris(start_utc: str, end_utc: str, steps=1):
-    return JSONRequest.post(URLS_POST['EPHEMERIS'], {
-        'start_utc': start_utc,
+def request_ephemeris(begin_utc: str, end_utc: str, steps=1):
+    return JSONRequest.post(ENDPOINTS['EPHEMERIS'], {
+        'start_utc': begin_utc,
         'end_utc': end_utc,
         'steps': steps
     })
@@ -1380,7 +1598,7 @@

Returns

@staticmethod
 def request_pointing(utc: str):
-    return JSONRequest.post(URLS_POST['STIX_POINTING'], {
+    return JSONRequest.post(ENDPOINTS['STIX_POINTING'], {
         'utc': utc,
     })
@@ -1390,24 +1608,19 @@

Returns

compute flare location using the online flare location solver

-

Args

-
-
cfl_counts
-
numpy array or list -counts recorded by the 12 CFL pixels
-
cfl_counts_err
-
numpy array or list +

Parameters

+

cfl_counts: numpy array or list +counts recorded by the 12 CFL pixels +cfl_counts_err: +numpy array or list standard deviations of -the counts recorded by the 12 CFL pixels

-
fluence
-
float +the counts recorded by the 12 CFL pixels +fluence: float X-ray fluence in units of counts/mm2, -calculated using counts recorded by other detectors
-
fluence_err
-
float -Errors in fluence in counts/mm2 units
-
+calculated using counts recorded by other detectors +fluence_err: float +Errors in fluence in counts/mm2 units

Returns

location
@@ -1422,7 +1635,7 @@

Returns

def solve_cfl(cfl_counts, cfl_counts_err, fluence, fluence_err): """compute flare location using the online flare location solver - Args: + Parameters: cfl_counts: numpy array or list counts recorded by the 12 CFL pixels cfl_counts_err: numpy array or list @@ -1436,9 +1649,13 @@

Returns

CFL location, ephemeris and the chisquare map """ - form = {'counts': cfl_counts, 'counts_err':cfl_counts_err, - 'fluence':fluence,'fluence_err':fluence_err} - url = URLS_POST['CFL_SOLVER'] + form = { + 'counts': cfl_counts, + 'counts_err': cfl_counts_err, + 'fluence': fluence, + 'fluence_err': fluence_err + } + url = ENDPOINTS['CFL_SOLVER'] return JSONRequest.post(url, form)
@@ -1473,12 +1690,11 @@

FitsQ
  • FitsQueryResult

    -
      + diff --git a/stixdcpy/science.py b/stixdcpy/science.py index c995020..b9a0a48 100644 --- a/stixdcpy/science.py +++ b/stixdcpy/science.py @@ -260,12 +260,12 @@ class ScienceL1(ScienceData): """ Tools to analyze L1 science data """ - def __init__(self, fname, request_id): + def __init__(self, fname, request_id, ltc=False): super().__init__(fname, request_id) self.data_type = 'ScienceL1' self.pixel_count_rates = None self.correct_pixel_count_rates = None - self.read_fits() + self.read_fits(light_time_correction=ltc) self.make_spectra() def make_spectra(self, pixel_counts=None): @@ -285,7 +285,7 @@ def make_spectra(self, pixel_counts=None): self.pixel_total_counts = np.sum(self.pixel_counts, axis=(0, 3)) - def correct_dead_time(self ): + def correct_dead_time(self): """ dead time correction Returns: corrected_counts: tuple @@ -295,9 +295,52 @@ def correct_dead_time(self ): photon_in: np.array live_ratio: np.array """ - self.corrected=LiveTimeCorrection.correct(self.triggers, self.pixel_counts, self.timedel) + + def correct(triggers, counts_arr, time_bins): + """ Live time correction + Args + triggers: ndarray + triggers in the spectrogram + counts_arr:ndarray + counts in the spectrogram + time_bins: ndarray + time_bins in the spectrogram + Returns + live_time_ratio: ndarray + live time ratio of detectors + count_rate: + corrected count rate + photons_in: + rate of photons illuminating the detector group + + """ + + fpga_tau = 10.1e-6 + asic_tau = 2.63e-6 + beta= 0.94 + trig_tau = fpga_tau + asic_tau + + time_bins = time_bins[:, None] + photons_in = triggers / (time_bins - trig_tau * triggers) + #photon rate calculated using triggers + + live_ratio= np.zeros((time_bins.size, 32)) + time_bins = time_bins[:, :, None, None] + + count_rate = counts_arr / time_bins + # print(counts_arr.shape) + for det in range(32): + trig_idx = inst.detector_id_to_trigger_index(det) + nin = photons_in[:, trig_idx] + live_ratio[:, det] = np.exp( + -beta* nin * asic_tau * 1e-6) / (1 + nin * trig_tau) + corrected_rate=count_rate/live_ratio[:, :, None, None] + return {'corrected_rates': corrected_rate, 'count_rate': count_rate, 'photons_in': photons_in, 'live_ratio':live_ratio} + self.corrected=correct(self.triggers, self.pixel_counts, self.timedel) return self.corrected + + def peek(self, plots=['spg', 'lc', 'spec', 'tbin', 'qllc'], ax0=None, @@ -389,65 +432,6 @@ def peek(self, return ax0, ax1, ax2, ax3 -class Spectrogram(ScienceData): - def __init__(self, fname, request_id): - super().__init__(fname,request_id) - self.data_type = 'Spectrogram' - - self.read_fits() - - self.spectrum = np.sum(self.counts, axis=0) - - def peek(self, ax0=None, ax1=None, ax2=None, ax3=None): - """ - preivew Science data - Arguments: - ax0: matplotlib axe - ax0: matplotlib axe - """ - if not self.hdul: - logger.error(f'Data not loaded. ') - return None - - #((ax0, ax1), (ax2, ax3))=axs - if not any([ax0, ax1, ax2, ax3]): - _, ((ax0, ax1), (ax2, ax3)) = plt.subplots(2, 2, figsize=(8, 6)) - - if ax0: - X, Y = np.meshgrid(self.time, - self.hdul['ENERGIES'].data['channel']) - im = ax0.pcolormesh(X, Y, np.transpose( - self.counts)) #pixel summed energy spectrum - ax0.set_yticks(self.hdul['ENERGIES'].data['channel'][::2]) - ax0.set_yticklabels(self.energy_bin_names[::2]) - fig = plt.gcf() - cbar = fig.colorbar(im, ax=ax0) - cbar.set_label('Counts') - ax0.set_title('Spectrogram') - ax0.set_ylabel('Energy range(keV') - ax0.set_xlabel(f"Seconds since {self.T0}s ") - if ax1: - #convert to 2d - ax1.plot(self.time, self.count_rates) - ax1.set_yscale('log') - ax1.set_ylabel('Counts / sec') - ax1.set_xlabel(f"Seconds since {self.T0}s ") - if ax2: - ax2.plot(self.ebins_low, self.spectrum, drawstyle='steps-post') - ax2.set_xscale('log') - ax2.set_yscale('log') - ax2.set_xlabel('Energy (keV)') - ax2.set_ylabel('Counts') - if ax3: - ax3.plot(self.time, self.timedel) - ax3.set_xlabel(f"Seconds since {self.T0}s ") - ax3.set_ylabel('Integration time (sec)') - plt.suptitle(f'L4 request #{self.request_id}') - plt.tight_layout() - return fig, ((ax0, ax1), (ax2, ax3)) - - - class BackgroundSubtraction(object): def __init__(self, l1sig: ScienceL1, l1bkg: ScienceL1): """ @@ -462,9 +446,7 @@ def __init__(self, l1sig: ScienceL1, l1bkg: ScienceL1): dmask = self.l1bkg.energy_bin_mask - self.l1sig.energy_bin_mask if np.any(dmask < 0): - logger.error( - 'Background subtraction failed due to the background energy range does not cover the signal energy range ' - ) + ValueError('Inconsistent energy bins') return #mean_pixel_rate_clip = self.l1bkg.mean_pixel_rate_spectra * self.l1sig.inverse_energy_bin_mask @@ -548,58 +530,62 @@ def get_background_subtracted_spectrum(self, start_utc=None, end_utc=None): return bkg_sub_spectra, bkg_sub_spectra_err -class LiveTimeCorrection(object): - """ - #counts is np.array time_bins, detector, pixel, energy bins - trigger_rates=l1data['triggers'][1:,:]/l1data['timedel'][:-1,None] - # delta time is off by 1 time bin due a bug in the - out=np.copy(trigger_rates) - tau=11e-6 - live_time=1 - tau*trig - photo_in=trig/(live_time) - """ - @staticmethod - def correct(triggers, counts_arr, time_bins): - """ Live time correction - Args - triggers: ndarray - triggers in the spectrogram - counts_arr:ndarray - counts in the spectrogram - time_bins: ndarray - time_bins in the spectrogram - Returns - live_time_ratio: ndarray - live time ratio of detectors - count_rate: - corrected count rate - photons_in: - rate of photons illuminating the detector group +class Spectrogram(ScienceData): + def __init__(self, fname, request_id, ltc=False): + super().__init__(fname,request_id) + self.data_type = 'Spectrogram' - """ + self.read_fits(light_time_correction=ltc) - fpga_tau = 10.1e-6 - asic_tau = 2.63e-6 - beta= 0.94 - trig_tau = fpga_tau + asic_tau + self.spectrum = np.sum(self.counts, axis=0) - time_bins = time_bins[:, None] - photons_in = triggers / (time_bins - trig_tau * triggers) - #photon rate calculated using triggers + def peek(self, ax0=None, ax1=None, ax2=None, ax3=None): + """ + preivew Science data + Arguments: + ax0: matplotlib axe + ax0: matplotlib axe + """ + if not self.hdul: + logger.error(f'Data not loaded. ') + return None + + #((ax0, ax1), (ax2, ax3))=axs + if not any([ax0, ax1, ax2, ax3]): + _, ((ax0, ax1), (ax2, ax3)) = plt.subplots(2, 2, figsize=(8, 6)) - live_ratio= np.zeros((time_bins.size, 32)) - time_bins = time_bins[:, :, None, None] + if ax0: + X, Y = np.meshgrid(self.time, + self.hdul['ENERGIES'].data['channel']) + im = ax0.pcolormesh(X, Y, np.transpose( + self.counts)) #pixel summed energy spectrum + ax0.set_yticks(self.hdul['ENERGIES'].data['channel'][::2]) + ax0.set_yticklabels(self.energy_bin_names[::2]) + fig = plt.gcf() + cbar = fig.colorbar(im, ax=ax0) + cbar.set_label('Counts') + ax0.set_title('Spectrogram') + ax0.set_ylabel('Energy range(keV') + ax0.set_xlabel(f"Seconds since {self.T0}s ") + if ax1: + #convert to 2d + ax1.plot(self.time, self.count_rates) + ax1.set_yscale('log') + ax1.set_ylabel('Counts / sec') + ax1.set_xlabel(f"Seconds since {self.T0}s ") + if ax2: + ax2.plot(self.ebins_low, self.spectrum, drawstyle='steps-post') + ax2.set_xscale('log') + ax2.set_yscale('log') + ax2.set_xlabel('Energy (keV)') + ax2.set_ylabel('Counts') + if ax3: + ax3.plot(self.time, self.timedel) + ax3.set_xlabel(f"Seconds since {self.T0}s ") + ax3.set_ylabel('Integration time (sec)') + plt.suptitle(f'L4 request #{self.request_id}') + plt.tight_layout() + return fig, ((ax0, ax1), (ax2, ax3)) - count_rate = counts_arr / time_bins - # print(counts_arr.shape) - for det in range(32): - trig_idx = inst.detector_id_to_trigger_index(det) - nin = photons_in[:, trig_idx] - live_ratio[:, det] = np.exp( - -beta* nin * asic_tau * 1e-6) / (1 + nin * trig_tau) - corrected_rate=count_rate/live_ratio[:, :, None, None] - return {'corrected_rates': corrected_rate, 'count_rate': count_rate, 'photons_in': photons_in, 'live_ratio':live_ratio} -class TransmissionCorrection(object): - pass