Update config message.

This commit is contained in:
NMC-DAVE 2022-07-07 00:45:54 +08:00
parent aebe672c49
commit b5c8e15ca7
3 changed files with 65 additions and 18 deletions

@ -26,17 +26,27 @@ def _get_config_dir():
CONFIG_DIR = _get_config_dir()
def _ConfigFetchError(Exception):
class _ConfigFetchError(Exception):
pass
def _get_config_from_rcfile():
"""
Get configure information from config_dk_met_io.ini file.
Get configure information from config.ini file in .nmcdev directory.
"""
rc = CONFIG_DIR / "config.ini"
if not rc.is_file():
rc = Path("~/config_met_io.ini").expanduser()
# check configure file
if not rc.is_file():
msg = (
"The {} doese not exist. Please create it with text editor. "
"\n Refer to https://github.com/nmcdev/nmc_met_io ".format(
CONFIG_DIR / "config.ini")
)
raise _ConfigFetchError(msg)
try:
config = configparser.ConfigParser()
config.read(rc)

@ -171,7 +171,8 @@ def get_latest_initTime(directory, suffix="*.006"):
def get_model_grid(directory, filename=None, suffix="*.024",
varname='data', varattrs={'units':''}, scale_off=None,
levattrs={'long_name':'pressure_level', 'units':'hPa',
'_CoordinateAxisType':'Pressure'}, cache=True, cache_clear=True):
'_CoordinateAxisType':'Pressure'},
cache=True, cache_clear=True, check_file_first=True):
"""
Retrieve numeric model grid forecast from MICAPS cassandra service.
Support ensemble member forecast.
@ -185,6 +186,7 @@ def get_model_grid(directory, filename=None, suffix="*.024",
:param scale_off: [scale, offset], return values = values*scale + offset.
:param levattrs: set level coordinate attributes, diectionary type.
:param cache: cache retrieved data to local directory, default is True.
:param check_file_first: check file exists firstly. Default is True.
:return: data, xarray type
:Examples:
@ -207,12 +209,18 @@ def get_model_grid(directory, filename=None, suffix="*.024",
filename = response
if filename == '':
return None
check_file_first = False # file existed
else:
return None
# retrieve data from cached file
if cache:
cache_file = CONFIG.get_cache_file(directory, filename, name="MICAPS_DATA", cache_clear=cache_clear)
cache_file = CONFIG.get_cache_file(
directory,
filename,
name="MICAPS_DATA",
cache_clear=cache_clear
)
if cache_file.is_file():
with open(cache_file, 'rb') as f:
data = pickle.load(f)
@ -220,9 +228,11 @@ def get_model_grid(directory, filename=None, suffix="*.024",
# get data contents
try:
file_list = get_file_list(directory)
if filename not in file_list:
return None
# get the file list and check file exists
if check_file_first:
file_list = get_file_list(directory)
if filename not in file_list:
return None
service = CassandraDB()
status, response = service.getData(directory, filename)
except ValueError:
@ -463,10 +473,19 @@ def get_model_grids(directory, filenames, allExists=True, pbar=False, **kargs):
tqdm_filenames = tqdm(filenames, desc=directory + ": ")
else:
tqdm_filenames = filenames
# get the file list for check
file_list = get_file_list(directory)
for filename in tqdm_filenames:
data = get_model_grid(directory, filename=filename, **kargs)
if data:
dataset.append(data)
# check the file exists
if filename in file_list:
data = get_model_grid(directory, filename=filename, check_file_first=False, **kargs)
if data:
dataset.append(data)
else:
if allExists:
warnings.warn("{} doese not exists.".format(directory+'/'+filename))
return None
else:
if allExists:
warnings.warn("{} doese not exists.".format(directory+'/'+filename))

@ -50,10 +50,23 @@ def get_http_result(host, port, url):
class GDSDataService:
def __init__(self):
def __init__(self, gdsIP=None, gdsPort=None):
"""
Args:
gdsIP (str, optional):
Micaps cassandra server IP. Defaults from config.ini file.
gdsPort (str, optional):
Micaps cassandra server port. Defaults from config.ini file.
"""
# set MICAPS GDS服务器地址
self.gdsIp = CONFIG.CONFIG['MICAPS']['GDS_IP']
self.gdsPort = CONFIG.CONFIG['MICAPS']['GDS_PORT']
if gdsIP is None:
self.gdsIp = CONFIG.CONFIG['MICAPS']['GDS_IP']
else:
self.gdsIp = gdsIP
if gdsPort is None:
self.gdsPort = CONFIG.CONFIG['MICAPS']['GDS_PORT']
else:
self.gdsPort = gdsPort
def getLatestDataName(self, directory, filter):
return get_http_result(
@ -156,7 +169,8 @@ def get_model_grid(directory, filename=None, suffix="*.024",
varname='data', varattrs={'units':''}, scale_off=None,
levattrs={'long_name':'pressure_level', 'units':'hPa',
'_CoordinateAxisType':'Pressure'},
cache=True, cache_clear=True, check_file_first=True):
cache=True, cache_clear=True, check_file_first=True,
gdsIP=None, gdsPort=None):
"""
Retrieve numeric model grid forecast from MICAPS cassandra service.
Support ensemble member forecast.
@ -171,7 +185,7 @@ def get_model_grid(directory, filename=None, suffix="*.024",
:param levattrs: set level coordinate attributes, diectionary type.
:param cache: cache retrieved data to local directory, Default is True.
:param cache_clear: 如果设置了清除缓存, 则会将缓存文件逐周存放, 并删除过去的周文件夹.
:param check_file_first: check file exists firstly, do not recommend. Default is False.
:param check_file_first: check file exists firstly. Default is True.
:return: data, xarray type
:Examples:
@ -186,7 +200,7 @@ def get_model_grid(directory, filename=None, suffix="*.024",
if filename is None:
try:
# connect to data service
service = GDSDataService()
service = GDSDataService(gdsIP=gdsIP, gdsPort=gdsPort)
status, response = service.getLatestDataName(directory, suffix)
except ValueError:
print('Can not retrieve data from ' + directory)
@ -204,7 +218,11 @@ def get_model_grid(directory, filename=None, suffix="*.024",
# retrieve data from cached file
if cache:
cache_file = CONFIG.get_cache_file(directory, filename, name="MICAPS_DATA", cache_clear=cache_clear)
cache_file = CONFIG.get_cache_file(
directory,
filename,
name="MICAPS_DATA",
cache_clear=cache_clear)
if cache_file.is_file():
with open(cache_file, 'rb') as f:
data = pickle.load(f)
@ -217,7 +235,7 @@ def get_model_grid(directory, filename=None, suffix="*.024",
file_list = get_file_list(directory)
if filename not in file_list:
return None
service = GDSDataService()
service = GDSDataService(gdsIP=gdsIP, gdsPort=gdsPort)
status, response = service.getData(directory, filename)
except ValueError:
print('Can not retrieve data' + filename + ' from ' + directory)