Update get_station_data function.

This commit is contained in:
NMC-DAVE 2019-07-03 22:07:05 +08:00
parent 755c8d0587
commit 2663565490
15 changed files with 3978 additions and 3957 deletions

208
.gitignore vendored

@ -1,104 +1,104 @@
# Byte-compiled / optimized / DLL files # Byte-compiled / optimized / DLL files
__pycache__/ __pycache__/
*.py[cod] *.py[cod]
*$py.class *$py.class
# C extensions # C extensions
*.so *.so
# Distribution / packaging # Distribution / packaging
.Python .Python
build/ build/
develop-eggs/ develop-eggs/
dist/ dist/
downloads/ downloads/
eggs/ eggs/
.eggs/ .eggs/
lib/ lib/
lib64/ lib64/
parts/ parts/
sdist/ sdist/
var/ var/
wheels/ wheels/
*.egg-info/ *.egg-info/
.installed.cfg .installed.cfg
*.egg *.egg
MANIFEST MANIFEST
# PyInstaller # PyInstaller
# Usually these files are written by a python script from a template # Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it. # before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest *.manifest
*.spec *.spec
# Installer logs # Installer logs
pip-log.txt pip-log.txt
pip-delete-this-directory.txt pip-delete-this-directory.txt
# Unit test / coverage reports # Unit test / coverage reports
htmlcov/ htmlcov/
.tox/ .tox/
.coverage .coverage
.coverage.* .coverage.*
.cache .cache
nosetests.xml nosetests.xml
coverage.xml coverage.xml
*.cover *.cover
.hypothesis/ .hypothesis/
.pytest_cache/ .pytest_cache/
# Translations # Translations
*.mo *.mo
*.pot *.pot
# Django stuff: # Django stuff:
*.log *.log
local_settings.py local_settings.py
db.sqlite3 db.sqlite3
# Flask stuff: # Flask stuff:
instance/ instance/
.webassets-cache .webassets-cache
# Scrapy stuff: # Scrapy stuff:
.scrapy .scrapy
# Sphinx documentation # Sphinx documentation
docs/_build/ docs/_build/
# PyBuilder # PyBuilder
target/ target/
# Jupyter Notebook # Jupyter Notebook
.ipynb_checkpoints .ipynb_checkpoints
# pyenv # pyenv
.python-version .python-version
# celery beat schedule file # celery beat schedule file
celerybeat-schedule celerybeat-schedule
# SageMath parsed files # SageMath parsed files
*.sage.py *.sage.py
# Environments # Environments
.env .env
.venv .venv
env/ env/
venv/ venv/
ENV/ ENV/
env.bak/ env.bak/
venv.bak/ venv.bak/
# Spyder project settings # Spyder project settings
.spyderproject .spyderproject
.spyproject .spyproject
# Rope project settings # Rope project settings
.ropeproject .ropeproject
# mkdocs documentation # mkdocs documentation
/site /site
# mypy # mypy
.mypy_cache/ .mypy_cache/

1348
LICENSE

File diff suppressed because it is too large Load Diff

@ -1,6 +1,6 @@
include README.md include README.md
include LICENSE include LICENSE
recursive-exclude * __pycache__ recursive-exclude * __pycache__
recursive-exclude * *.pyc recursive-exclude * *.pyc
recursive-exclude * *.pyo recursive-exclude * *.pyo
recursive-exclude * *.orig recursive-exclude * *.orig

@ -1,43 +1,43 @@
# 气象数据读写及访问程序库 # 气象数据读写及访问程序库
提供对MICAPS文件, 卫星云图, 天气雷达等数据的读写, 并访问CIMISS和MICAPS CASSANDRA数据库文件等. 提供对MICAPS文件, 卫星云图, 天气雷达等数据的读写, 并访问CIMISS和MICAPS CASSANDRA数据库文件等.
Only Python 3 is supported. Only Python 3 is supported.
## Dependencies ## Dependencies
Other required packages: Other required packages:
- numpy - numpy
- scipy - scipy
- xarray - xarray
- pandas - pandas
- pyproj - pyproj
- protobuf - protobuf
- urllib3 - urllib3
- python-dateutil - python-dateutil
## Install ## Install
Using the fellowing command to install packages: Using the fellowing command to install packages:
``` ```
pip install git+git://github.com/nmcdev/nmc_met_io.git pip install git+git://github.com/nmcdev/nmc_met_io.git
``` ```
or download the package and install: or download the package and install:
``` ```
git clone --recursive https://github.com/nmcdev/nmc_met_io.git git clone --recursive https://github.com/nmcdev/nmc_met_io.git
cd nmc_met_io cd nmc_met_io
python setup.py install python setup.py install
``` ```
## 设置CIMISS和MICAPS服务器的地址及用户信息 ## 设置CIMISS和MICAPS服务器的地址及用户信息
在系统用户目录下("C:\Users\用户名"), 新建文本文件config_met_io.ini, 里面内容模板为: 在系统用户目录下("C:\Users\用户名"), 新建文本文件config_met_io.ini, 里面内容模板为:
``` ```
[CIMISS] [CIMISS]
DNS = xx.xx.xx.xx DNS = xx.xx.xx.xx
USER_ID = xxxxxxxxx USER_ID = xxxxxxxxx
PASSWORD = xxxxxxxx PASSWORD = xxxxxxxx
[MICAPS] [MICAPS]
GDS_IP = xx.xx.xx.xx GDS_IP = xx.xx.xx.xx
GDS_PORT = xxxx GDS_PORT = xxxx
``` ```
这里xxxx用相应的地址, 接口和用户信息代替. 这里xxxx用相应的地址, 接口和用户信息代替.

@ -1,359 +1,359 @@
# Generated by the protocol buffer compiler. DO NOT EDIT! # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: DataBlock.proto # source: DataBlock.proto
import sys import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message from google.protobuf import message as _message
from google.protobuf import reflection as _reflection from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2 from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports) # @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default() _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor( DESCRIPTOR = _descriptor.FileDescriptor(
name='DataBlock.proto', name='DataBlock.proto',
package='', package='',
syntax='proto3', syntax='proto3',
serialized_pb=_b('\n\x0f\x44\x61taBlock.proto\"E\n\x0cStringResult\x12\x11\n\terrorCode\x18\x01 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\"M\n\x0f\x42yteArrayResult\x12\x11\n\terrorCode\x18\x01 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x11\n\tbyteArray\x18\x03 \x01(\x0c\"h\n\x18StringAndByteArrayResult\x12\x11\n\terrorCode\x18\x01 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x10\n\x08\x64\x61taName\x18\x03 \x01(\t\x12\x11\n\tbyteArray\x18\x04 \x01(\x0c\"[\n\x0e\x46ileInfoResult\x12\x11\n\terrorCode\x18\x01 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0e\n\x06isFile\x18\x03 \x01(\x08\x12\x10\n\x08\x66ileSize\x18\x04 \x01(\x03\"\x94\x01\n\tMapResult\x12\x11\n\terrorCode\x18\x01 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12,\n\tresultMap\x18\x03 \x03(\x0b\x32\x19.MapResult.ResultMapEntry\x1a\x30\n\x0eResultMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42)\n\x1c\x63n.gov.cma.cimiss.gds.serverB\tDataBlockb\x06proto3') serialized_pb=_b('\n\x0f\x44\x61taBlock.proto\"E\n\x0cStringResult\x12\x11\n\terrorCode\x18\x01 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\"M\n\x0f\x42yteArrayResult\x12\x11\n\terrorCode\x18\x01 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x11\n\tbyteArray\x18\x03 \x01(\x0c\"h\n\x18StringAndByteArrayResult\x12\x11\n\terrorCode\x18\x01 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x10\n\x08\x64\x61taName\x18\x03 \x01(\t\x12\x11\n\tbyteArray\x18\x04 \x01(\x0c\"[\n\x0e\x46ileInfoResult\x12\x11\n\terrorCode\x18\x01 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0e\n\x06isFile\x18\x03 \x01(\x08\x12\x10\n\x08\x66ileSize\x18\x04 \x01(\x03\"\x94\x01\n\tMapResult\x12\x11\n\terrorCode\x18\x01 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12,\n\tresultMap\x18\x03 \x03(\x0b\x32\x19.MapResult.ResultMapEntry\x1a\x30\n\x0eResultMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42)\n\x1c\x63n.gov.cma.cimiss.gds.serverB\tDataBlockb\x06proto3')
) )
_sym_db.RegisterFileDescriptor(DESCRIPTOR) _sym_db.RegisterFileDescriptor(DESCRIPTOR)
_STRINGRESULT = _descriptor.Descriptor( _STRINGRESULT = _descriptor.Descriptor(
name='StringResult', name='StringResult',
full_name='StringResult', full_name='StringResult',
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
containing_type=None, containing_type=None,
fields=[ fields=[
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='errorCode', full_name='StringResult.errorCode', index=0, name='errorCode', full_name='StringResult.errorCode', index=0,
number=1, type=5, cpp_type=1, label=1, number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0, has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='errorMessage', full_name='StringResult.errorMessage', index=1, name='errorMessage', full_name='StringResult.errorMessage', index=1,
number=2, type=9, cpp_type=9, label=1, number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'), has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='name', full_name='StringResult.name', index=2, name='name', full_name='StringResult.name', index=2,
number=3, type=9, cpp_type=9, label=1, number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'), has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
], ],
extensions=[ extensions=[
], ],
nested_types=[], nested_types=[],
enum_types=[ enum_types=[
], ],
options=None, options=None,
is_extendable=False, is_extendable=False,
syntax='proto3', syntax='proto3',
extension_ranges=[], extension_ranges=[],
oneofs=[ oneofs=[
], ],
serialized_start=19, serialized_start=19,
serialized_end=88, serialized_end=88,
) )
_BYTEARRAYRESULT = _descriptor.Descriptor( _BYTEARRAYRESULT = _descriptor.Descriptor(
name='ByteArrayResult', name='ByteArrayResult',
full_name='ByteArrayResult', full_name='ByteArrayResult',
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
containing_type=None, containing_type=None,
fields=[ fields=[
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='errorCode', full_name='ByteArrayResult.errorCode', index=0, name='errorCode', full_name='ByteArrayResult.errorCode', index=0,
number=1, type=5, cpp_type=1, label=1, number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0, has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='errorMessage', full_name='ByteArrayResult.errorMessage', index=1, name='errorMessage', full_name='ByteArrayResult.errorMessage', index=1,
number=2, type=9, cpp_type=9, label=1, number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'), has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='byteArray', full_name='ByteArrayResult.byteArray', index=2, name='byteArray', full_name='ByteArrayResult.byteArray', index=2,
number=3, type=12, cpp_type=9, label=1, number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""), has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
], ],
extensions=[ extensions=[
], ],
nested_types=[], nested_types=[],
enum_types=[ enum_types=[
], ],
options=None, options=None,
is_extendable=False, is_extendable=False,
syntax='proto3', syntax='proto3',
extension_ranges=[], extension_ranges=[],
oneofs=[ oneofs=[
], ],
serialized_start=90, serialized_start=90,
serialized_end=167, serialized_end=167,
) )
_STRINGANDBYTEARRAYRESULT = _descriptor.Descriptor( _STRINGANDBYTEARRAYRESULT = _descriptor.Descriptor(
name='StringAndByteArrayResult', name='StringAndByteArrayResult',
full_name='StringAndByteArrayResult', full_name='StringAndByteArrayResult',
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
containing_type=None, containing_type=None,
fields=[ fields=[
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='errorCode', full_name='StringAndByteArrayResult.errorCode', index=0, name='errorCode', full_name='StringAndByteArrayResult.errorCode', index=0,
number=1, type=5, cpp_type=1, label=1, number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0, has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='errorMessage', full_name='StringAndByteArrayResult.errorMessage', index=1, name='errorMessage', full_name='StringAndByteArrayResult.errorMessage', index=1,
number=2, type=9, cpp_type=9, label=1, number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'), has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='dataName', full_name='StringAndByteArrayResult.dataName', index=2, name='dataName', full_name='StringAndByteArrayResult.dataName', index=2,
number=3, type=9, cpp_type=9, label=1, number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'), has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='byteArray', full_name='StringAndByteArrayResult.byteArray', index=3, name='byteArray', full_name='StringAndByteArrayResult.byteArray', index=3,
number=4, type=12, cpp_type=9, label=1, number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""), has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
], ],
extensions=[ extensions=[
], ],
nested_types=[], nested_types=[],
enum_types=[ enum_types=[
], ],
options=None, options=None,
is_extendable=False, is_extendable=False,
syntax='proto3', syntax='proto3',
extension_ranges=[], extension_ranges=[],
oneofs=[ oneofs=[
], ],
serialized_start=169, serialized_start=169,
serialized_end=273, serialized_end=273,
) )
_FILEINFORESULT = _descriptor.Descriptor( _FILEINFORESULT = _descriptor.Descriptor(
name='FileInfoResult', name='FileInfoResult',
full_name='FileInfoResult', full_name='FileInfoResult',
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
containing_type=None, containing_type=None,
fields=[ fields=[
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='errorCode', full_name='FileInfoResult.errorCode', index=0, name='errorCode', full_name='FileInfoResult.errorCode', index=0,
number=1, type=5, cpp_type=1, label=1, number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0, has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='errorMessage', full_name='FileInfoResult.errorMessage', index=1, name='errorMessage', full_name='FileInfoResult.errorMessage', index=1,
number=2, type=9, cpp_type=9, label=1, number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'), has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='isFile', full_name='FileInfoResult.isFile', index=2, name='isFile', full_name='FileInfoResult.isFile', index=2,
number=3, type=8, cpp_type=7, label=1, number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False, has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='fileSize', full_name='FileInfoResult.fileSize', index=3, name='fileSize', full_name='FileInfoResult.fileSize', index=3,
number=4, type=3, cpp_type=2, label=1, number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0, has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
], ],
extensions=[ extensions=[
], ],
nested_types=[], nested_types=[],
enum_types=[ enum_types=[
], ],
options=None, options=None,
is_extendable=False, is_extendable=False,
syntax='proto3', syntax='proto3',
extension_ranges=[], extension_ranges=[],
oneofs=[ oneofs=[
], ],
serialized_start=275, serialized_start=275,
serialized_end=366, serialized_end=366,
) )
_MAPRESULT_RESULTMAPENTRY = _descriptor.Descriptor( _MAPRESULT_RESULTMAPENTRY = _descriptor.Descriptor(
name='ResultMapEntry', name='ResultMapEntry',
full_name='MapResult.ResultMapEntry', full_name='MapResult.ResultMapEntry',
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
containing_type=None, containing_type=None,
fields=[ fields=[
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='key', full_name='MapResult.ResultMapEntry.key', index=0, name='key', full_name='MapResult.ResultMapEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1, number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'), has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='value', full_name='MapResult.ResultMapEntry.value', index=1, name='value', full_name='MapResult.ResultMapEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1, number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'), has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
], ],
extensions=[ extensions=[
], ],
nested_types=[], nested_types=[],
enum_types=[ enum_types=[
], ],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False, is_extendable=False,
syntax='proto3', syntax='proto3',
extension_ranges=[], extension_ranges=[],
oneofs=[ oneofs=[
], ],
serialized_start=469, serialized_start=469,
serialized_end=517, serialized_end=517,
) )
_MAPRESULT = _descriptor.Descriptor( _MAPRESULT = _descriptor.Descriptor(
name='MapResult', name='MapResult',
full_name='MapResult', full_name='MapResult',
filename=None, filename=None,
file=DESCRIPTOR, file=DESCRIPTOR,
containing_type=None, containing_type=None,
fields=[ fields=[
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='errorCode', full_name='MapResult.errorCode', index=0, name='errorCode', full_name='MapResult.errorCode', index=0,
number=1, type=5, cpp_type=1, label=1, number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0, has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='errorMessage', full_name='MapResult.errorMessage', index=1, name='errorMessage', full_name='MapResult.errorMessage', index=1,
number=2, type=9, cpp_type=9, label=1, number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'), has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='resultMap', full_name='MapResult.resultMap', index=2, name='resultMap', full_name='MapResult.resultMap', index=2,
number=3, type=11, cpp_type=10, label=3, number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[], has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
], ],
extensions=[ extensions=[
], ],
nested_types=[_MAPRESULT_RESULTMAPENTRY, ], nested_types=[_MAPRESULT_RESULTMAPENTRY, ],
enum_types=[ enum_types=[
], ],
options=None, options=None,
is_extendable=False, is_extendable=False,
syntax='proto3', syntax='proto3',
extension_ranges=[], extension_ranges=[],
oneofs=[ oneofs=[
], ],
serialized_start=369, serialized_start=369,
serialized_end=517, serialized_end=517,
) )
_MAPRESULT_RESULTMAPENTRY.containing_type = _MAPRESULT _MAPRESULT_RESULTMAPENTRY.containing_type = _MAPRESULT
_MAPRESULT.fields_by_name['resultMap'].message_type = _MAPRESULT_RESULTMAPENTRY _MAPRESULT.fields_by_name['resultMap'].message_type = _MAPRESULT_RESULTMAPENTRY
DESCRIPTOR.message_types_by_name['StringResult'] = _STRINGRESULT DESCRIPTOR.message_types_by_name['StringResult'] = _STRINGRESULT
DESCRIPTOR.message_types_by_name['ByteArrayResult'] = _BYTEARRAYRESULT DESCRIPTOR.message_types_by_name['ByteArrayResult'] = _BYTEARRAYRESULT
DESCRIPTOR.message_types_by_name['StringAndByteArrayResult'] = _STRINGANDBYTEARRAYRESULT DESCRIPTOR.message_types_by_name['StringAndByteArrayResult'] = _STRINGANDBYTEARRAYRESULT
DESCRIPTOR.message_types_by_name['FileInfoResult'] = _FILEINFORESULT DESCRIPTOR.message_types_by_name['FileInfoResult'] = _FILEINFORESULT
DESCRIPTOR.message_types_by_name['MapResult'] = _MAPRESULT DESCRIPTOR.message_types_by_name['MapResult'] = _MAPRESULT
StringResult = _reflection.GeneratedProtocolMessageType('StringResult', (_message.Message,), dict( StringResult = _reflection.GeneratedProtocolMessageType('StringResult', (_message.Message,), dict(
DESCRIPTOR = _STRINGRESULT, DESCRIPTOR = _STRINGRESULT,
__module__ = 'DataBlock_pb2' __module__ = 'DataBlock_pb2'
# @@protoc_insertion_point(class_scope:StringResult) # @@protoc_insertion_point(class_scope:StringResult)
)) ))
_sym_db.RegisterMessage(StringResult) _sym_db.RegisterMessage(StringResult)
ByteArrayResult = _reflection.GeneratedProtocolMessageType('ByteArrayResult', (_message.Message,), dict( ByteArrayResult = _reflection.GeneratedProtocolMessageType('ByteArrayResult', (_message.Message,), dict(
DESCRIPTOR = _BYTEARRAYRESULT, DESCRIPTOR = _BYTEARRAYRESULT,
__module__ = 'DataBlock_pb2' __module__ = 'DataBlock_pb2'
# @@protoc_insertion_point(class_scope:ByteArrayResult) # @@protoc_insertion_point(class_scope:ByteArrayResult)
)) ))
_sym_db.RegisterMessage(ByteArrayResult) _sym_db.RegisterMessage(ByteArrayResult)
StringAndByteArrayResult = _reflection.GeneratedProtocolMessageType('StringAndByteArrayResult', (_message.Message,), dict( StringAndByteArrayResult = _reflection.GeneratedProtocolMessageType('StringAndByteArrayResult', (_message.Message,), dict(
DESCRIPTOR = _STRINGANDBYTEARRAYRESULT, DESCRIPTOR = _STRINGANDBYTEARRAYRESULT,
__module__ = 'DataBlock_pb2' __module__ = 'DataBlock_pb2'
# @@protoc_insertion_point(class_scope:StringAndByteArrayResult) # @@protoc_insertion_point(class_scope:StringAndByteArrayResult)
)) ))
_sym_db.RegisterMessage(StringAndByteArrayResult) _sym_db.RegisterMessage(StringAndByteArrayResult)
FileInfoResult = _reflection.GeneratedProtocolMessageType('FileInfoResult', (_message.Message,), dict( FileInfoResult = _reflection.GeneratedProtocolMessageType('FileInfoResult', (_message.Message,), dict(
DESCRIPTOR = _FILEINFORESULT, DESCRIPTOR = _FILEINFORESULT,
__module__ = 'DataBlock_pb2' __module__ = 'DataBlock_pb2'
# @@protoc_insertion_point(class_scope:FileInfoResult) # @@protoc_insertion_point(class_scope:FileInfoResult)
)) ))
_sym_db.RegisterMessage(FileInfoResult) _sym_db.RegisterMessage(FileInfoResult)
MapResult = _reflection.GeneratedProtocolMessageType('MapResult', (_message.Message,), dict( MapResult = _reflection.GeneratedProtocolMessageType('MapResult', (_message.Message,), dict(
ResultMapEntry = _reflection.GeneratedProtocolMessageType('ResultMapEntry', (_message.Message,), dict( ResultMapEntry = _reflection.GeneratedProtocolMessageType('ResultMapEntry', (_message.Message,), dict(
DESCRIPTOR = _MAPRESULT_RESULTMAPENTRY, DESCRIPTOR = _MAPRESULT_RESULTMAPENTRY,
__module__ = 'DataBlock_pb2' __module__ = 'DataBlock_pb2'
# @@protoc_insertion_point(class_scope:MapResult.ResultMapEntry) # @@protoc_insertion_point(class_scope:MapResult.ResultMapEntry)
)) ))
, ,
DESCRIPTOR = _MAPRESULT, DESCRIPTOR = _MAPRESULT,
__module__ = 'DataBlock_pb2' __module__ = 'DataBlock_pb2'
# @@protoc_insertion_point(class_scope:MapResult) # @@protoc_insertion_point(class_scope:MapResult)
)) ))
_sym_db.RegisterMessage(MapResult) _sym_db.RegisterMessage(MapResult)
_sym_db.RegisterMessage(MapResult.ResultMapEntry) _sym_db.RegisterMessage(MapResult.ResultMapEntry)
DESCRIPTOR.has_options = True DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034cn.gov.cma.cimiss.gds.serverB\tDataBlock')) DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034cn.gov.cma.cimiss.gds.serverB\tDataBlock'))
_MAPRESULT_RESULTMAPENTRY.has_options = True _MAPRESULT_RESULTMAPENTRY.has_options = True
_MAPRESULT_RESULTMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) _MAPRESULT_RESULTMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
# @@protoc_insertion_point(module_scope) # @@protoc_insertion_point(module_scope)

@ -1,7 +1,7 @@
""" """
dk_Met_IO is a package to read or retrieve meteorological data dk_Met_IO is a package to read or retrieve meteorological data
from various data sources. from various data sources.
""" """
__author__ = "The R & D Center for Weather Forecasting Technology in NMC, CMA" __author__ = "The R & D Center for Weather Forecasting Technology in NMC, CMA"
__version__ = '0.1.0' __version__ = '0.1.0'

@ -1,31 +1,31 @@
# _*_ coding: utf-8 _*_ # _*_ coding: utf-8 _*_
# Copyright (c) 2019 NMC Developers. # Copyright (c) 2019 NMC Developers.
# Distributed under the terms of the GPL V3 License. # Distributed under the terms of the GPL V3 License.
""" """
Read configure file. Read configure file.
""" """
import os import os
import configparser import configparser
def ConfigFetchError(Exception): def ConfigFetchError(Exception):
pass pass
def _get_config_from_rcfile(): def _get_config_from_rcfile():
""" """
Get configure information from config_dk_met_io.ini file. Get configure information from config_dk_met_io.ini file.
""" """
rc = os.path.normpath(os.path.expanduser("~/config_met_io.ini")) rc = os.path.normpath(os.path.expanduser("~/config_met_io.ini"))
try: try:
config = configparser.ConfigParser() config = configparser.ConfigParser()
config.read(rc) config.read(rc)
except IOError as e: except IOError as e:
raise ConfigFetchError(str(e)) raise ConfigFetchError(str(e))
except Exception as e: except Exception as e:
raise ConfigFetchError(str(e)) raise ConfigFetchError(str(e))
return config return config

@ -1,66 +1,66 @@
# _*_ coding: utf-8 _*_ # _*_ coding: utf-8 _*_
# Copyright (c) 2019 NMC Developers. # Copyright (c) 2019 NMC Developers.
# Distributed under the terms of the GPL V3 License. # Distributed under the terms of the GPL V3 License.
""" """
Read grads data file. Read grads data file.
""" """
import os import os
import re import re
from datetime import datetime from datetime import datetime
import numpy as np import numpy as np
def read_cmp_pre_hour_grid(files, start_lon=70.05, start_lat=15.05): def read_cmp_pre_hour_grid(files, start_lon=70.05, start_lat=15.05):
""" """
Read SURF_CLI_CHN_MERGE_CMP_PRE_HOUR_GRID_0.1 data file Read SURF_CLI_CHN_MERGE_CMP_PRE_HOUR_GRID_0.1 data file
:param files: a single or multiple data filenames. :param files: a single or multiple data filenames.
:param start_lon: region lower left corner longitude. :param start_lon: region lower left corner longitude.
:param start_lat: region lower left corner latitude. :param start_lat: region lower left corner latitude.
:return: data and time :return: data and time
:Examples: :Examples:
>>> files = ("F:/201607/SURF_CLI_CHN_MERGE_CMP_" >>> files = ("F:/201607/SURF_CLI_CHN_MERGE_CMP_"
"PRE_HOUR_GRID_0.10-2016070100.grd") "PRE_HOUR_GRID_0.10-2016070100.grd")
>>> data, time, lon, lat = read_cmp_pre_hour_grid(files) >>> data, time, lon, lat = read_cmp_pre_hour_grid(files)
""" """
# sort and count data files # sort and count data files
if isinstance(files, str): if isinstance(files, str):
files = np.array([files]) files = np.array([files])
else: else:
files = np.array(files) files = np.array(files)
# define coordinates # define coordinates
lon = np.arange(700) * 0.1 + start_lon lon = np.arange(700) * 0.1 + start_lon
lat = np.arange(440) * 0.1 + start_lat lat = np.arange(440) * 0.1 + start_lat
# define variables # define variables
data = np.full((len(files), lat.size, lon.size), np.nan) data = np.full((len(files), lat.size, lon.size), np.nan)
time = [] time = []
# loop every data file # loop every data file
for i, f in enumerate(files): for i, f in enumerate(files):
# check file exist # check file exist
if not os.path.isfile(f): if not os.path.isfile(f):
return None, None, None, None return None, None, None, None
# extract time information # extract time information
ttime = re.search('\d{10}', os.path.basename(f)) ttime = re.search('\d{10}', os.path.basename(f))
time.append(datetime.strptime(ttime.group(0), "%Y%m%d%H")) time.append(datetime.strptime(ttime.group(0), "%Y%m%d%H"))
# read data # read data
try: try:
tdata = np.fromfile( tdata = np.fromfile(
f, dtype=np.dtype('float32')).reshape(2, len(lat), len(lon)) f, dtype=np.dtype('float32')).reshape(2, len(lat), len(lon))
tdata[tdata == -999.0] = np.nan # missing value tdata[tdata == -999.0] = np.nan # missing value
data[i, :, :] = tdata[0, :, :] data[i, :, :] = tdata[0, :, :]
except IOError: except IOError:
print("Can not read data from "+f) print("Can not read data from "+f)
continue continue
# return value # return value
return data, time, lon, lat return data, time, lon, lat

File diff suppressed because it is too large Load Diff

@ -1,114 +1,114 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright (c) 2019 NMC Developers. # Copyright (c) 2019 NMC Developers.
# Distributed under the terms of the GPL V3 License. # Distributed under the terms of the GPL V3 License.
""" """
Read CMA radar format file. Read CMA radar format file.
""" """
from array import array from array import array
import numpy as np import numpy as np
def read_cma_sa_radar(fname): def read_cma_sa_radar(fname):
""" """
Read CMA SA radar format file. Read CMA SA radar format file.
refer to refer to
https://github.com/smft/CMA_SA_RADAR_READER/blob/master/read_RADAR.py https://github.com/smft/CMA_SA_RADAR_READER/blob/master/read_RADAR.py
https://github.com/smft/CMA_SA_RADAR_READER/blob/master/barb_plot.py https://github.com/smft/CMA_SA_RADAR_READER/blob/master/barb_plot.py
for plot map. for plot map.
:param fname: file path name. :param fname: file path name.
:return: data dictionary. :return: data dictionary.
""" """
# read data # read data
flag = open(fname, "r") flag = open(fname, "r")
data = np.asarray(array("B", flag.read())) data = np.asarray(array("B", flag.read()))
data = data.reshape([len(data)/2432, 2432]) data = data.reshape([len(data)/2432, 2432])
# find elevation angle # find elevation angle
if data[0, 72] == 11: if data[0, 72] == 11:
phi = [0.50, 0.50, 1.45, 1.45, 2.40, 3.35, 4.30, phi = [0.50, 0.50, 1.45, 1.45, 2.40, 3.35, 4.30,
5.25, 6.2, 7.5, 8.7, 10, 12, 14, 16.7, 19.5] 5.25, 6.2, 7.5, 8.7, 10, 12, 14, 16.7, 19.5]
if data[0, 72] == 21: if data[0, 72] == 21:
phi = [0.50, 0.50, 1.45, 1.45, 2.40, 3.35, 4.30, phi = [0.50, 0.50, 1.45, 1.45, 2.40, 3.35, 4.30,
6.00, 9.00, 14.6, 19.5] 6.00, 9.00, 14.6, 19.5]
if data[0, 72] == 31: if data[0, 72] == 31:
phi = [0.50, 0.50, 1.50, 1.50, 2.50, 2.50, 3.50, 4.50] phi = [0.50, 0.50, 1.50, 1.50, 2.50, 2.50, 3.50, 4.50]
if data[0, 72] == 32: if data[0, 72] == 32:
phi = [0.50, 0.50, 2.50, 3.50, 4.50] phi = [0.50, 0.50, 2.50, 3.50, 4.50]
# define data # define data
g1 = np.zeros([len(data), 460]) g1 = np.zeros([len(data), 460])
h1 = np.zeros([len(data), 460]) h1 = np.zeros([len(data), 460])
i1 = np.zeros([len(data), 460]) i1 = np.zeros([len(data), 460])
j1 = np.zeros([len(data), 460]) j1 = np.zeros([len(data), 460])
# process data # process data
count = 0 count = 0
while count < len(data): while count < len(data):
print("径向数据编号 : ", count) print("径向数据编号 : ", count)
b1 = data[count, 44] + 256 * data[count, 45] # 仰角序数 b1 = data[count, 44] + 256 * data[count, 45] # 仰角序数
c1 = ((data[count, 36] + 256 * data[count, 37]) / c1 = ((data[count, 36] + 256 * data[count, 37]) /
8 * 180 / 4096) # 方位角 8 * 180 / 4096) # 方位角
d1 = data[count, 54] + 256 * data[count, 55] # 径向库 d1 = data[count, 54] + 256 * data[count, 55] # 径向库
print("仰角序数,方位角,径向库 : ", b1, c1, d1) print("仰角序数,方位角,径向库 : ", b1, c1, d1)
if d1 == 0: if d1 == 0:
count += 1 count += 1
continue continue
else: else:
count += 1 count += 1
i = 0 i = 0
while i < 460: while i < 460:
g1[count - 1, i] = phi[b1 - 1] # 仰角 g1[count - 1, i] = phi[b1 - 1] # 仰角
h1[count - 1, i] = c1 # 方位角 h1[count - 1, i] = c1 # 方位角
i1[count - 1, i] = 0.5 + i - 1 # 径向 i1[count - 1, i] = 0.5 + i - 1 # 径向
if i > d1: # 反射率 if i > d1: # 反射率
j1[count - 1, i] = 0 j1[count - 1, i] = 0
else: else:
if data[count - 1, 128 + i] == 0: # 无数据 if data[count - 1, 128 + i] == 0: # 无数据
j1[count - 1, i] = 0 j1[count - 1, i] = 0
else: else:
if data[count - 1, 128 + i] == 1: # 距离模糊 if data[count - 1, 128 + i] == 1: # 距离模糊
j1[count - 1, i] = 0 j1[count - 1, i] = 0
else: # 数据正常 else: # 数据正常
j1[count - 1, i] = ((data[count - 1, 128 + i] - 2) / j1[count - 1, i] = ((data[count - 1, 128 + i] - 2) /
2 - 32) 2 - 32)
i += 1 i += 1
# calculate angle index # calculate angle index
n = 3 n = 3
a2 = 0 # 仰角序数 a2 = 0 # 仰角序数
while a2 < len(data): while a2 < len(data):
if data[a2, 44] > (n - 1): if data[a2, 44] > (n - 1):
break break
a2 += 1 a2 += 1
a3 = a2 a3 = a2
while a3 < len(data): while a3 < len(data):
if data[a3, 44] > n: if data[a3, 44] > n:
break break
a3 += 1 a3 += 1
# put data # put data
yj = g1[a2:a3, :] # 仰角 yj = g1[a2:a3, :] # 仰角
fwj = h1[a2:a3, :] # 方位角 fwj = h1[a2:a3, :] # 方位角
jx = i1[a2:a3, :] # 径向 jx = i1[a2:a3, :] # 径向
fsl = j1[a2:a3, :] # 反射率 fsl = j1[a2:a3, :] # 反射率
# return data # return data
return yj, fwj, jx, fsl return yj, fwj, jx, fsl
def sph2cart(elevation, azimuth, r): def sph2cart(elevation, azimuth, r):
""" """
Convert spherical coordinates to cartesian. Convert spherical coordinates to cartesian.
""" """
ele, a = np.deg2rad([elevation, azimuth]) ele, a = np.deg2rad([elevation, azimuth])
x = r * np.cos(ele) * np.cos(a) x = r * np.cos(ele) * np.cos(a)
y = r * np.cos(ele) * np.sin(a) y = r * np.cos(ele) * np.sin(a)
z = r * np.sin(ele) z = r * np.sin(ele)
return x, y, z return x, y, z

@ -1,179 +1,179 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright (c) 2019 NMC Developers. # Copyright (c) 2019 NMC Developers.
# Distributed under the terms of the GPL V3 License. # Distributed under the terms of the GPL V3 License.
""" """
Read FY satellite awx format file. Read FY satellite awx format file.
""" """
import numpy as np import numpy as np
def read_awx_cloud(fname): def read_awx_cloud(fname):
""" """
Read satellite awx format file. Read satellite awx format file.
:param fname: file pathname. :param fname: file pathname.
:return: data list :return: data list
""" """
# read part of binary # read part of binary
# refer to # refer to
# https://stackoverflow.com/questions/14245094/how-to-read-part-of-binary-file-with-numpy # https://stackoverflow.com/questions/14245094/how-to-read-part-of-binary-file-with-numpy
pass pass
def read_himawari(fname, resolution): def read_himawari(fname, resolution):
""" """
Read the japan himawari satellite standard data file. Read the japan himawari satellite standard data file.
refere to refere to
https://github.com/smft/Read_Himawari_binary_data/blob/master/read_Himawari.py https://github.com/smft/Read_Himawari_binary_data/blob/master/read_Himawari.py
:param fname: data file pathname. :param fname: data file pathname.
:param resolution: data resolution. :param resolution: data resolution.
:return: data list. :return: data list.
""" """
# define resolution # define resolution
if resolution == 1: if resolution == 1:
res = 12100000 res = 12100000
nlin = 1100 nlin = 1100
ncol = 11000 ncol = 11000
elif resolution == 2: elif resolution == 2:
res = 3025000 res = 3025000
nlin = 550 nlin = 550
ncol = 5500 ncol = 5500
else: else:
res = 48400000 res = 48400000
nlin = 2200 nlin = 2200
ncol = 22000 ncol = 22000
# define binary file format # define binary file format
formation = [('bn', 'i1', 1), formation = [('bn', 'i1', 1),
('bl', 'i2', 1), ('bl', 'i2', 1),
('thb', 'i2', 1), ('thb', 'i2', 1),
('bo', 'i1', 1), ('bo', 'i1', 1),
('sn', 'S1', 16), ('sn', 'S1', 16),
('pcn', 'S1', 16), ('pcn', 'S1', 16),
('oa', 'S1', 4), ('oa', 'S1', 4),
('obf', 'S1', 2), ('obf', 'S1', 2),
('ot', 'i2', 1), ('ot', 'i2', 1),
('ost', 'float64', 1), ('ost', 'float64', 1),
('oet', 'float64', 1), ('oet', 'float64', 1),
('fct', 'float64', 1), ('fct', 'float64', 1),
('thl', 'i4', 1), ('thl', 'i4', 1),
('tdl', 'i4', 1), ('tdl', 'i4', 1),
('qf1', 'i1', 1), ('qf1', 'i1', 1),
('qf2', 'i1', 1), ('qf2', 'i1', 1),
('qf3', 'i1', 1), ('qf3', 'i1', 1),
('qf4', 'i1', 1), ('qf4', 'i1', 1),
('ffv', 'S1', 32), ('ffv', 'S1', 32),
('fn', 'S1', 128), ('fn', 'S1', 128),
('null1', 'S1', 40), ('null1', 'S1', 40),
('bn2', 'i1', 1), ('bn2', 'i1', 1),
('bl2', 'i2', 1), ('bl2', 'i2', 1),
('nbpp', 'i2', 1), ('nbpp', 'i2', 1),
('noc', 'i2', 1), ('noc', 'i2', 1),
('nol', 'i2', 1), ('nol', 'i2', 1),
('cffdb', 'i1', 1), ('cffdb', 'i1', 1),
('null2', 'S1', 40), ('null2', 'S1', 40),
('bn3', 'i1', 1), ('bn3', 'i1', 1),
('bl3', 'i2', 1), ('bl3', 'i2', 1),
('sl', 'float64', 1), ('sl', 'float64', 1),
('CFAC', 'i4', 1), ('CFAC', 'i4', 1),
('LFAC', 'i4', 1), ('LFAC', 'i4', 1),
('COFF', 'float32', 1), ('COFF', 'float32', 1),
('LOFF', 'float32', 1), ('LOFF', 'float32', 1),
('dfectvs', 'float64', 1), ('dfectvs', 'float64', 1),
('eer', 'float64', 1), ('eer', 'float64', 1),
('epr', 'float64', 1), ('epr', 'float64', 1),
('var1', 'float64', 1), ('var1', 'float64', 1),
('var2', 'float64', 1), ('var2', 'float64', 1),
('var3', 'float64', 1), ('var3', 'float64', 1),
('cfsd', 'float64', 1), ('cfsd', 'float64', 1),
('rt', 'i2', 1), ('rt', 'i2', 1),
('rs', 'i2', 1), ('rs', 'i2', 1),
('null3', 'S1', 40), ('null3', 'S1', 40),
('bn4', 'i1', 1), ('bn4', 'i1', 1),
('bl4', 'i2', 1), ('bl4', 'i2', 1),
('ni', 'float64', 1), ('ni', 'float64', 1),
('ssplon', 'float64', 1), ('ssplon', 'float64', 1),
('ssplat', 'float64', 1), ('ssplat', 'float64', 1),
('dfects4', 'float64', 1), ('dfects4', 'float64', 1),
('nlat', 'float64', 1), ('nlat', 'float64', 1),
('nlon', 'float64', 1), ('nlon', 'float64', 1),
('sp', 'float64', 3), ('sp', 'float64', 3),
('mp', 'float64', 3), ('mp', 'float64', 3),
('null4', 'S1', 40), ('null4', 'S1', 40),
('bn5', 'i1', 1), ('bn5', 'i1', 1),
('bl5', 'i2', 1), ('bl5', 'i2', 1),
('bdn', 'i2', 1), ('bdn', 'i2', 1),
('cwl', 'float64', 1), ('cwl', 'float64', 1),
('vnobpp', 'i2', 1), ('vnobpp', 'i2', 1),
('cvoep', 'uint16', 1), ('cvoep', 'uint16', 1),
('cvoposa', 'uint16', 1), ('cvoposa', 'uint16', 1),
('gfcce', 'float64', 1), ('gfcce', 'float64', 1),
('cfcce', 'float64', 1), ('cfcce', 'float64', 1),
('c0', 'float64', 1), ('c0', 'float64', 1),
('c1', 'float64', 1), ('c1', 'float64', 1),
('c2', 'float64', 1), ('c2', 'float64', 1),
('C0', 'float64', 1), ('C0', 'float64', 1),
('C1', 'float64', 1), ('C1', 'float64', 1),
('C2', 'float64', 1), ('C2', 'float64', 1),
('sol', 'float64', 1), ('sol', 'float64', 1),
('pc', 'float64', 1), ('pc', 'float64', 1),
('bc', 'float64', 1), ('bc', 'float64', 1),
('null5', 'S1', 40), ('null5', 'S1', 40),
('b06n01', 'i1', 1), ('b06n01', 'i1', 1),
('b06n02', 'i2', 1), ('b06n02', 'i2', 1),
('b06n03', 'float64', 1), ('b06n03', 'float64', 1),
('b06n04', 'float64', 1), ('b06n04', 'float64', 1),
('b06n05', 'float64', 1), ('b06n05', 'float64', 1),
('b06n06', 'float64', 1), ('b06n06', 'float64', 1),
('b06n07', 'float64', 1), ('b06n07', 'float64', 1),
('b06n08', 'float64', 1), ('b06n08', 'float64', 1),
('b06n09', 'float64', 1), ('b06n09', 'float64', 1),
('b06n10', 'float64', 1), ('b06n10', 'float64', 1),
('b06n11', 'float32', 1), ('b06n11', 'float32', 1),
('b06n12', 'float32', 1), ('b06n12', 'float32', 1),
('b06n13', 'S1', 128), ('b06n13', 'S1', 128),
('b06n14', 'S1', 56), ('b06n14', 'S1', 56),
('b07n01', 'i1', 1), ('b07n01', 'i1', 1),
('b07n02', 'i2', 1), ('b07n02', 'i2', 1),
('b07n03', 'i1', 1), ('b07n03', 'i1', 1),
('b07n04', 'i1', 1), ('b07n04', 'i1', 1),
('b07n05', 'i2', 1), ('b07n05', 'i2', 1),
('b07n06', 'S1', 40), ('b07n06', 'S1', 40),
('b08n01', 'i1', 1), ('b08n01', 'i1', 1),
('b08n02', 'i2', 1), ('b08n02', 'i2', 1),
('b08n03', 'float32', 1), ('b08n03', 'float32', 1),
('b08n04', 'float32', 1), ('b08n04', 'float32', 1),
('b08n05', 'float64', 1), ('b08n05', 'float64', 1),
('b08n06', 'i2', 1), ('b08n06', 'i2', 1),
('b08n07', 'i2', 1), ('b08n07', 'i2', 1),
('b08n08', 'float32', 1), ('b08n08', 'float32', 1),
('b08n09', 'float32', 1), ('b08n09', 'float32', 1),
('b08n10', 'S1', 50), ('b08n10', 'S1', 50),
('b09n01', 'i1', 1), ('b09n01', 'i1', 1),
('b09n02', 'i2', 1), ('b09n02', 'i2', 1),
('b09n03', 'i2', 1), ('b09n03', 'i2', 1),
('b09n04', 'i2', 1), ('b09n04', 'i2', 1),
('b09n05', 'float64', 1), ('b09n05', 'float64', 1),
('b09n06', 'S1', 70), ('b09n06', 'S1', 70),
('b10n01', 'i1', 1), ('b10n01', 'i1', 1),
('b10n02', 'i4', 1), ('b10n02', 'i4', 1),
('b10n03', 'i2', 1), ('b10n03', 'i2', 1),
('b10n04', 'i2', 1), ('b10n04', 'i2', 1),
('b10n05', 'i2', 1), ('b10n05', 'i2', 1),
('b10n06', 'S1', 36), ('b10n06', 'S1', 36),
('b11n01', 'i1', 1), ('b11n01', 'i1', 1),
('b11n02', 'i2', 1), ('b11n02', 'i2', 1),
('b11n03', 'S1', 256), ('b11n03', 'S1', 256),
('b12n01', 'i2', res)] ('b12n01', 'i2', res)]
data = np.fromfile(fname, dtype=formation)['b12n01'].reshape(nlin, ncol) data = np.fromfile(fname, dtype=formation)['b12n01'].reshape(nlin, ncol)
return list(data) return list(data)

@ -1,166 +1,166 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright (c) 2019 NMC Developers. # Copyright (c) 2019 NMC Developers.
# Distributed under the terms of the GPL V3 License. # Distributed under the terms of the GPL V3 License.
""" """
Retrieve historical data from CIMISS service. Retrieve historical data from CIMISS service.
""" """
import os import os
import calendar import calendar
import urllib.request import urllib.request
import numpy as np import numpy as np
from nmc_met_io.retrieve_cimiss_server import cimiss_obs_by_time_range from nmc_met_io.retrieve_cimiss_server import cimiss_obs_by_time_range
from nmc_met_io.retrieve_cimiss_server import cimiss_obs_in_rect_by_time_range from nmc_met_io.retrieve_cimiss_server import cimiss_obs_in_rect_by_time_range
from nmc_met_io.retrieve_cimiss_server import cimiss_obs_file_by_time_range from nmc_met_io.retrieve_cimiss_server import cimiss_obs_file_by_time_range
def get_day_hist_obs(years=np.arange(2000, 2011, 1), def get_day_hist_obs(years=np.arange(2000, 2011, 1),
month_range=(1, 12), month_range=(1, 12),
elements=None, elements=None,
sta_levels=None, sta_levels=None,
outfname='day_rain_obs', outfname='day_rain_obs',
outdir='.'): outdir='.'):
""" """
Download historical daily observations and write to data files, Download historical daily observations and write to data files,
each month a file. each month a file.
:param years: years for historical data :param years: years for historical data
:param month_range: month range each year, like (1, 12) :param month_range: month range each year, like (1, 12)
:param elements: elements for retrieve, 'ele1, ele2, ...' :param elements: elements for retrieve, 'ele1, ele2, ...'
:param sta_levels: station levels :param sta_levels: station levels
:param outfname: output file name + '_year' + '_month' :param outfname: output file name + '_year' + '_month'
:param outdir: output file directory :param outdir: output file directory
:return: output file names. :return: output file names.
:Example: :Example:
>>> get_day_hist_obs(years=np.arange(2000, 2016, 1), outdir="D:/") >>> get_day_hist_obs(years=np.arange(2000, 2016, 1), outdir="D:/")
""" """
# check elements # check elements
if elements is None: if elements is None:
elements = "Station_Id_C,Station_Name,Datetime,Lat,Lon,PRE_Time_0808" elements = "Station_Id_C,Station_Name,Datetime,Lat,Lon,PRE_Time_0808"
# check output directory # check output directory
if not os.path.exists(outdir): if not os.path.exists(outdir):
os.makedirs(outdir) os.makedirs(outdir)
# define months # define months
months = np.arange(1, 13, 1) months = np.arange(1, 13, 1)
# Because of the CIMISS data mount limit, # Because of the CIMISS data mount limit,
# so loop every year to download the data. # so loop every year to download the data.
out_files = [] out_files = []
for iy in years: for iy in years:
if calendar.isleap(iy): if calendar.isleap(iy):
last_day = ['31', '29', '31', '30', '31', '30', last_day = ['31', '29', '31', '30', '31', '30',
'31', '31', '30', '31', '30', '31'] '31', '31', '30', '31', '30', '31']
else: else:
last_day = ['31', '28', '31', '30', '31', '30', last_day = ['31', '28', '31', '30', '31', '30',
'31', '31', '30', '31', '30', '31'] '31', '31', '30', '31', '30', '31']
for i, im in enumerate(months): for i, im in enumerate(months):
# check month range # check month range
if not (month_range[0] <= im <= month_range[1]): if not (month_range[0] <= im <= month_range[1]):
continue continue
month = '%02d' % im month = '%02d' % im
start_time = str(iy) + month + '01' + '000000' start_time = str(iy) + month + '01' + '000000'
end_time = str(iy) + month + last_day[i] + '000000' end_time = str(iy) + month + last_day[i] + '000000'
time_range = "[" + start_time + "," + end_time + "]" time_range = "[" + start_time + "," + end_time + "]"
# retrieve observations from CIMISS server # retrieve observations from CIMISS server
data = cimiss_obs_by_time_range( data = cimiss_obs_by_time_range(
time_range, sta_levels=sta_levels, time_range, sta_levels=sta_levels,
data_code="SURF_CHN_MUL_DAY", elements=elements) data_code="SURF_CHN_MUL_DAY", elements=elements)
if data is None: if data is None:
continue continue
# save observation data to file # save observation data to file
out_files.append(os.path.join( out_files.append(os.path.join(
outdir, outfname + "_" + str(iy) + "_" + month + ".pkl")) outdir, outfname + "_" + str(iy) + "_" + month + ".pkl"))
data.to_pickle(out_files[-1]) data.to_pickle(out_files[-1])
return out_files return out_files
def get_mon_hist_obs(years=np.arange(2000, 2011, 1), def get_mon_hist_obs(years=np.arange(2000, 2011, 1),
limit=(3, 73, 54, 136), limit=(3, 73, 54, 136),
elements=None, elements=None,
outfname='mon_surface_obs', outfname='mon_surface_obs',
outdir='.'): outdir='.'):
""" """
Download historical monthly observations and write to data files, Download historical monthly observations and write to data files,
each year a file. each year a file.
:param years: years for historical data :param years: years for historical data
:param limit: spatial limit [min_lat, min_lon, max_lat, max_lon] :param limit: spatial limit [min_lat, min_lon, max_lat, max_lon]
:param elements: elements for retrieve, 'ele1, ele2, ...' :param elements: elements for retrieve, 'ele1, ele2, ...'
:param outfname: output file name + 'year' :param outfname: output file name + 'year'
:param outdir: output file directory :param outdir: output file directory
:return: Output filenames :return: Output filenames
""" """
# check elements # check elements
if elements is None: if elements is None:
elements = ("Station_Id_C,Station_Name,Year," elements = ("Station_Id_C,Station_Name,Year,"
"Mon,Lat,Lon,Alti,PRE_Time_0808") "Mon,Lat,Lon,Alti,PRE_Time_0808")
# check output directory # check output directory
if not os.path.exists(outdir): if not os.path.exists(outdir):
os.makedirs(outdir) os.makedirs(outdir)
# Loop every year to download the data. # Loop every year to download the data.
out_files = [] out_files = []
for iy in years: for iy in years:
# check out file # check out file
outfile = os.path.join(outdir, outfname + "_" + str(iy) + ".pkl") outfile = os.path.join(outdir, outfname + "_" + str(iy) + ".pkl")
if os.path.isfile(outfile): if os.path.isfile(outfile):
continue continue
# set time range # set time range
start_time = str(iy) + '0101' + '000000' start_time = str(iy) + '0101' + '000000'
end_time = str(iy) + '1201' + '000000' end_time = str(iy) + '1201' + '000000'
time_range = "[" + start_time + "," + end_time + "]" time_range = "[" + start_time + "," + end_time + "]"
# retrieve observations from CIMISS server # retrieve observations from CIMISS server
data = cimiss_obs_in_rect_by_time_range( data = cimiss_obs_in_rect_by_time_range(
time_range, limit, data_code='SURF_CHN_MUL_MON', time_range, limit, data_code='SURF_CHN_MUL_MON',
elements=elements) elements=elements)
if data is None: if data is None:
continue continue
# save observation data to file # save observation data to file
out_files.append(outfile) out_files.append(outfile)
data.to_pickle(out_files[-1]) data.to_pickle(out_files[-1])
return out_files return out_files
def get_cmpas_hist_files(time_range, outdir='.'): def get_cmpas_hist_files(time_range, outdir='.'):
""" """
Download CMAPS QPE gridded data files. Download CMAPS QPE gridded data files.
Arguments: Arguments:
time_range {string} -- time range for retrieve, time_range {string} -- time range for retrieve,
"[YYYYMMDDHHMISS,YYYYMMDDHHMISS]" "[YYYYMMDDHHMISS,YYYYMMDDHHMISS]"
outdir {string} -- output directory. outdir {string} -- output directory.
:Exampels: :Exampels:
>>> time_range = "[20180101000000,20180331230000]" >>> time_range = "[20180101000000,20180331230000]"
>>> get_cmpas_hist_files(time_range, outdir='G:/CMAPS') >>> get_cmpas_hist_files(time_range, outdir='G:/CMAPS')
""" """
# check output directory # check output directory
if not os.path.exists(outdir): if not os.path.exists(outdir):
os.makedirs(outdir) os.makedirs(outdir)
files = cimiss_obs_file_by_time_range( files = cimiss_obs_file_by_time_range(
time_range, data_code="SURF_CMPA_NRT_NC") time_range, data_code="SURF_CMPA_NRT_NC")
filenames = files['DS'] filenames = files['DS']
for file in filenames: for file in filenames:
outfile = os.path.join(outdir, file['FILE_NAME']) outfile = os.path.join(outdir, file['FILE_NAME'])
if not os.path.isfile(outfile): if not os.path.isfile(outfile):
urllib.request.urlretrieve(file['FILE_URL'], outfile) urllib.request.urlretrieve(file['FILE_URL'], outfile)

@ -1,434 +1,434 @@
# _*_ coding: utf-8 _*_ # _*_ coding: utf-8 _*_
# Copyright (c) 2019 NMC Developers. # Copyright (c) 2019 NMC Developers.
# Distributed under the terms of the GPL V3 License. # Distributed under the terms of the GPL V3 License.
""" """
Retrieve the CIMISS data using REST API with pure python code. Retrieve the CIMISS data using REST API with pure python code.
refer to: refer to:
http://10.20.76.55/cimissapiweb/MethodData_list.action http://10.20.76.55/cimissapiweb/MethodData_list.action
https://github.com/babybearming/CIMISSDataGet/blob/master/cimissRead_v0.1.py https://github.com/babybearming/CIMISSDataGet/blob/master/cimissRead_v0.1.py
""" """
import json import json
from datetime import datetime, timedelta from datetime import datetime, timedelta
import urllib3 import urllib3
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import xarray as xr import xarray as xr
from nmc_met_io.config import _get_config_from_rcfile from nmc_met_io.config import _get_config_from_rcfile
def get_http_result(interface_id, params, data_format='json'): def get_http_result(interface_id, params, data_format='json'):
""" """
Get the http result from CIMISS REST api service. Get the http result from CIMISS REST api service.
:param interface_id: MUSIC interface id. :param interface_id: MUSIC interface id.
:param params: dictionary for MUSIC parameters. :param params: dictionary for MUSIC parameters.
:param data_format: MUSIC server data format. :param data_format: MUSIC server data format.
:return: :return:
""" """
# set MUSIC server dns and user information # set MUSIC server dns and user information
config = _get_config_from_rcfile() config = _get_config_from_rcfile()
dns = config['CIMISS']['DNS'] dns = config['CIMISS']['DNS']
user_id = config['CIMISS']['USER_ID'] user_id = config['CIMISS']['USER_ID']
pwd = config['CIMISS']['PASSWORD'] pwd = config['CIMISS']['PASSWORD']
# construct url # construct url
url = 'http://' + dns + '/cimiss-web/api?userId=' + user_id + \ url = 'http://' + dns + '/cimiss-web/api?userId=' + user_id + \
'&pwd=' + pwd + '&interfaceId=' + interface_id '&pwd=' + pwd + '&interfaceId=' + interface_id
# params # params
for key in params: for key in params:
url += '&' + key + '=' + params[key] url += '&' + key + '=' + params[key]
# data format # data format
url += '&dataFormat=' + data_format url += '&dataFormat=' + data_format
# request http contents # request http contents
http = urllib3.PoolManager() http = urllib3.PoolManager()
req = http.request('GET', url) req = http.request('GET', url)
if req.status != 200: if req.status != 200:
print('Can not access the url: ' + url) print('Can not access the url: ' + url)
return None return None
return req.data return req.data
def cimiss_obs_by_time_range(time_range, sta_levels=None, def cimiss_obs_by_time_range(time_range, sta_levels=None,
data_code="SURF_CHN_MUL_HOR_N", data_code="SURF_CHN_MUL_HOR_N",
elements="Station_Id_C,Datetime,Lat,Lon,TEM"): elements="Station_Id_C,Datetime,Lat,Lon,TEM"):
""" """
Retrieve station records from CIMISS by time and station ID. Retrieve station records from CIMISS by time and station ID.
:param time_range: time range for retrieve, :param time_range: time range for retrieve,
"[YYYYMMDDHHMISS, YYYYMMDDHHMISS]", "[YYYYMMDDHHMISS, YYYYMMDDHHMISS]",
like"[201509010000,20150903060000]" like"[201509010000,20150903060000]"
:param sta_levels: station levels, like "011,012,013" for standard, :param sta_levels: station levels, like "011,012,013" for standard,
base and general stations. base and general stations.
:param data_code: dataset code, like "SURF_CHN_MUL_HOR", :param data_code: dataset code, like "SURF_CHN_MUL_HOR",
"SURF_CHN_MUL_HOR_N", and so on. "SURF_CHN_MUL_HOR_N", and so on.
:param elements: elements for retrieve, 'ele1,ele2,...' :param elements: elements for retrieve, 'ele1,ele2,...'
:return: observation records, pandas DataFrame type :return: observation records, pandas DataFrame type
:Example: :Example:
>>> time_range = "[20180219000000,20180221000000]" >>> time_range = "[20180219000000,20180221000000]"
>>> sta_levels = "011,012,013" >>> sta_levels = "011,012,013"
>>> data_code = "SURF_CHN_MUL_DAY" >>> data_code = "SURF_CHN_MUL_DAY"
>>> elements = "Station_Id_C,Station_Name,Datetime,Lat,Lon,PRE_Time_0808" >>> elements = "Station_Id_C,Station_Name,Datetime,Lat,Lon,PRE_Time_0808"
>>> data = cimiss_obs_by_time_range(time_range, sta_levels=sta_levels, >>> data = cimiss_obs_by_time_range(time_range, sta_levels=sta_levels,
data_code=data_code, elements=elements) data_code=data_code, elements=elements)
>>> print "retrieve successfully" if data is not None else "failed" >>> print "retrieve successfully" if data is not None else "failed"
retrieve successfully retrieve successfully
""" """
# set retrieve parameters # set retrieve parameters
params = {'dataCode': data_code, params = {'dataCode': data_code,
'elements': elements, 'elements': elements,
'timeRange': time_range} 'timeRange': time_range}
if sta_levels is not None: if sta_levels is not None:
params['staLevels'] = sta_levels params['staLevels'] = sta_levels
# interface id # interface id
interface_id = "getSurfEleByTimeRange" interface_id = "getSurfEleByTimeRange"
# retrieve data contents # retrieve data contents
contents = get_http_result(interface_id, params) contents = get_http_result(interface_id, params)
if contents is None: if contents is None:
return None return None
contents = json.loads(contents.decode('utf-8')) contents = json.loads(contents.decode('utf-8'))
if contents['returnCode'] != '0': if contents['returnCode'] != '0':
return None return None
# construct pandas DataFrame # construct pandas DataFrame
data = pd.DataFrame(contents['DS']) data = pd.DataFrame(contents['DS'])
# return # return
return data return data
def cimiss_obs_by_time_and_id(times, data_code="SURF_CHN_MUL_HOR_N", def cimiss_obs_by_time_and_id(times, data_code="SURF_CHN_MUL_HOR_N",
elements="Station_Id_C,Datetime,TEM", elements="Station_Id_C,Datetime,TEM",
sta_ids="54511"): sta_ids="54511"):
""" """
Retrieve station records from CIMISS by time and station ID Retrieve station records from CIMISS by time and station ID
:param times: time for retrieve, 'YYYYMMDDHHMISS,YYYYMMDDHHMISS,...' :param times: time for retrieve, 'YYYYMMDDHHMISS,YYYYMMDDHHMISS,...'
:param data_code: dataset code, like "SURF_CHN_MUL_HOR", :param data_code: dataset code, like "SURF_CHN_MUL_HOR",
"SURF_CHN_MUL_HOR_N", and so on. "SURF_CHN_MUL_HOR_N", and so on.
:param elements: elements for retrieve, 'ele1,ele2,...' :param elements: elements for retrieve, 'ele1,ele2,...'
:param sta_ids: station ids, 'xxxxx,xxxxx,...' :param sta_ids: station ids, 'xxxxx,xxxxx,...'
:return: observation records, pandas DataFrame type :return: observation records, pandas DataFrame type
:Example: :Example:
>>> data = cimiss_obs_by_time_and_id('20170318000000') >>> data = cimiss_obs_by_time_and_id('20170318000000')
""" """
# set retrieve parameters # set retrieve parameters
params = {'dataCode': data_code, params = {'dataCode': data_code,
'elements': elements, 'elements': elements,
'times': times, 'times': times,
'staIds': sta_ids, 'staIds': sta_ids,
'orderby': "Datetime:ASC"} 'orderby': "Datetime:ASC"}
# interface id # interface id
interface_id = "getSurfEleByTimeAndStaID" interface_id = "getSurfEleByTimeAndStaID"
# retrieve data contents # retrieve data contents
contents = get_http_result(interface_id, params) contents = get_http_result(interface_id, params)
if contents is None: if contents is None:
return None return None
contents = json.loads(contents.decode('utf-8')) contents = json.loads(contents.decode('utf-8'))
if contents['returnCode'] != '0': if contents['returnCode'] != '0':
return None return None
# construct pandas DataFrame # construct pandas DataFrame
data = pd.DataFrame(contents['DS']) data = pd.DataFrame(contents['DS'])
# return # return
return data return data
def cimiss_obs_in_rect_by_time(times, limit, data_code="SURF_CHN_MUL_HOR_N", def cimiss_obs_in_rect_by_time(times, limit, data_code="SURF_CHN_MUL_HOR_N",
elements="Station_Id_C,Datetime,Lat,Lon,TEM"): elements="Station_Id_C,Datetime,Lat,Lon,TEM"):
""" """
Retrieve station records from CIMISS in region by time. Retrieve station records from CIMISS in region by time.
:param times: times for retrieve, 'YYYYMMDDHHMISS,YYYYMMDDHHMISS,...' :param times: times for retrieve, 'YYYYMMDDHHMISS,YYYYMMDDHHMISS,...'
:param limit: [min_lat, min_lon, max_lat, max_lon] :param limit: [min_lat, min_lon, max_lat, max_lon]
:param data_code: dataset code, like "SURF_CHN_MUL_HOR", :param data_code: dataset code, like "SURF_CHN_MUL_HOR",
"SURF_CHN_MUL_HOR_N", and so on "SURF_CHN_MUL_HOR_N", and so on
:param elements: elements for retrieve, 'ele1,ele2,...' :param elements: elements for retrieve, 'ele1,ele2,...'
:return: observation records, pandas DataFrame type :return: observation records, pandas DataFrame type
:Example: :Example:
>>> data = cimiss_obs_in_rect_by_time('20170320000000', [35, 110, 45, 120]) >>> data = cimiss_obs_in_rect_by_time('20170320000000', [35, 110, 45, 120])
""" """
# set retrieve parameters # set retrieve parameters
params = {'dataCode': data_code, params = {'dataCode': data_code,
'elements': elements, 'elements': elements,
'times': times, 'times': times,
'minLat': '{:.10f}'.format(limit[0]), 'minLat': '{:.10f}'.format(limit[0]),
'minLon': '{:.10f}'.format(limit[1]), 'minLon': '{:.10f}'.format(limit[1]),
'maxLat': '{:.10f}'.format(limit[2]), 'maxLat': '{:.10f}'.format(limit[2]),
'maxLon': '{:.10f}'.format(limit[3]), 'maxLon': '{:.10f}'.format(limit[3]),
'orderby': "Datetime:ASC"} 'orderby': "Datetime:ASC"}
# interface id # interface id
interface_id = "getSurfEleInRectByTime" interface_id = "getSurfEleInRectByTime"
# retrieve data contents # retrieve data contents
contents = get_http_result(interface_id, params) contents = get_http_result(interface_id, params)
if contents is None: if contents is None:
return None return None
contents = json.loads(contents.decode('utf-8')) contents = json.loads(contents.decode('utf-8'))
if contents['returnCode'] != '0': if contents['returnCode'] != '0':
return None return None
# construct pandas DataFrame # construct pandas DataFrame
data = pd.DataFrame(contents['DS']) data = pd.DataFrame(contents['DS'])
# return # return
return data return data
def cimiss_obs_in_rect_by_time_range( def cimiss_obs_in_rect_by_time_range(
time_range, limit, time_range, limit,
data_code="SURF_CHN_MUL_HOR_N", data_code="SURF_CHN_MUL_HOR_N",
elements="Station_Id_C,Datetime,Lat,Lon,TEM"): elements="Station_Id_C,Datetime,Lat,Lon,TEM"):
""" """
Retrieve observation records from CIMISS by rect and time range. Retrieve observation records from CIMISS by rect and time range.
:param time_range: time range for retrieve, :param time_range: time range for retrieve,
"[YYYYMMDDHHMISS,YYYYMMDDHHMISS]" "[YYYYMMDDHHMISS,YYYYMMDDHHMISS]"
:param limit: (min_lat, min_lon, max_lat, max_lon) :param limit: (min_lat, min_lon, max_lat, max_lon)
:param data_code: dataset code, like "SURF_CHN_MUL_HOR", :param data_code: dataset code, like "SURF_CHN_MUL_HOR",
"SURF_CHN_MUL_HOR_N", and so on. "SURF_CHN_MUL_HOR_N", and so on.
:param elements: elements for retrieve, 'ele1,ele2,...' :param elements: elements for retrieve, 'ele1,ele2,...'
:return: observation records, pandas DataFrame type :return: observation records, pandas DataFrame type
:Example: :Example:
>>> elements = ("Station_Id_C,Station_Id_d,Station_Name," >>> elements = ("Station_Id_C,Station_Id_d,Station_Name,"
"Station_levl,Datetime,Lat,Lon,PRE_Time_0808") "Station_levl,Datetime,Lat,Lon,PRE_Time_0808")
>>> time_range = "[20160801000000,20160801000000]" >>> time_range = "[20160801000000,20160801000000]"
>>> data_code = "SURF_CHN_MUL_DAY" >>> data_code = "SURF_CHN_MUL_DAY"
>>> data = cimiss_obs_in_rect_by_time_range( >>> data = cimiss_obs_in_rect_by_time_range(
time_range,[35,110,45,120], data_code=data_code, time_range,[35,110,45,120], data_code=data_code,
elements=elements) elements=elements)
""" """
# set retrieve parameters # set retrieve parameters
params = {'dataCode': data_code, params = {'dataCode': data_code,
'elements': elements, 'elements': elements,
'timeRange': time_range, 'timeRange': time_range,
'minLat': '{:.10f}'.format(limit[0]), 'minLat': '{:.10f}'.format(limit[0]),
'minLon': '{:.10f}'.format(limit[1]), 'minLon': '{:.10f}'.format(limit[1]),
'maxLat': '{:.10f}'.format(limit[2]), 'maxLat': '{:.10f}'.format(limit[2]),
'maxLon': '{:.10f}'.format(limit[3]), 'maxLon': '{:.10f}'.format(limit[3]),
'orderby': "Datetime:ASC"} 'orderby': "Datetime:ASC"}
# interface id # interface id
interface_id = "getSurfEleInRectByTimeRange" interface_id = "getSurfEleInRectByTimeRange"
# retrieve data contents # retrieve data contents
contents = get_http_result(interface_id, params) contents = get_http_result(interface_id, params)
if contents is None: if contents is None:
return None return None
contents = json.loads(contents.decode('utf-8')) contents = json.loads(contents.decode('utf-8'))
if contents['returnCode'] != '0': if contents['returnCode'] != '0':
return None return None
# construct pandas DataFrame # construct pandas DataFrame
data = pd.DataFrame(contents['DS']) data = pd.DataFrame(contents['DS'])
# return # return
return data return data
def cimiss_obs_grid_by_time( def cimiss_obs_grid_by_time(
time_str, data_code="SURF_CMPA_RT_NC", fcst_ele="PRE"): time_str, data_code="SURF_CMPA_RT_NC", fcst_ele="PRE"):
""" """
Retrieve surface analysis grid products, Retrieve surface analysis grid products,
like CMPAS-V2.1融合降水分析实时数据产品NC. like CMPAS-V2.1融合降水分析实时数据产品NC.
For SURF_CMPA_RT_NC, this function will retrieve For SURF_CMPA_RT_NC, this function will retrieve
the 0.01 resolution data and take long time. the 0.01 resolution data and take long time.
:param time_str: analysis time string, like "2017100800" :param time_str: analysis time string, like "2017100800"
:param data_code: data code :param data_code: data code
:param fcst_ele: elements :param fcst_ele: elements
:return: data, xarray type :return: data, xarray type
:Example: :Example:
>>> time_str = "2017110612" >>> time_str = "2017110612"
>>> data_code = "SURF_CMPA_RT_NC" >>> data_code = "SURF_CMPA_RT_NC"
>>> data = cimiss_obs_grid_by_time(time_str, data_code=data_code, >>> data = cimiss_obs_grid_by_time(time_str, data_code=data_code,
fcst_ele="PRE") fcst_ele="PRE")
""" """
# set retrieve parameters # set retrieve parameters
params = {'dataCode': data_code, params = {'dataCode': data_code,
'time': time_str + "0000", 'time': time_str + "0000",
'fcstEle': fcst_ele} 'fcstEle': fcst_ele}
# set interface id # set interface id
interface_id = "getSurfEleGridByTime" interface_id = "getSurfEleGridByTime"
# retrieve data contents # retrieve data contents
contents = get_http_result(interface_id, params) contents = get_http_result(interface_id, params)
if contents is None: if contents is None:
return None return None
contents = json.loads(contents.decode('utf-8')) contents = json.loads(contents.decode('utf-8'))
if contents['returnCode'] != '0': if contents['returnCode'] != '0':
return None return None
# get time information # get time information
time = datetime.strptime(time_str, '%Y%m%d%H') time = datetime.strptime(time_str, '%Y%m%d%H')
# extract coordinates and data # extract coordinates and data
start_lat = float(contents['startLat']) start_lat = float(contents['startLat'])
start_lon = float(contents['startLon']) start_lon = float(contents['startLon'])
nlon = int(contents['lonCount']) nlon = int(contents['lonCount'])
nlat = int(contents['latCount']) nlat = int(contents['latCount'])
dlon = float(contents['lonStep']) dlon = float(contents['lonStep'])
dlat = float(contents['latStep']) dlat = float(contents['latStep'])
lon = start_lon + np.arange(nlon) * dlon lon = start_lon + np.arange(nlon) * dlon
lat = start_lat + np.arange(nlat) * dlat lat = start_lat + np.arange(nlat) * dlat
name = contents['fieldNames'] name = contents['fieldNames']
units = contents['fieldUnits'] units = contents['fieldUnits']
# construct xarray # construct xarray
data = np.array(contents['DS']) data = np.array(contents['DS'])
data = data[np.newaxis, ...] data = data[np.newaxis, ...]
data = xr.DataArray(data, coords=[time, lat, lon], data = xr.DataArray(data, coords=[time, lat, lon],
dims=['time', 'lat', 'lon'], name=name) dims=['time', 'lat', 'lon'], name=name)
# add attributes # add attributes
data.attrs['units'] = units data.attrs['units'] = units
data.attrs['organization'] = 'Created by NMC.' data.attrs['organization'] = 'Created by NMC.'
# return data # return data
return data return data
def cimiss_obs_file_by_time_range(time_range, def cimiss_obs_file_by_time_range(time_range,
data_code="SURF_CMPA_RT_NC"): data_code="SURF_CMPA_RT_NC"):
""" """
Retrieve CIMISS data file information. Retrieve CIMISS data file information.
:param time_range: time range for retrieve, :param time_range: time range for retrieve,
"[YYYYMMDDHHMISS,YYYYMMDDHHMISS]" "[YYYYMMDDHHMISS,YYYYMMDDHHMISS]"
:param data_code: data code :param data_code: data code
:return: dictionary :return: dictionary
:Examples: :Examples:
>>> time_range = "[20180401000000,20180402000000]" >>> time_range = "[20180401000000,20180402000000]"
>>> files = cimiss_obs_file_by_time_range(time_range) >>> files = cimiss_obs_file_by_time_range(time_range)
>>> filenames = files['DS'] >>> filenames = files['DS']
>>> print(files['DS'][0]['FILE_URL']) >>> print(files['DS'][0]['FILE_URL'])
""" """
# set retrieve parameters # set retrieve parameters
params = {'dataCode': data_code, params = {'dataCode': data_code,
'timeRange': time_range} 'timeRange': time_range}
# set interface id # set interface id
interface_id = "getSurfFileByTimeRange" interface_id = "getSurfFileByTimeRange"
# retrieve data contents # retrieve data contents
contents = get_http_result(interface_id, params) contents = get_http_result(interface_id, params)
if contents is None: if contents is None:
return None return None
contents = json.loads(contents.decode('utf-8')) contents = json.loads(contents.decode('utf-8'))
if contents['returnCode'] != '0': if contents['returnCode'] != '0':
return None return None
# return # return
return contents return contents
def cimiss_model_by_time(init_time_str, limit=None, def cimiss_model_by_time(init_time_str, limit=None,
data_code='NAFP_FOR_FTM_HIGH_EC_GLB', data_code='NAFP_FOR_FTM_HIGH_EC_GLB',
fcst_level=0, valid_time=0, fcst_ele="TEF2"): fcst_level=0, valid_time=0, fcst_ele="TEF2"):
""" """
Retrieve grid data from CIMISS service. Retrieve grid data from CIMISS service.
:param init_time_str: model run time, like "2016081712" :param init_time_str: model run time, like "2016081712"
:param limit: [min_lat, min_lon, max_lat, max_lon] :param limit: [min_lat, min_lon, max_lat, max_lon]
:param data_code: MUSIC data code, default is "NAFP_FOR_FTM_HIGH_EC_GLB" :param data_code: MUSIC data code, default is "NAFP_FOR_FTM_HIGH_EC_GLB"
:param fcst_level: vertical level, default is 0. :param fcst_level: vertical level, default is 0.
:param valid_time: forecast element, default is 2m temperature "TEF2" :param valid_time: forecast element, default is 2m temperature "TEF2"
:param fcst_ele: forecast hour, default is 0 :param fcst_ele: forecast hour, default is 0
:return: :return:
""" """
# set retrieve parameters # set retrieve parameters
if limit is None: if limit is None:
params = {'dataCode': data_code, params = {'dataCode': data_code,
'time': init_time_str + '0000', 'time': init_time_str + '0000',
'fcstLevel': '{:d}'.format(fcst_level), 'fcstLevel': '{:d}'.format(fcst_level),
'validTime': '{:d}'.format(valid_time), 'validTime': '{:d}'.format(valid_time),
'fcstEle': fcst_ele} 'fcstEle': fcst_ele}
interface_id = 'getNafpEleGridByTimeAndLevelAndValidtime' interface_id = 'getNafpEleGridByTimeAndLevelAndValidtime'
else: else:
params = {'dataCode': data_code, params = {'dataCode': data_code,
'time': init_time_str + '0000', 'time': init_time_str + '0000',
'minLat': '{:.10f}'.format(limit[0]), 'minLat': '{:.10f}'.format(limit[0]),
"minLon": '{:.10f}'.format(limit[1]), "minLon": '{:.10f}'.format(limit[1]),
"maxLat": '{:.10f}'.format(limit[2]), "maxLat": '{:.10f}'.format(limit[2]),
"maxLon": '{:.10f}'.format(limit[3]), "maxLon": '{:.10f}'.format(limit[3]),
'fcstLevel': '{:d}'.format(fcst_level), 'fcstLevel': '{:d}'.format(fcst_level),
'validTime': '{:d}'.format(valid_time), 'validTime': '{:d}'.format(valid_time),
'fcstEle': fcst_ele} 'fcstEle': fcst_ele}
interface_id = 'getNafpEleGridInRectByTimeAndLevelAndValidtime' interface_id = 'getNafpEleGridInRectByTimeAndLevelAndValidtime'
# retrieve data contents # retrieve data contents
contents = get_http_result(interface_id, params) contents = get_http_result(interface_id, params)
if contents is None: if contents is None:
return None return None
contents = json.loads(contents.decode('utf-8')) contents = json.loads(contents.decode('utf-8'))
if contents['returnCode'] != '0': if contents['returnCode'] != '0':
return None return None
# get time information # get time information
init_time = datetime.strptime(init_time_str, '%Y%m%d%H') init_time = datetime.strptime(init_time_str, '%Y%m%d%H')
fhour = valid_time fhour = valid_time
time = init_time + timedelta(hours=fhour) time = init_time + timedelta(hours=fhour)
# extract coordinates and data # extract coordinates and data
start_lat = float(contents['startLat']) start_lat = float(contents['startLat'])
start_lon = float(contents['startLon']) start_lon = float(contents['startLon'])
nlon = int(contents['lonCount']) nlon = int(contents['lonCount'])
nlat = int(contents['latCount']) nlat = int(contents['latCount'])
dlon = float(contents['lonStep']) dlon = float(contents['lonStep'])
dlat = float(contents['latStep']) dlat = float(contents['latStep'])
lon = start_lon + np.arange(nlon)*dlon lon = start_lon + np.arange(nlon)*dlon
lat = start_lat + np.arange(nlat)*dlat lat = start_lat + np.arange(nlat)*dlat
name = contents['fieldNames'] name = contents['fieldNames']
units = contents['fieldUnits'] units = contents['fieldUnits']
# construct xarray # construct xarray
data = np.array(contents['DS']) data = np.array(contents['DS'])
if fcst_level == 0: if fcst_level == 0:
data = data[np.newaxis, ...] data = data[np.newaxis, ...]
data = xr.DataArray(data, coords=[time, lat, lon], data = xr.DataArray(data, coords=[time, lat, lon],
dims=['time', 'lat', 'lon'], name=name) dims=['time', 'lat', 'lon'], name=name)
else: else:
data = data[np.newaxis, np.newaxis, ...] data = data[np.newaxis, np.newaxis, ...]
data = xr.DataArray(data, coords=[time, fcst_level, lat, lon], data = xr.DataArray(data, coords=[time, fcst_level, lat, lon],
dims=['time', 'level', 'lat', 'lon'], name=name) dims=['time', 'level', 'lat', 'lon'], name=name)
# add time coordinates # add time coordinates
data.coords['init_time'] = ('time', init_time) data.coords['init_time'] = ('time', init_time)
data.coords['fhour'] = ('time', fhour) data.coords['fhour'] = ('time', fhour)
# add attributes # add attributes
data.attrs['units'] = units data.attrs['units'] = units
data.attrs['organization'] = 'Created by NMC.' data.attrs['organization'] = 'Created by NMC.'
# return data # return data
return data return data

File diff suppressed because it is too large Load Diff

126
setup.py

@ -1,63 +1,63 @@
# _*_ coding: utf-8 _*_ # _*_ coding: utf-8 _*_
from os import path from os import path
from setuptools import find_packages, setup from setuptools import find_packages, setup
from codecs import open from codecs import open
name = 'nmc_met_io' name = 'nmc_met_io'
author = __import__(name).__author__ author = __import__(name).__author__
version = __import__(name).__version__ version = __import__(name).__version__
here = path.abspath(path.dirname(__file__)) here = path.abspath(path.dirname(__file__))
# Get the long description from the README file # Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f: with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read() long_description = f.read()
setup( setup(
name=name, name=name,
version=version, version=version,
description=("Collection of tools for I/O or" description=("Collection of tools for I/O or"
"accessing meteorological data."), "accessing meteorological data."),
long_description=long_description, long_description=long_description,
# author # author
author=author, author=author,
author_email='kan.dai@foxmail.com', author_email='kan.dai@foxmail.com',
# LICENSE # LICENSE
license='GPL3', license='GPL3',
classifiers=[ classifiers=[
'Development Status :: 3 - Alpha', 'Development Status :: 3 - Alpha',
'Intended Audience :: Developers', 'Intended Audience :: Developers',
'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3',
], ],
packages=find_packages(exclude=['docs', 'tests', 'build', 'dist']), packages=find_packages(exclude=['docs', 'tests', 'build', 'dist']),
include_package_data=True, include_package_data=True,
exclude_package_data={'': ['.gitignore']}, exclude_package_data={'': ['.gitignore']},
install_requires=['numpy>=1.12.1', install_requires=['numpy>=1.12.1',
'scipy>=0.19.0', 'scipy>=0.19.0',
'xarray>=0.9.6', 'xarray>=0.9.6',
'pandas>=0.20.0', 'pandas>=0.20.0',
'pyproj>=1.9.5.1', 'pyproj>=1.9.5.1',
'protobuf>=3.5.0', 'protobuf>=3.5.0',
'urllib3>=1.20', 'urllib3>=1.20',
'python-dateutil'] 'python-dateutil']
) )
# development mode (DOS command): # development mode (DOS command):
# python setup.py develop # python setup.py develop
# python setup.py develop --uninstall # python setup.py develop --uninstall
# build mode # build mode
# python setup.py build --build-base=D:/test/python/build # python setup.py build --build-base=D:/test/python/build
# distribution mode: # distribution mode:
# python setup.py sdist # create source tar.gz file in /dist # python setup.py sdist # create source tar.gz file in /dist
# python setup.py bdist_wheel # create wheel binary in /dist # python setup.py bdist_wheel # create wheel binary in /dist