Fix get_fx_awx's data latitude order.

This commit is contained in:
NMC-DAVE 2021-12-09 23:21:24 +08:00
parent c77dd90422
commit 1d6bf1a738
5 changed files with 323 additions and 190 deletions

@ -39,37 +39,59 @@ Using the fellowing command to install packages:
- [arm_pyart](http://arm-doe.github.io/pyart/), `conda install -c conda-forge arm_pyart` - [arm_pyart](http://arm-doe.github.io/pyart/), `conda install -c conda-forge arm_pyart`
## 设置配置文件 ## 设置配置文件
若要访问CIMISS、CMADaaS或MICAPS服务器等, 需在配置文件中设置地址和用户信息(若不需要, 则相应项无需配置). 若要访问CMADaaS(大数据云), MICAPS服务器等, 需在配置文件中设置地址和用户信息(若不需要, 则相应项无需配置).
* 在系统用户目录下("C:\Users\用户名"(windows)或"/home/用户名/"(Linux)), 建立文件夹".nmcdev"(若Windows下无法直接创建, 在命令窗口中输入`mkdir .nmcdev`创建) * 在系统用户目录下("C:\Users\用户名"(windows)或"/home/用户名/"(Linux)), 建立文件夹".nmcdev"(若Windows下无法直接创建, 在命令窗口中输入`mkdir .nmcdev`创建)
* 在".nmcdev"中创建文本文件"config.ini", 内容模板为: * 在".nmcdev"中创建文本文件"config.ini", 内容模板为:
``` ```
# 用于nmc_met_io读取大数据云, MICAPS服务器等的配置文件.
# 若用不到某个服务器, 则不设置或删除改段落即可.
# 注意设置IP地址时, 不要加http等前缀信息.
# CMADaaS大数据云平台配置:
# DNS为IP地址, PORT为端口
# USER_ID和PASSWORD分别为用户名和密码
# serviceNodeId为服务节点名称(一般为 NMIC_MUSIC_CMADAAS)
[CMADaaS]
DNS = xx.xx.xx.xx
PORT = xx
USER_ID = xxxxxxxx
PASSWORD = xxxxxxxx
serviceNodeId = NMIC_MUSIC_CMADAAS
# MICAPS Cassandra服务器配置(一般需要联系运维开通访问权限)
# GDS_IP为IP地址, GDS_PORT为端口
# 可以人为设置本地数据缓存的地址CACHE_DIR, 默认为配置文件夹目录下的cache文件夹
[MICAPS]
GDS_IP = xx.xx.xx.xx
GDS_PORT = 8080
# Cached file directory, if not set,
# /user_home/.nmcdev/cache will be used.
# CACHE_DIR = ~
# CIMISS网址及用户ID和PASSWORD, 2021年年底CIMISS停止提供服务
# DNS为IP地址, PORT为端口
# USER_ID和PASSWORD分别为用户名和密码
[CIMISS] [CIMISS]
DNS = xx.xx.xx.xx DNS = xx.xx.xx.xx
USER_ID = xxxxxxxxx USER_ID = xxxxxxxxx
PASSWORD = xxxxxxxx PASSWORD = xxxxxxxx
[CMADaaS] # 彩云天气API的访问口令
DNS = xx.xx.xx.xx [CAIY]
PORT = xx token = xxxxxxxxxxxxxx
USER_ID = xxxxxxxxx
PASSWORD = xxxxxxxx
serviceNodeId = NMIC_MUSIC_CMADAAS
[MICAPS]
GDS_IP = xx.xx.xx.xx
GDS_PORT = xxxx
# Cached file directory, if not set,
# /home/USERNAME/.nmcdev/cache (linux) or C:/Users/USERNAME/.nmcdev/cache (windows) will be used.
[CACHE]
# CACHE_DIR = ~
# MAPBOX地图数据的访问口令(nmc_met_graphics绘图可以用)
# https://docs.mapbox.com/help/glossary/access-token 申请
[MAPBOX] [MAPBOX]
token = pk.xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx token = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# 天地地图数据的访问口令(nmc_met_graphics绘图可以用)
# http://lbs.tianditu.gov.cn/server/MapService.html 申请
[TIANDITU]
token = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
``` ```
这里xxxx用相应的地址, 接口和用户信息代替. 如果要用到MAPBOX地图, 可以申请[access token](https://docs.mapbox.com/help/glossary/access-token).
--- ---

@ -65,7 +65,7 @@
"from metpy.units import units\n", "from metpy.units import units\n",
"\n", "\n",
"from nmc_met_io.read_micaps import read_micaps_1\n", "from nmc_met_io.read_micaps import read_micaps_1\n",
"from nmc_met_graphics.plot.china_map import add_china_map_2cartopy" "from nmc_met_graphics.plot.mapview import add_china_map_2cartopy"
] ]
}, },
{ {
@ -276,7 +276,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.7.7" "version": "3.9.7"
}, },
"latex_envs": { "latex_envs": {
"LaTeX_envs_menu_present": true, "LaTeX_envs_menu_present": true,

@ -77,7 +77,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 19, "execution_count": 5,
"id": "dcad8a15-0b25-435b-a1bf-c780652f907f", "id": "dcad8a15-0b25-435b-a1bf-c780652f907f",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
@ -123,92 +123,92 @@
" <tr>\n", " <tr>\n",
" <th>0</th>\n", " <th>0</th>\n",
" <td>54511</td>\n", " <td>54511</td>\n",
" <td>39.8</td>\n", " <td>39.8061</td>\n",
" <td>116.4667</td>\n", " <td>116.4694</td>\n",
" <td>31.3</td>\n", " <td>32.8</td>\n",
" <td>2010</td>\n", " <td>2021</td>\n",
" <td>1</td>\n", " <td>11</td>\n",
" <td>1</td>\n", " <td>1</td>\n",
" <td>0</td>\n", " <td>0</td>\n",
" <td>1017.0</td>\n", " <td>1031.5</td>\n",
" <td>-6.8</td>\n", " <td>8.8</td>\n",
" <td>-19.1</td>\n", " <td>2.6</td>\n",
" <td>37.0</td>\n", " <td>65</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n", " <td>0.0</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n", " </tr>\n",
" <tr>\n", " <tr>\n",
" <th>1</th>\n", " <th>1</th>\n",
" <td>54511</td>\n", " <td>54511</td>\n",
" <td>39.8</td>\n", " <td>39.8061</td>\n",
" <td>116.4667</td>\n", " <td>116.4694</td>\n",
" <td>31.3</td>\n", " <td>32.8</td>\n",
" <td>2010</td>\n", " <td>2021</td>\n",
" <td>11</td>\n",
" <td>1</td>\n", " <td>1</td>\n",
" <td>1</td>\n", " <td>1</td>\n",
" <td>1</td>\n", " <td>1031.8</td>\n",
" <td>1017.1</td>\n", " <td>9.8</td>\n",
" <td>-6.5</td>\n", " <td>2.2</td>\n",
" <td>-18.8</td>\n", " <td>59</td>\n",
" <td>37.0</td>\n", " <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n", " <td>0.0</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n", " </tr>\n",
" <tr>\n", " <tr>\n",
" <th>2</th>\n", " <th>2</th>\n",
" <td>54511</td>\n", " <td>54511</td>\n",
" <td>39.8</td>\n", " <td>39.8061</td>\n",
" <td>116.4667</td>\n", " <td>116.4694</td>\n",
" <td>31.3</td>\n", " <td>32.8</td>\n",
" <td>2010</td>\n", " <td>2021</td>\n",
" <td>1</td>\n", " <td>11</td>\n",
" <td>1</td>\n", " <td>1</td>\n",
" <td>2</td>\n", " <td>2</td>\n",
" <td>1017.0</td>\n", " <td>1031.9</td>\n",
" <td>-4.9</td>\n", " <td>10.1</td>\n",
" <td>-18.0</td>\n", " <td>2.2</td>\n",
" <td>35.0</td>\n", " <td>58</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n", " <td>0.0</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n", " </tr>\n",
" <tr>\n", " <tr>\n",
" <th>3</th>\n", " <th>3</th>\n",
" <td>54511</td>\n", " <td>54511</td>\n",
" <td>39.8</td>\n", " <td>39.8061</td>\n",
" <td>116.4667</td>\n", " <td>116.4694</td>\n",
" <td>31.3</td>\n", " <td>32.8</td>\n",
" <td>2010</td>\n", " <td>2021</td>\n",
" <td>1</td>\n", " <td>11</td>\n",
" <td>1</td>\n", " <td>1</td>\n",
" <td>3</td>\n", " <td>3</td>\n",
" <td>1016.7</td>\n", " <td>1031.7</td>\n",
" <td>-4.3</td>\n", " <td>10.5</td>\n",
" <td>-17.5</td>\n", " <td>2.3</td>\n",
" <td>35.0</td>\n", " <td>57</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n", " <td>0.0</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n", " </tr>\n",
" <tr>\n", " <tr>\n",
" <th>4</th>\n", " <th>4</th>\n",
" <td>54511</td>\n", " <td>54511</td>\n",
" <td>39.8</td>\n", " <td>39.8061</td>\n",
" <td>116.4667</td>\n", " <td>116.4694</td>\n",
" <td>31.3</td>\n", " <td>32.8</td>\n",
" <td>2010</td>\n", " <td>2021</td>\n",
" <td>1</td>\n", " <td>11</td>\n",
" <td>1</td>\n", " <td>1</td>\n",
" <td>4</td>\n", " <td>4</td>\n",
" <td>1015.3</td>\n", " <td>1030.8</td>\n",
" <td>-3.4</td>\n", " <td>10.9</td>\n",
" <td>-15.7</td>\n", " <td>3.4</td>\n",
" <td>38.0</td>\n", " <td>60</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n", " <td>0.0</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n", " </tr>\n",
" <tr>\n", " <tr>\n",
" <th>...</th>\n", " <th>...</th>\n",
@ -229,131 +229,131 @@
" <td>...</td>\n", " <td>...</td>\n",
" </tr>\n", " </tr>\n",
" <tr>\n", " <tr>\n",
" <th>8755</th>\n", " <th>715</th>\n",
" <td>54511</td>\n", " <td>54511</td>\n",
" <td>39.8</td>\n", " <td>39.8061</td>\n",
" <td>116.4667</td>\n", " <td>116.4694</td>\n",
" <td>31.3</td>\n", " <td>32.8</td>\n",
" <td>2010</td>\n", " <td>2021</td>\n",
" <td>12</td>\n", " <td>11</td>\n",
" <td>31</td>\n", " <td>30</td>\n",
" <td>19</td>\n", " <td>19</td>\n",
" <td>1033.7</td>\n", " <td>1029.7</td>\n",
" <td>-6.3</td>\n", " <td>-2.2</td>\n",
" <td>-22.7</td>\n", " <td>-12.5</td>\n",
" <td>26.0</td>\n", " <td>45</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n", " <td>0.0</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n", " </tr>\n",
" <tr>\n", " <tr>\n",
" <th>8756</th>\n", " <th>716</th>\n",
" <td>54511</td>\n", " <td>54511</td>\n",
" <td>39.8</td>\n", " <td>39.8061</td>\n",
" <td>116.4667</td>\n", " <td>116.4694</td>\n",
" <td>31.3</td>\n", " <td>32.8</td>\n",
" <td>2010</td>\n", " <td>2021</td>\n",
" <td>12</td>\n", " <td>11</td>\n",
" <td>31</td>\n", " <td>30</td>\n",
" <td>20</td>\n", " <td>20</td>\n",
" <td>1033.3</td>\n", " <td>1029.5</td>\n",
" <td>-6.3</td>\n", " <td>-2.0</td>\n",
" <td>-22.7</td>\n", " <td>-11.5</td>\n",
" <td>26.0</td>\n", " <td>48</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n", " <td>0.0</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n", " </tr>\n",
" <tr>\n", " <tr>\n",
" <th>8757</th>\n", " <th>717</th>\n",
" <td>54511</td>\n", " <td>54511</td>\n",
" <td>39.8</td>\n", " <td>39.8061</td>\n",
" <td>116.4667</td>\n", " <td>116.4694</td>\n",
" <td>31.3</td>\n", " <td>32.8</td>\n",
" <td>2010</td>\n", " <td>2021</td>\n",
" <td>12</td>\n", " <td>11</td>\n",
" <td>31</td>\n", " <td>30</td>\n",
" <td>21</td>\n", " <td>21</td>\n",
" <td>1033.4</td>\n", " <td>1029.5</td>\n",
" <td>-6.7</td>\n", " <td>-2.0</td>\n",
" <td>-23.0</td>\n", " <td>-12.1</td>\n",
" <td>26.0</td>\n", " <td>46</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n", " <td>0.0</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n", " </tr>\n",
" <tr>\n", " <tr>\n",
" <th>8758</th>\n", " <th>718</th>\n",
" <td>54511</td>\n", " <td>54511</td>\n",
" <td>39.8</td>\n", " <td>39.8061</td>\n",
" <td>116.4667</td>\n", " <td>116.4694</td>\n",
" <td>31.3</td>\n", " <td>32.8</td>\n",
" <td>2010</td>\n", " <td>2021</td>\n",
" <td>12</td>\n", " <td>11</td>\n",
" <td>31</td>\n", " <td>30</td>\n",
" <td>22</td>\n", " <td>22</td>\n",
" <td>1033.9</td>\n", " <td>1029.9</td>\n",
" <td>-6.8</td>\n", " <td>-3.0</td>\n",
" <td>-22.7</td>\n", " <td>-11.5</td>\n",
" <td>27.0</td>\n", " <td>52</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n", " <td>0.0</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n", " </tr>\n",
" <tr>\n", " <tr>\n",
" <th>8759</th>\n", " <th>719</th>\n",
" <td>54511</td>\n", " <td>54511</td>\n",
" <td>39.8</td>\n", " <td>39.8061</td>\n",
" <td>116.4667</td>\n", " <td>116.4694</td>\n",
" <td>31.3</td>\n", " <td>32.8</td>\n",
" <td>2010</td>\n", " <td>2021</td>\n",
" <td>12</td>\n", " <td>11</td>\n",
" <td>31</td>\n", " <td>30</td>\n",
" <td>23</td>\n", " <td>23</td>\n",
" <td>1034.5</td>\n", " <td>1030.5</td>\n",
" <td>-6.7</td>\n", " <td>-3.1</td>\n",
" <td>-23.0</td>\n", " <td>-11.8</td>\n",
" <td>26.0</td>\n", " <td>51</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n", " <td>0.0</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n", " </tr>\n",
" </tbody>\n", " </tbody>\n",
"</table>\n", "</table>\n",
"<p>8760 rows × 15 columns</p>\n", "<p>720 rows × 15 columns</p>\n",
"</div>" "</div>"
], ],
"text/plain": [ "text/plain": [
" Station_Id_C Lat Lon Alti Year Mon Day Hour PRS_sea TEM \\\n", " Station_Id_C Lat Lon Alti Year Mon Day Hour PRS_sea \\\n",
"0 54511 39.8 116.4667 31.3 2010 1 1 0 1017.0 -6.8 \n", "0 54511 39.8061 116.4694 32.8 2021 11 1 0 1031.5 \n",
"1 54511 39.8 116.4667 31.3 2010 1 1 1 1017.1 -6.5 \n", "1 54511 39.8061 116.4694 32.8 2021 11 1 1 1031.8 \n",
"2 54511 39.8 116.4667 31.3 2010 1 1 2 1017.0 -4.9 \n", "2 54511 39.8061 116.4694 32.8 2021 11 1 2 1031.9 \n",
"3 54511 39.8 116.4667 31.3 2010 1 1 3 1016.7 -4.3 \n", "3 54511 39.8061 116.4694 32.8 2021 11 1 3 1031.7 \n",
"4 54511 39.8 116.4667 31.3 2010 1 1 4 1015.3 -3.4 \n", "4 54511 39.8061 116.4694 32.8 2021 11 1 4 1030.8 \n",
"... ... ... ... ... ... ... ... ... ... ... \n", ".. ... ... ... ... ... ... ... ... ... \n",
"8755 54511 39.8 116.4667 31.3 2010 12 31 19 1033.7 -6.3 \n", "715 54511 39.8061 116.4694 32.8 2021 11 30 19 1029.7 \n",
"8756 54511 39.8 116.4667 31.3 2010 12 31 20 1033.3 -6.3 \n", "716 54511 39.8061 116.4694 32.8 2021 11 30 20 1029.5 \n",
"8757 54511 39.8 116.4667 31.3 2010 12 31 21 1033.4 -6.7 \n", "717 54511 39.8061 116.4694 32.8 2021 11 30 21 1029.5 \n",
"8758 54511 39.8 116.4667 31.3 2010 12 31 22 1033.9 -6.8 \n", "718 54511 39.8061 116.4694 32.8 2021 11 30 22 1029.9 \n",
"8759 54511 39.8 116.4667 31.3 2010 12 31 23 1034.5 -6.7 \n", "719 54511 39.8061 116.4694 32.8 2021 11 30 23 1030.5 \n",
"\n", "\n",
" DPT RHU PRE_1h PRE_12h PRE_24h \n", " TEM DPT RHU PRE_1h PRE_12h PRE_24h \n",
"0 -19.1 37.0 0.0 NaN NaN \n", "0 8.8 2.6 65 0.0 0.0 0.0 \n",
"1 -18.8 37.0 0.0 NaN NaN \n", "1 9.8 2.2 59 0.0 0.0 0.0 \n",
"2 -18.0 35.0 0.0 NaN NaN \n", "2 10.1 2.2 58 0.0 0.0 0.0 \n",
"3 -17.5 35.0 0.0 NaN NaN \n", "3 10.5 2.3 57 0.0 0.0 0.0 \n",
"4 -15.7 38.0 0.0 NaN NaN \n", "4 10.9 3.4 60 0.0 0.0 0.0 \n",
"... ... ... ... ... ... \n", ".. ... ... ... ... ... ... \n",
"8755 -22.7 26.0 0.0 NaN NaN \n", "715 -2.2 -12.5 45 0.0 0.0 0.0 \n",
"8756 -22.7 26.0 0.0 NaN NaN \n", "716 -2.0 -11.5 48 0.0 0.0 0.0 \n",
"8757 -23.0 26.0 0.0 NaN NaN \n", "717 -2.0 -12.1 46 0.0 0.0 0.0 \n",
"8758 -22.7 27.0 0.0 NaN NaN \n", "718 -3.0 -11.5 52 0.0 0.0 0.0 \n",
"8759 -23.0 26.0 0.0 NaN NaN \n", "719 -3.1 -11.8 51 0.0 0.0 0.0 \n",
"\n", "\n",
"[8760 rows x 15 columns]" "[720 rows x 15 columns]"
] ]
}, },
"execution_count": 19, "execution_count": 5,
"metadata": {}, "metadata": {},
"output_type": "execute_result" "output_type": "execute_result"
} }
@ -364,7 +364,7 @@
"\n", "\n",
"# set retrieve parameters\n", "# set retrieve parameters\n",
"data_code = \"SURF_CHN_MUL_HOR_N\" # 中国地面逐小时(国家站)\n", "data_code = \"SURF_CHN_MUL_HOR_N\" # 中国地面逐小时(国家站)\n",
"time_range = \"[20100101000000,20101231230000]\"\n", "time_range = \"[20211101000000,20211130230000]\"\n",
"elements = \"Station_Id_C,Lat,Lon,Alti,Year,Mon,Day,Hour,PRS_sea,TEM,DPT,RHU,PRE_1h,PRE_12h,PRE_24h\"\n", "elements = \"Station_Id_C,Lat,Lon,Alti,Year,Mon,Day,Hour,PRS_sea,TEM,DPT,RHU,PRE_1h,PRE_12h,PRE_24h\"\n",
"sta_ids = \"54511\"\n", "sta_ids = \"54511\"\n",
"\n", "\n",
@ -939,7 +939,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.7.10" "version": "3.9.7"
}, },
"widgets": { "widgets": {
"application/vnd.jupyter.widget-state+json": { "application/vnd.jupyter.widget-state+json": {

File diff suppressed because one or more lines are too long

@ -821,7 +821,7 @@ def get_fy_awx(directory, filename=None, suffix="*.AWX", units='', cache=True, c
:return: satellite information and data. :return: satellite information and data.
:Examples: :Examples:
>>> directory = "SATELLITE/FY2E/L1/IR1/EQUAL" >>> directory = "SATELLITE/FY4A/L1/CHINA/C004"
>>> data = get_fy_awx(directory) >>> data = get_fy_awx(directory)
""" """
@ -868,7 +868,7 @@ def get_fy_awx(directory, filename=None, suffix="*.AWX", units='', cache=True, c
print('There is no data ' + filename + ' in ' + directory) print('There is no data ' + filename + ' in ' + directory)
return None return None
# the first class file head # the first class file head 一级文件头记录采用定长方式, 共40字节
head1_dtype = [ head1_dtype = [
('SAT96', 'S12'), # SAT96 filename ('SAT96', 'S12'), # SAT96 filename
('byteSequence', 'i2'), # 整型数的字节顺序, 0 低字节在前, 高字节在后; !=0 高字节在前, 低字节在后. ('byteSequence', 'i2'), # 整型数的字节顺序, 0 低字节在前, 高字节在后; !=0 高字节在前, 低字节在后.
@ -886,7 +886,7 @@ def get_fy_awx(directory, filename=None, suffix="*.AWX", units='', cache=True, c
ind = 40 ind = 40
if head1_info['productCategory']: if head1_info['productCategory']:
# the second class file head # the second class file head 二级文件头采用不定长方式,内容依据产品的不同而不同.
head2_dtype = [ head2_dtype = [
('satelliteName', 'S8'), # 卫星名 ('satelliteName', 'S8'), # 卫星名
('year', 'i2'), ('month', 'i2'), ('year', 'i2'), ('month', 'i2'),
@ -961,13 +961,16 @@ def get_fy_awx(directory, filename=None, suffix="*.AWX", units='', cache=True, c
ind += head1_info['padDataLength'][0] ind += head1_info['padDataLength'][0]
# retrieve data records # retrieve data records
data_len = (head1_info['dataRecordNumber'][0].astype(np.int) * data_len = (head1_info['dataRecordNumber'][0].astype(int) *
head1_info['recordLength'][0]) head1_info['recordLength'][0])
data = np.frombuffer(byteArray[ind:(ind + data_len)], dtype='u1', count=data_len) data = np.frombuffer(byteArray[ind:(ind + data_len)], dtype='u1', count=data_len)
if calibration_table is not None: if calibration_table is not None:
data = calibration_table[data] data = calibration_table[data]
data.shape = (head1_info['dataRecordNumber'][0], head1_info['recordLength'][0]) data.shape = (head1_info['dataRecordNumber'][0], head1_info['recordLength'][0])
# 由于数据是按照左上角开始放置, 为此需要对纬度顺序进行反转
data = np.flip(data, axis=0)
# construct longitude and latitude coordinates # construct longitude and latitude coordinates
# if use the verticalResolution and horizontalResolution, lon and lat will not be correct. # if use the verticalResolution and horizontalResolution, lon and lat will not be correct.
#lat = ( #lat = (