bes Updated for version 3.21.1
The Backend Server (BES) is the lower two tiers of the Hyrax data server
HDFEOS5CFSpecialCVArray.cc
Go to the documentation of this file.
1// This file is part of the hdf5_handler implementing for the CF-compliant
2// Copyright (c) 2011-2023 The HDF Group, Inc. and OPeNDAP, Inc.
3//
4// This is free software; you can redistribute it and/or modify it under the
5// terms of the GNU Lesser General Public License as published by the Free
6// Software Foundation; either version 2.1 of the License, or (at your
7// option) any later version.
8//
9// This software is distributed in the hope that it will be useful, but
10// WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
11// or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
12// License for more details.
13//
14// You should have received a copy of the GNU Lesser General Public
15// License along with this library; if not, write to the Free Software
16// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17//
18// You can contact OPeNDAP, Inc. at PO Box 112, Saunderstown, RI. 02874-0112.
19// You can contact The HDF Group, Inc. at 410 E University Ave,
20// Suite 200, Champaign, IL 61820
21
31
32#include <iostream>
33#include <memory>
34#include <cassert>
35#include <BESDebug.h>
36#include <libdap/InternalErr.h>
37
38#include "HDF5RequestHandler.h"
40
41using namespace std;
42using namespace libdap;
43
44BaseType *HDFEOS5CFSpecialCVArray::ptr_duplicate()
45{
46 auto HDFEOS5CFSpecialCVArray_unique = make_unique<HDFEOS5CFSpecialCVArray>(*this);
47 return HDFEOS5CFSpecialCVArray_unique.release();
48}
49
50bool HDFEOS5CFSpecialCVArray::read(){
51
52 BESDEBUG("h5","Coming to HDFEOS5CFSpecialCVArray read "<<endl);
53
54 read_data_NOT_from_mem_cache(false,nullptr);
55
56 return true;
57}
58
59void HDFEOS5CFSpecialCVArray::read_data_NOT_from_mem_cache(bool /*add_cache*/, void*/*buf*/) {
60
61 BESDEBUG("h5","Coming to HDFEOS5CFSpecialCVArray: read_data_NOT_from_mem_cache "<<endl);
62
63 bool check_pass_fileid_key = HDF5RequestHandler::get_pass_fileid();
64
65 vector<int64_t> offset;
66 vector<int64_t> count;
67 vector<int64_t>step;
68 int64_t nelms = 0;
69
70 if (rank <= 0)
71 throw InternalErr (__FILE__, __LINE__,
72 "The number of dimension of the variable is <=0 for this array.");
73
74 else {
75 offset.resize(rank);
76 count.resize(rank);
77 step.resize(rank);
78
79 nelms = format_constraint (offset.data(), step.data(), count.data());
80
81 }
82
83 if(false == check_pass_fileid_key) {
84 if ((fileid = H5Fopen(filename.c_str(),H5F_ACC_RDONLY,H5P_DEFAULT))<0) {
85
86 ostringstream eherr;
87 eherr << "HDF5 File " << filename
88 << " cannot be opened. "<<endl;
89 throw InternalErr (__FILE__, __LINE__, eherr.str ());
90 }
91 }
92 string cv_name = HDF5CFUtil::obtain_string_after_lastslash(varname);
93 if ("" == cv_name) {
94 throw InternalErr (__FILE__, __LINE__, "Cannot obtain TES CV attribute");
95 }
96
97 string group_name = varname.substr(0,varname.size()-cv_name.size());
98
99 size_t cv_name_sep_pos = cv_name.find_first_of('_',0);
100
101 if (string::npos == cv_name_sep_pos) {
102 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
103 throw InternalErr (__FILE__, __LINE__, "Cannot obtain TES CV attribute");
104 }
105 string cv_attr_name = cv_name.substr(0,cv_name_sep_pos);
106
107 htri_t swath_link_exist = H5Lexists(fileid,group_name.c_str(),H5P_DEFAULT);
108
109 if (swath_link_exist <= 0) {
110 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
111 throw InternalErr (__FILE__, __LINE__, "The TES swath link doesn't exist");
112 }
113
114 htri_t swath_exist = H5Oexists_by_name(fileid,group_name.c_str(),H5P_DEFAULT);
115 if (swath_exist <= 0) {
116 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
117 throw InternalErr (__FILE__, __LINE__, "The TES swath doesn't exist");
118 }
119
120 htri_t cv_attr_exist = H5Aexists_by_name(fileid,group_name.c_str(),cv_attr_name.c_str(),H5P_DEFAULT);
121 if (cv_attr_exist <= 0) {
122 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
123 throw InternalErr (__FILE__, __LINE__, "The TES swath CV attribute doesn't exist");
124 }
125
126 hid_t cv_attr_id = H5Aopen_by_name(fileid,group_name.c_str(),cv_attr_name.c_str(),H5P_DEFAULT,H5P_DEFAULT);
127 if (cv_attr_id <0) {
128 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
129 throw InternalErr (__FILE__, __LINE__, "Cannot obtain the TES CV attribute id");
130 }
131
132 hid_t attr_type = -1;
133 if ((attr_type = H5Aget_type(cv_attr_id)) < 0) {
134 string msg = "cannot get the attribute datatype for the attribute ";
135 msg += cv_attr_name;
136 H5Aclose(cv_attr_id);
137 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
138 throw InternalErr(__FILE__, __LINE__, msg);
139 }
140
141 hid_t attr_space = -1;
142 if ((attr_space = H5Aget_space(cv_attr_id)) < 0) {
143 string msg = "cannot get the hdf5 dataspace id for the attribute ";
144 msg += cv_attr_name;
145 H5Tclose(attr_type);
146 H5Aclose(cv_attr_id);
147 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
148 throw InternalErr(__FILE__, __LINE__, msg);
149 }
150
151 auto attr_num_elm = (int)(H5Sget_simple_extent_npoints(attr_space));
152 if (0 == attr_num_elm ) {
153 string msg = "cannot get the number for the attribute ";
154 msg += cv_attr_name;
155 H5Tclose(attr_type);
156 H5Aclose(cv_attr_id);
157 H5Sclose(attr_space);
158 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
159 throw InternalErr(__FILE__, __LINE__, msg);
160 }
161
162 if (attr_num_elm != (total_num_elm -1)) {
163 string msg = "cannot get the number for the attribute ";
164 msg += cv_attr_name;
165 H5Tclose(attr_type);
166 H5Aclose(cv_attr_id);
167 H5Sclose(attr_space);
168 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
169 throw InternalErr(__FILE__, __LINE__, msg);
170 }
171
172 if (dtype != H5FLOAT32 || dtype != HDF5CFUtil::H5type_to_H5DAPtype(attr_type)) {
173 string msg = "cannot get the number for the attribute ";
174 msg += cv_attr_name;
175 H5Tclose(attr_type);
176 H5Aclose(cv_attr_id);
177 H5Sclose(attr_space);
178 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
179 throw InternalErr(__FILE__, __LINE__, msg);
180 }
181
182 hid_t attr_mem_type = -1;
183 if ((attr_mem_type = H5Tget_native_type(attr_type,H5T_DIR_ASCEND)) < 0) {
184 string msg = "cannot get the attribute datatype for the attribute ";
185 msg += cv_attr_name;
186 H5Tclose(attr_type);
187 H5Aclose(cv_attr_id);
188 H5Sclose(attr_space);
189 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
190 throw InternalErr(__FILE__, __LINE__, msg);
191 }
192
193 if (nelms <= 0 || (total_num_elm -1) <=0 ||total_num_elm < 0) {
194 H5Tclose(attr_mem_type);
195 H5Tclose(attr_type);
196 H5Aclose(cv_attr_id);
197 H5Sclose(attr_space);
198 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
199 throw InternalErr(__FILE__,__LINE__,
200 "Number of elements must be greater than 0");
201 }
202
203 vector<float> val;
204 val.resize(nelms);
205
206 vector<float>orig_val;
207 orig_val.resize(total_num_elm-1);
208
209 vector<float>total_val;
210 total_val.resize(total_num_elm);
211
212
213 if (H5Aread(cv_attr_id,attr_mem_type, (void*)orig_val.data())<0){
214 string msg = "cannot retrieve the value of the attribute ";
215 msg += cv_attr_name;
216 H5Tclose(attr_mem_type);
217 H5Tclose(attr_type);
218 H5Aclose(cv_attr_id);
219 H5Sclose(attr_space);
220 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
221 throw InternalErr(__FILE__, __LINE__, msg);
222 }
223
224
225 // Panoply cannot accept the same value for the coordinate variable.
226 // So I have to create a unique value for the added value.
227 // Since the first data value is 1211.53 hpa, which is below the main sea level
228 // already(1013.25 hpa), so I will just use more weight(0.9) for the first data
229 // value and less weight(0.1) for the second data value to calculate.
230 // As far as I know, no real data at the added level(fill value -999.0 for data value),
231 // it will not affect any visualization results.
232 // The position is position of added_value, position of the first_value, position of the second_value.
233 // So the equal weight equation is first_value = 0.5*(added_value+second_value);
234 // Add the weight, first_value = 0.95*added_value + 0.05*second_value;
235 // The approximate formula for the added_value is
236 // add_value = first_value*1.1 - 0.1*second_value;
237 // KY 2012-2-20
238
239 total_val[0] = 1.1F*orig_val[0] - 0.1F*orig_val[1];
240 for (int i = 1; i < total_num_elm; i++)
241 total_val[i] = orig_val[i-1];
242
243
244 // Note: offset and step in this case will never be nullptr since the total number of elements will be
245 // greater than 1. This is enforced in line 180. KY 2014-02-27
246 for (int i = 0; i <nelms; i++)
247 val[i] = total_val[offset[0]+i*step[0]];
248
249 set_value_ll(val.data(), nelms);
250 H5Tclose(attr_type);
251 H5Tclose(attr_mem_type);
252 H5Aclose(cv_attr_id);
253 H5Sclose(attr_space);
254 HDF5CFUtil::close_fileid(fileid,check_pass_fileid_key);
255
256 return;
257
258}
include the entry functions to execute the handlers
This class specifies the retrieval of special coordinate variable values for HDF-EOS5 products.
static H5DataType H5type_to_H5DAPtype(hid_t h5_type_id)
Map HDF5 Datatype to the intermediate H5DAPtype for the future use.
Definition HDF5CFUtil.cc:53