bes Updated for version 3.21.1
The Backend Server (BES) is the lower two tiers of the Hyrax data server
heos5cfdap.cc
Go to the documentation of this file.
1// This file is part of hdf5_handler: an HDF5 file handler for the OPeNDAP
2// data server.
3
4// Copyright (c) 2011-2023 The HDF Group, Inc. and OPeNDAP, Inc.
5//
6// This is free software; you can redistribute it and/or modify it under the
7// terms of the GNU Lesser General Public License as published by the Free
8// Software Foundation; either version 2.1 of the License, or (at your
9// option) any later version.
10//
11// This software is distributed in the hope that it will be useful, but
12// WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13// or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
14// License for more details.
15//
16// You should have received a copy of the GNU Lesser General Public
17// License along with this library; if not, write to the Free Software
18// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19//
20// You can contact OPeNDAP, Inc. at PO Box 112, Saunderstown, RI. 02874-0112.
21// You can contact The HDF Group, Inc. at 410 E University Ave,
22// Suite 200, Champaign, IL 61820
23
32
33#include <fcntl.h>
34#include <unistd.h>
35#include <iostream>
36#include <memory>
37
38#include <BESLog.h>
39#include <BESDebug.h>
40
41#include <libdap/parser.h>
42#include "heos5cfdap.h"
43#include "h5cfdaputil.h"
44#include "HDF5CFByte.h"
45#include "HDF5CFInt8.h"
46#include "HDF5CFUInt16.h"
47#include "HDF5CFInt16.h"
48#include "HDF5CFUInt32.h"
49#include "HDF5CFInt32.h"
50#include "HDF5CFUInt64.h"
51#include "HDF5CFInt64.h"
52#include "HDF5CFFloat32.h"
53#include "HDF5CFFloat64.h"
54#include "HDF5CFStr.h"
55#include "HDF5CFArray.h"
59#include "HDF5CFGeoCFProj.h"
60#include "HDF5RequestHandler.h"
61#include "h5apicompatible.h"
62
63#include "HE5Parser.h"
64#include "HE5Checker.h"
65#include "he5das.tab.hh"
66
67struct yy_buffer_state;
68
69yy_buffer_state *he5dds_scan_string(const char *str);
70int he5ddsparse(HE5Parser *he5parser);
71int he5dasparse(libdap::parser_arg *arg);
72int he5ddslex_destroy();
73int he5daslex_destroy();
74
76yy_buffer_state *he5das_scan_string(const char *str);
77
78using namespace HDF5CF;
79
80// Map EOS5 to DAP DDS
81void map_eos5_cfdds(DDS &dds, hid_t file_id, const string & filename) {
82
83 BESDEBUG("h5","Coming to HDF-EOS5 products DDS mapping function map_eos5_cfdds "<<endl);
84
85
86 string st_str;
87 string core_str;
88 string arch_str;
89 string xml_str;
90 string subset_str;
91 string product_str;
92 string other_str;
93 bool st_only = true;
94
95 // Read ECS metadata: merge them into one C++ string
96 read_ecs_metadata(file_id,st_str,core_str,arch_str,xml_str, subset_str,product_str,other_str,st_only);
97 if(""==st_str) {
98 string msg =
99 "unable to obtain the HDF-EOS5 struct metadata ";
100 throw InternalErr(__FILE__, __LINE__, msg);
101 }
102
103 bool is_check_nameclashing = HDF5RequestHandler::get_check_name_clashing();
104
105 EOS5File *f = nullptr;
106
107 try {
108 f = new EOS5File(filename.c_str(),file_id);
109 }
110 catch(...) {
111 throw InternalErr(__FILE__,__LINE__,"Cannot allocate the file object.");
112 }
113
114 bool include_attr = false;
115
116 // This first "try-catch" block will use the parsed info
117 try {
118
119 // Parse the structmetadata
120 HE5Parser p;
121 HE5Checker c;
122 he5dds_scan_string(st_str.c_str());
123 he5ddsparse(&p);
124 he5ddslex_destroy();
125
126 // Retrieve ProjParams from StructMetadata
127 p.add_projparams(st_str);
128#if 0
129 //p.print();
130#endif
131
132 // Check if the HDF-EOS5 grid has the valid parameters, projection codes.
133 if (c.check_grids_unknown_parameters(&p)) {
134 throw InternalErr("Unknown HDF-EOS5 grid paramters found in the file");
135 }
136
137 if (c.check_grids_missing_projcode(&p)) {
138 throw InternalErr("The HDF-EOS5 is missing project code ");
139 }
140
141 // We gradually add the support of different projection codes
142 if (c.check_grids_support_projcode(&p)) {
143 throw InternalErr("The current project code is not supported");
144 }
145
146 // HDF-EOS5 provides default pixel and origin values if they are not defined.
147 c.set_grids_missing_pixreg_orig(&p);
148
149 // Check if this multi-grid file shares the same grid.
150 bool grids_mllcv = c.check_grids_multi_latlon_coord_vars(&p);
151
152 // Retrieve all HDF5 info(Not the values)
153 f->Retrieve_H5_Info(filename.c_str(),file_id,include_attr);
154
155 // Adjust EOS5 Dimension names/sizes based on the parsed results
156 f->Adjust_EOS5Dim_Info(&p);
157
158 // Translate the parsed output to HDF-EOS5 grids/swaths/zonal.
159 // Several maps related to dimension and coordiantes are set up here.
160 f->Add_EOS5File_Info(&p, grids_mllcv);
161
162 // Add the dimension names
163 f->Add_Dim_Name(&p);
164 }
165 catch (HDF5CF::Exception &e){
166 delete f;
167 throw InternalErr(e.what());
168 }
169 catch(...) {
170 delete f;
171 throw;
172 }
173
174 // The parsed struct will no longer be in this "try-catch" block.
175 try {
176
177 // NASA Aura files need special handling. So first check if this file is an Aura file.
179
180 // Adjust the variable name
182
183 // Handle coordinate variables
184 f->Handle_CVar();
185
186 // Adjust variable and dimension names again based on the handling of coordinate variables.
188
189
190 // We need to use the CV units to distinguish lat/lon from th 3rd CV when
191 // memory cache is turned on.
192 if((HDF5RequestHandler::get_lrdata_mem_cache() != nullptr) ||
193 (HDF5RequestHandler::get_srdata_mem_cache() != nullptr)){
194
195 // Handle unsupported datatypes including the attributes
197
198 // Handle unsupported dataspaces including the attributes
200
201 // We need to retrieve coordinate variable attributes for memory cache use.
203
204 }
205 else {
206
207 // Handle unsupported datatypes
208 f->Handle_Unsupported_Dtype(include_attr);
209
210 // Handle unsupported dataspaces
211 f->Handle_Unsupported_Dspace(include_attr);
212
213 }
214
215
216 // Need to retrieve the units of CV when memory cache is turned on.
217 // The units of CV will be used to distinguish whether this CV is
218 // latitude/longitude or a third-dimension CV.
219 // isLatLon() will use the units value.
220 if((HDF5RequestHandler::get_lrdata_mem_cache() != nullptr) ||
221 (HDF5RequestHandler::get_srdata_mem_cache() != nullptr))
222 f->Adjust_Attr_Info();
223
224 // May need to adjust the object names for special objects. Currently, no operations
225 // are done in this routine.
226 f->Adjust_Obj_Name();
227
228 // Flatten the object name
229 f->Flatten_Obj_Name(include_attr);
230
231 // Handle name clashing
232 if(true == is_check_nameclashing)
233 f->Handle_Obj_NameClashing(include_attr);
234
235 // Check if this should follow COARDS, yes, set the COARDS flag.
237
238 // For COARDS, the dimension name needs to be changed.
239 f->Adjust_Dim_Name();
240 if(true == is_check_nameclashing)
242
243 // We need to turn off the very long string in the TES file to avoid
244 // the choking of netCDF Java tools. So this special variable routine
245 // is listed at last. We may need to turn off this if netCDF can handle
246 // long string better.
247 f->Handle_SpVar();
248 }
249 catch (HDF5CF::Exception &e){
250 delete f;
251 throw InternalErr(e.what());
252 }
253
254 // Generate EOS5 DDS
255 try {
256 gen_eos5_cfdds(dds,f);
257 }
258 catch(...) {
259 delete f;
260 throw;
261 }
262
263 delete f;
264}
265
266// Map EOS5 to DAP DAS
267void map_eos5_cfdas(DAS &das, hid_t file_id, const string &filename) {
268
269 BESDEBUG("h5","Coming to HDF-EOS5 products DAS mapping function map_eos5_cfdas "<<endl);
270 string st_str;
271 string core_str;
272 string arch_str;
273 string xml_str;
274 string subset_str;
275 string product_str;
276 string other_str;
277 bool st_only = true;
278
279 read_ecs_metadata(file_id,st_str,core_str,arch_str,xml_str, subset_str,product_str,other_str,st_only);
280 if(""==st_str) {
281 string msg =
282 "unable to obtain the HDF-EOS5 struct metadata ";
283 throw InternalErr(__FILE__, __LINE__, msg);
284 }
285
286 bool is_check_nameclashing = HDF5RequestHandler::get_check_name_clashing();
287
288 bool is_add_path_attrs = HDF5RequestHandler::get_add_path_attrs();
289
290 EOS5File *f = nullptr;
291 try {
292 f = new EOS5File(filename.c_str(),file_id);
293 }
294 catch(...) {
295 throw InternalErr(__FILE__,__LINE__,"Cannot allocate the file object.");
296 }
297 bool include_attr = true;
298
299 // The first "try-catch" block will use the parsed info.
300 try {
301
302 HE5Parser p;
303 HE5Checker c;
304 he5dds_scan_string(st_str.c_str());
305
306 he5ddsparse(&p);
307 he5ddslex_destroy();
308 p.add_projparams(st_str);
309#if 0
310 //p.print();
311 // cerr<<"main loop p.za_list.size() = "<<p.za_list.size() <<endl;
312#endif
313
314 if (c.check_grids_unknown_parameters(&p)) {
315 throw InternalErr("Unknown HDF-EOS5 grid paramters found in the file");
316 }
317
318 if (c.check_grids_missing_projcode(&p)) {
319 throw InternalErr("The HDF-EOS5 is missing project code ");
320 }
321 if (c.check_grids_support_projcode(&p)) {
322 throw InternalErr("The current project code is not supported");
323 }
324 c.set_grids_missing_pixreg_orig(&p);
325
326 bool grids_mllcv = c.check_grids_multi_latlon_coord_vars(&p);
327
328 f->Retrieve_H5_Info(filename.c_str(),file_id,include_attr);
329 f->Adjust_EOS5Dim_Info(&p);
330 f->Add_EOS5File_Info(&p, grids_mllcv);
331 f->Add_Dim_Name(&p);
332 }
333 catch (HDF5CF::Exception &e){
334 delete f;
335 throw InternalErr(e.what());
336 }
337 catch(...) {
338 delete f;
339 throw;
340 }
341
342 try {
345 f->Handle_CVar();
347 f->Handle_Unsupported_Dtype(include_attr);
348
349 // Remove unsupported dataspace
350 f->Handle_Unsupported_Dspace(include_attr);
351
352 // Need to retrieve the attribute values.
354
355
356 // Handle other unsupported objects,
357 // currently it mainly generates the info. for the
358 // unsupported objects other than datatype, dataspace,links and named datatype
359 // This function needs to be called after retrieving supported attributes.
360 f->Handle_Unsupported_Others(include_attr);
361
362 // Add/adjust CF attributes
363 f->Adjust_Attr_Info();
364 f->Adjust_Obj_Name();
365 f->Flatten_Obj_Name(include_attr);
366 if (true == is_check_nameclashing)
367 f->Handle_Obj_NameClashing(include_attr);
369
370#if 0
371 //f->Adjust_Dim_Name();
372 //if(true == is_check_nameclashing)
373 // f->Handle_DimNameClashing();
374#endif
375
376 // Add supplemental attributes
377 f->Add_Supplement_Attrs(is_add_path_attrs);
378
379 // Handle coordinate attributes
380 f->Handle_Coor_Attr();
382 }
383 catch (HDF5CF::Exception &e){
384 delete f;
385 throw InternalErr(e.what());
386 }
387
388 // Generate DAS for the EOS5
389 try {
390 gen_eos5_cfdas(das,file_id,f);
391 }
392 catch(...) {
393 delete f;
394 throw;
395 }
396
397 delete f;
398
399}
400
401// Generate DDS for the EOS5
402void gen_eos5_cfdds(DDS &dds, const HDF5CF::EOS5File *f) {
403
404 BESDEBUG("h5","Coming to HDF-EOS5 products DDS generation function gen_eos5_cfdds "<<endl);
405 const vector<HDF5CF::Var *>& vars = f->getVars();
406 const vector<HDF5CF::EOS5CVar *>& cvars = f->getCVars();
407 const string filename = f->getPath();
408 const hid_t file_id = f->getFileID();
409
410 // Read Variable info.
411 for (const auto &var:vars) {
412 BESDEBUG("h5","variable full path= "<< var->getFullPath() <<endl);
413 gen_dap_onevar_dds(dds,var,file_id,filename);
414 }
415
416 for (const auto &cvar:cvars) {
417 BESDEBUG("h5","variable full path= "<< cvar->getFullPath() <<endl);
418 gen_dap_oneeos5cvar_dds(dds,cvar,file_id,filename);
419 }
420
421 // We need to provide grid_mapping info. for multiple grids.
422 // Here cv_lat_miss_index represents the missing latitude(HDF-EOS grid without the latitude field) cv index
423 // This index is used to create the grid_mapping variable for different grids.
424 unsigned short cv_lat_miss_index = 1;
425 for (const auto &cvar:cvars) {
426 if(cvar->getCVType() == CV_LAT_MISS) {
427 if(cvar->getProjCode() != HE5_GCTP_GEO) {
428 // Here we need to add grid_mapping variables for each grid
429 // For projections other than sinusoidal since attribute values for LAMAZ and PS
430 // are different for each grid.
431 gen_dap_oneeos5cf_dds(dds,cvar);
432 add_cf_grid_mapinfo_var(dds,cvar->getProjCode(),cv_lat_miss_index);
433 cv_lat_miss_index++;
434 }
435 }
436 }
437}
438
439void gen_dap_oneeos5cf_dds(DDS &dds,const HDF5CF::EOS5CVar* cvar) {
440
441 BESDEBUG("h5","Coming to gen_dap_oneeos5cf_dds() "<<endl);
442
443 float cv_point_lower = cvar->getPointLower();
444 float cv_point_upper = cvar->getPointUpper();
445 float cv_point_left = cvar->getPointLeft();
446 float cv_point_right = cvar->getPointRight();
447 EOS5GridPCType cv_proj_code = cvar->getProjCode();
448 const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
449 if(dims.size() !=2)
450 throw InternalErr(__FILE__,__LINE__,"Currently we only support the 2-D CF coordinate projection system.");
451 add_cf_grid_cvs(dds,cv_proj_code,cv_point_lower,cv_point_upper,cv_point_left,cv_point_right,dims);
452
453}
454
455void gen_dap_oneeos5cf_das(DAS &das,const vector<HDF5CF::Var*>& vars, const HDF5CF::EOS5CVar* cvar,const unsigned short g_suffix) {
456
457 BESDEBUG("h5","Coming to gen_dap_oneeos5cf_das() "<<endl);
458#if 0
459 float cv_point_lower = cvar->getPointLower();
460 float cv_point_upper = cvar->getPointUpper();
461 float cv_point_left = cvar->getPointLeft();
462 float cv_point_right = cvar->getPointRight();
463#endif
464 EOS5GridPCType cv_proj_code = cvar->getProjCode();
465 const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
466
467#if 0
468cerr<<"cv_point_lower is "<<cv_point_lower <<endl;
469cerr<<"cvar name is "<<cvar->getName() <<endl;
470for(vector<HDF5CF::Dimension*>::const_iterator it_d = dims.begin(); it_d != dims.end(); ++it_d)
471 cerr<<"dim name das is "<<(*it_d)->getNewName() <<endl;
472#endif
473
474 if(dims.size() !=2)
475 throw InternalErr(__FILE__,__LINE__,"Currently we only support the 2-D CF coordinate projection system.");
476#if 0
477 add_cf_grid_cv_attrs(das,vars,cv_proj_code,cv_point_lower,cv_point_upper,cv_point_left,cv_point_right,dims,cvar->getParams(),g_suffix);
478#endif
479 add_cf_grid_cv_attrs(das,vars,cv_proj_code,dims,cvar->getParams(),g_suffix);
480
481}
482
483//For EOS5, generate the ignored object info. for the CF option
484void gen_eos5_cf_ignored_obj_info(DAS &das, HDF5CF::EOS5File *f) {
485
486 BESDEBUG("h5","Coming to gen_eos5_cf_ignored_obj_info() "<<endl);
487 AttrTable *at = das.get_table("Ignored_Object_Info");
488 if (nullptr == at)
489 at = das.add_table("Ignored_Object_Info", obtain_new_attr_table()) ;
490
491 at->append_attr("Message","String",f->Get_Ignored_Msg());
492
493
494}
495
496// Generate DDS for EOS5 coordinate variables
497void gen_dap_oneeos5cvar_dds(DDS &dds,const HDF5CF::EOS5CVar* cvar, const hid_t file_id, const string & filename) {
498
499 BESDEBUG("h5","Coming to gen_dap_oneeos5cvar_dds() "<<endl);
500 BaseType *bt = nullptr;
501
502 // TODO: need to handle 64-bit integer for DAP4 CF
503 if(cvar->getType()==H5INT64 || cvar->getType() == H5UINT64)
504 return;
505 switch(cvar->getType()) {
506#define HANDLE_CASE(tid,type) \
507 case tid: \
508 bt = new (type)(cvar->getNewName(),cvar->getFullPath()); \
509 break;
510
511 HANDLE_CASE(H5FLOAT32, HDF5CFFloat32)
512 HANDLE_CASE(H5FLOAT64, HDF5CFFloat64)
513 HANDLE_CASE(H5CHAR,HDF5CFInt16)
514 HANDLE_CASE(H5UCHAR, HDF5CFByte)
515 HANDLE_CASE(H5INT16, HDF5CFInt16)
516 HANDLE_CASE(H5UINT16, HDF5CFUInt16)
517 HANDLE_CASE(H5INT32, HDF5CFInt32)
518 HANDLE_CASE(H5UINT32, HDF5CFUInt32)
519 HANDLE_CASE(H5FSTRING, Str)
520 HANDLE_CASE(H5VSTRING, Str)
521 default:
522 throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
523#undef HANDLE_CASE
524 }
525
526 if (bt) {
527
528 const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
529 vector <HDF5CF::Dimension*>:: const_iterator it_d;
530 vector <size_t> dimsizes;
531 dimsizes.resize(cvar->getRank());
532 for(int i = 0; i <cvar->getRank();i++)
533 dimsizes[i] = (dims[i])->getSize();
534
535
536 if(dims.empty())
537 throw InternalErr(__FILE__,__LINE__,"the coordinate variables cannot be scalar.");
538 switch(cvar->getCVType()) {
539
540 case CV_EXIST:
541 {
542
543#if 0
544for(vector<HDF5CF::Attribute *>::const_iterator it_ra = cvar->getAttributes().begin();
545 it_ra != cvar->getAttributes().end(); ++it_ra) {
546cerr<<"cvar attribute name is "<<(*it_ra)->getNewName() <<endl;
547cerr<<"cvar attribute value type is "<<(*it_ra)->getType() <<endl;
548}
549cerr<<"cvar new name exist at he s5cfdap.cc is "<<cvar->getNewName() <<endl;
550#endif
551 bool is_latlon = cvar->isLatLon();
552 auto ar_unique = make_unique<HDF5CFArray>(cvar->getRank(),
553 file_id,
554 filename,
555 cvar->getType(),
556 dimsizes,
557 cvar->getFullPath(),
558 cvar->getTotalElems(),
559 CV_EXIST,
560 is_latlon,
561 cvar->getCompRatio(),
562 false,
563 cvar->getNewName(),
564 bt);
565 auto ar = ar_unique.get();
566 delete bt;
567
568 for(it_d = dims.begin(); it_d != dims.end(); ++it_d) {
569 if (""==(*it_d)->getNewName())
570 ar->append_dim((int)((*it_d)->getSize()));
571 else
572 ar->append_dim((int)((*it_d)->getSize()), (*it_d)->getNewName());
573 }
574
575 dds.add_var(ar);
576 }
577 break;
578
579 case CV_LAT_MISS:
580 case CV_LON_MISS:
581 {
582
583 auto ar_unique = make_unique<HDFEOS5CFMissLLArray>(cvar->getRank(),
584 filename,
585 file_id,
586 cvar->getFullPath(),
587 cvar->getCVType(),
588 cvar->getPointLower(),
589 cvar->getPointUpper(),
590 cvar->getPointLeft(),
591 cvar->getPointRight(),
592 cvar->getPixelReg(),
593 cvar->getOrigin(),
594 cvar->getProjCode(),
595 cvar->getParams(),
596 cvar->getZone(),
597 cvar->getSphere(),
598 cvar->getXDimSize(),
599 cvar->getYDimSize(),
600 cvar->getNewName(),
601 bt);
602 auto ar = ar_unique.get();
603 delete bt;
604#if 0
605cerr<<"cvar zone here is "<<cvar->getZone() <<endl;
606cerr<<"cvar Sphere here is "<<cvar->getSphere() <<endl;
607cerr<<"cvar getParams here 1 is "<<cvar->getParams()[0]<<endl;
608#endif
609 for(it_d = dims.begin(); it_d != dims.end(); ++it_d) {
610 if (""==(*it_d)->getNewName())
611 ar->append_dim((int)((*it_d)->getSize()));
612 else
613 ar->append_dim((int)((*it_d)->getSize()), (*it_d)->getNewName());
614 }
615
616 dds.add_var(ar);
617 }
618 break;
619
620 case CV_NONLATLON_MISS:
621 {
622
623 if (cvar->getRank() !=1) {
624 delete bt;
625 throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
626 }
627 auto nelem = (int)((cvar->getDimensions()[0])->getSize());
628
629 auto ar_unique = make_unique<HDFEOS5CFMissNonLLCVArray>(cvar->getRank(),
630 nelem,
631 cvar->getNewName(),
632 bt);
633 auto ar = ar_unique.get();
634 delete bt;
635
636 for(it_d = dims.begin(); it_d != dims.end(); it_d++) {
637 if (""==(*it_d)->getNewName())
638 ar->append_dim((int)((*it_d)->getSize()));
639 else
640 ar->append_dim((int)((*it_d)->getSize()), (*it_d)->getNewName());
641 }
642 dds.add_var(ar);
643 }
644 break;
645 case CV_SPECIAL:
646 // Currently only support Aura TES files. May need to revise when having more
647 // special products KY 2012-2-3
648 {
649
650 if (cvar->getRank() !=1) {
651 delete bt;
652 throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
653 }
654 auto nelem = (int)((cvar->getDimensions()[0])->getSize());
655 auto ar_unique = make_unique<HDFEOS5CFSpecialCVArray>(
656 cvar->getRank(),
657 filename,
658 file_id,
659 cvar->getType(),
660 nelem,
661 cvar->getFullPath(),
662 cvar->getNewName(),
663 bt);
664 auto ar = ar_unique.get();
665 delete bt;
666
667 for(it_d = dims.begin(); it_d != dims.end(); ++it_d){
668 if (""==(*it_d)->getNewName())
669 ar->append_dim((int)((*it_d)->getSize()));
670 else
671 ar->append_dim((int)((*it_d)->getSize()), (*it_d)->getNewName());
672 }
673 dds.add_var(ar);
674 }
675 break;
676 case CV_MODIFY:
677 default:
678 delete bt;
679 throw InternalErr(__FILE__,__LINE__,"Unsupported coordinate variable type.");
680 }
681
682 }
683
684}
685
686// Generate EOS5 DAS
687void gen_eos5_cfdas(DAS &das, hid_t file_id, HDF5CF::EOS5File *f) {
688
689 BESDEBUG("h5","Coming to HDF-EOS5 products DAS generation function gen_eos5_cfdas "<<endl);
690
691 // First check if this is for generating the ignored object info.
692 if(true == f->Get_IgnoredInfo_Flag()) {
693 gen_eos5_cf_ignored_obj_info(das, f);
694 return;
695 }
696
697 const vector<HDF5CF::Var *>& vars = f->getVars();
698 const vector<HDF5CF::EOS5CVar *>& cvars = f->getCVars();
699 const vector<HDF5CF::Group *>& grps = f->getGroups();
700 const vector<HDF5CF::Attribute *>& root_attrs = f->getAttributes();
701
702#if 0
705#endif
706
707 // Handling the file attributes(attributes under the root group)
708 // The table name is "HDF_GLOBAL".
709 if (false == root_attrs.empty()) {
710 AttrTable *at = das.get_table(FILE_ATTR_TABLE_NAME);
711 if (nullptr == at)
712 at = das.add_table(FILE_ATTR_TABLE_NAME, obtain_new_attr_table());
713
714 for (const auto &root_attr:root_attrs)
715 gen_dap_oneobj_das(at,root_attr,nullptr);
716
717 }
718
719 if (false == grps.empty()) {
720 for (const auto &grp:grps) {
721 AttrTable *at = das.get_table(grp->getNewName());
722 if (nullptr == at)
723 at = das.add_table(grp->getNewName(), obtain_new_attr_table());
724
725 for (const auto &attr:grp->getAttributes()) {
726#if 0
727 //gen_dap_oneobj_das(at,attr,nullptr);
728#endif
729 // TODO: ADDING a BES KEY
730 if(attr->getNewName()=="Conventions" &&(grp->getNewName() == "HDFEOS_ADDITIONAL_FILE_ATTRIBUTES")
731 && (true==HDF5RequestHandler::get_eos5_rm_convention_attr_path())) {
732 AttrTable *at_das = das.get_table(FILE_ATTR_TABLE_NAME);
733 if (nullptr == at_das)
734 at_das = das.add_table(FILE_ATTR_TABLE_NAME, obtain_new_attr_table());
735 gen_dap_oneobj_das(at_das,attr,nullptr);
736 }
737 else
738 gen_dap_oneobj_das(at,attr,nullptr);
739 }
740 }
741 }
742
743 for (const auto &var:vars) {
744 if (false == (var->getAttributes().empty())) {
745
746 // TODO: Need to handle 64-bit int support for DAP4 CF.
747 if(H5INT64 == var->getType() || H5UINT64 == var->getType()){
748 continue;
749 }
750
751 AttrTable *at = das.get_table(var->getNewName());
752 if (nullptr == at)
753 at = das.add_table(var->getNewName(), obtain_new_attr_table());
754
755 for (const auto &attr:var->getAttributes())
756 gen_dap_oneobj_das(at,attr,var);
757 }
758 }
759
760 for (const auto &cvar:cvars) {
761
762 if (false == (cvar->getAttributes().empty())) {
763
764 // TODO: Need to handle 64-bit int support for DAP4 CF.
765 if(H5INT64 == cvar->getType() || H5UINT64 == cvar->getType()){
766 continue;
767 }
768
769 AttrTable *at = das.get_table(cvar->getNewName());
770 if (nullptr == at)
771 at = das.add_table(cvar->getNewName(), obtain_new_attr_table());
772
773 for (const auto &attr:cvar->getAttributes())
774 gen_dap_oneobj_das(at,attr,cvar);
775
776 }
777 }
778
779 // Add CF 1-D projection variables
780 unsigned short cv_lat_miss_index = 1;
781 // This code block will add grid_mapping attribute info. to corresponding variables.
782 for (const auto &cvar:cvars) {
783 if(cvar->getCVType() == CV_LAT_MISS) {
784 if(cvar->getProjCode() != HE5_GCTP_GEO) {
785 gen_dap_oneeos5cf_das(das,vars,cvar,cv_lat_miss_index);
786 cv_lat_miss_index++;
787 }
788 }
789 }
790
791 for (const auto &cvar:cvars) {
792 if(cvar->getProjCode() == HE5_GCTP_LAMAZ) {
793 if(cvar->getCVType() == CV_LAT_MISS || cvar->getCVType() == CV_LON_MISS) {
794 AttrTable *at = das.get_table(cvar->getNewName());
795 if (nullptr == at)
796 at = das.add_table(cvar->getNewName(), obtain_new_attr_table());
797 if(cvar->getCVType() == CV_LAT_MISS)
798 add_ll_valid_range(at,true);
799 else
800 add_ll_valid_range(at,false);
801 }
802 }
803 }
804
805
806 bool disable_ecsmetadata = HDF5RequestHandler::get_disable_ecsmeta();
807
808 if(disable_ecsmetadata == false) {
809
810 // To keep the backward compatibility with the old handler,
811 // we parse the special ECS metadata to DAP attributes
812
813 string st_str;
814 string core_str;
815 string arch_str;
816 string xml_str;
817 string subset_str;
818 string product_str;
819 string other_str;
820 bool st_only = false;
821
822 read_ecs_metadata(file_id, st_str, core_str, arch_str, xml_str,
823 subset_str, product_str, other_str, st_only);
824
825#if 0
826if(st_str!="") "h5","Final structmetadata "<<st_str <<endl;
827if(core_str!="") "h5","Final coremetadata "<<core_str <<endl;
828if(arch_str!="") "h5","Final archivedmetadata "<<arch_str <<endl;
829if(xml_str!="") "h5","Final xmlmetadata "<<xml_str <<endl;
830if(subset_str!="") "h5","Final subsetmetadata "<<subset_str <<endl;
831if(product_str!="") "h5","Final productmetadata "<<product_str <<endl;
832if(other_str!="") "h5","Final othermetadata "<<other_str <<endl;
833
834#endif
835 if(st_str != ""){
836
837#if 0
838 string check_disable_smetadata_key ="H5.DisableStructMetaAttr";
839 bool is_check_disable_smetadata = false;
840#endif
841 bool is_check_disable_smetadata = HDF5RequestHandler::get_disable_structmeta();
842
843 if (false == is_check_disable_smetadata) {
844
845 AttrTable *at = das.get_table("StructMetadata");
846 if (nullptr == at)
847 at = das.add_table("StructMetadata", obtain_new_attr_table());
848 parser_arg arg(at);
849
850 he5das_scan_string(st_str.c_str());
851 if (he5dasparse(&arg) != 0
852 || false == arg.status()){
853
854 ERROR_LOG("HDF-EOS5 parse error while processing a StructMetadata HDFEOS attribute.");
855 }
856
857 he5daslex_destroy();
858
859 }
860 }
861
862 if(core_str != ""){
863 AttrTable *at = das.get_table("CoreMetadata");
864 if (nullptr == at)
865 at = das.add_table("CoreMetadata", obtain_new_attr_table());
866 parser_arg arg(at);
867 he5das_scan_string(core_str.c_str());
868 if (he5dasparse(&arg) != 0
869 || false == arg.status()){
870
871 ERROR_LOG("HDF-EOS5 parse error while processing a CoreMetadata HDFEOS attribute.");
872 }
873
874 he5daslex_destroy();
875 }
876 if(arch_str != ""){
877 AttrTable *at = das.get_table("ArchiveMetadata");
878 if (nullptr == at)
879 at = das.add_table("ArchiveMetadata", obtain_new_attr_table());
880 parser_arg arg(at);
881 he5das_scan_string(arch_str.c_str());
882 if (he5dasparse(&arg) != 0 || false == arg.status()){
883 ERROR_LOG("HDF-EOS5 parse error while processing a ArchiveMetadata HDFEOS attribute.");
884 }
885 he5daslex_destroy();
886 }
887
888 // XML attribute includes double quote("), this will choke netCDF Java library.
889 // So we replace double_quote(") with &quote.This is currently the OPeNDAP way.
890 // XML attribute cannot be parsed. So just pass the string.
891 if(xml_str != ""){
892 AttrTable *at = das.get_table("XMLMetadata");
893 if (nullptr == at)
894 at = das.add_table("XMLMetadata", obtain_new_attr_table());
895 HDF5CFDAPUtil::replace_double_quote(xml_str);
896 at->append_attr("Contents","String",xml_str);
897 }
898
899 // SubsetMetadata and ProductMetadata exist in HDF-EOS2 files.
900 // So far we haven't found any metadata in NASA HDF-EOS5 files,
901 // but will keep an eye on it. KY 2012-3-6
902 if(subset_str != ""){
903 AttrTable *at = das.get_table("SubsetMetadata");
904 if (nullptr == at)
905 at = das.add_table("SubsetMetadata", obtain_new_attr_table());
906 parser_arg arg(at);
907 he5das_scan_string(subset_str.c_str());
908 if (he5dasparse(&arg) != 0 || false == arg.status()) {
909 ERROR_LOG("HDF-EOS5 parse error while processing a SubsetMetadata HDFEOS attribute.");
910 }
911 he5daslex_destroy();
912 }
913 if(product_str != ""){
914 AttrTable *at = das.get_table("ProductMetadata");
915 if (nullptr == at)
916 at = das.add_table("ProductMetadata", obtain_new_attr_table());
917 parser_arg arg(at);
918 he5das_scan_string(product_str.c_str());
919 if (he5dasparse(&arg) != 0 || false == arg.status()){
920 ERROR_LOG("HDF-EOS5 parse error while processing a ProductMetadata HDFEOS attribute.");
921 }
922 he5daslex_destroy();
923 }
924
925 // All other metadata under "HDF-EOS Information" will not be
926 // parsed since we don't know how to parse them.
927 // We will simply pass a string to the DAS.
928 if (other_str != ""){
929 AttrTable *at = das.get_table("OtherMetadata");
930 if (nullptr == at)
931 at = das.add_table("OtherMetadata", obtain_new_attr_table());
932 at->append_attr("Contents","String",other_str);
933 }
934
935 }
936 // CHECK ALL UNLIMITED DIMENSIONS from the coordinate variables based on the names.
937 if(f->HaveUnlimitedDim() == true) {
938
939 AttrTable *at = das.get_table("DODS_EXTRA");
940 if (nullptr == at)
941 at = das.add_table("DODS_EXTRA", obtain_new_attr_table());
942 string unlimited_names;
943
944 for (const auto &cvar: cvars) {
945#if 0
946 bool has_unlimited_dim = false;
947#endif
948 // Check unlimited dimension names.
949 for (const auto &dim:cvar->getDimensions()) {
950
951 // Currently we only check one unlimited dimension, which is the most
952 // common case. When receiving the conventions from JG, will add
953 // the support of multi-unlimited dimension. KY 2016-02-09
954 if(dim->HaveUnlimitedDim() == true) {
955
956 if(unlimited_names=="") {
957 unlimited_names = dim->getNewName();
958 at->append_attr("Unlimited_Dimension","String",unlimited_names);
959 }
960 else {
961 if(unlimited_names.rfind(dim->getNewName()) == string::npos) {
962 unlimited_names = unlimited_names+" "+dim->getNewName();
963 at->append_attr("Unlimited_Dimension","String",dim->getNewName());
964 }
965 }
966 }
967
968 }
969
970#if 0
971 //if(true == has_unlimited_dim)
972 // break;
973#endif
974 }
975#if 0
976 //if(unlimited_names!="")
977 // at->append_attr("Unlimited_Dimension","String",unlimited_names);
978#endif
979 }
980
981}
982
983// Read ECS metadata
984void read_ecs_metadata(hid_t s_file_id,
985 string &total_strmeta_value,
986 string &total_coremeta_value,
987 string &total_archmeta_value,
988 string &total_xmlmeta_value,
989 string &total_submeta_value,
990 string &total_prometa_value,
991 string &total_othermeta_value,
992 bool s_st_only) {
993
994 BESDEBUG("h5","Coming to read_ecs_metadata() "<<endl);
995 string ecs_group = "/HDFEOS INFORMATION";
996 hid_t ecs_grp_id = -1;
997 if ((ecs_grp_id = H5Gopen(s_file_id, ecs_group.c_str(),H5P_DEFAULT))<0) {
998 string msg =
999 "h5_ecs_meta: unable to open the HDF5 group ";
1000 msg +=ecs_group;
1001 throw InternalErr(__FILE__, __LINE__, msg);
1002 }
1003
1004 H5G_info_t g_info;
1005 hsize_t nelems = 0;
1006
1007 if (H5Gget_info(ecs_grp_id,&g_info) <0) {
1008 string msg =
1009 "h5_ecs_meta: unable to obtain the HDF5 group info. for ";
1010 msg +=ecs_group;
1011 H5Gclose(ecs_grp_id);
1012 throw InternalErr(__FILE__, __LINE__, msg);
1013 }
1014
1015 nelems = g_info.nlinks;
1016
1017 ssize_t oname_size = 0;
1018#if 0
1019 int cur_archmeta_suffix = 0;
1020 int cur_coremeta_suffix = 0;
1021 int cur_strmeta_suffix = 0;
1022 int cur_xmlmeta_suffix = 0;
1023#endif
1024
1025 int archmeta_num = -1;
1026 int coremeta_num = -1;
1027 int xmlmeta_num = -1;
1028 int strmeta_num = -1;
1029 int submeta_num = -1;
1030 int prometa_num = -1;
1031
1032 // Initalize the total number for different metadata.
1033 int archmeta_num_total = 0;
1034 int coremeta_num_total = 0;
1035 int xmlmeta_num_total = 0;
1036 int strmeta_num_total = 0;
1037 int submeta_num_total = 0;
1038 int prometa_num_total = 0;
1039 int othermeta_num_total = 0;
1040
1041 bool archmeta_no_suffix = true;
1042 bool coremeta_no_suffix = true;
1043 bool strmeta_no_suffix = true;
1044 bool xmlmeta_no_suffix = true;
1045 bool submeta_no_suffix = true;
1046 bool prometa_no_suffix = true;
1047
1048 // Define a vector of string to hold all dataset names.
1049 vector<string> s_oname(nelems);
1050
1051 // Define an EOSMetadata array that can describe the metadata type for each object
1052 // We initialize the value to OtherMeta.
1053 EOS5Metadata metatype[nelems];
1054
1055 for (unsigned int i =0; i<nelems; i++)
1056 metatype[i] = OtherMeta;
1057
1058 for (hsize_t i = 0; i < nelems; i++) {
1059
1060 // Query the length of the object name.
1061 oname_size =
1062 H5Lget_name_by_idx(ecs_grp_id,".",H5_INDEX_NAME,H5_ITER_NATIVE,i,nullptr,
1063 0, H5P_DEFAULT);
1064 if (oname_size <= 0) {
1065 string msg = "hdf5 object name error from: ";
1066 msg += ecs_group;
1067 H5Gclose(ecs_grp_id);
1068 throw InternalErr(__FILE__, __LINE__, msg);
1069 }
1070
1071 // Obtain the name of the object.
1072 vector<char> oname(oname_size + 1);
1073 if (H5Lget_name_by_idx(ecs_grp_id,".",H5_INDEX_NAME,H5_ITER_NATIVE,i,oname.data(),
1074 (size_t)(oname_size+1), H5P_DEFAULT)<0){
1075 string msg = "hdf5 object name error from: ";
1076 msg += ecs_group;
1077 H5Gclose(ecs_grp_id);
1078 throw InternalErr(__FILE__, __LINE__, msg);
1079 }
1080
1081 // Check if this object is an HDF5 dataset, not, throw an error.
1082 // First, check if it is the hard link or the soft link
1083 H5L_info_t linfo;
1084 if (H5Lget_info(ecs_grp_id,oname.data(),&linfo,H5P_DEFAULT)<0) {
1085 string msg = "hdf5 link name error from: ";
1086 msg += ecs_group;
1087 H5Gclose(ecs_grp_id);
1088 throw InternalErr(__FILE__, __LINE__, msg);
1089 }
1090
1091 // This is the soft link.
1092 if (linfo.type == H5L_TYPE_SOFT){
1093 string msg = "hdf5 link name error from: ";
1094 msg += ecs_group;
1095 H5Gclose(ecs_grp_id);
1096 throw InternalErr(__FILE__, __LINE__, msg);
1097 }
1098
1099 // Obtain the object type
1100 H5O_info_t oinfo;
1101 if (H5OGET_INFO_BY_IDX(ecs_grp_id, ".", H5_INDEX_NAME, H5_ITER_NATIVE,
1102 i, &oinfo, H5P_DEFAULT)<0) {
1103 string msg = "Cannot obtain the object info ";
1104 msg += ecs_group;
1105 H5Gclose(ecs_grp_id);
1106 throw InternalErr(__FILE__, __LINE__, msg);
1107 }
1108
1109 if(oinfo.type != H5O_TYPE_DATASET) {
1110 string msg = "hdf5 link name error from: ";
1111 msg += ecs_group;
1112 H5Gclose(ecs_grp_id);
1113 throw InternalErr(__FILE__, __LINE__, msg);
1114 }
1115
1116 // We want to remove the last '\0' character added by C .
1117 string s_one_oname(oname.begin(),oname.end()-1);
1118 s_oname[i] = s_one_oname;
1119
1120 // Calculate how many elements we have for each category(StructMetadata, CoreMetadata, etc.)
1121 if (((s_one_oname.find("StructMetadata"))==0) ||
1122 ((s_one_oname.find("structmetadata"))==0)){
1123
1124 metatype[i] = StructMeta;
1125
1126 // Do we have suffix for the metadata?
1127 // If this metadata doesn't have any suffix, it should only come to this loop once.
1128 // That's why, when checking the first time, no_suffix is always true.
1129 // If we have already found that it doesn't have any suffix,
1130 // it should not go into this loop. throw an error.
1131 if (false == strmeta_no_suffix) {
1132 string msg = "StructMetadata/structmetadata without suffix should only appear once. ";
1133 H5Gclose(ecs_grp_id);
1134 throw InternalErr(__FILE__, __LINE__, msg);
1135 }
1136
1137 else if(strmeta_num_total >0)
1138 strmeta_num_total++;
1139 else { // either no suffix or the first time to loop the one having the suffix.
1140 if ((0 == s_one_oname.compare("StructMetadata"))||
1141 (0 == s_one_oname.compare("structmetadata")))
1142 strmeta_no_suffix = false;
1143 else strmeta_num_total++;
1144 }
1145#if 0
1146"h5","strmeta_num_total= "<<strmeta_num_total <<endl;
1147if(strmeta_no_suffix) "h5","structmeta data has the suffix" <<endl;
1148else "h5","structmeta data doesn't have the suffix" <<endl;
1149#endif
1150 }
1151
1152 if(false == s_st_only) {
1153
1154 if ((0 == (s_one_oname.find("CoreMetadata"))) ||
1155 (0 == (s_one_oname.find("coremetadata")))){
1156
1157 metatype[i] = CoreMeta;
1158
1159 // Do we have suffix for the metadata?
1160 // When checking the first time, no_suffix is always true.
1161 // If we have already found that it doesn't have any suffix,
1162 // it should not go into this loop anyway. throw an error.
1163 if (false == coremeta_no_suffix) {
1164 string msg = "CoreMetadata/coremetadata without suffix should only appear once. ";
1165 H5Gclose(ecs_grp_id);
1166 throw InternalErr(__FILE__, __LINE__, msg);
1167 }
1168
1169 else if(coremeta_num_total >0)
1170 coremeta_num_total++;
1171 else { // either no suffix or the first time to loop the one having the suffix.
1172 // If no suffix is true, it should be out of the loop. In case it comes
1173 // to the loop again, we set "coremeta_no_suffix" be false so an error
1174 // can be thrown. This is counter-intuitive. Hopefully people can understand it.
1175 if ((0 == s_one_oname.compare("CoreMetadata")) ||
1176 (0 == s_one_oname.compare("coremetadata")))
1177 coremeta_no_suffix = false;
1178 else coremeta_num_total++;
1179 }
1180#if 0
1181"h5","coremeta_num_total= "<<coremeta_num_total <<endl;
1182if(coremeta_no_suffix) "h5","coreuctmeta data has the suffix" <<endl;
1183else "h5","coremeta data doesn't have the suffix" <<endl;
1184#endif
1185 }
1186
1187 // OMI has the metadata name as "ArchiveMetadata.0"
1188 else if ((0 == (s_one_oname.find("ArchivedMetadata"))) ||
1189 (0 == (s_one_oname.find("archivedmetadata"))) ||
1190 (0 == (s_one_oname.find("ArchiveMetadata"))) ||
1191 (0 == (s_one_oname.find("archivemetadata")))){
1192
1193 metatype[i] = ArchivedMeta;
1194 // Do we have suffix for the metadata?
1195 // When checking the first time, no_suffix is always true.
1196 // If we have already found that it doesn't have any suffix,
1197 // it should not go into this loop anyway. throw an error.
1198 if (false == archmeta_no_suffix) {
1199 string msg = "archivedmetadata/ArchivedMetadata without suffix should only appear once. ";
1200 H5Gclose(ecs_grp_id);
1201 throw InternalErr(__FILE__, __LINE__, msg);
1202 }
1203
1204 else if(archmeta_num_total >0)
1205 archmeta_num_total++;
1206 else { // either no suffix or the first time to loop the one having the suffix.
1207 if ((0 == s_one_oname.compare("ArchivedMetadata"))||
1208 (0 == s_one_oname.compare("archivedmetadata")) ||
1209 (0 == s_one_oname.compare("archivemetadata")) ||
1210 (0 == s_one_oname.compare("ArchiveMetadata")))
1211 archmeta_no_suffix = false;
1212 else
1213 archmeta_num_total++;
1214 }
1215#if 0
1216"h5","archmeta_num_total= "<<archmeta_num_total <<endl;
1217if(archmeta_no_suffix) "h5","archuctmeta data has the suffix" <<endl;
1218else "h5","archmeta data doesn't have the suffix" <<endl;
1219#endif
1220
1221 }
1222
1223 else if (((s_one_oname.find("SubsetMetadata"))==0) ||
1224 ((s_one_oname.find("subsetmetadata"))==0)){
1225
1226 metatype[i] = SubsetMeta;
1227 // Do we have suffix for the metadata?
1228 // When checking the first time, no_suffix is always true.
1229 // If we have already found that it doesn't have any suffix,
1230 // it should not go into this loop anyway. throw an error.
1231 if (false == submeta_no_suffix) {
1232 H5Gclose(ecs_grp_id);
1233 string msg = "submetadata/SubMetadata without suffix should only appear once. ";
1234 throw InternalErr(__FILE__, __LINE__, msg);
1235 }
1236
1237 else if(submeta_num_total >0)
1238 submeta_num_total++;
1239 else { // either no suffix or the first time to loop the one having the suffix.
1240 if ((0 == s_one_oname.compare("SubsetMetadata"))||
1241 (0 == s_one_oname.compare("subsetmetadata")))
1242 submeta_no_suffix = false;
1243 else submeta_num_total++;
1244 }
1245#if 0
1246"h5","submeta_num_total= "<<submeta_num_total <<endl;
1247if(submeta_no_suffix) "h5","subuctmeta data has the suffix" <<endl;
1248else "h5","submeta data doesn't have the suffix" <<endl;
1249#endif
1250
1251 }
1252
1253 else if ((0 == (s_one_oname.find("XmlMetadata"))) ||
1254 (0 == (s_one_oname.find("xmlmetadata")))){
1255
1256 metatype[i] = XMLMeta;
1257
1258 // Do we have suffix for the metadata?
1259 // When checking the first time, no_suffix is always true.
1260 // If we have already found that it doesn't have any suffix,
1261 // it should not go into this loop anyway. throw an error.
1262 if (false == xmlmeta_no_suffix) {
1263 H5Gclose(ecs_grp_id);
1264 string msg = "xmlmetadata/Xmlmetadata without suffix should only appear once. ";
1265 throw InternalErr(__FILE__, __LINE__, msg);
1266 }
1267
1268 else if(xmlmeta_num_total >0)
1269 xmlmeta_num_total++;
1270 else { // either no suffix or the first time to loop the one having the suffix.
1271 if ((0 == s_one_oname.compare("XmlMetadata"))||
1272 (0 == s_one_oname.compare("xmlmetadata")))
1273 xmlmeta_no_suffix = false;
1274 else xmlmeta_num_total++;
1275 }
1276#if 0
1277"h5","xmlmeta_num_total= "<<xmlmeta_num_total <<endl;
1278if(xmlmeta_no_suffix) "h5","xmluctmeta data doesn't have the suffix" <<endl;
1279else "h5","xmlmeta data has the suffix" <<endl;
1280#endif
1281
1282 }
1283
1284 else if ((0 == (s_one_oname.find("ProductMetadata"))) ||
1285 (0 == (s_one_oname.find("productmetadata")))){
1286
1287 metatype[i] = ProductMeta;
1288 // Do we have suffix for the metadata?
1289 // When checking the first time, no_suffix is always true.
1290 // If we have already found that it doesn't have any suffix,
1291 // it should not go into this loop anyway. throw an error.
1292 if (!prometa_no_suffix) {
1293 H5Gclose(ecs_grp_id);
1294 string msg = "productmetadata/ProductMetadata without suffix should only appear once. ";
1295 throw InternalErr(__FILE__, __LINE__, msg);
1296 }
1297
1298 else if(prometa_num_total >0) prometa_num_total++;
1299 else { // either no suffix or the first time to loop the one having the suffix.
1300 if ((0 == s_one_oname.compare("ProductMetadata"))||
1301 (0 == s_one_oname.compare("productmetadata")))
1302 prometa_no_suffix = false;
1303 else prometa_num_total++;
1304 }
1305
1306 }
1307
1308 // All other metadata will be merged to one string, no need to check the name.
1309 else othermeta_num_total++;
1310 }
1311
1312 oname.clear();
1313 s_one_oname.clear();
1314 }
1315
1316 // Define a vector of string to hold StructMetadata.
1317 // StructMetadata must exist for a valid HDF-EOS5 file.
1318 vector<string> strmeta_value;
1319 if (strmeta_num_total <= 0) {
1320 string msg = "hdf5 object name error from: ";
1321 H5Gclose(ecs_grp_id);
1322 throw InternalErr(__FILE__, __LINE__, msg);
1323 }
1324 else {
1325 strmeta_value.resize(strmeta_num_total);
1326 for (int i = 0; i < strmeta_num_total; i++)
1327 strmeta_value[i]="";
1328 }
1329
1330 // All other metadata are optional.
1331 // Define a vector of string to hold archivedmetadata.
1332 vector<string> archmeta_value;
1333 if (archmeta_num_total >0) {
1334 archmeta_value.resize(archmeta_num_total);
1335 for (int i = 0; i < archmeta_num_total; i++)
1336 archmeta_value[i]="";
1337 }
1338
1339 // Define a vector of string to hold coremetadata.
1340 vector<string> coremeta_value;
1341 if (coremeta_num_total >0) {
1342 coremeta_value.resize(coremeta_num_total);
1343 for (int i = 0; i < coremeta_num_total; i++)
1344 coremeta_value[i]="";
1345 }
1346
1347 // Define a vector of string to hold xmlmetadata.
1348 vector<string> xmlmeta_value;
1349 if (xmlmeta_num_total >0) {
1350 xmlmeta_value.resize(xmlmeta_num_total);
1351 for (int i = 0; i < xmlmeta_num_total; i++)
1352 xmlmeta_value[i]="";
1353 }
1354
1355 // Define a vector of string to hold subsetmetadata.
1356 vector<string> submeta_value;
1357 if (submeta_num_total >0) {
1358 submeta_value.resize(submeta_num_total);
1359 for (int i = 0; i < submeta_num_total; i++)
1360 submeta_value[i]="";
1361 }
1362
1363 // Define a vector of string to hold productmetadata.
1364 vector<string> prometa_value;
1365 if (prometa_num_total >0) {
1366 prometa_value.resize(prometa_num_total);
1367 for (int i = 0; i < prometa_num_total; i++)
1368 prometa_value[i]="";
1369 }
1370
1371 // For all other metadata, we don't need to calculate the value, just append them.
1372
1373 // Now we want to retrieve the metadata value and combine them into one string.
1374 // Here we have to remember the location of every element of the metadata if
1375 // this metadata has a suffix.
1376 for (hsize_t i = 0; i < nelems; i++) {
1377
1378 // DDS parser only needs to parse the struct Metadata. So check
1379 // if st_only flag is true, will only read StructMetadata string.
1380 // Struct Metadata is generated by the HDF-EOS5 library, so the
1381 // name "StructMetadata.??" won't change for real struct metadata.
1382 //However, we still assume that somebody may not use the HDF-EOS5
1383 // library to add StructMetadata, the name may be "structmetadata".
1384 if (true == s_st_only &&
1385 (((s_oname[i].find("StructMetadata"))!=0) &&
1386 ((s_oname[i].find("structmetadata"))!=0))){
1387 continue;
1388 }
1389
1390 // Open the dataset, dataspace, datatype, number of elements etc. for this metadata
1391 hid_t s_dset_id = -1;
1392 hid_t s_space_id = -1;
1393 hid_t s_ty_id = -1;
1394 hssize_t s_nelms = -1;
1395 size_t dtype_size = -1;
1396
1397 if ((s_dset_id = H5Dopen(ecs_grp_id,s_oname[i].c_str(),H5P_DEFAULT))<0){
1398 string msg = "Cannot open HDF5 dataset ";
1399 msg += s_oname[i];
1400 H5Gclose(ecs_grp_id);
1401 throw InternalErr(__FILE__, __LINE__, msg);
1402 }
1403
1404 if ((s_space_id = H5Dget_space(s_dset_id))<0) {
1405 string msg = "Cannot open the data space of HDF5 dataset ";
1406 msg += s_oname[i];
1407 H5Dclose(s_dset_id);
1408 H5Gclose(ecs_grp_id);
1409 throw InternalErr(__FILE__, __LINE__, msg);
1410 }
1411
1412 if ((s_ty_id = H5Dget_type(s_dset_id)) < 0) {
1413 string msg = "Cannot get the data type of HDF5 dataset ";
1414 msg += s_oname[i];
1415 H5Sclose(s_space_id);
1416 H5Dclose(s_dset_id);
1417 H5Gclose(ecs_grp_id);
1418 throw InternalErr(__FILE__, __LINE__, msg);
1419 }
1420 if ((s_nelms = H5Sget_simple_extent_npoints(s_space_id))<0) {
1421 string msg = "Cannot get the number of points of HDF5 dataset ";
1422 msg += s_oname[i];
1423 H5Tclose(s_ty_id);
1424 H5Sclose(s_space_id);
1425 H5Dclose(s_dset_id);
1426 H5Gclose(ecs_grp_id);
1427 throw InternalErr(__FILE__, __LINE__, msg);
1428 }
1429 if ((dtype_size = H5Tget_size(s_ty_id))==0) {
1430
1431 string msg = "Cannot get the data type size of HDF5 dataset ";
1432 msg += s_oname[i];
1433 H5Tclose(s_ty_id);
1434 H5Sclose(s_space_id);
1435 H5Dclose(s_dset_id);
1436 H5Gclose(ecs_grp_id);
1437 throw InternalErr(__FILE__, __LINE__, msg);
1438 }
1439
1440 // Obtain the real value of the metadata
1441 vector<char> s_buf(dtype_size*s_nelms +1);
1442
1443 if ((H5Dread(s_dset_id,s_ty_id,H5S_ALL,H5S_ALL,H5P_DEFAULT,s_buf.data()))<0) {
1444
1445 string msg = "Cannot read HDF5 dataset ";
1446 msg += s_oname[i];
1447 H5Tclose(s_ty_id);
1448 H5Sclose(s_space_id);
1449 H5Dclose(s_dset_id);
1450 H5Gclose(ecs_grp_id);
1451 throw InternalErr(__FILE__, __LINE__, msg);
1452 }
1453
1454 // Now we can safely close datatype, data space and dataset IDs.
1455 H5Tclose(s_ty_id);
1456 H5Sclose(s_space_id);
1457 H5Dclose(s_dset_id);
1458
1459
1460 // Convert from the vector<char> to a C++ string.
1461 string tempstr(s_buf.begin(),s_buf.end());
1462 s_buf.clear();
1463 size_t temp_null_pos = tempstr.find_first_of('\0');
1464
1465 // temp_null_pos returns the position of nullptr,which is the last character of the string.
1466 // so the length of string before null is EQUAL to
1467 // temp_null_pos since pos starts at 0.
1468 string finstr = tempstr.substr(0,temp_null_pos);
1469
1470 // For the DDS parser, only return StructMetadata
1471 if (StructMeta == metatype[i]) {
1472
1473 // Now obtain the corresponding value in integer type for the suffix. '0' to 0 etc.
1474 try {
1475 strmeta_num = get_metadata_num(s_oname[i]);
1476 }
1477 catch(...) {
1478 H5Gclose(ecs_grp_id);
1479 throw InternalErr(__FILE__,__LINE__,"Obtain structmetadata suffix error.");
1480
1481 }
1482 // This is probably not necessary, since structmetadata may always have a suffix.
1483 // Leave here just in case the rules change or a special non-HDF-EOS5 library generated file.
1484 // when strmeta_num is -1, it means no suffix for this metadata. So the total structmetadata
1485 // is this string only.
1486 if (-1 == strmeta_num)
1487 total_strmeta_value = finstr;
1488 // strmeta_value at this point should be empty before assigning any values.
1489 else if (strmeta_value[strmeta_num]!="") {
1490 string msg = "The structmeta value array at this index should be empty string ";
1491 H5Gclose(ecs_grp_id);
1492 throw InternalErr(__FILE__, __LINE__, msg);
1493 }
1494 // assign the string vector to this value.
1495 else
1496 strmeta_value[strmeta_num] = finstr;
1497 }
1498
1499 // DAS parser needs all metadata.
1500 if (false == s_st_only &&
1501 (metatype[i] != StructMeta)) {
1502
1503 switch (metatype[i]) {
1504
1505 case CoreMeta:
1506 {
1507 if (coremeta_num_total < 0) {
1508 string msg = "There may be no coremetadata or coremetadata is not counted ";
1509 H5Gclose(ecs_grp_id);
1510 throw InternalErr(__FILE__, __LINE__, msg);
1511
1512 }
1513
1514 try {
1515 coremeta_num = get_metadata_num(s_oname[i]);
1516 }
1517 catch(...) {
1518 H5Gclose(ecs_grp_id);
1519 throw InternalErr(__FILE__,__LINE__,"Obtain coremetadata suffix error.");
1520
1521 }
1522
1523 // when coremeta_num is -1, it means no suffix for this metadata. So the total coremetadata
1524 // is this string only. Similar cases apply for the rest metadata.
1525 if ( -1 == coremeta_num )
1526 total_coremeta_value = finstr;
1527 else if (coremeta_value[coremeta_num]!="") {
1528 string msg = "The coremeta value array at this index should be empty string ";
1529 H5Gclose(ecs_grp_id);
1530 throw InternalErr(__FILE__, __LINE__, msg);
1531 }
1532
1533 // assign the string vector to this value.
1534 else
1535 coremeta_value[coremeta_num] = finstr;
1536 }
1537 break;
1538
1539 case ArchivedMeta:
1540 {
1541 if (archmeta_num_total < 0) {
1542 string msg = "There may be no archivemetadata or archivemetadata is not counted ";
1543 H5Gclose(ecs_grp_id);
1544 throw InternalErr(__FILE__, __LINE__, msg);
1545 }
1546 try {
1547 archmeta_num = get_metadata_num(s_oname[i]);
1548 }
1549 catch(...) {
1550 H5Gclose(ecs_grp_id);
1551 throw InternalErr(__FILE__,__LINE__,"Obtain archivemetadata suffix error.");
1552 }
1553 if (-1 == archmeta_num )
1554 total_archmeta_value = finstr;
1555 else if (archmeta_value[archmeta_num]!="") {
1556 string msg = "The archivemeta value array at this index should be empty string ";
1557 H5Gclose(ecs_grp_id);
1558 throw InternalErr(__FILE__, __LINE__, msg);
1559
1560 }
1561 // assign the string vector to this value.
1562 else
1563 archmeta_value[archmeta_num] = finstr;
1564 }
1565 break;
1566 case SubsetMeta:
1567 {
1568 if (submeta_num_total < 0) {
1569 string msg = "The subsetemeta value array at this index should be empty string ";
1570 H5Gclose(ecs_grp_id);
1571 throw InternalErr(__FILE__, __LINE__, msg);
1572 }
1573 try {
1574 submeta_num = get_metadata_num(s_oname[i]);
1575 }
1576 catch(...) {
1577 H5Gclose(ecs_grp_id);
1578 throw InternalErr(__FILE__,__LINE__,"Obtain subsetmetadata suffix error.");
1579 }
1580 if (-1 == submeta_num )
1581 total_submeta_value = finstr;
1582 else if (submeta_value[submeta_num]!="") {
1583 string msg = "The submeta value array at this index should be empty string ";
1584 H5Gclose(ecs_grp_id);
1585 throw InternalErr(__FILE__, __LINE__, msg);
1586 }
1587 // assign the string vector to this value.
1588 else
1589 submeta_value[submeta_num] = finstr;
1590 }
1591 break;
1592 case ProductMeta:
1593 {
1594 if (prometa_num_total < 0) {
1595 string msg = "There may be no productmetadata or productmetadata is not counted ";
1596 H5Gclose(ecs_grp_id);
1597 throw InternalErr(__FILE__, __LINE__, msg);
1598 }
1599 try {
1600 prometa_num = get_metadata_num(s_oname[i]);
1601 }
1602 catch(...) {
1603 H5Gclose(ecs_grp_id);
1604 throw InternalErr(__FILE__,__LINE__,"Obtain productmetadata suffix error.");
1605 }
1606 if (prometa_num == -1)
1607 total_prometa_value = finstr;
1608 else if (prometa_value[prometa_num]!="") {
1609 string msg = "The productmeta value array at this index should be empty string ";
1610 H5Gclose(ecs_grp_id);
1611 throw InternalErr(__FILE__, __LINE__, msg);
1612 }
1613 // assign the string vector to this value.
1614 else
1615 prometa_value[prometa_num] = finstr;
1616 }
1617 break;
1618 case XMLMeta:
1619 {
1620 if (xmlmeta_num_total < 0) {
1621 string msg = "There may be no xmlmetadata or xmlmetadata is not counted ";
1622 H5Gclose(ecs_grp_id);
1623 throw InternalErr(__FILE__, __LINE__, msg);
1624 }
1625 try {
1626 xmlmeta_num = get_metadata_num(s_oname[i]);
1627 }
1628 catch(...) {
1629 H5Gclose(ecs_grp_id);
1630 throw InternalErr(__FILE__,__LINE__,"Obtain XMLmetadata suffix error.");
1631 }
1632 if (-1 == xmlmeta_num )
1633 total_xmlmeta_value = finstr;
1634 else if (xmlmeta_value[xmlmeta_num]!="") {
1635 string msg = "The xmlmeta value array at this index should be empty string ";
1636 H5Gclose(ecs_grp_id);
1637 throw InternalErr(__FILE__, __LINE__, msg);
1638 }
1639 // assign the string vector to this value.
1640 else
1641 xmlmeta_value[xmlmeta_num] = finstr;
1642 }
1643 break;
1644 case OtherMeta:
1645 {
1646 if (othermeta_num_total < 0) {
1647 string msg = "There may be no othermetadata or other metadata is not counted ";
1648 H5Gclose(ecs_grp_id);
1649 throw InternalErr(__FILE__, __LINE__, msg);
1650 }
1651 total_othermeta_value = total_othermeta_value + finstr;
1652 }
1653 break;
1654 default :
1655 {
1656 string msg = "Unsupported metadata type ";
1657 H5Gclose(ecs_grp_id);
1658 throw InternalErr(__FILE__, __LINE__, msg);
1659 }
1660 }
1661 }
1662 tempstr.clear();
1663 finstr.clear();
1664 }
1665
1666 // Now we need to handle the concatenation of the metadata
1667 // first StructMetadata
1668 if (strmeta_num_total > 0) {
1669 // The no suffix one has been taken care.
1670 if (strmeta_num != -1) {
1671 for (int i = 0; i <strmeta_num_total; i++)
1672 total_strmeta_value +=strmeta_value[i];
1673 }
1674 }
1675
1676 // For the DAS handler
1677 if ( false == s_st_only) {
1678
1679 if (coremeta_num_total >0) {
1680 if (coremeta_num != -1) {
1681 for(int i = 0; i <coremeta_num_total; i++)
1682 total_coremeta_value +=coremeta_value[i];
1683 }
1684 }
1685
1686 if (archmeta_num_total >0) {
1687 if (archmeta_num != -1) {
1688 for(int i = 0; i <archmeta_num_total; i++)
1689 total_archmeta_value +=archmeta_value[i];
1690 }
1691 }
1692
1693 if (submeta_num_total >0) {
1694 if (submeta_num != -1) {
1695 for(int i = 0; i <submeta_num_total; i++)
1696 total_submeta_value +=submeta_value[i];
1697 }
1698 }
1699
1700 if (xmlmeta_num_total >0) {
1701 if (xmlmeta_num != -1) {
1702 for(int i = 0; i <xmlmeta_num_total; i++)
1703 total_xmlmeta_value +=xmlmeta_value[i];
1704 }
1705 }
1706
1707 if (prometa_num_total >0) {
1708 if (prometa_num != -1) {
1709 for(int i = 0; i <prometa_num_total; i++)
1710 total_prometa_value +=prometa_value[i];
1711 }
1712 }
1713 }
1714 H5Gclose(ecs_grp_id);
1715}
1716
1717// Helper function for read_ecs_metadata. Get the number after metadata.
1718int get_metadata_num(const string & meta_str) {
1719
1720 // The normal metadata names should be like coremetadata.0, coremetadata.1 etc.
1721 // We just find a not so nice coremetadata names such as coremetadata.0, coremetadata.0.1 for a HIRDLS-MLS-Aura-L3
1722 // We need to handle them. Here we assume no more than two dots in a name series. KY 2012-11-08
1723 size_t dot_pos = meta_str.find(".");
1724 if (dot_pos == string::npos) // No dot
1725 return -1;
1726 else if (meta_str.find_first_of(".") == meta_str.find_last_of(".")) { // One dot
1727 string num_str = meta_str.substr(dot_pos+1);
1728 stringstream ssnum(num_str);
1729 int num;
1730 ssnum >> num;
1731 if (ssnum.fail())
1732 throw InternalErr(__FILE__,__LINE__,"Suffix after dots is not a number.");
1733 return num;
1734 }
1735 else { // Two dots
1736 string str_after_first_dot = meta_str.substr(dot_pos+1);
1737 if (str_after_first_dot.find_first_of(".") != str_after_first_dot.find_last_of("."))
1738 throw InternalErr(__FILE__,__LINE__,"Currently don't support metadata names containing more than two dots.");
1739 // Here we don't check if names are like coremetadata.0 coremetadata.0.0 etc., Having ".0 .0.0" is,if not mistaken,
1740 // is insane.
1741 // Instead, we hope that the data producers will produce data like coremetadata.0 coremetadata.0.1 coremeatadata.0.2
1742 // KY 2012-11-08
1743 size_t second_dot_pos = str_after_first_dot.find(".");
1744 string num_str = str_after_first_dot.substr(second_dot_pos+1);
1745 stringstream ssnum(num_str);
1746 int num;
1747 ssnum >> num;
1748 return num;
1749 }
1750
1751}
1752
1753void map_eos5_cfdmr(D4Group *d4_root, hid_t file_id, const string &filename) {
1754
1755 BESDEBUG("h5","Coming to HDF-EOS5 products DDS mapping function map_eos5_cfdds "<<endl);
1756
1757 string st_str;
1758 string core_str;
1759 string arch_str;
1760 string xml_str;
1761 string subset_str;
1762 string product_str;
1763 string other_str;
1764 bool st_only = false;
1765
1766 // Read ECS metadata: merge them into one C++ string
1767 read_ecs_metadata(file_id,st_str,core_str,arch_str,xml_str, subset_str,product_str,other_str,st_only);
1768 if(""==st_str) {
1769 string msg =
1770 "unable to obtain the HDF-EOS5 struct metadata ";
1771 throw InternalErr(__FILE__, __LINE__, msg);
1772 }
1773
1774 bool disable_ecsmetadata = HDF5RequestHandler::get_disable_ecsmeta();
1775 if(disable_ecsmetadata == false) {
1776
1777 bool is_check_disable_smetadata = HDF5RequestHandler::get_disable_structmeta();
1778
1779 if (false == is_check_disable_smetadata)
1780 add_grp_dap4_attr(d4_root,"StructMetadata",attr_str_c,st_str);
1781
1782 if(core_str != "")
1783 add_grp_dap4_attr(d4_root,"CoreMetadata",attr_str_c,core_str);
1784
1785 if(arch_str != "")
1786 add_grp_dap4_attr(d4_root,"ArchiveMetadata",attr_str_c,arch_str);
1787
1788 if(xml_str != "")
1789 add_grp_dap4_attr(d4_root,"XMLMetadata",attr_str_c,xml_str);
1790
1791 if(subset_str !="")
1792 add_grp_dap4_attr(d4_root,"SubsetMetadata",attr_str_c,subset_str);
1793
1794 if(product_str != "")
1795 add_grp_dap4_attr(d4_root,"ProductMetadata",attr_str_c,product_str);
1796
1797 if(other_str !="")
1798 add_grp_dap4_attr(d4_root,"OtherMetadata",attr_str_c,other_str);
1799 }
1800
1801 bool is_check_nameclashing = HDF5RequestHandler::get_check_name_clashing();
1802
1803 bool is_add_path_attrs = HDF5RequestHandler::get_add_path_attrs();
1804
1805 EOS5File *f = nullptr;
1806
1807 try {
1808 f = new EOS5File(filename.c_str(),file_id);
1809 }
1810 catch(...) {
1811 throw InternalErr(__FILE__,__LINE__,"Cannot allocate the file object.");
1812 }
1813
1814 bool include_attr = true;
1815
1816 // This first "try-catch" block will use the parsed info
1817 try {
1818
1819 // Parse the structmetadata
1820 // Note: he5dds_scan_string just retrieves the variable info.
1821 // It is still used to handle DMR, no need to write another parser.
1822 // KY 2021-05-21
1823 HE5Parser p;
1824 HE5Checker c;
1825 he5dds_scan_string(st_str.c_str());
1826 he5ddsparse(&p);
1827 he5ddslex_destroy();
1828
1829 // Retrieve ProjParams from StructMetadata
1830 p.add_projparams(st_str);
1831#if 0
1832 //p.print();
1833#endif
1834
1835 // Check if the HDF-EOS5 grid has the valid parameters, projection codes.
1836 if (c.check_grids_unknown_parameters(&p)) {
1837 throw InternalErr("Unknown HDF-EOS5 grid paramters found in the file");
1838 }
1839
1840 if (c.check_grids_missing_projcode(&p)) {
1841 throw InternalErr("The HDF-EOS5 is missing project code ");
1842 }
1843
1844 // We gradually add the support of different projection codes
1845 if (c.check_grids_support_projcode(&p)) {
1846 throw InternalErr("The current project code is not supported");
1847 }
1848
1849 // HDF-EOS5 provides default pixel and origin values if they are not defined.
1850 c.set_grids_missing_pixreg_orig(&p);
1851
1852 // Check if this multi-grid file shares the same grid.
1853 bool grids_mllcv = c.check_grids_multi_latlon_coord_vars(&p);
1854
1855 // Retrieve all HDF5 info(Not the values)
1856 f->Retrieve_H5_Info(filename.c_str(),file_id,include_attr);
1857
1858 // Adjust EOS5 Dimension names/sizes based on the parsed results
1859 f->Adjust_EOS5Dim_Info(&p);
1860
1861 // Translate the parsed output to HDF-EOS5 grids/swaths/zonal.
1862 // Several maps related to dimension and coordinates are set up here.
1863 f->Add_EOS5File_Info(&p, grids_mllcv);
1864
1865 // Add the dimension names
1866 f->Add_Dim_Name(&p);
1867 }
1868 catch (HDF5CF::Exception &e){
1869 delete f;
1870 throw InternalErr(e.what());
1871 }
1872 catch(...) {
1873 delete f;
1874 throw;
1875 }
1876
1877 // The parsed struct will no longer be in this "try-catch" block.
1878 try {
1879
1880 // NASA Aura files need special handling. So first check if this file is an Aura file.
1882
1883 // Adjust the variable name
1885
1886 // Handle coordinate variables
1887 f->Handle_CVar();
1888
1889 // Adjust variable and dimension names again based on the handling of coordinate variables.
1891
1892
1893 // Old comments, leave them for the time being:
1894 // We need to use the CV units to distinguish lat/lon from th 3rd CV when
1895 // memory cache is turned on.
1896#if 0
1897 //if((HDF5RequestHandler::get_lrdata_mem_cache() != nullptr) ||
1898 // (HDF5RequestHandler::get_srdata_mem_cache() != nullptr)){
1899#endif
1900
1901 // Handle unsupported datatypes including the attributes
1902 f->Handle_Unsupported_Dtype(true);
1903
1904 // Handle unsupported dataspaces including the attributes
1906
1907 // We need to retrieve coordinate variable attributes for memory cache use.
1909
1911
1912 // Handle other unsupported objects,
1913 // currently it mainly generates the info. for the
1914 // unsupported objects other than datatype, dataspace,links and named datatype
1915 // This function needs to be called after retrieving supported attributes.
1916 f->Handle_Unsupported_Others(include_attr);
1917
1918#if 0
1919 else {
1920
1921 // Handle unsupported datatypes
1922 f->Handle_Unsupported_Dtype(include_attr);
1923
1924 // Handle unsupported dataspaces
1925 f->Handle_Unsupported_Dspace(include_attr);
1926
1927 }
1928#endif
1929
1930
1931 // Need to retrieve the units of CV when memory cache is turned on.
1932 // The units of CV will be used to distinguish whether this CV is
1933 // latitude/longitude or a third-dimension CV.
1934 // isLatLon() will use the units value.
1935#if 0
1936 //if((HDF5RequestHandler::get_lrdata_mem_cache() != nullptr) ||
1937 // (HDF5RequestHandler::get_srdata_mem_cache() != nullptr))
1938#endif
1939 f->Adjust_Attr_Info();
1940
1941 // May need to adjust the object names for special objects. Currently, no operations
1942 // are done in this routine.
1943 f->Adjust_Obj_Name();
1944
1945 // Flatten the object name
1946 f->Flatten_Obj_Name(include_attr);
1947
1948 // Handle name clashing
1949 if(true == is_check_nameclashing)
1950 f->Handle_Obj_NameClashing(include_attr);
1951
1952 // Check if this should follow COARDS, yes, set the COARDS flag.
1953 f->Set_COARDS_Status();
1954
1955 // For COARDS, the dimension name needs to be changed.
1956 f->Adjust_Dim_Name();
1957 if(true == is_check_nameclashing)
1959
1960 f->Add_Supplement_Attrs(is_add_path_attrs);
1961
1962 // We need to turn off the very long string in the TES file to avoid
1963 // the choking of netCDF Java tools. So this special variable routine
1964 // is listed at last. We may need to turn off this if netCDF can handle
1965 // long string better.
1966 f->Handle_SpVar_DMR();
1967
1968 // Handle coordinate attributes
1969 f->Handle_Coor_Attr();
1970#if 0
1971 //f->Handle_SpVar_Attr();
1972#endif
1973 }
1974 catch (HDF5CF::Exception &e){
1975 delete f;
1976 throw InternalErr(e.what());
1977 }
1978
1979 // Generate EOS5 DMR
1980 try {
1981 gen_eos5_cfdmr(d4_root,f);
1982 }
1983 catch(...) {
1984 delete f;
1985 throw;
1986 }
1987
1988 delete f;
1989
1990}
1991
1992void gen_eos5_cfdmr(D4Group *d4_root, const HDF5CF::EOS5File *f) {
1993
1994 BESDEBUG("h5","Coming to HDF-EOS5 products DDS generation function "<<endl);
1995 const vector<HDF5CF::Var *>& vars = f->getVars();
1996 const vector<HDF5CF::EOS5CVar *>& cvars = f->getCVars();
1997 const string filename = f->getPath();
1998 const hid_t file_id = f->getFileID();
1999 const vector<HDF5CF::Group *>& grps = f->getGroups();
2000 const vector<HDF5CF::Attribute *>& root_attrs = f->getAttributes();
2001
2002
2003 if (false == root_attrs.empty()) {
2004 for (const auto &root_attr:root_attrs)
2005 map_cfh5_grp_attr_to_dap4(d4_root,root_attr);
2006 }
2007
2008 // We use the container since we claim to have no hierarchy.
2009 if (false == grps.empty()) {
2010 for (const auto &grp:grps) {
2011 auto tmp_grp_unique = make_unique<D4Attribute>();
2012 auto tmp_grp = tmp_grp_unique.release();
2013 tmp_grp->set_name(grp->getNewName());
2014 tmp_grp->set_type(attr_container_c);
2015
2016 for (const auto &attr:grp->getAttributes())
2017 map_cfh5_attr_container_to_dap4(tmp_grp,attr);
2018 d4_root->attributes()->add_attribute_nocopy(tmp_grp);
2019 }
2020 }
2021
2022 // Read Variable info.
2023 // TODO: We may need to make the cvar first for coverage support.
2024
2025 if (HDF5RequestHandler::get_add_dap4_coverage() == true) {
2026 for (const auto &cvar:cvars) {
2027 BESDEBUG("h5","variable full path= "<< cvar->getFullPath() <<endl);
2028 gen_dap_oneeos5cvar_dmr(d4_root,cvar,file_id,filename);
2029
2030 }
2031 for (const auto &var:vars) {
2032 BESDEBUG("h5","variable full path= "<< var->getFullPath() <<endl);
2033 gen_dap_onevar_dmr(d4_root,var,file_id,filename);
2034 }
2035
2036 // Handle EOS5 grid mapping info.
2037 if (f->Have_EOS5_Grids()==true)
2038 gen_dap_eos5cf_gm_dmr(d4_root,f);
2039
2040 }
2041 else {
2042 for (const auto &var:vars) {
2043 BESDEBUG("h5","variable full path= "<< var->getFullPath() <<endl);
2044 gen_dap_onevar_dmr(d4_root,var,file_id,filename);
2045 }
2046
2047 // Handle EOS5 grid mapping info.
2048 if (f->Have_EOS5_Grids()==true)
2049 gen_dap_eos5cf_gm_dmr(d4_root,f);
2050
2051 for (const auto &cvar:cvars) {
2052 BESDEBUG("h5","variable full path= "<< cvar->getFullPath() <<endl);
2053 gen_dap_oneeos5cvar_dmr(d4_root,cvar,file_id,filename);
2054 }
2055
2056 }
2057
2058 // CHECK ALL UNLIMITED DIMENSIONS from the coordinate variables based on the names.
2059 if(f->HaveUnlimitedDim() == true) {
2060
2061 string dods_extra = "DODS_EXTRA";
2062
2063 // If DODS_EXTRA exists, we will not create the unlimited dimensions.
2064 if(d4_root->attributes() != nullptr) {
2065#if 0
2066 //if((d4_root->attributes()->find(dods_extra))==nullptr) {
2067#endif
2068 string unlimited_dim_names;
2069
2070 for (const auto &cvar:cvars) {
2071
2072 // Check unlimited dimension names.
2073 for (const auto &dim:cvar->getDimensions()) {
2074
2075 // Currently we only check one unlimited dimension, which is the most
2076 // common case. When receiving the conventions from JG, will add
2077 // the support of multi-unlimited dimension. KY 2016-02-09
2078 if(dim->HaveUnlimitedDim() == true) {
2079
2080 if(unlimited_dim_names=="")
2081 unlimited_dim_names = dim->getNewName();
2082 else {
2083 if(unlimited_dim_names.rfind(dim->getNewName()) == string::npos) {
2084 unlimited_dim_names = unlimited_dim_names+" "+dim->getNewName();
2085 }
2086 }
2087 }
2088 }
2089 }
2090
2091 if (unlimited_dim_names != "") {
2092 auto unlimited_dim_attr_unique = make_unique<D4Attribute>("Unlimited_Dimension",attr_str_c);
2093 auto unlimited_dim_attr = unlimited_dim_attr_unique.release();
2094 unlimited_dim_attr->add_value(unlimited_dim_names);
2095 auto dods_extra_attr_unique = make_unique<D4Attribute>(dods_extra,attr_container_c);
2096 auto dods_extra_attr = dods_extra_attr_unique.release();
2097 dods_extra_attr->attributes()->add_attribute_nocopy(unlimited_dim_attr);
2098 d4_root->attributes()->add_attribute_nocopy(dods_extra_attr);
2099 }
2100 else
2101 throw InternalErr(__FILE__, __LINE__, "Unlimited dimension should exist.");
2102 //}
2103 }
2104
2105 }
2106
2107 // Add DAP4 map for coverage
2108 if (HDF5RequestHandler::get_add_dap4_coverage() == true) {
2109
2110 // Obtain the coordinate variable names, these are mapped variables.
2111 vector <string> cvar_name;
2112 for (const auto &cvar:cvars)
2113 cvar_name.emplace_back(cvar->getNewName());
2114
2115 add_dap4_coverage(d4_root,cvar_name,f->getIsCOARD());
2116 }
2117}
2118
2119
2120void gen_dap_oneeos5cvar_dmr(D4Group* d4_root,const EOS5CVar* cvar,const hid_t file_id,const string & filename){
2121
2122 BESDEBUG("h5","Coming to gen_dap_oneeos5cvar_dmr() "<<endl);
2123 BaseType *bt = nullptr;
2124
2125 switch(cvar->getType()) {
2126#define HANDLE_CASE(tid,type) \
2127 case tid: \
2128 bt = new (type)(cvar->getNewName(),cvar->getFullPath()); \
2129 break;
2130
2131 HANDLE_CASE(H5FLOAT32, HDF5CFFloat32)
2132 HANDLE_CASE(H5FLOAT64, HDF5CFFloat64)
2133 HANDLE_CASE(H5CHAR,HDF5CFInt8)
2134 HANDLE_CASE(H5UCHAR, HDF5CFByte)
2135 HANDLE_CASE(H5INT16, HDF5CFInt16)
2136 HANDLE_CASE(H5UINT16, HDF5CFUInt16)
2137 HANDLE_CASE(H5INT32, HDF5CFInt32)
2138 HANDLE_CASE(H5UINT32, HDF5CFUInt32)
2139 HANDLE_CASE(H5INT64, HDF5CFInt64)
2140 HANDLE_CASE(H5UINT64, HDF5CFUInt64)
2141 HANDLE_CASE(H5FSTRING, Str)
2142 HANDLE_CASE(H5VSTRING, Str)
2143 default:
2144 throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
2145#undef HANDLE_CASE
2146 }
2147
2148 if (bt) {
2149
2150 const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
2151 vector <HDF5CF::Dimension*>:: const_iterator it_d;
2152 vector <size_t> dimsizes;
2153 dimsizes.resize(cvar->getRank());
2154 for (int i = 0; i <cvar->getRank();i++)
2155 dimsizes[i] = (dims[i])->getSize();
2156
2157 if(dims.empty())
2158 throw InternalErr(__FILE__,__LINE__,"the coordinate variables cannot be scalar.");
2159 switch(cvar->getCVType()) {
2160
2161 case CV_EXIST:
2162 {
2163
2164 bool is_latlon = cvar->isLatLon();
2165 bool is_dap4 = true;
2166 auto ar_unique = make_unique<HDF5CFArray>(
2167 cvar->getRank(),
2168 file_id,
2169 filename,
2170 cvar->getType(),
2171 dimsizes,
2172 cvar->getFullPath(),
2173 cvar->getTotalElems(),
2174 CV_EXIST,
2175 is_latlon,
2176 cvar->getCompRatio(),
2177 is_dap4,
2178 cvar->getNewName(),
2179 bt);
2180 auto ar = ar_unique.get();
2181
2182 delete bt;
2183
2184
2185 for (it_d = dims.begin(); it_d != dims.end(); ++it_d) {
2186 if (""==(*it_d)->getNewName())
2187 ar->append_dim_ll((*it_d)->getSize());
2188 else
2189 ar->append_dim_ll((*it_d)->getSize(), (*it_d)->getNewName());
2190 }
2191
2192 ar->set_is_dap4(true);
2193 BaseType* d4_var=ar->h5cfdims_transform_to_dap4(d4_root);
2194 map_cfh5_var_attrs_to_dap4(cvar,d4_var);
2195 d4_root->add_var_nocopy(d4_var);
2196
2197 }
2198 break;
2199
2200 case CV_LAT_MISS:
2201 case CV_LON_MISS:
2202 {
2203 auto ar_unique = make_unique<HDFEOS5CFMissLLArray> (
2204 cvar->getRank(),
2205 filename,
2206 file_id,
2207 cvar->getFullPath(),
2208 cvar->getCVType(),
2209 cvar->getPointLower(),
2210 cvar->getPointUpper(),
2211 cvar->getPointLeft(),
2212 cvar->getPointRight(),
2213 cvar->getPixelReg(),
2214 cvar->getOrigin(),
2215 cvar->getProjCode(),
2216 cvar->getParams(),
2217 cvar->getZone(),
2218 cvar->getSphere(),
2219 cvar->getXDimSize(),
2220 cvar->getYDimSize(),
2221 cvar->getNewName(),
2222 bt);
2223 auto ar = ar_unique.get();
2224 delete bt;
2225
2226 for (it_d = dims.begin(); it_d != dims.end(); ++it_d) {
2227 if (""==(*it_d)->getNewName())
2228 ar->append_dim_ll((*it_d)->getSize());
2229 else
2230 ar->append_dim_ll((*it_d)->getSize(), (*it_d)->getNewName());
2231 }
2232
2233 ar->set_is_dap4(true);
2234 BaseType* d4_var=ar->h5cfdims_transform_to_dap4(d4_root);
2235 map_cfh5_var_attrs_to_dap4(cvar,d4_var);
2236 add_var_sp_attrs_to_dap4(d4_var,cvar);
2237 d4_root->add_var_nocopy(d4_var);
2238
2239 }
2240 break;
2241
2242 case CV_NONLATLON_MISS:
2243 {
2244
2245 if (cvar->getRank() !=1) {
2246 delete bt;
2247 throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
2248 }
2249 int nelem = (int)((cvar->getDimensions()[0])->getSize());
2250
2251 auto ar_unique = make_unique<HDFEOS5CFMissNonLLCVArray>(
2252 cvar->getRank(),
2253 nelem,
2254 cvar->getNewName(),
2255 bt);
2256 auto ar = ar_unique.get();
2257 delete bt;
2258
2259 for(it_d = dims.begin(); it_d != dims.end(); it_d++) {
2260 if (""==(*it_d)->getNewName())
2261 ar->append_dim_ll((*it_d)->getSize());
2262 else
2263 ar->append_dim_ll((*it_d)->getSize(), (*it_d)->getNewName());
2264 }
2265
2266 ar->set_is_dap4(true);
2267 BaseType* d4_var=ar->h5cfdims_transform_to_dap4(d4_root);
2268 map_cfh5_var_attrs_to_dap4(cvar,d4_var);
2269 d4_root->add_var_nocopy(d4_var);
2270
2271 }
2272 break;
2273 case CV_SPECIAL:
2274 // Currently only support Aura TES files. May need to revise when having more
2275 // special products KY 2012-2-3
2276 {
2277
2278 if (cvar->getRank() !=1) {
2279 delete bt;
2280 throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
2281 }
2282 int nelem = (int)((cvar->getDimensions()[0])->getSize());
2283 auto ar_unique = make_unique<HDFEOS5CFSpecialCVArray> (
2284 cvar->getRank(),
2285 filename,
2286 file_id,
2287 cvar->getType(),
2288 nelem,
2289 cvar->getFullPath(),
2290 cvar->getNewName(),
2291 bt);
2292 auto ar = ar_unique.get();
2293 delete bt;
2294
2295 for(it_d = dims.begin(); it_d != dims.end(); ++it_d){
2296 if (""==(*it_d)->getNewName())
2297 ar->append_dim_ll((*it_d)->getSize());
2298 else
2299 ar->append_dim_ll((*it_d)->getSize(), (*it_d)->getNewName());
2300 }
2301
2302 ar->set_is_dap4(true);
2303 BaseType* d4_var=ar->h5cfdims_transform_to_dap4(d4_root);
2304 map_cfh5_var_attrs_to_dap4(cvar,d4_var);
2305 d4_root->add_var_nocopy(d4_var);
2306
2307 }
2308 break;
2309 case CV_MODIFY:
2310 default:
2311 delete bt;
2312 throw InternalErr(__FILE__,__LINE__,"Unsupported coordinate variable type.");
2313 }
2314
2315 }
2316
2317}
2318
2319
2320// generate dmr info for grid mapping (gm: grid mapping)
2321void gen_dap_eos5cf_gm_dmr(libdap::D4Group* d4_root,const HDF5CF::EOS5File*f) {
2322
2323 // grid mapping projection vars
2324 // and add grid_mapping attribute for non-cv vars
2325 gen_gm_proj_var_info(d4_root,f);
2326
2327 // special grid mapping dimension variables.
2328 gen_gm_proj_spvar_info(d4_root,f);
2329
2330}
2331
2332//(1) Add grid mapping projection vars if we have any
2333//(2) Add grid_mapping attributes for all non-cv vars
2334void gen_gm_proj_var_info(libdap::D4Group* d4_root,const HDF5CF::EOS5File* f) {
2335
2336 BESDEBUG("h5","Coming to HDF-EOS5 products DDS generation function "<<endl);
2337 const vector<HDF5CF::EOS5CVar *>& cvars = f->getCVars();
2338
2339 // For multiple grids, multiple grid mapping variables are needed.
2340 // We use EOS5 coordinate variables to track this.
2341 unsigned short cv_lat_miss_index = 1;
2342 for (const auto &cvar:cvars) {
2343 if(cvar->getCVType() == CV_LAT_MISS) {
2344 if(cvar->getProjCode() != HE5_GCTP_GEO) {
2345 gen_gm_oneproj_var(d4_root,cvar,cv_lat_miss_index,f);
2346 cv_lat_miss_index++;
2347 }
2348 }
2349 }
2350}
2351
2352// Generate the dummy grid_mapping variables,attributes and
2353// grid_mapping attributes for all the non-cv variables.
2354void gen_gm_oneproj_var(libdap::D4Group*d4_root,
2355 const HDF5CF::EOS5CVar* cvar,
2356 const unsigned short g_suffix, const HDF5CF::EOS5File* f) {
2357
2358 BESDEBUG("h5","Coming to gen_gm_oneproj_var() "<<endl);
2359 EOS5GridPCType cv_proj_code = cvar->getProjCode();
2360 const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
2361
2362 if(dims.size() !=2)
2363 throw InternalErr(__FILE__,__LINE__,"Currently we only support the 2-D CF coordinate projection system.");
2364
2365 // 1. Add the grid mapping dummy projection variable dmr for each grid
2366 // 2. Add the grid_mapping attribute for each variable that this projection applies
2367 // now, we handle sinusoidal,PS and LAMAZ projections.
2368 if (HE5_GCTP_SNSOID == cv_proj_code || HE5_GCTP_PS == cv_proj_code || HE5_GCTP_LAMAZ== cv_proj_code) {
2369
2370 // Add the dummy projection variable.
2371 // The attributes of this variable can be used to store the grid mapping info.
2372 // To handle multi-grid cases, we need to add suffixes to distinguish them.
2373 string cf_projection_base = "eos_cf_projection";
2374 string cf_projection_name;
2375
2376 HDF5CFGeoCFProj * dummy_proj_cf = nullptr;
2377
2378 if(HE5_GCTP_SNSOID == cv_proj_code) {
2379
2380 // AFAIK, one grid_mapping variable is necessary for multi-grids.
2381 // So we just leave one grid here.
2382 cf_projection_name = cf_projection_base;
2383 if (g_suffix == 1) {
2384 auto dummy_proj_cf_unique = make_unique<HDF5CFGeoCFProj>(cf_projection_name, cf_projection_name);
2385 dummy_proj_cf = dummy_proj_cf_unique.release();
2386 }
2387 }
2388 else {
2389 stringstream t_suffix_ss;
2390 t_suffix_ss << g_suffix;
2391 cf_projection_name = cf_projection_base + "_" + t_suffix_ss.str();
2392 auto dummy_proj_cf_unique = make_unique<HDF5CFGeoCFProj>(cf_projection_name, cf_projection_name);
2393 dummy_proj_cf = dummy_proj_cf_unique.release();
2394 }
2395
2396 if (dummy_proj_cf != nullptr) {
2397 dummy_proj_cf->set_is_dap4(true);
2398 add_gm_oneproj_var_dap4_attrs(dummy_proj_cf,cv_proj_code,cvar->getParams());
2399 d4_root->add_var_nocopy(dummy_proj_cf);
2400 }
2401
2402 // Add the grid_mapping attributes to all non-cv variables for the grid.
2403 vector<string> cvar_name;
2404 if (HDF5RequestHandler::get_add_dap4_coverage() == true) {
2405 const vector<HDF5CF::EOS5CVar *>& cvars = f->getCVars();
2406 for (const auto &gm_cvar:cvars)
2407 cvar_name.emplace_back(gm_cvar->getNewName());
2408
2409 }
2410 add_cf_grid_cv_dap4_attrs(d4_root,cf_projection_name,dims,cvar_name);
2411 }
2412
2413}
2414
2415//Generate DMR of special dimension variables.
2416void gen_gm_proj_spvar_info(libdap::D4Group* d4_root,const HDF5CF::EOS5File* f){
2417
2418 BESDEBUG("h5","Coming to HDF-EOS5 products grid mapping variable generation function "<<endl);
2419 const vector<HDF5CF::EOS5CVar *>& cvars = f->getCVars();
2420
2421 for (const auto &cvar:cvars) {
2422 if(cvar->getCVType() == CV_LAT_MISS) {
2423 if(cvar->getProjCode() != HE5_GCTP_GEO)
2424 gen_gm_oneproj_spvar(d4_root,cvar);
2425 }
2426 }
2427}
2428
2429void gen_gm_oneproj_spvar(libdap::D4Group *d4_root,const HDF5CF::EOS5CVar *cvar) {
2430
2431 BESDEBUG("h5","Coming to gen_gm_oneproj_spvar() "<<endl);
2432
2433 float cv_point_lower = cvar->getPointLower();
2434 float cv_point_upper = cvar->getPointUpper();
2435 float cv_point_left = cvar->getPointLeft();
2436 float cv_point_right = cvar->getPointRight();
2437 EOS5GridPCType cv_proj_code = cvar->getProjCode();
2438 const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
2439 if(dims.size() !=2)
2440 throw InternalErr(__FILE__,__LINE__,"Currently we only support the 2-D CF coordinate projection system.");
2441 add_gm_spcvs(d4_root,cv_proj_code,cv_point_lower,cv_point_upper,cv_point_left,cv_point_right,dims);
2442
2443}
2444
2445void add_var_sp_attrs_to_dap4(BaseType *d4_var,const EOS5CVar* cvar) {
2446
2447 if(cvar->getProjCode() == HE5_GCTP_LAMAZ) {
2448 if(cvar->getCVType() == CV_LAT_MISS) {
2449 add_var_dap4_attr(d4_var,"valid_min", attr_float64_c, "-90.0");
2450 add_var_dap4_attr(d4_var,"valid_max", attr_float64_c, "90.0");
2451 }
2452 else {
2453 add_var_dap4_attr(d4_var,"valid_min", attr_float64_c, "-180.0");
2454 add_var_dap4_attr(d4_var,"valid_max", attr_float64_c, "180.0");
2455 }
2456 }
2457
2458}
2459
This class includes the methods to read data array into DAP buffer from an HDF5 dataset for the CF op...
This class provides a way to map HDF5 byte to DAP byte for the CF option.
This class provides a way to map HDF5 float to DAP float for the CF option.
This class provides a way to map HDF5 64-bit floating-point(double) to DAP 64-bit floating-point for ...
This class provides a way to map HDF5 int16 to DAP int16 for the CF option.
This class provides a way to map HDF5 32-bit integer to DAP Int32 for the CF option.
This class provides a way to map HDF5 64-bit integer to DAP4 Int64 for the CF option.
This class provides a way to map HDF5 int8 to DAP int16 for the CF option.
This class provides a way to map HDF5 Str to DAP Str for the CF option.
This class provides a way to map HDF5 unsigned 16-bit integer to DAP uint16 for the CF option.
This class provides a way to map HDF5 unsigned 32-bit integer to DAP uint32 for the CF option.
This class provides a way to map HDF5 64-bit unsigned integer to DAP4 UInt64 for the CF option.
include the entry functions to execute the handlers
This class specifies the retrieval of the missing lat/lon values for HDF-EOS5 products.
This class specifies the retrieval of the missing lat/lon values for HDFEOS5 products.
This class specifies the retrieval of special coordinate variable values for HDF-EOS5 products.
A class for parsing NASA HDF-EOS5 StructMetadata.
A class for parsing NASA HDF-EOS5 StructMetadata.
CVType getCVType() const
Get the coordinate variable type of this variable.
Definition HDF5CF.h:358
This class is a derived class of CVar. It represents a coordinate variable for HDF-EOS5 files.
Definition HDF5CF.h:427
This class is a derived class of File. It includes methods applied to HDF-EOS5 files only.
Definition HDF5CF.h:1174
bool Get_IgnoredInfo_Flag() override
Obtain the flag to see if ignored objects should be generated.
Definition HDF5CF.h:1274
const std::string & Get_Ignored_Msg() override
Obtain the message that contains the ignored object info.
Definition HDF5CF.h:1279
void Adjust_Obj_Name() override
This method is a no-op operation. Leave here since the method in the base class is pure virtual.
void Add_EOS5File_Info(HE5Parser *, bool)
Add HDF-EOS5 dimension and coordinate variable related info. to EOS5Grid,EOS5Swath etc.
Definition HDFEOS5CF.cc:837
void Retrieve_H5_Info(const char *path, hid_t file_id, bool include_attr) override
Retrieve DDS information from the HDF5 file; a real implementation for HDF-EOS5 products.
Definition HDFEOS5CF.cc:163
void Adjust_Var_NewName_After_Parsing() const
Adjust variable names for HDF-EOS5 files.
void Add_Supplement_Attrs(bool) override
Add the supplemental attributes for HDF-EOS5 products.
void Set_COARDS_Status()
Set COARDS flag.
void Handle_CVar() override
Handle coordinate variable for HDF-EOS5 files.
void Adjust_Attr_Info()
Adjust the attribute info for HDF-EOS5 products.
void Handle_SpVar_Attr() override
Handle special variables for HDF-EOS5 files.
void Handle_Obj_NameClashing(bool)
Handle the object name clashing for HDF-EOS5 products.
void Handle_Unsupported_Dtype(bool) override
Handle unsupported HDF5 datatypes for HDF-EOS5 products.
Definition HDFEOS5CF.cc:208
void Handle_DimNameClashing() override
void Handle_Unsupported_Others(bool) override
Handle other unmapped objects/attributes for HDF-EOS5 products.
Definition HDFEOS5CF.cc:359
void Retrieve_H5_CVar_Supported_Attr_Values() override
Retrieve coordinate variable attributes.
Definition HDFEOS5CF.cc:170
void Handle_SpVar() override
Handle special variables for HDF-EOS5 files.
void Retrieve_H5_Supported_Attr_Values() override
Retrieve attribute values for the supported HDF5 datatypes for HDF-EOS5 products.
Definition HDFEOS5CF.cc:186
void Add_Dim_Name(HE5Parser *)
Add the dimension name for HDF-EOS5 files.
void Adjust_Dim_Name() override
Adjust the dimension name for HDF-EOS5 products.
void Handle_SpVar_DMR() override
Handle special variables and attributes for HDF-EOS5 files(for DMR)
void Handle_Unsupported_Dspace(bool) override
Handle unsupported HDF5 dataspaces for HDF-EOS5 products.
Definition HDFEOS5CF.cc:300
void Adjust_Var_Dim_NewName_Before_Flattening() const
Adjust variable dimension names before the flattening for HDF-EOS5 files.
void Flatten_Obj_Name(bool include_attr) override
Flatten the object name for HDF-EOS5 files.
void Check_Aura_Product_Status()
Check if the HDF-EOS5 file is an Aura file. Special CF operations need to be used.
const std::vector< EOS5CVar * > & getCVars() const
Obtain coordinate variables for HDF-EOS5 products.
Definition HDF5CF.h:1185
void Adjust_EOS5Dim_Info(HE5Parser *strmeta_info)
Adjust HDF-EOS5 dimension information.
Definition HDFEOS5CF.cc:536
void Handle_Coor_Attr() override
Handle the coordinates attribute for HDF-EOS5 products.
bool HaveUnlimitedDim() const
Has unlimited dimensions.
Definition HDF5CF.h:657
hid_t getFileID() const
Obtain the HDF5 file ID.
Definition HDF5CF.h:627
const std::vector< Attribute * > & getAttributes() const
Public interface to obtain information of all attributes under the root group.
Definition HDF5CF.h:645
const std::vector< Group * > & getGroups() const
Public interface to obtain all the group info.
Definition HDF5CF.h:651
const std::string & getPath() const
Obtain the path of the file.
Definition HDF5CF.h:633
const std::vector< Var * > & getVars() const
Public interface to obtain information of all variables.
Definition HDF5CF.h:639
float getCompRatio() const
Get the compression ratio of this dataset.
Definition HDF5CF.h:317
int getRank() const
Get the dimension rank of this variable.
Definition HDF5CF.h:294
const std::string & getFullPath() const
Get the full path of this variable.
Definition HDF5CF.h:272
const std::string & getName() const
Get the original name of this variable.
Definition HDF5CF.h:260
H5DataType getType() const
Get the data type of this variable(Not HDF5 datatype id)
Definition HDF5CF.h:300
const std::vector< Dimension * > & getDimensions() const
Get the list of the dimensions.
Definition HDF5CF.h:311
const std::string & getNewName() const
Get the new name of this variable.
Definition HDF5CF.h:266
STL class.
STL iterator class.
STL class.
Helper functions for generating DAS attributes and a function to check BES Key.
yy_buffer_state * he5das_scan_string(const char *str)
Buffer state for NASA EOS metadata scanner.
Map and generate DDS and DAS for the CF option for HDF-EOS5 products.