bes  Updated for version 3.20.5
heos5cfdap.cc
Go to the documentation of this file.
1 // This file is part of hdf5_handler: an HDF5 file handler for the OPeNDAP
2 // data server.
3 
4 // Copyright (c) 2011-2016 The HDF Group, Inc. and OPeNDAP, Inc.
5 //
6 // This is free software; you can redistribute it and/or modify it under the
7 // terms of the GNU Lesser General Public License as published by the Free
8 // Software Foundation; either version 2.1 of the License, or (at your
9 // option) any later version.
10 //
11 // This software is distributed in the hope that it will be useful, but
12 // WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 // or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
14 // License for more details.
15 //
16 // You should have received a copy of the GNU Lesser General Public
17 // License along with this library; if not, write to the Free Software
18 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 //
20 // You can contact OPeNDAP, Inc. at PO Box 112, Saunderstown, RI. 02874-0112.
21 // You can contact The HDF Group, Inc. at 1800 South Oak Street,
22 // Suite 203, Champaign, IL 61820
23 
32 
33 #include <sys/types.h>
34 #include <sys/stat.h>
35 #include <fcntl.h>
36 #include <unistd.h>
37 #include <iostream>
38 #include <sstream>
39 
40 #include <BESLog.h>
41 #include <BESDebug.h>
42 
43 #include "parser.h"
44 #include "heos5cfdap.h"
45 #include "h5cfdaputil.h"
46 #include "HDF5CFByte.h"
47 #include "HDF5CFUInt16.h"
48 #include "HDF5CFInt16.h"
49 #include "HDF5CFUInt32.h"
50 #include "HDF5CFInt32.h"
51 #include "HDF5CFFloat32.h"
52 #include "HDF5CFFloat64.h"
53 #include "HDF5CFStr.h"
54 #include "HDF5CFArray.h"
55 #include "HDFEOS5CFMissLLArray.h"
58 #include "HDF5RequestHandler.h"
59 
60 #include "he5dds.tab.hh"
61 #include "HE5Parser.h"
62 #include "HE5Checker.h"
63 #include "he5das.tab.hh"
64 
65 struct yy_buffer_state;
66 
67 yy_buffer_state *he5dds_scan_string(const char *str);
68 int he5ddsparse(HE5Parser *he5parser);
69 int he5dasparse(libdap::parser_arg *arg);
70 int he5ddslex_destroy();
71 int he5daslex_destroy();
72 
74 yy_buffer_state *he5das_scan_string(const char *str);
75 
76 using namespace HDF5CF;
77 
78 // Map EOS5 to DAP DDS
79 void map_eos5_cfdds(DDS &dds, hid_t file_id, const string & filename) {
80 
81  BESDEBUG("h5","Coming to HDF-EOS5 products DDS mapping function map_eos5_cfdds "<<endl);
82 
83 
84  string st_str ="";
85  string core_str="";
86  string arch_str="";
87  string xml_str ="";
88  string subset_str="";
89  string product_str="";
90  string other_str ="";
91  bool st_only = true;
92 
93  // Read ECS metadata: merge them into one C++ string
94  read_ecs_metadata(file_id,st_str,core_str,arch_str,xml_str, subset_str,product_str,other_str,st_only);
95  if(""==st_str) {
96  string msg =
97  "unable to obtain the HDF-EOS5 struct metadata ";
98  throw InternalErr(__FILE__, __LINE__, msg);
99  }
100 
101  bool is_check_nameclashing = HDF5RequestHandler::get_check_name_clashing();
102 
103  EOS5File *f = NULL;
104 
105  try {
106  f = new EOS5File(filename.c_str(),file_id);
107  }
108  catch(...) {
109  throw InternalErr(__FILE__,__LINE__,"Cannot allocate the file object.");
110  }
111 
112  bool include_attr = false;
113 
114  // This first "try-catch" block will use the parsed info
115  try {
116 
117  // Parse the structmetadata
118  HE5Parser p;
119  HE5Checker c;
120  he5dds_scan_string(st_str.c_str());
121  he5ddsparse(&p);
122  he5ddslex_destroy();
123 
124  // Retrieve ProjParams from StructMetadata
125  p.add_projparams(st_str);
126 #if 0
127  //p.print();
128 #endif
129 
130  // Check if the HDF-EOS5 grid has the valid parameters, projection codes.
131  if (c.check_grids_unknown_parameters(&p)) {
132  throw InternalErr("Unknown HDF-EOS5 grid paramters found in the file");
133  }
134 
135  if (c.check_grids_missing_projcode(&p)) {
136  throw InternalErr("The HDF-EOS5 is missing project code ");
137  }
138 
139  // We gradually add the support of different projection codes
140  if (c.check_grids_support_projcode(&p)) {
141  throw InternalErr("The current project code is not supported");
142  }
143 
144  // HDF-EOS5 provides default pixel and origin values if they are not defined.
145  c.set_grids_missing_pixreg_orig(&p);
146 
147  // Check if this multi-grid file shares the same grid.
148  bool grids_mllcv = c.check_grids_multi_latlon_coord_vars(&p);
149 
150  // Retrieve all HDF5 info(Not the values)
151  f->Retrieve_H5_Info(filename.c_str(),file_id,include_attr);
152 
153  // Adjust EOS5 Dimension names/sizes based on the parsed results
154  f->Adjust_EOS5Dim_Info(&p);
155 
156  // Translate the parsed output to HDF-EOS5 grids/swaths/zonal.
157  // Several maps related to dimension and coordiantes are set up here.
158  f->Add_EOS5File_Info(&p, grids_mllcv);
159 
160  // Add the dimension names
161  f->Add_Dim_Name(&p);
162  }
163  catch (HDF5CF::Exception &e){
164  if(f!=NULL)
165  delete f;
166  throw InternalErr(e.what());
167  }
168  catch(...) {
169  if(f!=NULL)
170  delete f;
171  throw;
172  }
173 
174  // The parsed struct will no longer be in this "try-catch" block.
175  try {
176 
177  // NASA Aura files need special handlings. So first check if this file is an Aura file.
179 
180  // Adjust the variable name
182 
183  // Handle coordinate variables
184  f->Handle_CVar();
185 
186  // Adjust variable and dimension names again based on the handling of coordinate variables.
188 
189 
190  // We need to use the CV units to distinguish lat/lon from th 3rd CV when
191  // memory cache is turned on.
192  if((HDF5RequestHandler::get_lrdata_mem_cache() != NULL) ||
193  (HDF5RequestHandler::get_srdata_mem_cache() != NULL)){
194 
195  // Handle unsupported datatypes including the attributes
196  f->Handle_Unsupported_Dtype(true);
197 
198  // Handle unsupported dataspaces including the attributes
199  f->Handle_Unsupported_Dspace(true);
200 
201  // We need to retrieve coordinate variable attributes for memory cache use.
203 
204  }
205  else {
206 
207  // Handle unsupported datatypes
208  f->Handle_Unsupported_Dtype(include_attr);
209 
210  // Handle unsupported dataspaces
211  f->Handle_Unsupported_Dspace(include_attr);
212 
213  }
214 
215 
216  // Need to retrieve the units of CV when memory cache is turned on.
217  // The units of CV will be used to distinguish whether this CV is
218  // latitude/longitude or a third-dimension CV.
219  // isLatLon() will use the units value.
220  if((HDF5RequestHandler::get_lrdata_mem_cache() != NULL) ||
221  (HDF5RequestHandler::get_srdata_mem_cache() != NULL))
222  f->Adjust_Attr_Info();
223 
224  // May need to adjust the object names for special objects. Currently no operations
225  // are done in this routine.
226  f->Adjust_Obj_Name();
227 
228  // Flatten the object name
229  f->Flatten_Obj_Name(include_attr);
230 
231  // Handle name clashing
232  if(true == is_check_nameclashing)
233  f->Handle_Obj_NameClashing(include_attr);
234 
235  // Check if this should follow COARDS, yes, set the COARDS flag.
236  f->Set_COARDS_Status();
237 
238  // For COARDS, the dimension name needs to be changed.
239  f->Adjust_Dim_Name();
240  if(true == is_check_nameclashing)
242 
243  // We need to turn off the very long string in the TES file to avoid
244  // the choking of netCDF Java tools. So this special variable routine
245  // is listed at last. We may need to turn off this if netCDF can handle
246  // long string better.
247  f->Handle_SpVar();
248  }
249  catch (HDF5CF::Exception &e){
250  if(f != NULL)
251  delete f;
252  throw InternalErr(e.what());
253  }
254 
255  // Generate EOS5 DDS
256  try {
257  gen_eos5_cfdds(dds,f);
258  }
259  catch(...) {
260  if (f!=NULL)
261  delete f;
262  throw;
263  }
264 
265  if (f!=NULL)
266  delete f;
267 }
268 
269 // Map EOS5 to DAP DAS
270 void map_eos5_cfdas(DAS &das, hid_t file_id, const string &filename) {
271 
272  BESDEBUG("h5","Coming to HDF-EOS5 products DAS mapping function map_eos5_cfdas "<<endl);
273  string st_str ="";
274  string core_str="";
275  string arch_str="";
276  string xml_str ="";
277  string subset_str="";
278  string product_str="";
279  string other_str ="";
280  bool st_only = true;
281 
282  read_ecs_metadata(file_id,st_str,core_str,arch_str,xml_str, subset_str,product_str,other_str,st_only);
283  if(""==st_str) {
284  string msg =
285  "unable to obtain the HDF-EOS5 struct metadata ";
286  throw InternalErr(__FILE__, __LINE__, msg);
287  }
288 
289  bool is_check_nameclashing = HDF5RequestHandler::get_check_name_clashing();
290 
291  bool is_add_path_attrs = HDF5RequestHandler::get_add_path_attrs();
292 
293  EOS5File *f = NULL;
294  try {
295  f = new EOS5File(filename.c_str(),file_id);
296  }
297  catch(...) {
298  throw InternalErr(__FILE__,__LINE__,"Cannot allocate the file object.");
299  }
300  bool include_attr = true;
301 
302  // The first "try-catch" block will use the parsed info.
303  try {
304 
305  HE5Parser p;
306  HE5Checker c;
307  he5dds_scan_string(st_str.c_str());
308 
309  he5ddsparse(&p);
310  he5ddslex_destroy();
311  p.add_projparams(st_str);
312 #if 0
313  //p.print();
314  // cerr<<"main loop p.za_list.size() = "<<p.za_list.size() <<endl;
315 #endif
316 
317  if (c.check_grids_unknown_parameters(&p)) {
318  throw InternalErr("Unknown HDF-EOS5 grid paramters found in the file");
319  }
320 
321  if (c.check_grids_missing_projcode(&p)) {
322  throw InternalErr("The HDF-EOS5 is missing project code ");
323  }
324  if (c.check_grids_support_projcode(&p)) {
325  throw InternalErr("The current project code is not supported");
326  }
327  c.set_grids_missing_pixreg_orig(&p);
328 
329  bool grids_mllcv = c.check_grids_multi_latlon_coord_vars(&p);
330 
331  f->Retrieve_H5_Info(filename.c_str(),file_id,include_attr);
332  f->Adjust_EOS5Dim_Info(&p);
333  f->Add_EOS5File_Info(&p, grids_mllcv);
334  f->Add_Dim_Name(&p);
335  }
336  catch (HDF5CF::Exception &e){
337  if(f != NULL)
338  delete f;
339  throw InternalErr(e.what());
340  }
341  catch(...) {
342  if(f != NULL)
343  delete f;
344  throw;
345  }
346 
347  try {
350  f->Handle_CVar();
352  f->Handle_Unsupported_Dtype(include_attr);
353 
354  // Remove unsupported dataspace
355  f->Handle_Unsupported_Dspace(include_attr);
356 
357  // Need to retrieve the attribute values.
359 
360 
361  // Handle other unsupported objects,
362  // currently it mainly generates the info. for the
363  // unsupported objects other than datatype, dataspace,links and named datatype
364  // This function needs to be called after retrieving supported attributes.
365  f->Handle_Unsupported_Others(include_attr);
366 
367  // Add/adjust CF attributes
368  f->Adjust_Attr_Info();
369  f->Adjust_Obj_Name();
370  f->Flatten_Obj_Name(include_attr);
371  if (true == is_check_nameclashing)
372  f->Handle_Obj_NameClashing(include_attr);
373  f->Set_COARDS_Status();
374 
375 #if 0
376  //f->Adjust_Dim_Name();
377  //if(true == is_check_nameclashing)
378  // f->Handle_DimNameClashing();
379 #endif
380 
381  // Add supplemental attributes
382  f->Add_Supplement_Attrs(is_add_path_attrs);
383 
384  // Handle coordinate attributes
385  f->Handle_Coor_Attr();
386  f->Handle_SpVar_Attr();
387  }
388  catch (HDF5CF::Exception &e){
389  if(f != NULL)
390  delete f;
391  throw InternalErr(e.what());
392  }
393 
394  // Generate DAS for the EOS5
395  try {
396  gen_eos5_cfdas(das,file_id,f);
397  }
398  catch(...) {
399  if (f != NULL)
400  delete f;
401  throw;
402  }
403 
404  if( f != NULL)
405  delete f;
406 
407 }
408 
409 // Generate DDS for the EOS5
410 void gen_eos5_cfdds(DDS &dds, HDF5CF::EOS5File *f) {
411 
412  BESDEBUG("h5","Coming to HDF-EOS5 products DDS generation function gen_eos5_cfdds "<<endl);
413  const vector<HDF5CF::Var *>& vars = f->getVars();
414  const vector<HDF5CF::EOS5CVar *>& cvars = f->getCVars();
415  const string filename = f->getPath();
416  const hid_t file_id = f->getFileID();
417 
418  // Read Variable info.
419  vector<HDF5CF::Var *>::const_iterator it_v;
420  vector<HDF5CF::EOS5CVar *>::const_iterator it_cv;
421 
422  for (it_v = vars.begin(); it_v !=vars.end();++it_v) {
423  BESDEBUG("h5","variable full path= "<< (*it_v)->getFullPath() <<endl);
424  gen_dap_onevar_dds(dds,*it_v,file_id,filename);
425  }
426 
427  for (it_cv = cvars.begin(); it_cv !=cvars.end();++it_cv) {
428  BESDEBUG("h5","variable full path= "<< (*it_cv)->getFullPath() <<endl);
429  gen_dap_oneeos5cvar_dds(dds,*it_cv,file_id,filename);
430 
431  }
432 
433  // We need to provide grid_mapping info. for multiple grids.
434  // Here cv_lat_miss_index represents the missing latitude(HDF-EOS grid without the latitude field) cv index
435  // This index is used to create the grid_mapping variable for different grids.
436  unsigned short cv_lat_miss_index = 1;
437  for (it_cv = cvars.begin(); it_cv !=cvars.end();++it_cv) {
438  if((*it_cv)->getCVType() == CV_LAT_MISS) {
439  if((*it_cv)->getProjCode() != HE5_GCTP_GEO) {
440  // Here we need to add grid_mapping variables for each grid
441  // For projections other than sinusoidal since attribute values for LAMAZ and PS
442  // are different for each grid.
443  gen_dap_oneeos5cf_dds(dds,*it_cv);
444  add_cf_grid_mapinfo_var(dds,(*it_cv)->getProjCode(),cv_lat_miss_index);
445  cv_lat_miss_index++;
446  }
447  }
448  }
449 }
450 
451 void gen_dap_oneeos5cf_dds(DDS &dds,const HDF5CF::EOS5CVar* cvar) {
452 
453  BESDEBUG("h5","Coming to gen_dap_oneeos5cf_dds() "<<endl);
454 
455  float cv_point_lower = cvar->getPointLower();
456  float cv_point_upper = cvar->getPointUpper();
457  float cv_point_left = cvar->getPointLeft();
458  float cv_point_right = cvar->getPointRight();
459  EOS5GridPCType cv_proj_code = cvar->getProjCode();
460  const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
461  if(dims.size() !=2)
462  throw InternalErr(__FILE__,__LINE__,"Currently we only support the 2-D CF coordinate projection system.");
463  add_cf_grid_cvs(dds,cv_proj_code,cv_point_lower,cv_point_upper,cv_point_left,cv_point_right,dims);
464 
465 }
466 
467 void gen_dap_oneeos5cf_das(DAS &das,const vector<HDF5CF::Var*>& vars, const HDF5CF::EOS5CVar* cvar,const unsigned short g_suffix) {
468 
469  BESDEBUG("h5","Coming to gen_dap_oneeos5cf_das() "<<endl);
470 #if 0
471  float cv_point_lower = cvar->getPointLower();
472  float cv_point_upper = cvar->getPointUpper();
473  float cv_point_left = cvar->getPointLeft();
474  float cv_point_right = cvar->getPointRight();
475 #endif
476  EOS5GridPCType cv_proj_code = cvar->getProjCode();
477  const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
478 
479 #if 0
480 cerr<<"cv_point_lower is "<<cv_point_lower <<endl;
481 cerr<<"cvar name is "<<cvar->getName() <<endl;
482 for(vector<HDF5CF::Dimension*>::const_iterator it_d = dims.begin(); it_d != dims.end(); ++it_d)
483  cerr<<"dim name das is "<<(*it_d)->getNewName() <<endl;
484 #endif
485 
486  if(dims.size() !=2)
487  throw InternalErr(__FILE__,__LINE__,"Currently we only support the 2-D CF coordinate projection system.");
488 #if 0
489  add_cf_grid_cv_attrs(das,vars,cv_proj_code,cv_point_lower,cv_point_upper,cv_point_left,cv_point_right,dims,cvar->getParams(),g_suffix);
490 #endif
491  add_cf_grid_cv_attrs(das,vars,cv_proj_code,dims,cvar->getParams(),g_suffix);
492 
493 }
494 
495 //For EOS5, generate the ignored object info. for the CF option
496 void gen_eos5_cf_ignored_obj_info(DAS &das, HDF5CF::EOS5File *f) {
497 
498  BESDEBUG("h5","Coming to gen_eos5_cf_ignored_obj_info() "<<endl);
499  AttrTable *at = das.get_table("Ignored_Object_Info");
500  if (NULL == at)
501  at = das.add_table("Ignored_Object_Info", new AttrTable);
502 
503  at->append_attr("Message","String",f->Get_Ignored_Msg());
504 
505 
506 }
507 
508 // Generate DDS for EOS5 coordinate variables
509 void gen_dap_oneeos5cvar_dds(DDS &dds,const HDF5CF::EOS5CVar* cvar, const hid_t file_id, const string & filename) {
510 
511  BESDEBUG("h5","Coming to gen_dap_oneeos5cvar_dds() "<<endl);
512  BaseType *bt = NULL;
513 
514  // TODO: need to handle 64-bit integer for DAP4 CF
515  if(cvar->getType()==H5INT64 || cvar->getType() == H5UINT64)
516  return;
517  switch(cvar->getType()) {
518 #define HANDLE_CASE(tid,type) \
519  case tid: \
520  bt = new (type)(cvar->getNewName(),cvar->getFullPath()); \
521  break;
522 
523  HANDLE_CASE(H5FLOAT32, HDF5CFFloat32);
524  HANDLE_CASE(H5FLOAT64, HDF5CFFloat64);
525  HANDLE_CASE(H5CHAR,HDF5CFInt16);
526  HANDLE_CASE(H5UCHAR, HDF5CFByte);
527  HANDLE_CASE(H5INT16, HDF5CFInt16);
528  HANDLE_CASE(H5UINT16, HDF5CFUInt16);
529  HANDLE_CASE(H5INT32, HDF5CFInt32);
530  HANDLE_CASE(H5UINT32, HDF5CFUInt32);
531  HANDLE_CASE(H5FSTRING, Str);
532  HANDLE_CASE(H5VSTRING, Str);
533  default:
534  throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
535 #undef HANDLE_CASE
536  }
537 
538  if (bt) {
539 
540  const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
541  vector <HDF5CF::Dimension*>:: const_iterator it_d;
542  vector <size_t> dimsizes;
543  dimsizes.resize(cvar->getRank());
544  for(int i = 0; i <cvar->getRank();i++)
545  dimsizes[i] = (dims[i])->getSize();
546 
547 
548  if(dims.size() == 0)
549  throw InternalErr(__FILE__,__LINE__,"the coordinate variables cannot be scalar.");
550  switch(cvar->getCVType()) {
551 
552  case CV_EXIST:
553  {
554 
555 #if 0
556 for(vector<HDF5CF::Attribute *>::const_iterator it_ra = cvar->getAttributes().begin();
557  it_ra != cvar->getAttributes().end(); ++it_ra) {
558 cerr<<"cvar attribute name is "<<(*it_ra)->getNewName() <<endl;
559 cerr<<"cvar attribute value type is "<<(*it_ra)->getType() <<endl;
560 }
561 cerr<<"cvar new name exist at he s5cfdap.cc is "<<cvar->getNewName() <<endl;
562 #endif
563  bool is_latlon = cvar->isLatLon();
564  HDF5CFArray *ar = NULL;
565  try {
566  ar = new HDF5CFArray (
567  cvar->getRank(),
568  file_id,
569  filename,
570  cvar->getType(),
571  dimsizes,
572  cvar->getFullPath(),
573  cvar->getTotalElems(),
574  CV_EXIST,
575  is_latlon,
576  cvar->getCompRatio(),
577  cvar->getNewName(),
578  bt);
579  }
580  catch (...) {
581  delete bt;
582  throw InternalErr(__FILE__,__LINE__,"unable to allocate memory for HDF5CFArray.");
583  }
584 
585  for(it_d = dims.begin(); it_d != dims.end(); ++it_d) {
586  if (""==(*it_d)->getNewName())
587  ar->append_dim((*it_d)->getSize());
588  else
589  ar->append_dim((*it_d)->getSize(), (*it_d)->getNewName());
590  }
591 
592  dds.add_var(ar);
593  delete bt;
594  delete ar;
595  }
596  break;
597 
598  case CV_LAT_MISS:
599  case CV_LON_MISS:
600  {
601 
602  HDFEOS5CFMissLLArray *ar = NULL;
603  try {
604 #if 0
605 cerr<<"cvar zone here is "<<cvar->getZone() <<endl;
606 cerr<<"cvar Sphere here is "<<cvar->getSphere() <<endl;
607 cerr<<"cvar getParams here 1 is "<<cvar->getParams()[0]<<endl;
608 #endif
609  ar = new HDFEOS5CFMissLLArray (
610  cvar->getRank(),
611  filename,
612  file_id,
613  cvar->getFullPath(),
614  cvar->getCVType(),
615  cvar->getPointLower(),
616  cvar->getPointUpper(),
617  cvar->getPointLeft(),
618  cvar->getPointRight(),
619  cvar->getPixelReg(),
620  cvar->getOrigin(),
621  cvar->getProjCode(),
622  cvar->getParams(),
623  cvar->getZone(),
624  cvar->getSphere(),
625  cvar->getXDimSize(),
626  cvar->getYDimSize(),
627  cvar->getNewName(),
628  bt);
629  }
630  catch (...) {
631  delete bt;
632  throw InternalErr(__FILE__,__LINE__,"unable to allocate memory for HDFEOS5CFMissLLArray.");
633  }
634 
635  for(it_d = dims.begin(); it_d != dims.end(); ++it_d) {
636  if (""==(*it_d)->getNewName())
637  ar->append_dim((*it_d)->getSize());
638  else
639  ar->append_dim((*it_d)->getSize(), (*it_d)->getNewName());
640  }
641 
642  dds.add_var(ar);
643  delete bt;
644  delete ar;
645  }
646  break;
647 
648  case CV_NONLATLON_MISS:
649  {
650 
651  if (cvar->getRank() !=1) {
652  delete bt;
653  throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
654  }
655  int nelem = (cvar->getDimensions()[0])->getSize();
656 
657  HDFEOS5CFMissNonLLCVArray *ar = NULL;
658  try {
659  ar = new HDFEOS5CFMissNonLLCVArray(
660  cvar->getRank(),
661  nelem,
662  cvar->getNewName(),
663  bt);
664  }
665  catch (...) {
666  delete bt;
667  throw InternalErr(__FILE__,__LINE__,"unable to allocate memory for HDFEOS5CFMissNonLLCVArray.");
668  }
669 
670 
671  for(it_d = dims.begin(); it_d != dims.end(); it_d++) {
672  if (""==(*it_d)->getNewName())
673  ar->append_dim((*it_d)->getSize());
674  else
675  ar->append_dim((*it_d)->getSize(), (*it_d)->getNewName());
676  }
677  dds.add_var(ar);
678  delete bt;
679  delete ar;
680 
681 
682  }
683  break;
684  case CV_SPECIAL:
685  // Currently only support Aura TES files. May need to revise when having more
686  // special products KY 2012-2-3
687  {
688 
689  if (cvar->getRank() !=1) {
690  delete bt;
691  throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
692  }
693  int nelem = (cvar->getDimensions()[0])->getSize();
694  HDFEOS5CFSpecialCVArray *ar = NULL;
695 
696  try {
697  ar = new HDFEOS5CFSpecialCVArray(
698  cvar->getRank(),
699  filename,
700  file_id,
701  cvar->getType(),
702  nelem,
703  cvar->getFullPath(),
704  cvar->getNewName(),
705  bt);
706  }
707  catch (...) {
708  delete bt;
709  throw InternalErr(__FILE__,__LINE__,"unable to allocate memory for HDF5CFArray.");
710  }
711 
712 
713  for(it_d = dims.begin(); it_d != dims.end(); ++it_d){
714  if (""==(*it_d)->getNewName())
715  ar->append_dim((*it_d)->getSize());
716  else
717  ar->append_dim((*it_d)->getSize(), (*it_d)->getNewName());
718  }
719  dds.add_var(ar);
720  delete bt;
721  delete ar;
722  }
723  break;
724  case CV_MODIFY:
725  default:
726  delete bt;
727  throw InternalErr(__FILE__,__LINE__,"Unsupported coordinate variable type.");
728  }
729 
730  }
731 
732 }
733 
734 // Generate EOS5 DAS
735 void gen_eos5_cfdas(DAS &das, hid_t file_id, HDF5CF::EOS5File *f) {
736 
737  BESDEBUG("h5","Coming to HDF-EOS5 products DAS generation function gen_eos5_cfdas "<<endl);
738 
739  // First check if this is for generating the ignored object info.
740  if(true == f->Get_IgnoredInfo_Flag()) {
741  gen_eos5_cf_ignored_obj_info(das, f);
742  return;
743  }
744 
745  const vector<HDF5CF::Var *>& vars = f->getVars();
746  const vector<HDF5CF::EOS5CVar *>& cvars = f->getCVars();
747  const vector<HDF5CF::Group *>& grps = f->getGroups();
748  const vector<HDF5CF::Attribute *>& root_attrs = f->getAttributes();
749 
750  vector<HDF5CF::Var *>::const_iterator it_v;
751  vector<HDF5CF::EOS5CVar *>::const_iterator it_cv;
752  vector<HDF5CF::Group *>::const_iterator it_g;
753  vector<HDF5CF::Attribute *>::const_iterator it_ra;
754 
755  // Handling the file attributes(attributes under the root group)
756  // The table name is "HDF_GLOBAL".
757  if (false == root_attrs.empty()) {
758  AttrTable *at = das.get_table(FILE_ATTR_TABLE_NAME);
759  if (NULL == at)
760  at = das.add_table(FILE_ATTR_TABLE_NAME, new AttrTable);
761 
762  for (it_ra = root_attrs.begin(); it_ra != root_attrs.end(); it_ra++) {
763  gen_dap_oneobj_das(at,*it_ra,NULL);
764  }
765  }
766 
767  if (false == grps.empty()) {
768  for (it_g = grps.begin();
769  it_g != grps.end(); ++it_g) {
770  AttrTable *at = das.get_table((*it_g)->getNewName());
771  if (NULL == at)
772  at = das.add_table((*it_g)->getNewName(), new AttrTable);
773 
774  for (it_ra = (*it_g)->getAttributes().begin();
775  it_ra != (*it_g)->getAttributes().end(); ++it_ra) {
776  gen_dap_oneobj_das(at,*it_ra,NULL);
777  }
778  }
779  }
780 
781  for (it_v = vars.begin();
782  it_v != vars.end(); ++it_v) {
783  if (false == ((*it_v)->getAttributes().empty())) {
784 
785  // TODO: Need to handle 64-bit int support for DAP4 CF.
786  if(H5INT64 == (*it_v)->getType() || H5UINT64 == (*it_v)->getType()){
787  continue;
788  }
789 
790  AttrTable *at = das.get_table((*it_v)->getNewName());
791  if (NULL == at)
792  at = das.add_table((*it_v)->getNewName(), new AttrTable);
793 
794  for (it_ra = (*it_v)->getAttributes().begin();
795  it_ra != (*it_v)->getAttributes().end(); ++it_ra) {
796  gen_dap_oneobj_das(at,*it_ra,*it_v);
797  }
798  }
799  }
800 
801  for (it_cv = cvars.begin(); it_cv !=cvars.end();it_cv++) {
802 
803  if (false == ((*it_cv)->getAttributes().empty())) {
804 
805  // TODO: Need to handle 64-bit int support for DAP4 CF.
806  if(H5INT64 == (*it_cv)->getType() || H5UINT64 == (*it_cv)->getType()){
807  continue;
808  }
809 
810  AttrTable *at = das.get_table((*it_cv)->getNewName());
811  if (NULL == at)
812  at = das.add_table((*it_cv)->getNewName(), new AttrTable);
813 
814 
815  for (it_ra = (*it_cv)->getAttributes().begin();
816  it_ra != (*it_cv)->getAttributes().end(); ++it_ra) {
817  gen_dap_oneobj_das(at,*it_ra,*it_cv);
818  }
819  }
820  }
821 
822  // Add CF 1-D projection variables
823  unsigned short cv_lat_miss_index = 1;
824  // This code block will add grid_mapping attribute info. to corresponding variables.
825  for (it_cv = cvars.begin(); it_cv !=cvars.end();++it_cv) {
826  if((*it_cv)->getCVType() == CV_LAT_MISS) {
827  if((*it_cv)->getProjCode() != HE5_GCTP_GEO) {
828  gen_dap_oneeos5cf_das(das,vars,*it_cv,cv_lat_miss_index);
829  cv_lat_miss_index++;
830  }
831  }
832  }
833 
834  for (it_cv = cvars.begin(); it_cv !=cvars.end();++it_cv) {
835  if((*it_cv)->getProjCode() == HE5_GCTP_LAMAZ) {
836  if((*it_cv)->getCVType() == CV_LAT_MISS || (*it_cv)->getCVType() == CV_LON_MISS) {
837  AttrTable *at = das.get_table((*it_cv)->getNewName());
838  if (NULL == at)
839  at = das.add_table((*it_cv)->getNewName(), new AttrTable);
840  if((*it_cv)->getCVType() == CV_LAT_MISS)
841  add_ll_valid_range(at,true);
842  else
843  add_ll_valid_range(at,false);
844  }
845  }
846  }
847 
848 
849  bool disable_ecsmetadata = HDF5RequestHandler::get_disable_ecsmeta();
850 
851  if(disable_ecsmetadata == false) {
852 
853  // To keep the backward compatiablity with the old handler,
854  // we parse the special ECS metadata to DAP attributes
855 
856  string st_str ="";
857  string core_str="";
858  string arch_str="";
859  string xml_str ="";
860  string subset_str="";
861  string product_str="";
862  string other_str ="";
863  bool st_only = false;
864 
865  read_ecs_metadata(file_id, st_str, core_str, arch_str, xml_str,
866  subset_str, product_str, other_str, st_only);
867 
868 #if 0
869 if(st_str!="") "h5","Final structmetadata "<<st_str <<endl;
870 if(core_str!="") "h5","Final coremetadata "<<core_str <<endl;
871 if(arch_str!="") "h5","Final archivedmetadata "<<arch_str <<endl;
872 if(xml_str!="") "h5","Final xmlmetadata "<<xml_str <<endl;
873 if(subset_str!="") "h5","Final subsetmetadata "<<subset_str <<endl;
874 if(product_str!="") "h5","Final productmetadata "<<product_str <<endl;
875 if(other_str!="") "h5","Final othermetadata "<<other_str <<endl;
876 
877 #endif
878  if(st_str != ""){
879 
880 #if 0
881  string check_disable_smetadata_key ="H5.DisableStructMetaAttr";
882  bool is_check_disable_smetadata = false;
883  is_check_disable_smetadata = HDF5CFDAPUtil::check_beskeys(check_disable_smetadata_key);
884 #endif
885  bool is_check_disable_smetadata = HDF5RequestHandler::get_disable_structmeta();
886 
887  if (false == is_check_disable_smetadata) {
888 
889  AttrTable *at = das.get_table("StructMetadata");
890  if (NULL == at)
891  at = das.add_table("StructMetadata", new AttrTable);
892  parser_arg arg(at);
893 
894  he5das_scan_string((const char*) st_str.c_str());
895  if (he5dasparse(&arg) != 0
896  || false == arg.status()){
897 
898  (*BESLog::TheLog())<< "HDF-EOS5 parse error while processing a "
899  << "StructMetadata " << " HDFEOS attribute" << endl;
900  }
901 
902  he5daslex_destroy();
903 
904  }
905  }
906 
907  if(core_str != ""){
908  AttrTable *at = das.get_table("CoreMetadata");
909  if (NULL == at)
910  at = das.add_table("CoreMetadata", new AttrTable);
911  parser_arg arg(at);
912  he5das_scan_string((const char*) core_str.c_str());
913  if (he5dasparse(&arg) != 0
914  || false == arg.status()){
915 
916  (*BESLog::TheLog())<< "HDF-EOS5 parse error while processing a "
917  << "CoreMetadata " << " HDFEOS attribute" << endl;
918  }
919 
920  he5daslex_destroy();
921  }
922  if(arch_str != ""){
923  AttrTable *at = das.get_table("ArchiveMetadata");
924  if (NULL == at)
925  at = das.add_table("ArchiveMetadata", new AttrTable);
926  parser_arg arg(at);
927  he5das_scan_string((const char*) arch_str.c_str());
928  if (he5dasparse(&arg) != 0
929  || false == arg.status()){
930 
931  (*BESLog::TheLog())<< "HDF-EOS5 parse error while processing a "
932  << "ArchiveMetadata " << " HDFEOS attribute" << endl;
933  }
934  he5daslex_destroy();
935  }
936 
937  // XML attribute includes double quote("), this will choke netCDF Java library.
938  // So we replace double_quote(") with &quote.This is currently the OPeNDAP way.
939  // XML attribute cannot parsed. So just pass the string.
940  if(xml_str != ""){
941  AttrTable *at = das.get_table("XMLMetadata");
942  if (NULL == at)
943  at = das.add_table("XMLMetadata", new AttrTable);
944  HDF5CFDAPUtil::replace_double_quote(xml_str);
945  at->append_attr("Contents","String",xml_str);
946  }
947 
948  // SubsetMetadata and ProductMetadata exist in HDF-EOS2 files.
949  // So far we haven't found any metadata in NASA HDF-EOS5 files,
950  // but will keep an eye on it. KY 2012-3-6
951  if(subset_str != ""){
952  AttrTable *at = das.get_table("SubsetMetadata");
953  if (NULL == at)
954  at = das.add_table("SubsetMetadata", new AttrTable);
955  parser_arg arg(at);
956  he5das_scan_string((const char*) subset_str.c_str());
957  if (he5dasparse(&arg) != 0
958  || false == arg.status()) {
959 
960  (*BESLog::TheLog())<< "HDF-EOS5 parse error while processing a "
961  << "SubsetMetadata " << " HDFEOS attribute" << endl;
962  }
963  he5daslex_destroy();
964  }
965  if(product_str != ""){
966  AttrTable *at = das.get_table("ProductMetadata");
967  if (NULL == at)
968  at = das.add_table("ProductMetadata", new AttrTable);
969  parser_arg arg(at);
970  he5das_scan_string((const char*) product_str.c_str());
971  if (he5dasparse(&arg) != 0
972  || false == arg.status()){
973  (*BESLog::TheLog())<< "HDF-EOS5 parse error while processing a "
974  << "ProductMetadata " << " HDFEOS attribute" << endl;
975  }
976  he5daslex_destroy();
977  }
978 
979  // All other metadata under "HDF-EOS Information" will not be
980  // parsed since we don't know how to parse them.
981  // We will simply pass a string to the DAS.
982  if (other_str != ""){
983  AttrTable *at = das.get_table("OtherMetadata");
984  if (NULL == at)
985  at = das.add_table("OtherMetadata", new AttrTable);
986  at->append_attr("Contents","String",other_str);
987  }
988 
989  }
990  // CHECK ALL UNLIMITED DIMENSIONS from the coordinate variables based on the names.
991  if(f->HaveUnlimitedDim() == true) {
992 
993  AttrTable *at = das.get_table("DODS_EXTRA");
994  if (NULL == at)
995  at = das.add_table("DODS_EXTRA", new AttrTable);
996  string unlimited_names;
997 
998  for (it_cv = cvars.begin();
999  it_cv != cvars.end(); it_cv++) {
1000 #if 0
1001  bool has_unlimited_dim = false;
1002 #endif
1003  // Check unlimited dimension names.
1004  for (vector<Dimension*>::const_iterator ird = (*it_cv)->getDimensions().begin();
1005  ird != (*it_cv)->getDimensions().end(); ++ird) {
1006 
1007  // Currently we only check one unlimited dimension, which is the most
1008  // common case. When receiving the conventions from JG, will add
1009  // the support of multi-unlimited dimension. KY 2016-02-09
1010  if((*ird)->HaveUnlimitedDim() == true) {
1011 
1012  if(unlimited_names=="") {
1013  unlimited_names = (*ird)->getNewName();
1014  at->append_attr("Unlimited_Dimension","String",unlimited_names);
1015  }
1016  else {
1017  if(unlimited_names.rfind((*ird)->getNewName()) == string::npos) {
1018  unlimited_names = unlimited_names+" "+(*ird)->getNewName();
1019  at->append_attr("Unlimited_Dimension","String",(*ird)->getNewName());
1020  }
1021  }
1022  }
1023 
1024  }
1025 
1026 #if 0
1027  //if(true == has_unlimited_dim)
1028  // break;
1029 #endif
1030  }
1031 #if 0
1032  //if(unlimited_names!="")
1033  // at->append_attr("Unlimited_Dimension","String",unlimited_names);
1034 #endif
1035  }
1036 
1037 }
1038 
1039 // Read ECS metadata
1040 void read_ecs_metadata(hid_t s_file_id,
1041  string &total_strmeta_value,
1042  string &total_coremeta_value,
1043  string &total_archmeta_value,
1044  string &total_xmlmeta_value,
1045  string &total_submeta_value,
1046  string &total_prometa_value,
1047  string &total_othermeta_value,
1048  bool s_st_only) {
1049 
1050  BESDEBUG("h5","Coming to read_ecs_metadata() "<<endl);
1051  string ecs_group = "/HDFEOS INFORMATION";
1052  hid_t ecs_grp_id = -1;
1053  if ((ecs_grp_id = H5Gopen(s_file_id, ecs_group.c_str(),H5P_DEFAULT))<0) {
1054  string msg =
1055  "h5_ecs_meta: unable to open the HDF5 group ";
1056  msg +=ecs_group;
1057  throw InternalErr(__FILE__, __LINE__, msg);
1058  }
1059 
1060  H5G_info_t g_info;
1061  hsize_t nelems = 0;
1062 
1063  if (H5Gget_info(ecs_grp_id,&g_info) <0) {
1064  string msg =
1065  "h5_ecs_meta: unable to obtain the HDF5 group info. for ";
1066  msg +=ecs_group;
1067  H5Gclose(ecs_grp_id);
1068  throw InternalErr(__FILE__, __LINE__, msg);
1069  }
1070 
1071  nelems = g_info.nlinks;
1072 
1073  ssize_t oname_size = 0;
1074 #if 0
1075  int cur_archmeta_suffix = 0;
1076  int cur_coremeta_suffix = 0;
1077  int cur_strmeta_suffix = 0;
1078  int cur_xmlmeta_suffix = 0;
1079 #endif
1080 
1081  int archmeta_num = -1;
1082  int coremeta_num = -1;
1083  int xmlmeta_num = -1;
1084  int strmeta_num = -1;
1085  int submeta_num = -1;
1086  int prometa_num = -1;
1087 
1088  // Initalize the total number for different metadata.
1089  int archmeta_num_total = 0;
1090  int coremeta_num_total = 0;
1091  int xmlmeta_num_total = 0;
1092  int strmeta_num_total = 0;
1093  int submeta_num_total = 0;
1094  int prometa_num_total = 0;
1095  int othermeta_num_total = 0;
1096 
1097  bool archmeta_no_suffix = true;
1098  bool coremeta_no_suffix = true;
1099  bool strmeta_no_suffix = true;
1100  bool xmlmeta_no_suffix = true;
1101  bool submeta_no_suffix = true;
1102  bool prometa_no_suffix = true;
1103 
1104  // Define a vector of string to hold all dataset names.
1105  vector<string> s_oname(nelems);
1106 
1107  // Define an EOSMetadata array that can describe the metadata type for each object
1108  // We initialize the value to OtherMeta.
1109  EOS5Metadata metatype[nelems];
1110 
1111  for (unsigned int i =0; i<nelems; i++)
1112  metatype[i] = OtherMeta;
1113 
1114  for (hsize_t i = 0; i < nelems; i++) {
1115 
1116  // Query the length of the object name.
1117  oname_size =
1118  H5Lget_name_by_idx(ecs_grp_id,".",H5_INDEX_NAME,H5_ITER_NATIVE,i,NULL,
1119  0, H5P_DEFAULT);
1120  if (oname_size <= 0) {
1121  string msg = "hdf5 object name error from: ";
1122  msg += ecs_group;
1123  H5Gclose(ecs_grp_id);
1124  throw InternalErr(__FILE__, __LINE__, msg);
1125  }
1126 
1127  // Obtain the name of the object.
1128  vector<char> oname(oname_size + 1);
1129  if (H5Lget_name_by_idx(ecs_grp_id,".",H5_INDEX_NAME,H5_ITER_NATIVE,i,&oname[0],
1130  (size_t)(oname_size+1), H5P_DEFAULT)<0){
1131  string msg = "hdf5 object name error from: ";
1132  msg += ecs_group;
1133  H5Gclose(ecs_grp_id);
1134  throw InternalErr(__FILE__, __LINE__, msg);
1135  }
1136 
1137  // Check if this object is an HDF5 dataset, not, throw an error.
1138  // First, check if it is the hard link or the soft link
1139  H5L_info_t linfo;
1140  if (H5Lget_info(ecs_grp_id,&oname[0],&linfo,H5P_DEFAULT)<0) {
1141  string msg = "hdf5 link name error from: ";
1142  msg += ecs_group;
1143  H5Gclose(ecs_grp_id);
1144  throw InternalErr(__FILE__, __LINE__, msg);
1145  }
1146 
1147  // This is the soft link.
1148  if (linfo.type == H5L_TYPE_SOFT){
1149  string msg = "hdf5 link name error from: ";
1150  msg += ecs_group;
1151  H5Gclose(ecs_grp_id);
1152  throw InternalErr(__FILE__, __LINE__, msg);
1153  }
1154 
1155  // Obtain the object type
1156  H5O_info_t oinfo;
1157  if (H5Oget_info_by_idx(ecs_grp_id, ".", H5_INDEX_NAME, H5_ITER_NATIVE,
1158  i, &oinfo, H5P_DEFAULT)<0) {
1159  string msg = "Cannot obtain the object info ";
1160  msg += ecs_group;
1161  H5Gclose(ecs_grp_id);
1162  throw InternalErr(__FILE__, __LINE__, msg);
1163  }
1164 
1165  if(oinfo.type != H5O_TYPE_DATASET) {
1166  string msg = "hdf5 link name error from: ";
1167  msg += ecs_group;
1168  H5Gclose(ecs_grp_id);
1169  throw InternalErr(__FILE__, __LINE__, msg);
1170  }
1171 
1172  // We want to remove the last '\0' character added by C .
1173  string s_one_oname(oname.begin(),oname.end()-1);
1174  s_oname[i] = s_one_oname;
1175 
1176  // Calculate how many elements we have for each category(StructMetadata, CoreMetadata, etc.)
1177  if (((s_one_oname.find("StructMetadata"))==0) ||
1178  ((s_one_oname.find("structmetadata"))==0)){
1179 
1180  metatype[i] = StructMeta;
1181 
1182  // Do we have suffix for the metadata?
1183  // If this metadata doesn't have any suffix, it should only come to this loop once.
1184  // That's why, when checking the first time, no_suffix is always true.
1185  // If we have already found that it doesn't have any suffix,
1186  // it should not go into this loop. throw an error.
1187  if (false == strmeta_no_suffix) {
1188  string msg = "StructMetadata/structmetadata without suffix should only appear once. ";
1189  H5Gclose(ecs_grp_id);
1190  throw InternalErr(__FILE__, __LINE__, msg);
1191  }
1192 
1193  else if(strmeta_num_total >0)
1194  strmeta_num_total++;
1195  else { // either no suffix or the first time to loop the one having the suffix.
1196  if ((0 == s_one_oname.compare("StructMetadata"))||
1197  (0 == s_one_oname.compare("structmetadata")))
1198  strmeta_no_suffix = false;
1199  else strmeta_num_total++;
1200  }
1201 #if 0
1202 "h5","strmeta_num_total= "<<strmeta_num_total <<endl;
1203 if(strmeta_no_suffix) "h5","structmeta data has the suffix" <<endl;
1204 else "h5","structmeta data doesn't have the suffix" <<endl;
1205 #endif
1206  }
1207 
1208  if(false == s_st_only) {
1209 
1210  if ((0 == (s_one_oname.find("CoreMetadata"))) ||
1211  (0 == (s_one_oname.find("coremetadata")))){
1212 
1213  metatype[i] = CoreMeta;
1214 
1215  // Do we have suffix for the metadata?
1216  // When checking the first time, no_suffix is always true.
1217  // If we have already found that it doesn't have any suffix,
1218  // it should not go into this loop anyway. throw an error.
1219  if (false == coremeta_no_suffix) {
1220  string msg = "CoreMetadata/coremetadata without suffix should only appear once. ";
1221  H5Gclose(ecs_grp_id);
1222  throw InternalErr(__FILE__, __LINE__, msg);
1223  }
1224 
1225  else if(coremeta_num_total >0)
1226  coremeta_num_total++;
1227  else { // either no suffix or the first time to loop the one having the suffix.
1228  // If no suffix is true, it should be out of the loop. In case it comes
1229  // to the loop again, we set "coremeta_no_suffix" be false so an error
1230  // can be thrown. This is counter-intutitive. Hopefully people can understand it.
1231  if ((0 == s_one_oname.compare("CoreMetadata")) ||
1232  (0 == s_one_oname.compare("coremetadata")))
1233  coremeta_no_suffix = false;
1234  else coremeta_num_total++;
1235  }
1236 #if 0
1237 "h5","coremeta_num_total= "<<coremeta_num_total <<endl;
1238 if(coremeta_no_suffix) "h5","coreuctmeta data has the suffix" <<endl;
1239 else "h5","coremeta data doesn't have the suffix" <<endl;
1240 #endif
1241  }
1242 
1243  // OMI has the metadata name as "ArchiveMetadata.0"
1244  else if ((0 == (s_one_oname.find("ArchivedMetadata"))) ||
1245  (0 == (s_one_oname.find("archivedmetadata"))) ||
1246  (0 == (s_one_oname.find("ArchiveMetadata"))) ||
1247  (0 == (s_one_oname.find("archivemetadata")))){
1248 
1249  metatype[i] = ArchivedMeta;
1250  // Do we have suffix for the metadata?
1251  // When checking the first time, no_suffix is always true.
1252  // If we have already found that it doesn't have any suffix,
1253  // it should not go into this loop anyway. throw an error.
1254  if (false == archmeta_no_suffix) {
1255  string msg = "archivedmetadata/ArchivedMetadata without suffix should only appear once. ";
1256  H5Gclose(ecs_grp_id);
1257  throw InternalErr(__FILE__, __LINE__, msg);
1258  }
1259 
1260  else if(archmeta_num_total >0)
1261  archmeta_num_total++;
1262  else { // either no suffix or the first time to loop the one having the suffix.
1263  if ((0 == s_one_oname.compare("ArchivedMetadata"))||
1264  (0 == s_one_oname.compare("archivedmetadata")) ||
1265  (0 == s_one_oname.compare("archivemetadata")) ||
1266  (0 == s_one_oname.compare("ArchiveMetadata")))
1267  archmeta_no_suffix = false;
1268  else
1269  archmeta_num_total++;
1270  }
1271 #if 0
1272 "h5","archmeta_num_total= "<<archmeta_num_total <<endl;
1273 if(archmeta_no_suffix) "h5","archuctmeta data has the suffix" <<endl;
1274 else "h5","archmeta data doesn't have the suffix" <<endl;
1275 #endif
1276 
1277  }
1278 
1279  else if (((s_one_oname.find("SubsetMetadata"))==0) ||
1280  ((s_one_oname.find("subsetmetadata"))==0)){
1281 
1282  metatype[i] = SubsetMeta;
1283  // Do we have suffix for the metadata?
1284  // When checking the first time, no_suffix is always true.
1285  // If we have already found that it doesn't have any suffix,
1286  // it should not go into this loop anyway. throw an error.
1287  if (false == submeta_no_suffix) {
1288  H5Gclose(ecs_grp_id);
1289  string msg = "submetadata/SubMetadata without suffix should only appear once. ";
1290  throw InternalErr(__FILE__, __LINE__, msg);
1291  }
1292 
1293  else if(submeta_num_total >0)
1294  submeta_num_total++;
1295  else { // either no suffix or the first time to loop the one having the suffix.
1296  if ((0 == s_one_oname.compare("SubsetMetadata"))||
1297  (0 == s_one_oname.compare("subsetmetadata")))
1298  submeta_no_suffix = false;
1299  else submeta_num_total++;
1300  }
1301 #if 0
1302 "h5","submeta_num_total= "<<submeta_num_total <<endl;
1303 if(submeta_no_suffix) "h5","subuctmeta data has the suffix" <<endl;
1304 else "h5","submeta data doesn't have the suffix" <<endl;
1305 #endif
1306 
1307  }
1308 
1309  else if ((0 == (s_one_oname.find("XmlMetadata"))) ||
1310  (0 == (s_one_oname.find("xmlmetadata")))){
1311 
1312  metatype[i] = XMLMeta;
1313 
1314  // Do we have suffix for the metadata?
1315  // When checking the first time, no_suffix is always true.
1316  // If we have already found that it doesn't have any suffix,
1317  // it should not go into this loop anyway. throw an error.
1318  if (false == xmlmeta_no_suffix) {
1319  H5Gclose(ecs_grp_id);
1320  string msg = "xmlmetadata/Xmlmetadata without suffix should only appear once. ";
1321  throw InternalErr(__FILE__, __LINE__, msg);
1322  }
1323 
1324  else if(xmlmeta_num_total >0)
1325  xmlmeta_num_total++;
1326  else { // either no suffix or the first time to loop the one having the suffix.
1327  if ((0 == s_one_oname.compare("XmlMetadata"))||
1328  (0 == s_one_oname.compare("xmlmetadata")))
1329  xmlmeta_no_suffix = false;
1330  else xmlmeta_num_total++;
1331  }
1332 #if 0
1333 "h5","xmlmeta_num_total= "<<xmlmeta_num_total <<endl;
1334 if(xmlmeta_no_suffix) "h5","xmluctmeta data doesn't have the suffix" <<endl;
1335 else "h5","xmlmeta data has the suffix" <<endl;
1336 #endif
1337 
1338  }
1339 
1340  else if ((0 == (s_one_oname.find("ProductMetadata"))) ||
1341  (0 == (s_one_oname.find("productmetadata")))){
1342 
1343  metatype[i] = ProductMeta;
1344  // Do we have suffix for the metadata?
1345  // When checking the first time, no_suffix is always true.
1346  // If we have already found that it doesn't have any suffix,
1347  // it should not go into this loop anyway. throw an error.
1348  if (!prometa_no_suffix) {
1349  H5Gclose(ecs_grp_id);
1350  string msg = "productmetadata/ProductMetadata without suffix should only appear once. ";
1351  throw InternalErr(__FILE__, __LINE__, msg);
1352  }
1353 
1354  else if(prometa_num_total >0) prometa_num_total++;
1355  else { // either no suffix or the first time to loop the one having the suffix.
1356  if ((0 == s_one_oname.compare("ProductMetadata"))||
1357  (0 == s_one_oname.compare("productmetadata")))
1358  prometa_no_suffix = false;
1359  else prometa_num_total++;
1360  }
1361 
1362  }
1363 
1364  // All other metadata will be merged to one string, no need to check the name.
1365  else othermeta_num_total++;
1366  }
1367 
1368  oname.clear();
1369  s_one_oname.clear();
1370  }
1371 
1372  // Define a vector of string to hold StructMetadata.
1373  // StructMetadata must exist for a valid HDF-EOS5 file.
1374  vector<string> strmeta_value;
1375  if (strmeta_num_total <= 0) {
1376  string msg = "hdf5 object name error from: ";
1377  H5Gclose(ecs_grp_id);
1378  throw InternalErr(__FILE__, __LINE__, msg);
1379  }
1380  else {
1381  strmeta_value.resize(strmeta_num_total);
1382  for (int i = 0; i < strmeta_num_total; i++)
1383  strmeta_value[i]="";
1384  }
1385 
1386  // All other metadata are optional.
1387  // Define a vector of string to hold archivedmetadata.
1388  vector<string> archmeta_value;
1389  if (archmeta_num_total >0) {
1390  archmeta_value.resize(archmeta_num_total);
1391  for (int i = 0; i < archmeta_num_total; i++)
1392  archmeta_value[i]="";
1393  }
1394 
1395  // Define a vector of string to hold coremetadata.
1396  vector<string> coremeta_value;
1397  if (coremeta_num_total >0) {
1398  coremeta_value.resize(coremeta_num_total);
1399  for (int i = 0; i < coremeta_num_total; i++)
1400  coremeta_value[i]="";
1401  }
1402 
1403  // Define a vector of string to hold xmlmetadata.
1404  vector<string> xmlmeta_value;
1405  if (xmlmeta_num_total >0) {
1406  xmlmeta_value.resize(xmlmeta_num_total);
1407  for (int i = 0; i < xmlmeta_num_total; i++)
1408  xmlmeta_value[i]="";
1409  }
1410 
1411  // Define a vector of string to hold subsetmetadata.
1412  vector<string> submeta_value;
1413  if (submeta_num_total >0) {
1414  submeta_value.resize(submeta_num_total);
1415  for (int i = 0; i < submeta_num_total; i++)
1416  submeta_value[i]="";
1417  }
1418 
1419  // Define a vector of string to hold productmetadata.
1420  vector<string> prometa_value;
1421  if (prometa_num_total >0) {
1422  prometa_value.resize(prometa_num_total);
1423  for (int i = 0; i < prometa_num_total; i++)
1424  prometa_value[i]="";
1425  }
1426 
1427  // For all other metadata, we don't need to calculate the value, just append them.
1428 
1429  // Now we want to retrieve the metadata value and combine them into one string.
1430  // Here we have to remember the location of every element of the metadata if
1431  // this metadata has a suffix.
1432  for (hsize_t i = 0; i < nelems; i++) {
1433 
1434  // DDS parser only needs to parse the struct Metadata. So check
1435  // if st_only flag is true, will only read StructMetadata string.
1436  // Struct Metadata is generated by the HDF-EOS5 library, so the
1437  // name "StructMetadata.??" won't change for real struct metadata.
1438  //However, we still assume that somebody may not use the HDF-EOS5
1439  // library to add StructMetadata, the name may be "structmetadata".
1440  if (true == s_st_only &&
1441  (((s_oname[i].find("StructMetadata"))!=0) &&
1442  ((s_oname[i].find("structmetadata"))!=0))){
1443  continue;
1444  }
1445 
1446  // Open the dataset, dataspace, datatype, number of elements etc. for this metadata
1447  hid_t s_dset_id = -1;
1448  hid_t s_space_id = -1;
1449  hid_t s_ty_id = -1;
1450  hssize_t s_nelms = -1;
1451  size_t dtype_size = -1;
1452 
1453  if ((s_dset_id = H5Dopen(ecs_grp_id,s_oname[i].c_str(),H5P_DEFAULT))<0){
1454  string msg = "Cannot open HDF5 dataset ";
1455  msg += s_oname[i];
1456  H5Gclose(ecs_grp_id);
1457  throw InternalErr(__FILE__, __LINE__, msg);
1458  }
1459 
1460  if ((s_space_id = H5Dget_space(s_dset_id))<0) {
1461  string msg = "Cannot open the data space of HDF5 dataset ";
1462  msg += s_oname[i];
1463  H5Dclose(s_dset_id);
1464  H5Gclose(ecs_grp_id);
1465  throw InternalErr(__FILE__, __LINE__, msg);
1466  }
1467 
1468  if ((s_ty_id = H5Dget_type(s_dset_id)) < 0) {
1469  string msg = "Cannot get the data type of HDF5 dataset ";
1470  msg += s_oname[i];
1471  H5Sclose(s_space_id);
1472  H5Dclose(s_dset_id);
1473  H5Gclose(ecs_grp_id);
1474  throw InternalErr(__FILE__, __LINE__, msg);
1475  }
1476  if ((s_nelms = H5Sget_simple_extent_npoints(s_space_id))<0) {
1477  string msg = "Cannot get the number of points of HDF5 dataset ";
1478  msg += s_oname[i];
1479  H5Tclose(s_ty_id);
1480  H5Sclose(s_space_id);
1481  H5Dclose(s_dset_id);
1482  H5Gclose(ecs_grp_id);
1483  throw InternalErr(__FILE__, __LINE__, msg);
1484  }
1485  if ((dtype_size = H5Tget_size(s_ty_id))==0) {
1486 
1487  string msg = "Cannot get the data type size of HDF5 dataset ";
1488  msg += s_oname[i];
1489  H5Tclose(s_ty_id);
1490  H5Sclose(s_space_id);
1491  H5Dclose(s_dset_id);
1492  H5Gclose(ecs_grp_id);
1493  throw InternalErr(__FILE__, __LINE__, msg);
1494  }
1495 
1496  // Obtain the real value of the metadata
1497  vector<char> s_buf(dtype_size*s_nelms +1);
1498 
1499  if ((H5Dread(s_dset_id,s_ty_id,H5S_ALL,H5S_ALL,H5P_DEFAULT,&s_buf[0]))<0) {
1500 
1501  string msg = "Cannot read HDF5 dataset ";
1502  msg += s_oname[i];
1503  H5Tclose(s_ty_id);
1504  H5Sclose(s_space_id);
1505  H5Dclose(s_dset_id);
1506  H5Gclose(ecs_grp_id);
1507  throw InternalErr(__FILE__, __LINE__, msg);
1508  }
1509 
1510  // Now we can safely close datatype, data space and dataset IDs.
1511  H5Tclose(s_ty_id);
1512  H5Sclose(s_space_id);
1513  H5Dclose(s_dset_id);
1514 
1515 
1516  // Convert from the vector<char> to a C++ string.
1517  string tempstr(s_buf.begin(),s_buf.end());
1518  s_buf.clear();
1519  size_t temp_null_pos = tempstr.find_first_of('\0');
1520 
1521  // temp_null_pos returns the position of NULL,which is the last character of the string.
1522  // so the length of string before null is EQUAL to
1523  // temp_null_pos since pos starts at 0.
1524  string finstr = tempstr.substr(0,temp_null_pos);
1525 
1526  // For the DDS parser, only return StructMetadata
1527  if (StructMeta == metatype[i]) {
1528 
1529  // Now obtain the corresponding value in integer type for the suffix. '0' to 0 etc.
1530  try {
1531  strmeta_num = get_metadata_num(s_oname[i]);
1532  }
1533  catch(...) {
1534  H5Gclose(ecs_grp_id);
1535  throw InternalErr(__FILE__,__LINE__,"Obtain structmetadata suffix error.");
1536 
1537  }
1538  // This is probably not necessary, since structmetadata may always have a suffix.
1539  // Leave here just in case the rules change or a special non-HDF-EOS5 library generated file.
1540  // when strmeta_num is -1, it means no suffix for this metadata. So the total structmetadata
1541  // is this string only.
1542  if (-1 == strmeta_num)
1543  total_strmeta_value = finstr;
1544  // strmeta_value at this point should be empty before assigning any values.
1545  else if (strmeta_value[strmeta_num]!="") {
1546  string msg = "The structmeta value array at this index should be empty string ";
1547  H5Gclose(ecs_grp_id);
1548  throw InternalErr(__FILE__, __LINE__, msg);
1549  }
1550  // assign the string vector to this value.
1551  else
1552  strmeta_value[strmeta_num] = finstr;
1553  }
1554 
1555  // DAS parser needs all metadata.
1556  if (false == s_st_only &&
1557  (metatype[i] != StructMeta)) {
1558 
1559  switch (metatype[i]) {
1560 
1561  case CoreMeta:
1562  {
1563  if (coremeta_num_total < 0) {
1564  string msg = "There may be no coremetadata or coremetadata is not counted ";
1565  H5Gclose(ecs_grp_id);
1566  throw InternalErr(__FILE__, __LINE__, msg);
1567 
1568  }
1569 
1570  try {
1571  coremeta_num = get_metadata_num(s_oname[i]);
1572  }
1573  catch(...) {
1574  H5Gclose(ecs_grp_id);
1575  throw InternalErr(__FILE__,__LINE__,"Obtain coremetadata suffix error.");
1576 
1577  }
1578 
1579  // when coremeta_num is -1, it means no suffix for this metadata. So the total coremetadata
1580  // is this string only. Similar cases apply for the rest metadata.
1581  if ( -1 == coremeta_num )
1582  total_coremeta_value = finstr;
1583  else if (coremeta_value[coremeta_num]!="") {
1584  string msg = "The coremeta value array at this index should be empty string ";
1585  H5Gclose(ecs_grp_id);
1586  throw InternalErr(__FILE__, __LINE__, msg);
1587  }
1588 
1589  // assign the string vector to this value.
1590  else
1591  coremeta_value[coremeta_num] = finstr;
1592  }
1593  break;
1594 
1595  case ArchivedMeta:
1596  {
1597  if (archmeta_num_total < 0) {
1598  string msg = "There may be no archivemetadata or archivemetadata is not counted ";
1599  H5Gclose(ecs_grp_id);
1600  throw InternalErr(__FILE__, __LINE__, msg);
1601  }
1602  try {
1603  archmeta_num = get_metadata_num(s_oname[i]);
1604  }
1605  catch(...) {
1606  H5Gclose(ecs_grp_id);
1607  throw InternalErr(__FILE__,__LINE__,"Obtain archivemetadata suffix error.");
1608  }
1609  if (-1 == archmeta_num )
1610  total_archmeta_value = finstr;
1611  else if (archmeta_value[archmeta_num]!="") {
1612  string msg = "The archivemeta value array at this index should be empty string ";
1613  H5Gclose(ecs_grp_id);
1614  throw InternalErr(__FILE__, __LINE__, msg);
1615 
1616  }
1617  // assign the string vector to this value.
1618  else
1619  archmeta_value[archmeta_num] = finstr;
1620  }
1621  break;
1622  case SubsetMeta:
1623  {
1624  if (submeta_num_total < 0) {
1625  string msg = "The subsetemeta value array at this index should be empty string ";
1626  H5Gclose(ecs_grp_id);
1627  throw InternalErr(__FILE__, __LINE__, msg);
1628  }
1629  try {
1630  submeta_num = get_metadata_num(s_oname[i]);
1631  }
1632  catch(...) {
1633  H5Gclose(ecs_grp_id);
1634  throw InternalErr(__FILE__,__LINE__,"Obtain subsetmetadata suffix error.");
1635  }
1636  if (-1 == submeta_num )
1637  total_submeta_value = finstr;
1638  else if (submeta_value[submeta_num]!="") {
1639  string msg = "The submeta value array at this index should be empty string ";
1640  H5Gclose(ecs_grp_id);
1641  throw InternalErr(__FILE__, __LINE__, msg);
1642  }
1643  // assign the string vector to this value.
1644  else
1645  submeta_value[submeta_num] = finstr;
1646  }
1647  break;
1648  case ProductMeta:
1649  {
1650  if (prometa_num_total < 0) {
1651  string msg = "There may be no productmetadata or productmetadata is not counted ";
1652  H5Gclose(ecs_grp_id);
1653  throw InternalErr(__FILE__, __LINE__, msg);
1654  }
1655  try {
1656  prometa_num = get_metadata_num(s_oname[i]);
1657  }
1658  catch(...) {
1659  H5Gclose(ecs_grp_id);
1660  throw InternalErr(__FILE__,__LINE__,"Obtain productmetadata suffix error.");
1661  }
1662  if (prometa_num == -1)
1663  total_prometa_value = finstr;
1664  else if (prometa_value[prometa_num]!="") {
1665  string msg = "The productmeta value array at this index should be empty string ";
1666  H5Gclose(ecs_grp_id);
1667  throw InternalErr(__FILE__, __LINE__, msg);
1668  }
1669  // assign the string vector to this value.
1670  else
1671  prometa_value[prometa_num] = finstr;
1672  }
1673  break;
1674  case XMLMeta:
1675  {
1676  if (xmlmeta_num_total < 0) {
1677  string msg = "There may be no xmlmetadata or xmlmetadata is not counted ";
1678  H5Gclose(ecs_grp_id);
1679  throw InternalErr(__FILE__, __LINE__, msg);
1680  }
1681  try {
1682  xmlmeta_num = get_metadata_num(s_oname[i]);
1683  }
1684  catch(...) {
1685  H5Gclose(ecs_grp_id);
1686  throw InternalErr(__FILE__,__LINE__,"Obtain XMLmetadata suffix error.");
1687  }
1688  if (-1 == xmlmeta_num )
1689  total_xmlmeta_value = finstr;
1690  else if (xmlmeta_value[xmlmeta_num]!="") {
1691  string msg = "The xmlmeta value array at this index should be empty string ";
1692  H5Gclose(ecs_grp_id);
1693  throw InternalErr(__FILE__, __LINE__, msg);
1694  }
1695  // assign the string vector to this value.
1696  else
1697  xmlmeta_value[xmlmeta_num] = finstr;
1698  }
1699  break;
1700  case OtherMeta:
1701  {
1702  if (othermeta_num_total < 0) {
1703  string msg = "There may be no othermetadata or other metadata is not counted ";
1704  H5Gclose(ecs_grp_id);
1705  throw InternalErr(__FILE__, __LINE__, msg);
1706  }
1707  total_othermeta_value = total_othermeta_value + finstr;
1708  }
1709  break;
1710  default :
1711  {
1712  string msg = "Unsupported metadata type ";
1713  H5Gclose(ecs_grp_id);
1714  throw InternalErr(__FILE__, __LINE__, msg);
1715  }
1716  }
1717  }
1718  tempstr.clear();
1719  finstr.clear();
1720  }
1721 
1722  // Now we need to handle the concatenation of the metadata
1723  // first StructMetadata
1724  if (strmeta_num_total > 0) {
1725  // The no suffix one has been taken care.
1726  if (strmeta_num != -1) {
1727  for (int i = 0; i <strmeta_num_total; i++)
1728  total_strmeta_value +=strmeta_value[i];
1729  }
1730  }
1731 
1732  // For the DAS handler
1733  if ( false == s_st_only) {
1734 
1735  if (coremeta_num_total >0) {
1736  if (coremeta_num != -1) {
1737  for(int i = 0; i <coremeta_num_total; i++)
1738  total_coremeta_value +=coremeta_value[i];
1739  }
1740  }
1741 
1742  if (archmeta_num_total >0) {
1743  if (archmeta_num != -1) {
1744  for(int i = 0; i <archmeta_num_total; i++)
1745  total_archmeta_value +=archmeta_value[i];
1746  }
1747  }
1748 
1749  if (submeta_num_total >0) {
1750  if (submeta_num != -1) {
1751  for(int i = 0; i <submeta_num_total; i++)
1752  total_submeta_value +=submeta_value[i];
1753  }
1754  }
1755 
1756  if (xmlmeta_num_total >0) {
1757  if (xmlmeta_num != -1) {
1758  for(int i = 0; i <xmlmeta_num_total; i++)
1759  total_xmlmeta_value +=xmlmeta_value[i];
1760  }
1761  }
1762 
1763  if (prometa_num_total >0) {
1764  if (prometa_num != -1) {
1765  for(int i = 0; i <prometa_num_total; i++)
1766  total_prometa_value +=prometa_value[i];
1767  }
1768  }
1769  }
1770  H5Gclose(ecs_grp_id);
1771 }
1772 
1773 // Helper function for read_ecs_metadata. Get the number after metadata.
1774 int get_metadata_num(const string & meta_str) {
1775 
1776  // The normal metadata names should be like coremetadata.0, coremetadata.1 etc.
1777  // We just find a not so nice coremetadata names such as coremetadata.0, coremetadata.0.1 for a HIRDLS-MLS-Aura-L3
1778  // We need to handle them. Here we assume no more than two dots in a name series. KY 2012-11-08
1779  size_t dot_pos = meta_str.find(".");
1780  if (dot_pos == string::npos) // No dot
1781  return -1;
1782  else if (meta_str.find_first_of(".") == meta_str.find_last_of(".")) { // One dot
1783  string num_str = meta_str.substr(dot_pos+1);
1784  stringstream ssnum(num_str);
1785  int num;
1786  ssnum >> num;
1787  if (ssnum.fail())
1788  throw InternalErr(__FILE__,__LINE__,"Suffix after dots is not a number.");
1789  return num;
1790  }
1791  else { // Two dots
1792  string str_after_first_dot = meta_str.substr(dot_pos+1);
1793  if (str_after_first_dot.find_first_of(".") != str_after_first_dot.find_last_of("."))
1794  throw InternalErr(__FILE__,__LINE__,"Currently don't support metadata names containing more than two dots.");
1795  // Here we don't check if names are like coremetadata.0 coremetadata.0.0 etc., Having ".0 .0.0" is,if not mistaken,
1796  // is insane.
1797  // Instead we hope that the data producers will produce data like coremetadata.0 coremetadata.0.1 coremeatadata.0.2
1798  // KY 2012-11-08
1799  size_t second_dot_pos = str_after_first_dot.find(".");
1800  string num_str = str_after_first_dot.substr(second_dot_pos+1);
1801  stringstream ssnum(num_str);
1802  int num;
1803  ssnum >> num;
1804  return num;
1805  }
1806 
1807 }
1808 
1809 
1810 
1811 
1812 
This class provides a way to map HDF5 Str to DAP Str for the CF option.
void Check_Aura_Product_Status()
Check if the HDF-EOS5 file is an Aura file. Special CF operations need to be used.
Definition: HDFEOS5CF.cc:1707
yy_buffer_state * he5das_scan_string(const char *str)
Buffer state for NASA EOS metadata scanner.
const std::string & getFullPath() const
Get the full path of this variable.
Definition: HDF5CF.h:283
const std::string & getName() const
Get the original name of this variable.
Definition: HDF5CF.h:271
const std::string & getNewName() const
Get the new name of this variable.
Definition: HDF5CF.h:277
void Add_Dim_Name(HE5Parser *)
Add the dimension name for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:1311
This class specifies the retrieval of special coordinate variable values for HDF-EOS5 products.
void Handle_SpVar()
Handle special variables for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:3960
This class provides a way to map HDF5 unsigned 16-bit integer to DAP uint16 for the CF option.
const std::vector< Attribute * > & getAttributes() const
Public interface to obtain information of all attributes under the root group.
Definition: HDF5CF.h:664
void Handle_Obj_NameClashing(bool)
Handle the object name clashing for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:3225
This class provides a way to map HDF5 float to DAP float for the CF option.
int getCompRatio() const
Get the compression ratio of this dataset.
Definition: HDF5CF.h:318
void Handle_Unsupported_Dtype(bool)
Handle unsupported HDF5 datatypes for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:204
int getRank() const
Get the dimension rank of this variable.
Definition: HDF5CF.h:295
void Adjust_EOS5Dim_Info(HE5Parser *strmeta_info)
Adjust HDF-EOS5 dimension information.
Definition: HDFEOS5CF.cc:530
hid_t getFileID() const
Obtain the HDF5 file ID.
Definition: HDF5CF.h:646
const std::vector< EOS5CVar * > & getCVars() const
Obtain coordinate variables for HDF-EOS5 products.
Definition: HDF5CF.h:1183
This class provides a way to map HDF5 int16 to DAP int16 for the CF option.
const std::vector< Var * > & getVars() const
Public interface to obtain information of all variables.
Definition: HDF5CF.h:658
bool HaveUnlimitedDim() const
Has unlimited dimensions.
Definition: HDF5CF.h:676
This class provides a way to map HDF5 64-bit floating-point(double) to DAP 64-bit floating-point for ...
void Adjust_Var_Dim_NewName_Before_Flattening()
Adjust variable dimension names before the flattening for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:3025
void Handle_DimNameClashing()
Definition: HDFEOS5CF.cc:3310
void Handle_CVar()
Handle coordinate variable for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:1759
This class is a derived class of CVar. It represents a coordinate variable for HDF-EOS5 files.
Definition: HDF5CF.h:438
This class specifies the retrieval of the missing lat/lon values for HDF-EOS5 products.
This class specifies the retrieval of the missing lat/lon values for HDFEOS5 products.
void Adjust_Obj_Name()
This method is a no-op operation. Leave here since the method in the base class is pure virtual.
Definition: HDFEOS5CF.cc:4118
CVType getCVType() const
Get the coordinate variable type of this variable.
Definition: HDF5CF.h:360
void Handle_Unsupported_Dspace(bool)
Handle unsupported HDF5 dataspaces for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:297
Map and generate DDS and DAS for the CF option for HDF-EOS5 products.
include the entry functions to execute the handlers
void Adjust_Var_NewName_After_Parsing()
Adjust variable names for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:1219
const std::vector< Dimension * > & getDimensions() const
Get the list of the dimensions.
Definition: HDF5CF.h:312
This class provides a way to map HDF5 32-bit integer to DAP Int32 for the CF option.
H5DataType getType() const
Get the data type of this variable(Not HDF5 datatype id)
Definition: HDF5CF.h:301
This class includes the methods to read data array into DAP buffer from an HDF5 dataset for the CF op...
void Handle_SpVar_Attr()
Handle special variables for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:4082
This class is a derived class of File. It includes methods applied to HDF-EOS5 files only.
Definition: HDF5CF.h:1171
void Add_EOS5File_Info(HE5Parser *, bool)
Add HDF-EOS5 dimension and coordinate variable related info. to EOS5Grid,EOS5Swath etc.
Definition: HDFEOS5CF.cc:832
void Retrieve_H5_Info(const char *path, hid_t file_id, bool include_attr)
Retrieve DDS information from the HDF5 file; a real implementation for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:159
A class for parsing NASA HDF-EOS5 StructMetadata.
Helper functions for generating DAS attributes and a function to check BES Key.
bool Get_IgnoredInfo_Flag()
Obtain the flag to see if ignored objects should be generated.
Definition: HDF5CF.h:1266
void Flatten_Obj_Name(bool include_attr)
Flatten the object name for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:3203
This class provides a way to map HDF5 unsigned 32-bit integer to DAP uint32 for the CF option.
void Set_COARDS_Status()
Set COARDS flag.
Definition: HDFEOS5CF.cc:3375
const std::vector< Group * > & getGroups() const
Public interface to obtain all the group info.
Definition: HDF5CF.h:670
void Add_Supplement_Attrs(bool)
Add the supplemental attributes for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:3661
void Handle_Unsupported_Others(bool)
Handle other unmapped objects/attributes for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:356
A class for parsing NASA HDF-EOS5 StructMetadata.
void Adjust_Attr_Info()
Adjust the attribute info for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:3398
This class provides a way to map HDF5 byte to DAP byte for the CF option.
void Retrieve_H5_CVar_Supported_Attr_Values()
Retrieve coordinate variable attributes.
Definition: HDFEOS5CF.cc:166
const std::string & getPath() const
Obtain the path of the file.
Definition: HDF5CF.h:652
const std::string & Get_Ignored_Msg()
Obtain the message that contains the ignored object info.
Definition: HDF5CF.h:1271
void Retrieve_H5_Supported_Attr_Values()
Retrieve attribute values for the supported HDF5 datatypes for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:182
void Adjust_Dim_Name()
Adjust the dimension name for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:3631
void Handle_Coor_Attr()
Handle the coordinates attribute for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:3746