bes  Updated for version 3.20.6
hdfdesc.cc
Go to the documentation of this file.
1 // This file is part of the hdf4 data handler for the OPeNDAP data server.
6 // The code includes the support of HDF-EOS2 and NASA HDF4 files that follow CF.
7 // Copyright (c) 2008-2012 The HDF Group.
8 // Author: MuQun Yang <myang6@hdfgroup.org>
9 // Author: Hyo-Kyung Lee <hyoklee@hdfgroup.org>
10 //
11 // Copyright (c) 2005 OPeNDAP, Inc.
12 // Author: James Gallagher <jgallagher@opendap.org>
13 //
14 // This is free software; you can redistribute it and/or modify it under the
15 // terms of the GNU Lesser General Public License as published by the Free
16 // Software Foundation; either version 2.1 of the License, or (at your
17 // option) any later version.
18 //
19 // This software is distributed in the hope that it will be useful, but
20 // WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
21 // or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
22 // License for more details.
23 //
24 // You should have received a copy of the GNU Lesser General Public License
25 // along with this software; if not, write to the Free Software Foundation,
26 // Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
27 //
28 // You can contact OPeNDAP, Inc. at PO Box 112, Saunderstown, RI. 02874-0112.
29 
31 // Copyright 1996, by the California Institute of Technology.
32 // ALL RIGHTS RESERVED. United States Government Sponsorship
33 // acknowledged. Any commercial use must be negotiated with the
34 // Office of Technology Transfer at the California Institute of
35 // Technology. This software may be subject to U.S. export control
36 // laws and regulations. By accepting this software, the user
37 // agrees to comply with all applicable U.S. export laws and
38 // regulations. User has the responsibility to obtain export
39 // licenses, or other export authority as may be required before
40 // exporting such information to foreign countries or providing
41 // access to foreign persons.
42 
43 // Author: Todd Karakashian, NASA/Jet Propulsion Laboratory
44 // Todd.K.Karakashian@jpl.nasa.gov
45 //
46 //
48 
49 #include "config_hdf.h"
50 
51 #include <cstdio>
52 #include <cassert>
53 #include <libgen.h>
54 
55 // STL includes
56 #include <string>
57 #include <fstream>
58 #include <iostream>
59 #include <sstream>
60 #include <algorithm>
61 #include <numeric>
62 #include <functional>
63 
64 
65 // Include this on linux to suppress an annoying warning about multiple
66 // definitions of MIN and MAX.
67 #ifdef HAVE_SYS_PARAM_H
68 #include <sys/param.h>
69 #endif
70 
71 #include <unistd.h>
72 #include <sys/types.h>
73 #include <dirent.h>
74 #include <iomanip>
75 #include <cerrno>
76 
77 
78 // HDF and HDFClass includes
79 #include <mfhdf.h>
80 
81 // DODS includes
82 #include <DDS.h>
83 #include <DAS.h>
84 #include <escaping.h>
85 #include <parser.h>
86 #include <InternalErr.h>
87 #include <debug.h>
88 
89 #include <BESDebug.h>
90 #include <BESLog.h>
91 
92 #include "HDF4RequestHandler.h"
93 // DODS/HDF includes for the default option only
94 #include "hcstream.h"
95 #include "hdfclass.h"
96 #include "hcerr.h"
97 #include "dhdferr.h"
98 #include "HDFArray.h"
99 #include "HDFSequence.h"
100 #include "HDFTypeFactory.h"
101 #include "HDFGrid.h"
102 #include "dodsutil.h"
103 #include "hdf-dods.h"
104 #include "hdf-maps.h"
105 
106 // DAP2 doesn't have signed char type, the signed char will be converted to int32 with this macro.
107 #define SIGNED_BYTE_TO_INT32 1
108 
109 // HDF datatype headers for both the default and the CF options
110 #include "HDFByte.h"
111 #include "HDFInt16.h"
112 #include "HDFUInt16.h"
113 #include "HDFInt32.h"
114 #include "HDFUInt32.h"
115 #include "HDFFloat32.h"
116 #include "HDFFloat64.h"
117 #include "HDFStr.h"
118 
119 // Routines that handle SDS and Vdata attributes for the HDF-EOS2 objects in a hybrid HDF-EOS2 file for the CF option
120 #include "HE2CF.h"
121 
122 // HDF4 for the CF option(EOS2 will treat as pure HDF4 objects if the HDF-EOS2 library is not configured in)
123 #include "HDFSP.h"
124 #include "HDFSPArray_RealField.h"
125 #include "HDFSPArrayGeoField.h"
126 #include "HDFSPArrayMissField.h"
127 #include "HDFSPArrayAddCVField.h"
128 #include "HDFSPArray_VDField.h"
129 #include "HDFCFStrField.h"
130 #include "HDFCFStr.h"
131 #include "HDFCFUtil.h"
132 
133 // HDF-EOS2 (including the hybrid) will be handled as HDF-EOS2 objects if the HDF-EOS2 library is configured in
134 #ifdef USE_HDFEOS2_LIB
135 #include "HDFEOS2.h"
136 #include "HDFEOS2Array_RealField.h"
137 #include "HDFEOS2ArrayGridGeoField.h"
138 #include "HDFEOS2ArraySwathGeoField.h"
139 #include "HDFEOS2ArrayMissField.h"
140 #include "HDFEOS2ArraySwathDimMapField.h"
141 //#include "HDFEOS2ArraySwathGeoDimMapField.h"
142 #include "HDFEOS2ArraySwathGeoDimMapExtraField.h"
143 #include "HDFEOS2CFStr.h"
144 #include "HDFEOS2CFStrField.h"
145 #include "HDFEOS2HandleType.h"
146 #endif
147 
148 
149 using namespace std;
150 
151 // Added 5/7/09; This bug (#1163) was fixed in July 2008 except for this
152 // handler. jhrg
153 #define ATTR_STRING_QUOTE_FIX
154 
155 template < class T > string num2string(T n)
156 {
157  ostringstream oss;
158  oss << n;
159  return oss.str();
160 }
161 
162 // Glue routines declared in hdfeos.lex
163 void hdfeos_switch_to_buffer(void *new_buffer);
164 void hdfeos_delete_buffer(void * buffer);
165 void *hdfeos_string(const char *yy_str);
166 
167 struct yy_buffer_state;
168 yy_buffer_state *hdfeos_scan_string(const char *str);
169 extern int hdfeosparse(libdap::parser_arg *arg); // defined in hdfeos.tab.c
170 
171 // Functions for the default option
172 void AddHDFAttr(DAS & das, const string & varname,
173  const vector < hdf_attr > &hav);
174 void AddHDFAttr(DAS & das, const string & varname,
175  const vector < string > &anv);
176 
177 static void build_descriptions(DDS & dds, DAS & das,
178  const string & filename);
179 static void SDS_descriptions(sds_map & map, DAS & das,
180  const string & filename);
181 static void Vdata_descriptions(vd_map & map, DAS & das,
182  const string & filename);
183 static void Vgroup_descriptions(DDS & dds, DAS & das,
184  const string & filename, sds_map & sdmap,
185  vd_map & vdmap, gr_map & grmap);
186 static void GR_descriptions(gr_map & map, DAS & das,
187  const string & filename);
188 static void FileAnnot_descriptions(DAS & das, const string & filename);
189 static vector < hdf_attr > Pals2Attrs(const vector < hdf_palette > palv);
190 static vector < hdf_attr > Dims2Attrs(const hdf_dim dim);
191 
192 void read_das(DAS & das, const string & filename);
193 void read_dds(DDS & dds, const string & filename);
194 
195 // For the CF option
196 // read_dds for HDF4 files. Some NASA non-eos2 HDF4 products are handled specifially to follow the CF conventions.
197 bool read_dds_hdfsp(DDS & dds, const string & filename,int32 sdfd, int32 fileid,HDFSP::File*h4file);
198 bool read_das_hdfsp(DAS & das, const string & filename,int32 sdfd, int32 fileid,HDFSP::File**h4filepptr);
199 
200 // read_dds for special NASA HDF-EOS2 hybrid(non-EOS2) objects
201 bool read_dds_hdfhybrid(DDS & dds, const string & filename,int32 sdfd, int32 fileid,HDFSP::File*h4file);
202 bool read_das_hdfhybrid(DAS & das, const string & filename,int32 sdfd, int32 fileid,HDFSP::File**h4filepptr);
203 
204 // Functions to read special 1-d HDF-EOS2 grid. This grid can be built up quickly.
205 //bool read_dds_special_1d_grid(DDS &dds, HDFSP::File *spf, const string & filename,int32 sdfd, int32 fileid);
206 bool read_dds_special_1d_grid(DDS &dds, HDFSP::File *spf, const string & filename,int32 sdfd,bool can_cache);
207 bool read_das_special_eos2(DAS &das,const string & filename,int32 sdid, int32 fileid,bool ecs_metadata,HDFSP::File**h4filepptr);
208 bool read_das_special_eos2_core(DAS &das, HDFSP::File *spf, const string & filename,bool ecs_metadata);
209 
210 void read_das_sds(DAS & das, const string & filename,int32 sdfd, bool ecs_metadata,HDFSP::File**h4fileptr);
211 void read_dds_sds(DDS &dds, const string & filename,int32 sdfd, HDFSP::File*h4file,bool dds_set_cache);
212 
213 void change_das_mod08_scale_offset(DAS & das, HDFSP::File *spf);
214 
215 // Functions to handle SDS fields for the CF option.
216 void read_dds_spfields(DDS &dds,const string& filename,const int sdfd,HDFSP::SDField *spsds, SPType sptype);
217 
218 // Functions to handle Vdata fields for the CF option.
219 void read_dds_spvdfields(DDS &dds,const string& filename, const int fileid,int32 vdref, int32 numrec,HDFSP::VDField *spvd);
220 
221 // Check if this is a special HDF-EOS2 file that can be handled by HDF4 directly. Using HDF4 only can improve performance.
222 int check_special_eosfile(const string&filename,string&grid_name,int32 sdfd,int32 fileid);
223 
224 
225 // The following blocks only handle HDF-EOS2 objects based on HDF-EOS2 libraries.
226 #ifdef USE_HDFEOS2_LIB
227 
228 // Parse HDF-EOS2's ECS metadata(coremetadata etc.)
229 void parse_ecs_metadata(DAS &das,const string & metaname, const string &metadata);
230 
231 // read_dds for HDF-EOS2
233 // We find some special HDF-EOS2(MOD08_M3) products that provides coordinate variables
234 // without using the dimension scales. We will handle this in a special way.
235 // So change the return value of read_dds_hdfeos2 to represent different cases
236 // 0: general non-EOS2 pure HDF4
237 // 1: HDF-EOS2 hybrid
238 // 2: MOD08_M3
239 // HDF-EOS2 but no need to use HDF-EOS2 lib: no real dimension scales but have CVs for every dimension, treat differently
240 // 3: AIRS version 6
241 // HDF-EOS2 but no need to use HDF-EOS2 lib:
242 // have dimension scales but don’t have CVs for every dimension, also need to condense dimensions, treat differently
243 // 4. Expected AIRS level 3 or level 2
244 // HDF-EOS2 but no need to use HDF-EOS2 lib: Have dimension scales for all dimensions
245 // 5. MERRA
246 // Special handling for MERRA file
247 int read_dds_hdfeos2(DDS & dds, const string & filename,int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,HDFSP::File*h4file,HDFEOS2::File*eosfile);
248 
249 // reas das for HDF-EOS2
250 int read_das_hdfeos2(DAS & das, const string & filename,int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,bool ecs_metadata,HDFSP::File**h4filepptr,HDFEOS2::File**eosfilepptr);
251 
252 
253 // read_dds for one grid or swath
254 void read_dds_hdfeos2_grid_swath(DDS &dds, const string&filename, HDFEOS2::Dataset *dataset, int grid_or_swath,bool ownll, SOType sotype,
255  int32 sdfd, int32 fileid, int32 gridfd,int32 swathfd)
256 {
257 
258  BESDEBUG("h4","Coming to read_dds_hdfeos2_grid_swath "<<endl);
259  // grid_or_swath - 0: grid, 1: swath
260  if(grid_or_swath < 0 || grid_or_swath > 1)
261  throw InternalErr(__FILE__, __LINE__, "The current type should be either grid or swath");
262 
264 
265  // Declare dim. map entry. The defination of dimmap_entry can be found at HDFCFUtil.h.
266  vector<struct dimmap_entry> dimmaps;
267 
268  //The extra dim map file name(lat/lon of swath with dim. map can be found in a separate HDF file.
269  string modis_geofilename="";
270  bool geofile_has_dimmap = false;
271 
272  // 1. Obtain dimension map info and stores the info. to dimmaps.
273  // 2. Check if MODIS swath geo-location HDF-EOS2 file exists for the dimension map case of MODIS Swath
274  if(grid_or_swath == 1)
275  HDFCFUtil::obtain_dimmap_info(filename,dataset,dimmaps,modis_geofilename,geofile_has_dimmap);
277 
278 
280  const vector<HDFEOS2::Field*>& fields = (dataset)->getDataFields();
281  vector<HDFEOS2::Field*> all_fields = fields;
282  vector<HDFEOS2::Field*>::const_iterator it_f;
283 
284  if(1 == grid_or_swath) {
285  HDFEOS2::SwathDataset *sw = static_cast<HDFEOS2::SwathDataset *>(dataset);
286  const vector<HDFEOS2::Field*>geofields = sw->getGeoFields();
287  for (it_f = geofields.begin(); it_f != geofields.end(); it_f++)
288  all_fields.push_back(*it_f);
289  }
291 
293  for(it_f = all_fields.begin(); it_f != all_fields.end(); it_f++)
294  {
295  BESDEBUG("h4","New field Name " <<(*it_f)->getNewName()<<endl);
296 
297  BaseType *bt=NULL;
298 
299  // Whether the field is real field,lat/lon field or missing Z-dimension field
300  int fieldtype = (*it_f)->getFieldType();
301 
302  // Check if the datatype needs to be changed.This is for MODIS data that needs to apply scale and offset.
303  // ctype_field_namelist is assigned in the function read_das_hdfeos2.
304  bool changedtype = false;
305  for (vector<string>::const_iterator i = ctype_field_namelist.begin(); i != ctype_field_namelist.end(); ++i){
306  if ((*i) == (*it_f)->getNewName()){
307  changedtype = true;
308  break;
309  }
310  }
311 
312  switch((*it_f)->getType())
313  {
314 
315 #define HANDLE_CASE2(tid, type) \
316  case tid: \
317  if(true == changedtype && fieldtype==0) \
318  bt = new (HDFFloat32) ((*it_f)->getNewName(), (dataset)->getName()); \
319  else \
320  bt = new (type)((*it_f)->getNewName(), (dataset)->getName()); \
321  break;
322 
323 #define HANDLE_CASE(tid, type)\
324  case tid: \
325  bt = new (type)((*it_f)->getNewName(), (dataset)->getName()); \
326  break;
327  HANDLE_CASE(DFNT_FLOAT32, HDFFloat32);
328  HANDLE_CASE(DFNT_FLOAT64, HDFFloat64);
329  HANDLE_CASE(DFNT_CHAR8,HDFStr);
330 #ifndef SIGNED_BYTE_TO_INT32
331  HANDLE_CASE2(DFNT_INT8, HDFByte);
332 #else
333  HANDLE_CASE2(DFNT_INT8,HDFInt32);
334 #endif
335  HANDLE_CASE2(DFNT_UINT8, HDFByte);
336  HANDLE_CASE2(DFNT_INT16, HDFInt16);
337  HANDLE_CASE2(DFNT_UINT16,HDFUInt16);
338  HANDLE_CASE2(DFNT_INT32, HDFInt32);
339  HANDLE_CASE2(DFNT_UINT32, HDFUInt32);
340  HANDLE_CASE2(DFNT_UCHAR8, HDFByte);
341  default:
342  throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
343 #undef HANDLE_CASE
344 #undef HANDLE_CASE2
345  }
346 
347  if(bt)
348  {
349  const vector<HDFEOS2::Dimension*>& dims= (*it_f)->getCorrectedDimensions();
350  vector<HDFEOS2::Dimension*>::const_iterator it_d;
351 
352  // Char array maps to DAP string.
353  if(DFNT_CHAR == (*it_f)->getType()) {
354 
355  if((*it_f)->getRank() >1) {
356 
357  HDFEOS2CFStrField * ar = NULL;
358 
359  try {
360 
361  ar = new HDFEOS2CFStrField(
362  (*it_f)->getRank() -1,
363  (grid_or_swath ==0)?gridfd:swathfd,
364  filename,
365  (dataset)->getName(),
366  (*it_f)->getName(),
367  grid_or_swath,
368  (*it_f)->getNewName(),
369  bt);
370  }
371  catch(...) {
372  delete bt;
373  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
374  }
375  for(it_d = dims.begin(); it_d != dims.begin()+dims.size()-1; it_d++){
376  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
377  }
378 
379  dds.add_var(ar);
380  delete bt;
381  if(ar != NULL)
382  delete ar;
383 
384  }
385 
386  else {
387  HDFEOS2CFStr * sca_str = NULL;
388  try {
389 
390  sca_str = new HDFEOS2CFStr(
391  (grid_or_swath ==0)?gridfd:swathfd,
392  filename,
393  (dataset)->getName(),
394  (*it_f)->getName(),
395  (*it_f)->getNewName(),
396  grid_or_swath);
397  }
398  catch(...) {
399  delete bt;
400  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
401  }
402  dds.add_var(sca_str);
403  delete bt;
404  delete sca_str;
405  }
406 
407  }
408 
409  // For general variables and non-lat/lon existing coordinate variables
410  else if(fieldtype == 0 || fieldtype == 3 || fieldtype == 5) {
411 
412  // grid
413  if(grid_or_swath==0){
414  HDFEOS2Array_RealField *ar = NULL;
415  ar = new HDFEOS2Array_RealField(
416  (*it_f)->getRank(),
417  filename,false,sdfd,gridfd,
418  (dataset)->getName(), "", (*it_f)->getName(),
419  sotype,
420  (*it_f)->getNewName(), bt);
421  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
422  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
423  dds.add_var(ar);
424  delete bt;
425  delete ar;
426  }
427  // swath
428  else if(grid_or_swath==1){
429 
430  string tempfieldname = (*it_f)->getName();
431 
432  // This swath uses the dimension map
433  if((*it_f)->UseDimMap()) {
434  // We also find that a separate geolocation file exists
435 
436  if (!modis_geofilename.empty()) {
437 
438  // This field can be found in the geo-location file. The field name may be corrected.
439  if (true == HDFCFUtil::is_modis_dimmap_nonll_field(tempfieldname)) {
440 
441  if(false == geofile_has_dimmap) {
442 
443  // Here we have to use HDFEOS2Array_RealField since the field may
444  // need to apply scale and offset equation.
445  // MODIS geolocation swath name is always MODIS_Swath_Type_GEO.
446  // We can improve the handling of this by not hard-coding the swath name
447  // in the future. KY 2012-08-16
448  HDFEOS2Array_RealField *ar = NULL;
449  ar = new HDFEOS2Array_RealField(
450  (*it_f)->getRank(),
451  modis_geofilename,
452  true,
453  sdfd,
454  swathfd,
455  "",
456  "MODIS_Swath_Type_GEO",
457  tempfieldname,
458  sotype,
459  (*it_f)->getNewName(),
460  bt);
461 
462  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
463  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
464  dds.add_var(ar);
465  delete bt;
466  delete ar;
467  }
468  else {// Use dimension maps in the dimension map file
469 
470  HDFEOS2ArraySwathDimMapField * ar = NULL;
471 
472  // SET dimmaps to empty.
473  // This is very key since we are using the geolocation file for the new information.
474  // The dimension map info. will be obtained when the data is reading. KY 2013-03-13
475 
476  dimmaps.clear();
477  ar = new HDFEOS2ArraySwathDimMapField(
478  (*it_f)->getRank(),
479  modis_geofilename,
480  true,
481  sdfd,
482  swathfd,
483  "",
484  "MODIS_Swath_Type_GEO",
485  tempfieldname,
486  dimmaps,
487  sotype,
488  (*it_f)->getNewName(),
489  bt);
490  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
491  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
492  dds.add_var(ar);
493  delete bt;
494  delete ar;
495  }
496  }
497  else { // This field cannot be found in the dimension map file.
498 
499  HDFEOS2ArraySwathDimMapField * ar = NULL;
500 
501  // Even if the dimension map file exists, it only applies to some
502  // specific data fields, if this field doesn't belong to these fields,
503  // we should still apply the dimension map rule to these fields.
504 
505  ar = new HDFEOS2ArraySwathDimMapField(
506  (*it_f)->getRank(),
507  filename,
508  false,
509  sdfd,
510  swathfd,
511  "",
512  (dataset)->getName(),
513  tempfieldname,
514  dimmaps,
515  sotype,
516  (*it_f)->getNewName(),
517  bt);
518  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
519  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
520  dds.add_var(ar);
521  delete bt;
522  delete ar;
523 
524 
525  }
526  }
527  else {// No dimension map file
528  HDFEOS2ArraySwathDimMapField * ar = NULL;
529  ar = new HDFEOS2ArraySwathDimMapField(
530  (*it_f)->getRank(),
531  filename,
532  false,
533  sdfd,
534  swathfd,
535  "",
536  (dataset)->getName(),
537  tempfieldname,
538  dimmaps,
539  sotype,
540  (*it_f)->getNewName(),
541  bt);
542  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
543  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
544  dds.add_var(ar);
545  delete bt;
546  delete ar;
547  }
548  }
549  else { // No dimension map
550 
551  HDFEOS2Array_RealField * ar = NULL;
552  ar = new HDFEOS2Array_RealField(
553  (*it_f)->getRank(),
554  filename,
555  false,
556  sdfd,
557  swathfd,
558  "",
559  (dataset)->getName(),
560  tempfieldname,
561  sotype,
562  (*it_f)->getNewName(),
563  bt);
564  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
565  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
566  dds.add_var(ar);
567  delete bt;
568  delete ar;
569  }
570  }
571  else {
572  delete bt;
573  throw InternalErr(__FILE__, __LINE__, "The current type should be either grid or swath");
574  }
575  }
576 
577  // For latitude and longitude
578  else if(fieldtype == 1 || fieldtype == 2) {
579 
580  // For grid
581  if(grid_or_swath==0) {
582 
583  HDFEOS2ArrayGridGeoField *ar = NULL;
584  //int fieldtype = (*it_f)->getFieldType();
585  bool ydimmajor = (*it_f)->getYDimMajor();
586  bool condenseddim = (*it_f)->getCondensedDim();
587  bool speciallon = (*it_f)->getSpecialLon();
588  int specialformat = (*it_f)->getSpecialLLFormat();
589 
590  ar = new HDFEOS2ArrayGridGeoField(
591  (*it_f)->getRank(),
592  fieldtype,
593  ownll,
594  ydimmajor,
595  condenseddim,
596  speciallon,
597  specialformat,
598  /*fieldcache,*/
599  filename,
600  gridfd,
601  (dataset)->getName(),
602  (*it_f)->getName(),
603  (*it_f)->getNewName(),
604  bt);
605 
606  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
607  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
608  dds.add_var(ar);
609  delete bt;
610  delete ar;
611  }
612 
613  // We encounter a very special MODIS case (MOD/MYD ATML2 files),
614  // Latitude and longitude fields are located under data fields.
615  // So include this case. KY 2010-7-12
616  // We also encounter another special case(MOD11_L2.A2012248.2355.041.2012249083024.hdf),
617  // the latitude/longitude with dimension map is under the "data fields".
618  // So we have to consider this. KY 2012-09-24
619 
620  else if(grid_or_swath ==1) {
621 
622  // Use Swath dimension map
623  if((*it_f)->UseDimMap()) {
624 
625  // Have an extra HDF-EOS file for latitude and longtiude
626  if(!modis_geofilename.empty()) {
627 
628  if (false == geofile_has_dimmap) {
629  HDFEOS2ArraySwathGeoDimMapExtraField *ar = NULL;
630  ar = new HDFEOS2ArraySwathGeoDimMapExtraField(
631  (*it_f)->getRank(),
632  modis_geofilename,
633  (*it_f)->getName(),
634  (*it_f)->getNewName(),
635  bt);
636  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
637  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
638  dds.add_var(ar);
639  delete bt;
640  delete ar;
641  }
642  else {
643 
644  HDFEOS2ArraySwathDimMapField * ar = NULL;
645 
646  // SET dimmaps to empty.
647  // This is essential since we are using the geolocation file for the new information.
648  // The dimension map info. will be obtained when the data is read. KY 2013-03-13
649  dimmaps.clear();
650  ar = new HDFEOS2ArraySwathDimMapField(
651  (*it_f)->getRank(),
652  modis_geofilename,
653  true,
654  sdfd,
655  swathfd,
656  "",
657  "MODIS_Swath_Type_GEO",
658  (*it_f)->getName(),
659  dimmaps,
660  sotype,
661  (*it_f)->getNewName(),
662  bt);
663  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
664  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
665 
666  dds.add_var(ar);
667  delete bt;
668  delete ar;
669  }
670  }
671  // Will interpolate by the handler
672  else {
673 
674  HDFEOS2ArraySwathDimMapField * ar = NULL;
675  ar = new HDFEOS2ArraySwathDimMapField(
676  (*it_f)->getRank(),
677  filename,
678  false,
679  sdfd,
680  swathfd,
681  "",
682  (dataset)->getName(),
683  (*it_f)->getName(),
684  dimmaps,
685  sotype,
686  (*it_f)->getNewName(),
687  bt);
688  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
689  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
690 
691  dds.add_var(ar);
692  delete bt;
693  delete ar;
694  }
695  }
696  else {// No Dimension map
697 
698  HDFEOS2ArraySwathGeoField * ar = NULL;
699  ar = new HDFEOS2ArraySwathGeoField(
700  (*it_f)->getRank(),
701  filename,
702  swathfd,
703  (dataset)->getName(),
704  (*it_f)->getName(),
705  (*it_f)->getNewName(),
706  bt);
707 
708  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
709  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
710  dds.add_var(ar);
711  delete bt;
712  delete ar;
713  }
714  }
715  else {
716  delete bt;
717  throw InternalErr(__FILE__, __LINE__, "The current type should be either grid or swath");
718  }
719 
720  }
721 
722  //Missing Z dimensional field
723  else if(fieldtype == 4) {
724 
725  if((*it_f)->getRank()!=1){
726  delete bt;
727  throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
728  }
729 
730  int nelem = ((*it_f)->getCorrectedDimensions()[0])->getSize();
731  HDFEOS2ArrayMissGeoField *ar = NULL;
732  ar = new HDFEOS2ArrayMissGeoField(
733  (*it_f)->getRank(),
734  nelem,
735  (*it_f)->getNewName(),
736  bt);
737 
738  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
739  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
740 
741  dds.add_var(ar);
742  delete bt;
743  delete ar;
744  }
745  else {
746  delete bt;
747  throw InternalErr(__FILE__, __LINE__, "Encounter unsupported datatype or The field type should be between 0 and 5. ");
748  }
749 
750  }
751  }
752 
753 }
754 
755 // Build DDS for HDF-EOS2 only.
756 //bool read_dds_hdfeos2(DDS & dds, const string & filename)
757 int read_dds_hdfeos2(DDS & dds, const string & filename,int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,HDFSP::File*spf,HDFEOS2::File*f)
758 {
759 
760  BESDEBUG("h4","Coming to read_dds_hdfeos2 "<<endl);
761 
762  // Set DDS dataset.
763  dds.set_dataset_name(basename(filename));
764 
765  // There are some HDF-EOS2 files(MERRA) that should be treated
766  // exactly like HDF4 SDS files. We don't need to use HDF-EOS2 APIs to
767  // retrieve any information. In fact, treating them as HDF-EOS2 files
768  // will cause confusions and we may get wrong information.
769  // A quick fix is to check if the file name contains MERRA. KY 2011-3-4
770  // Find MERRA data, return 5, then just use HDF4 SDS code.
771  if((basename(filename).size() >=5) && ((basename(filename)).compare(0,5,"MERRA")==0))
772  return 5;
773  //return false;
774 
775 #if 0
776  string check_enable_spec_eos_key="H4.EnableSpecialEOS";
777  bool turn_on_enable_spec_eos_key= false;
778  turn_on_enable_spec_eos_key = HDFCFUtil::check_beskeys(check_enable_spec_eos_key);
779 #endif
780 
781  //if(true == turn_on_enable_spec_eos_key) {
782  if(true == HDF4RequestHandler::get_enable_special_eos()) {
783 
784  string grid_name;
785  int ret_val = check_special_eosfile(filename,grid_name,sdfd,fileid);
786 
787  // These are AIRS-like products that use HDF4 SDS dimension scale perfectly.
788  // We originally thought that the AIRS version 6 products fall into this category, so we added this case.
789  // However, the current AIRS version 6 products still miss some dimension scales. So currently we don't
790  // find any products that support this case. Leave it for future use. KY 2015-06-03
791  if(4== ret_val)
792  return ret_val;
793 
794 
795  // Case 2 or 3 are MOD08M3 or AIRS version 6
796  if(2 == ret_val || 3 == ret_val) {
797 
798  try {
799  read_dds_special_1d_grid(dds,spf,filename,sdfd,false);
800  //read_dds_special_1d_grid(dds,spf,filename,sdfd,fileid);
801  } catch (...)
802  {
803  //delete spf;
804  throw;
805  }
806  return ret_val;
807  }
808 
809  }
810 
811  // Special HDF-EOS2 file, doesn't use HDF-EOS2 file structure. so
812  // the file pointer passed from DAS is Null. return 0.
813  if( f == NULL)
814  return 0;
815 
816  //Some grids have one shared lat/lon pair. For this case,"onelatlon" is true.
817  // Other grids have their individual grids. We have to handle them differently.
818  // ownll is the flag to distinguish "one lat/lon pair" and multiple lat/lon pairs.
819  const vector<HDFEOS2::GridDataset *>& grids = f->getGrids();
820  bool ownll = false;
821  bool onelatlon = f->getOneLatLon();
822 
823  // Set scale and offset type to the DEFAULT one.
824  SOType sotype = DEFAULT_CF_EQU;
825 
826  // Iterate all the grids of this file and map them to DAP DDS.
827  vector<HDFEOS2::GridDataset *>::const_iterator it_g;
828  for(it_g = grids.begin(); it_g != grids.end(); it_g++){
829 
830  // Check if this grid provides its own lat/lon.
831  ownll = onelatlon?onelatlon:(*it_g)->getLatLonFlag();
832 
833  // Obtain Scale and offset type. This is for MODIS products who use non-CF scale/offset rules.
834  sotype = (*it_g)->getScaleType();
835  try {
836  read_dds_hdfeos2_grid_swath(
837  dds, filename, static_cast<HDFEOS2::Dataset*>(*it_g), 0,ownll,sotype,sdfd,fileid,gridfd,swathfd);
838  // Add 1-D CF grid projection required coordinate variables.
839  // Currently only supports sinusoidal projection.
840  HDFCFUtil::add_cf_grid_cvs(dds,*it_g);
841  }
842  catch(...) {
843  // delete f;
844  throw;
845  }
846  }
847 
848  // Iterate all the swaths of this file and map them to DAP DDS.
849  const vector<HDFEOS2::SwathDataset *>& swaths= f->getSwaths();
850  vector<HDFEOS2::SwathDataset *>::const_iterator it_s;
851  for(it_s = swaths.begin(); it_s != swaths.end(); it_s++) {
852 
853  // Obtain Scale and offset type. This is for MODIS products who use non-CF scale/offset rules.
854  sotype = (*it_s)->getScaleType();
855  try {
856  read_dds_hdfeos2_grid_swath(
857  dds, filename, static_cast<HDFEOS2::Dataset*>(*it_s), 1,false,sotype,sdfd,fileid,gridfd,swathfd);//No global lat/lon for multiple swaths
858  }
859  catch(...) {
860  //delete f;
861  throw;
862  }
863  }
864 
865  // Clear the field name list of which the datatype is changed. KY 2012-8-1
866  // ctype_field_namelist is a global vector(see HDFEOS2HandleType.h for more description)
867  // Since the handler program is a continuously running service, this values of this global vector may
868  // change from one file to another. So clearing this vector each time when mapping DDS is over.
869  ctype_field_namelist.clear();
870 
871  return 1;
872 }
873 
874 
875 // The wrapper of building DDS of non-EOS fields and attributes in a hybrid HDF-EOS2 file.
876 //bool read_dds_hdfhybrid(DDS & dds, const string & filename,int32 sdfd, int32 fileid,int32 gridfd,int32 swathfd)
877 bool read_dds_hdfhybrid(DDS & dds, const string & filename,int32 sdfd, int32 fileid,HDFSP::File*f)
878 
879 {
880 
881  BESDEBUG("h4","Coming to read_dds_hdfhybrid "<<endl);
882 
883  // Set DDS dataset.
884  dds.set_dataset_name(basename(filename));
885 
886  // Obtain non-EOS SDS fields.
887  const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
888 
889  // Read SDS
890  vector<HDFSP::SDField *>::const_iterator it_g;
891  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
892  try {
893  read_dds_spfields(dds,filename,sdfd,(*it_g),f->getSPType());
894  }
895  catch(...) {
896  throw;
897  }
898  }
899 
900  // Read Vdata fields.
901  // To speed up the performance for CERES data, we turn off some CERES vdata fields.
902 
903  // Many MODIS and MISR products use Vdata intensively. To make the output CF compliant, we map
904  // each vdata field to a DAP array. However, this may cause the generation of many DAP fields. So
905  // we use the BES keys for users to turn on/off as they choose. By default, the key is turned on. KY 2012-6-26
906 
907 #if 0
908  string check_hybrid_vdata_key="H4.EnableHybridVdata";
909  bool turn_on_hybrid_vdata_key = false;
910  turn_on_hybrid_vdata_key = HDFCFUtil::check_beskeys(check_hybrid_vdata_key);
911 #endif
912 
913  //if( true == turn_on_hybrid_vdata_key) {
914  if( true == HDF4RequestHandler::get_enable_hybrid_vdata()) {
915  for(vector<HDFSP::VDATA *>::const_iterator i = f->getVDATAs().begin(); i!=f->getVDATAs().end();i++) {
916  if(false == (*i)->getTreatAsAttrFlag()){
917  for(vector<HDFSP::VDField *>::const_iterator j=(*i)->getFields().begin();j!=(*i)->getFields().end();j++) {
918  try {
919  read_dds_spvdfields(dds,filename,fileid, (*i)->getObjRef(),(*j)->getNumRec(),(*j));
920  }
921  catch(...) {
922  throw;
923  }
924  }
925  }
926  }
927  }
928 
929  return true;
930 }
931 
932 
933 // Build DAS for non-EOS objects in a hybrid HDF-EOS2 file.
934 bool read_das_hdfhybrid(DAS & das, const string & filename,int32 sdfd, int32 fileid,HDFSP::File**fpptr)
935 {
936 
937  BESDEBUG("h4","Coming to read_das_hdfhybrid "<<endl);
938  // Declare a non-EOS file pointer
939  HDFSP::File *f = NULL;
940  try {
941  // Read non-EOS objects in a hybrid HDF-EOS2 file.
942  f = HDFSP::File::Read_Hybrid(filename.c_str(), sdfd,fileid);
943  }
944  catch (HDFSP::Exception &e)
945  {
946  if(f!=NULL)
947  delete f;
948  throw InternalErr(e.what());
949  }
950 
951  // Remember the file pointer
952  *fpptr = f;
953 
954 #if 0
955  string check_scale_offset_type_key = "H4.EnableCheckScaleOffsetType";
956  bool turn_on_enable_check_scale_offset_key= false;
957  turn_on_enable_check_scale_offset_key = HDFCFUtil::check_beskeys(check_scale_offset_type_key);
958 #endif
959 
960  // First Added non-HDFEOS2 SDS attributes.
961  const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
962  vector<HDFSP::SDField *>::const_iterator it_g;
963  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
964 
965  // Use CF field name as the DAS table name.
966  AttrTable *at = das.get_table((*it_g)->getNewName());
967  if (!at)
968  at = das.add_table((*it_g)->getNewName(), new AttrTable);
969 
970  // Some fields have "long_name" attributes,so we have to use this attribute rather than creating our own "long_name"
971  bool long_name_flag = false;
972 
973  for(vector<HDFSP::Attribute *>::const_iterator i=(*it_g)->getAttributes().begin();i!=(*it_g)->getAttributes().end();i++) {
974 
975  if((*i)->getName() == "long_name") {
976  long_name_flag = true;
977  break;
978  }
979  }
980 
981  if(false == long_name_flag)
982  at->append_attr("long_name", "String", (*it_g)->getName());
983 
984  // Map all attributes to DAP DAS.
985  for(vector<HDFSP::Attribute *>::const_iterator i=(*it_g)->getAttributes().begin();i!=(*it_g)->getAttributes().end();i++) {
986 
987  // Handle string first.
988  if((*i)->getType()==DFNT_UCHAR || (*i)->getType() == DFNT_CHAR){
989 
990  // Questionable use of string. KY 2014-02-12
991  string tempstring2((*i)->getValue().begin(),(*i)->getValue().end());
992  string tempfinalstr= string(tempstring2.c_str());
993 
994  // We want to escape the possible special characters except the fullpath attribute.
995  // The fullpath is only added for some CERES and MERRA data. People use fullpath to keep their
996  // original names even their original name includes special characters. KY 2014-02-12
997  at->append_attr((*i)->getNewName(), "String" , ((*i)->getNewName()=="fullpath")?tempfinalstr:HDFCFUtil::escattr(tempfinalstr));
998  }
999  else {
1000  for (int loc=0; loc < (*i)->getCount() ; loc++) {
1001  string print_rep = HDFCFUtil::print_attr((*i)->getType(), loc, (void*) &((*i)->getValue()[0]));
1002  at->append_attr((*i)->getNewName(), HDFCFUtil::print_type((*i)->getType()), print_rep);
1003  }
1004  }
1005  }
1006 
1007  // Check if having _FillValue. If having _FillValue, compare the datatype of _FillValue
1008  // with the variable datatype. Correct the fillvalue datatype if necessary.
1009  if(at != NULL) {
1010  int32 var_type = (*it_g)->getType();
1011  try {
1012  HDFCFUtil::correct_fvalue_type(at,var_type);
1013  }
1014  catch(...) {
1015  //delete f;
1016  throw;
1017  }
1018  }
1019 
1020  // If H4.EnableCheckScaleOffsetType BES key is true,
1021  // if yes, check if having scale_factor and add_offset attributes;
1022  // if yes, check if scale_factor and add_offset attribute types are the same;
1023  // if no, make add_offset's datatype be the same as the datatype of scale_factor.
1024  // (CF requires the type of scale_factor and add_offset the same).
1025  //if (true == turn_on_enable_check_scale_offset_key && at !=NULL)
1026  if (true == HDF4RequestHandler::get_enable_check_scale_offset_type() && at !=NULL)
1028 
1029  }
1030 
1031  // Handle vdata attributes.
1032  try {
1033  HDFCFUtil::handle_vdata_attrs_with_desc_key(f,das);
1034  }
1035  catch(...) {
1036  throw;
1037  }
1038 
1039  return true;
1040 
1041 }
1042 
1045 void read_dds_use_eos2lib(DDS & dds, const string & filename,int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,HDFSP::File*h4file,HDFEOS2::File*eosfile)
1046 {
1047 
1048  BESDEBUG("h4","Coming to read_dds_use_eos2lib" <<endl);
1049 
1050  int ret_value = read_dds_hdfeos2(dds,filename,sdfd,fileid,gridfd,swathfd,h4file,eosfile);
1051 
1052  BESDEBUG("h4","ret_value of read_dds_hdfeos2 is "<<ret_value<<endl);
1053 
1054  // read_dds_hdfeos2 return value description:
1055  // 0: general non-EOS2 pure HDF4
1056  // 1: HDF-EOS2 hybrid
1057  // 2: MOD08_M3
1058  // HDF-EOS2 but no need to use HDF-EOS2 lib: no real dimension scales but have CVs for every dimension, treat differently
1059  // 3: AIRS version 6
1060  // HDF-EOS2 but no need to use HDF-EOS2 lib:
1061  // have dimension scales but don’t have CVs for every dimension, also need to condense dimensions, treat differently
1062  // 4. Ideal(Expected) AIRS version 6(No real products yet)
1063  // HDF-EOS2 but no need to use HDF-EOS2 lib: Have dimension scales for all dimensions
1064  // 5. MERRA
1065  // Special handling for MERRA file
1066 
1067 
1068  // Treat MERRA and non-HDFEOS2 HDF4 products as pure HDF4 objects
1069  // For Ideal AIRS version 6 products, we temporarily handle them in a generic HDF4 way.
1070  if (0 == ret_value || 5 == ret_value || 4 == ret_value ) {
1071  if(true == read_dds_hdfsp(dds, filename,sdfd,fileid,h4file))
1072  return;
1073  }
1074  // Special handling
1075  else if ( 1 == ret_value ) {
1076 
1077  // Map non-EOS2 objects to DDS
1078  if(true ==read_dds_hdfhybrid(dds,filename,sdfd,fileid,h4file))
1079  return;
1080  }
1081  else {// ret_value = 2 and 3 are handled already in the read_dds_hdfeos2 calls. Just return.
1082  return;
1083  }
1084 
1085 // leave this code block for performance comparison.
1086 #if 0
1087  // first map HDF-EOS2 objects to DDS
1088  if(true == read_dds_hdfeos2(dds, filename)){
1089 
1090  // Map non-EOS2 objects to DDS
1091  if(true == read_dds_hdfhybrid(dds,filename))
1092  return;
1093  }
1094 
1095  // Map HDF4 objects in pure HDF4 files to DDS
1096  if(read_dds_hdfsp(dds, filename)){
1097  return;
1098  }
1099 #endif
1100 
1101  // Call the default mapping of HDF4 to DDS. It should never reach here.
1102  // We add this line to ensure the HDF4 objects mapped to DDS even if the above routines return false.
1103  read_dds(dds, filename);
1104 
1105 }
1106 
1107 // Map other HDF global attributes, this routine must be called after all ECS metadata are handled.
1108 void write_non_ecsmetadata_attrs(HE2CF& cf) {
1109 
1110  cf.set_non_ecsmetadata_attrs();
1111 
1112 }
1113 
1114 // Map HDF-EOS2's ECS attributes to DAS. ECS attributes include coremetadata, structmetadata etc.
1115 void write_ecsmetadata(DAS& das, HE2CF& cf, const string& _meta)
1116 {
1117 
1118  // There is a maximum length for each ECS metadata if one uses ECS toolkit to add the metadata.
1119  // For some products of which the metadata size is huge, one metadata may be stored in several
1120  // ECS attributes such as coremetadata.0, coremetadata.1 etc.
1121  // When mapping the ECS metadata, we need to merge such metadata attributes into one attribute(coremetadata)
1122  // so that end users can easily understand this metadata.
1123  // ECS toolkit recommends data producers to use the format shown in the following coremetadata example:
1124  // coremetadata.0, coremetadata.1, etc.
1125  // Most NASA HDF-EOS2 products follow this naming convention.
1126  // However, the toolkit also allows data producers to freely name its metadata.
1127  // So we also find the following slightly different format:
1128  // (1) No suffix: coremetadata
1129  // (2) only have one such ECS attribute: coremetadata.0
1130  // (3) have several ECS attributes with two dots after the name: coremetadata.0, coremetadata.0.1 etc.
1131  // (4) Have non-number suffix: productmetadata.s, productmetadata.t etc.
1132  // We handle the above case in the function set_metadata defined in HE2CF.cc. KY 2013-07-06
1133 
1134  bool suffix_is_number = true;
1135  vector<string> meta_nonum_names;
1136  vector<string> meta_nonum_data;
1137 
1138  string meta = cf.get_metadata(_meta,suffix_is_number,meta_nonum_names, meta_nonum_data);
1139 
1140  if(""==meta && true == suffix_is_number){
1141  return; // No _meta metadata exists.
1142  }
1143 
1144  BESDEBUG("h4",meta << endl);
1145 
1146  if (false == suffix_is_number) {
1147  // For the case when the suffix is like productmetadata.s, productmetadata.t,
1148  // we will not merge the metadata since we are not sure about the order.
1149  // We just parse each attribute individually.
1150  for (unsigned int i = 0; i <meta_nonum_names.size(); i++)
1151  parse_ecs_metadata(das,meta_nonum_names[i],meta_nonum_data[i]);
1152  }
1153  else
1154  parse_ecs_metadata(das,_meta,meta);
1155 
1156 }
1157 
1158 void parse_ecs_metadata(DAS &das,const string & metaname, const string &metadata) {
1159 
1160 
1161  AttrTable *at = das.get_table(metaname);
1162  if (!at)
1163  at = das.add_table(metaname, new AttrTable);
1164 
1165  // tell lexer to scan attribute string
1166  void *buf = hdfeos_string(metadata.c_str());
1167  parser_arg arg(at);
1168 
1169  if (hdfeosparse(&arg) != 0) {
1170  hdfeos_delete_buffer(buf);
1171  throw Error("HDF-EOS parse error while processing a " + metadata + " HDFEOS attribute.");
1172  }
1173 
1174  if (arg.status() == false) {
1175  (*BESLog::TheLog())<< "HDF-EOS parse error while processing a "
1176  << metadata << " HDFEOS attribute. (2) " << endl;
1177  // << arg.error()->get_error_message() << endl;
1178  }
1179 
1180  hdfeos_delete_buffer(buf);
1181 }
1182 
1183 // Build DAS for HDFEOS2 files.
1184 int read_das_hdfeos2(DAS & das, const string & filename,int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,
1185  bool ecs_metadata,HDFSP::File**spfpptr,HDFEOS2::File **fpptr)
1186 {
1187 
1188  BESDEBUG("h4","Coming to read_das_hdfeos2 " << endl);
1189 
1190  // There are some HDF-EOS2 files(MERRA) that should be treated
1191  // exactly like HDF4 SDS files. We don't need to use HDF-EOS2 APIs to
1192  // retrieve any information. In fact, treating them as HDF-EOS2 files
1193  // will cause confusions and retrieve wrong information, though may not be essential.
1194  // So far, we've only found that the MERRA product has this problem.
1195  // A quick fix is to check if the file name contains MERRA. KY 2011-3-4
1196  // Actually, AIRS version 6 and MODO8M3 also fall into this category,
1197  // they are also specially handled, check read_das_special_eos2_core. KY 2015-06-04
1198 
1199  // Find MERRA data, return 5.
1200  if((basename(filename).size() >=5) && ((basename(filename)).compare(0,5,"MERRA")==0)) {
1201  return 5;
1202  }
1203 
1204  // We will check if the handler wants to turn on the special EOS key checking
1205 #if 0
1206  string check_enable_spec_eos_key="H4.EnableSpecialEOS";
1207  bool turn_on_enable_spec_eos_key= false;
1208  turn_on_enable_spec_eos_key = HDFCFUtil::check_beskeys(check_enable_spec_eos_key);
1209 #endif
1210  if(true == HDF4RequestHandler::get_enable_special_eos()) {
1211 
1212  string grid_name;
1213  int ret_val = check_special_eosfile(filename,grid_name,sdfd,fileid);
1214 
1215  // Expected AIRS level 2 or 3
1216  if(4== ret_val)
1217  return ret_val;
1218 
1219  bool airs_l2_l3_v6 = false;
1220  bool special_1d_grid = false;
1221 
1222  // AIRS level 2,3 version 6 or MOD08_M3-like products
1223  if(2 == ret_val || 3 == ret_val) {
1224 
1225  HDFSP::File *spf = NULL;
1226  try {
1227  spf = HDFSP::File::Read(filename.c_str(),sdfd,fileid);
1228  }
1229  catch (HDFSP::Exception &e)
1230  {
1231  if (spf != NULL)
1232  delete spf;
1233  throw InternalErr(e.what());
1234  }
1235 
1236  try {
1237  if( 2 == ret_val) {
1238 
1239  // More check and build the relations if this is a special MOD08_M3-like file
1240  if(spf->Check_update_special(grid_name)== true){
1241 
1242  special_1d_grid = true;
1243 
1244  // Building the normal HDF4 DAS here.
1245  read_das_special_eos2_core(das,spf,filename,ecs_metadata);
1246 
1247  // Need to handle MOD08M3 product
1248  if(grid_name =="mod08") {
1249  change_das_mod08_scale_offset(das,spf);
1250  }
1251  }
1252  }
1253  else {
1254 
1255  airs_l2_l3_v6 = true;
1256  spf->Handle_AIRS_L23();
1257  read_das_special_eos2_core(das,spf,filename,ecs_metadata);
1258  }
1259  //delete spf;
1260 
1261  }
1262  catch (...)
1263  {
1264  delete spf;
1265  throw;
1266  }
1267 
1268  // If this is MOD08M3 or AIRS version 6,we just need to return the file pointer.
1269  if (true == special_1d_grid || true == airs_l2_l3_v6) {
1270  *spfpptr = spf;
1271  return ret_val;
1272  }
1273 
1274  }
1275  }
1276 
1277  HDFEOS2::File *f = NULL;
1278 
1279  try {
1280  // Read all the information of EOS objects from an HDF-EOS2 file
1281  f= HDFEOS2::File::Read(filename.c_str(),gridfd,swathfd);
1282  }
1283  catch (HDFEOS2::Exception &e){
1284 
1285  if(f != NULL)
1286  delete f;
1287 
1288  // If this file is not an HDF-EOS2 file, return 0.
1289  if (!e.getFileType()){
1290  //return false;
1291  return 0;
1292  }
1293  else
1294  {
1295  throw InternalErr(e.what());
1296  }
1297  }
1298 
1299  try {
1300  // Generate CF coordinate variables(including auxiliary coordinate variables) and dimensions
1301  // All the names follow CF.
1302  f->Prepare(filename.c_str());
1303  }
1304 
1305  catch (HDFEOS2:: Exception &e) {
1306  if(f!=NULL)
1307  delete f;
1308  throw InternalErr(e.what());
1309  }
1310 
1311  *fpptr = f;
1312 
1313  // HE2CF cf is used to handle hybrid SDS and SD attributes.
1314  HE2CF cf;
1315 
1316  try {
1317  cf.open(filename,sdfd,fileid);
1318  }
1319  catch(...) {
1320  throw;
1321  }
1322  cf.set_DAS(&das);
1323 
1324  SOType sotype = DEFAULT_CF_EQU;
1325 
1326  // A flag not to generate structMetadata for the MOD13C2 file.
1327  // MOD13C2's structMetadata has wrong values. It couldn't pass the parser.
1328  // So we want to turn it off. KY 2010-8-10
1329  bool tempstrflag = false;
1330 
1331  // Product name(AMSR_E) that needs to change attribute from "SCALE FACTOR" to scale_factor etc. to follow the CF conventions
1332  bool filename_change_scale = false;
1333  if (f->getSwaths().size() > 0) {
1334  string temp_fname = basename(filename);
1335  string temp_prod_prefix = "AMSR_E";
1336  if ((temp_fname.size() > temp_prod_prefix.size()) &&
1337  (0 == (temp_fname.compare(0,temp_prod_prefix.size(),temp_prod_prefix))))
1338  filename_change_scale = true;
1339  }
1340 
1341  // Obtain information to identify MEaSURES VIP. This product needs to be handled properly.
1342  bool gridname_change_valid_range = false;
1343  if(1 == f->getGrids().size()) {
1344  string gridname = f->getGrids()[0]->getName();
1345  if ("VIP_CMG_GRID" == gridname)
1346  gridname_change_valid_range = true;
1347  }
1348 
1349  // Obtain information to identify MODIS_SWATH_Type_L1B product. This product's scale and offset need to be handled properly.
1350  bool is_modis_l1b = false;
1351 
1352  // Since this is a swath product, we check swath only.
1353  for (int i = 0; i<(int) f->getSwaths().size(); i++) {
1354  HDFEOS2::SwathDataset* swath = f->getSwaths()[i];
1355  string sname = swath->getName();
1356  if("MODIS_SWATH_Type_L1B" == sname){
1357  is_modis_l1b = true;
1358  break;
1359  }
1360  }
1361 
1362 #if 0
1363  string check_disable_scale_comp_key = "H4.DisableScaleOffsetComp";
1364  bool turn_on_disable_scale_comp_key= false;
1365  turn_on_disable_scale_comp_key = HDFCFUtil::check_beskeys(check_disable_scale_comp_key);
1366 
1367  string check_scale_offset_type_key = "H4.EnableCheckScaleOffsetType";
1368  bool turn_on_enable_check_scale_offset_key= false;
1369  turn_on_enable_check_scale_offset_key = HDFCFUtil::check_beskeys(check_scale_offset_type_key);
1370 #endif
1371 
1372  try {
1373 
1374  // MAP grids to DAS.
1375  for (int i = 0; i < (int) f->getGrids().size(); i++) {
1376 
1377  HDFEOS2::GridDataset* grid = f->getGrids()[i];
1378  string gname = grid->getName();
1379  sotype = grid->getScaleType();
1380 
1381  const vector<HDFEOS2::Field*>gfields = grid->getDataFields();
1382  vector<HDFEOS2::Field*>::const_iterator it_gf;
1383 
1384  for (it_gf = gfields.begin();it_gf != gfields.end();++it_gf) {
1385 
1386  bool change_fvtype = false;
1387 
1388  // original field name
1389  string fname = (*it_gf)->getName();
1390 
1391  // new field name that follows CF
1392  string newfname = (*it_gf)->getNewName();
1393 
1394  BESDEBUG("h4","Original field name: " << fname << endl);
1395  BESDEBUG("h4","Corrected field name: " << newfname << endl);
1396 
1397  // whether coordinate variable or data variables
1398  int fieldtype = (*it_gf)->getFieldType();
1399 
1400  // 0 means that the data field is NOT a coordinate variable.
1401  if (fieldtype == 0){
1402 
1403  // If you don't find any _FillValue through generic API.
1404  if((*it_gf)->haveAddedFillValue()) {
1405  BESDEBUG("h4","Has an added fill value." << endl);
1406  float addedfillvalue =
1407  (*it_gf)->getAddedFillValue();
1408  int type =
1409  (*it_gf)->getType();
1410  BESDEBUG("h4","Added fill value = "<<addedfillvalue);
1411  cf.write_attribute_FillValue(newfname,
1412  type, addedfillvalue);
1413  }
1414  string coordinate = (*it_gf)->getCoordinate();
1415  BESDEBUG("h4","Coordinate attribute: " << coordinate <<endl);
1416  if (coordinate != "")
1417  cf.write_attribute_coordinates(newfname, coordinate);
1418  }
1419 
1420  // This will override _FillValue if it's defined on the field.
1421  cf.write_attribute(gname, fname, newfname,
1422  f->getGrids().size(), fieldtype);
1423 
1424  // For fieldtype values:
1425  // 0 is general fields
1426  // 1 is latitude.
1427  // 2 is longtitude.
1428  // 3 is the existing 3rd-dimension coordinate variable
1429  // 4 is the dimension that misses the coordinate variable,use natural number
1430  // 5 is time
1431  if(fieldtype > 0){
1432 
1433  // MOD13C2 is treated specially.
1434  if(fieldtype == 1 && ((*it_gf)->getSpecialLLFormat())==3)
1435  tempstrflag = true;
1436 
1437  // Don't change the units if the 3-rd dimension field exists.(fieldtype =3)
1438  // KY 2013-02-15
1439  if (fieldtype !=3) {
1440  string tempunits = (*it_gf)->getUnits();
1441  BESDEBUG("h4",
1442  "fieldtype " << fieldtype
1443  << " units" << tempunits
1444  << endl);
1445  cf.write_attribute_units(newfname, tempunits);
1446  }
1447  }
1448 
1449  //Rename attributes of MODIS products.
1450  AttrTable *at = das.get_table(newfname);
1451 
1452  // No need for the case that follows the CF scale and offset .
1453  if(sotype!=DEFAULT_CF_EQU && at!=NULL)
1454  {
1455  bool has_Key_attr = false;
1456  AttrTable::Attr_iter it = at->attr_begin();
1457  while (it!=at->attr_end())
1458  {
1459  if(at->get_name(it)=="Key")
1460  {
1461  has_Key_attr = true;
1462  break;
1463  }
1464  it++;
1465  }
1466 
1467  //if((false == is_modis_l1b) && (false == gridname_change_valid_range)&&(false == has_Key_attr) && (true == turn_on_disable_scale_comp_key))
1468  if((false == is_modis_l1b) && (false == gridname_change_valid_range)&&(false == has_Key_attr) &&
1469  (true == HDF4RequestHandler::get_disable_scaleoffset_comp()))
1470  HDFCFUtil::handle_modis_special_attrs_disable_scale_comp(at,basename(filename), true, newfname,sotype);
1471  else {
1472 
1473  // Check if the datatype of this field needs to be changed.
1474  bool changedtype = HDFCFUtil::change_data_type(das,sotype,newfname);
1475 
1476  // Build up the field name list if the datatype of the field needs to be changed.
1477  if (true == changedtype)
1478  ctype_field_namelist.push_back(newfname);
1479 
1480  HDFCFUtil::handle_modis_special_attrs(at,basename(filename),true, newfname,sotype,gridname_change_valid_range,changedtype,change_fvtype);
1481 
1482  }
1483  }
1484 
1485  // Handle AMSR-E attributes.
1486  HDFCFUtil::handle_amsr_attrs(at);
1487 
1488  // Check if having _FillValue. If having _FillValue, compare the datatype of _FillValue
1489  // with the variable datatype. Correct the fillvalue datatype if necessary.
1490  if((false == change_fvtype) && at != NULL) {
1491  int32 var_type = (*it_gf)->getType();
1492  HDFCFUtil::correct_fvalue_type(at,var_type);
1493  }
1494 
1495  // if h4.enablecheckscaleoffsettype bes key is true,
1496  // if yes, check if having scale_factor and add_offset attributes;
1497  // if yes, check if scale_factor and add_offset attribute types are the same;
1498  // if no, make add_offset's datatype be the same as the datatype of scale_factor.
1499  // (cf requires the type of scale_factor and add_offset the same).
1500  //if (true == turn_on_enable_check_scale_offset_key && at!=NULL)
1501  if (true == HDF4RequestHandler::get_enable_check_scale_offset_type() && at!=NULL)
1503 
1504  }
1505 
1506  // Add possible 1-D CV CF attributes to identify projection info. for CF.
1507  // Currently only the Sinusoidal projection is supported.
1508  HDFCFUtil::add_cf_grid_cv_attrs(das,grid);
1509 
1510  }
1511  }
1512  catch(...) {
1513  //delete f;
1514  throw;
1515  }
1516 
1517  try {
1518  // MAP Swath attributes to DAS.
1519  for (int i = 0; i < (int) f->getSwaths().size(); i++) {
1520 
1521  HDFEOS2::SwathDataset* swath = f->getSwaths()[i];
1522 
1523  // Swath includes two parts: "Geolocation Fields" and "Data Fields".
1524  // The all_fields vector includes both.
1525  const vector<HDFEOS2::Field*> geofields = swath->getGeoFields();
1526  vector<HDFEOS2::Field*> all_fields = geofields;
1527  vector<HDFEOS2::Field*>::const_iterator it_f;
1528 
1529  const vector<HDFEOS2::Field*> datafields = swath->getDataFields();
1530  for (it_f = datafields.begin(); it_f != datafields.end(); it_f++)
1531  all_fields.push_back(*it_f);
1532 
1533  int total_geofields = geofields.size();
1534 
1535  string gname = swath->getName();
1536  BESDEBUG("h4","Swath name: " << gname << endl);
1537 
1538  sotype = swath->getScaleType();
1539 
1540  // field_counter is only used to separate the geo field from the data field.
1541  int field_counter = 0;
1542 
1543  for(it_f = all_fields.begin(); it_f != all_fields.end(); it_f++)
1544  {
1545  bool change_fvtype = false;
1546  string fname = (*it_f)->getName();
1547  string newfname = (*it_f)->getNewName();
1548  BESDEBUG("h4","Original Field name: " << fname << endl);
1549  BESDEBUG("h4","Corrected Field name: " << newfname << endl);
1550 
1551  int fieldtype = (*it_f)->getFieldType();
1552  if (fieldtype == 0){
1553  string coordinate = (*it_f)->getCoordinate();
1554  BESDEBUG("h4","Coordinate attribute: " << coordinate <<endl);
1555  if (coordinate != "")
1556  cf.write_attribute_coordinates(newfname, coordinate);
1557  }
1558 
1559  // 1 is latitude.
1560  // 2 is longitude.
1561  // Don't change "units" if a non-latlon coordinate variable exists.
1562  //if(fieldtype >0 )
1563  if(fieldtype >0 && fieldtype !=3){
1564  string tempunits = (*it_f)->getUnits();
1565  BESDEBUG("h4",
1566  "fieldtype " << fieldtype
1567  << " units" << tempunits << endl);
1568  cf.write_attribute_units(newfname, tempunits);
1569 
1570  }
1571  BESDEBUG("h4","Field Name: " << fname << endl);
1572 
1573  // coordinate "fillvalue" attribute
1574  // This operation should only apply to data fields.
1575  if (field_counter >=total_geofields) {
1576  if((*it_f)->haveAddedFillValue()){
1577  float addedfillvalue =
1578  (*it_f)->getAddedFillValue();
1579  int type =
1580  (*it_f)->getType();
1581  BESDEBUG("h4","Added fill value = "<<addedfillvalue);
1582  cf.write_attribute_FillValue(newfname, type, addedfillvalue);
1583  }
1584  }
1585  cf.write_attribute(gname, fname, newfname,
1586  f->getSwaths().size(), fieldtype);
1587 
1588  AttrTable *at = das.get_table(newfname);
1589 
1590  // No need for CF scale and offset equation.
1591  if(sotype!=DEFAULT_CF_EQU && at!=NULL)
1592  {
1593 
1594  bool has_Key_attr = false;
1595  AttrTable::Attr_iter it = at->attr_begin();
1596  while (it!=at->attr_end())
1597  {
1598  if(at->get_name(it)=="Key")
1599  {
1600  has_Key_attr = true;
1601  break;
1602  }
1603  it++;
1604  }
1605 
1606  //if((false == is_modis_l1b) && (false == gridname_change_valid_range) &&(false == has_Key_attr) && (true == turn_on_disable_scale_comp_key))
1607  if((false == is_modis_l1b) && (false == gridname_change_valid_range) &&(false == has_Key_attr) &&
1608  (true == HDF4RequestHandler::get_disable_scaleoffset_comp()))
1609  HDFCFUtil::handle_modis_special_attrs_disable_scale_comp(at,basename(filename),false,newfname,sotype);
1610  else {
1611 
1612  // Check if the datatype of this field needs to be changed.
1613  bool changedtype = HDFCFUtil::change_data_type(das,sotype,newfname);
1614 
1615  // Build up the field name list if the datatype of the field needs to be changed.
1616  if (true == changedtype)
1617 
1618  ctype_field_namelist.push_back(newfname);
1619 
1620  // Handle MODIS special attributes such as valid_range, scale_factor and add_offset etc.
1621  // Need to catch the exception since this function calls handle_modis_vip_special_attrs that may
1622  // throw an exception.
1623  HDFCFUtil::handle_modis_special_attrs(at,basename(filename), false,newfname,sotype,gridname_change_valid_range,changedtype,change_fvtype);
1624  }
1625  }
1626 
1627  // Handle AMSR-E attributes
1628  if(at !=NULL)
1629  HDFCFUtil::handle_amsr_attrs(at);
1630 
1631  // Check if having _FillValue. If having _FillValue, compare the datatype of _FillValue
1632  // with the variable datatype. Correct the fillvalue datatype if necessary.
1633  if((false == change_fvtype) && at != NULL) {
1634  int32 var_type = (*it_f)->getType();
1635  HDFCFUtil::correct_fvalue_type(at,var_type);
1636  }
1637 
1638  // If H4.EnableCheckScaleOffsetType BES key is true,
1639  // if yes, check if having scale_factor and add_offset attributes;
1640  // if yes, check if scale_factor and add_offset attribute types are the same;
1641  // if no, make add_offset's datatype be the same as the datatype of scale_factor.
1642  // (CF requires the type of scale_factor and add_offset the same).
1643  //if (true == turn_on_enable_check_scale_offset_key && at !=NULL)
1644  if (true == HDF4RequestHandler::get_enable_check_scale_offset_type() && at !=NULL)
1646 
1647  field_counter++;
1648  }
1649  }
1650  }
1651  catch(...) {
1652  //delete f;
1653  throw;
1654  }
1655 
1656 
1657  try {
1658 
1659  if(ecs_metadata == true) {
1660 
1661  // Handle ECS metadata. The following metadata are what we found so far.
1662  write_ecsmetadata(das,cf, "CoreMetadata");
1663 
1664  write_ecsmetadata(das,cf, "coremetadata");
1665 
1666  write_ecsmetadata(das,cf,"ArchiveMetadata");
1667 
1668  write_ecsmetadata(das,cf,"archivemetadata");
1669 
1670  write_ecsmetadata(das,cf,"ProductMetadata");
1671 
1672  write_ecsmetadata(das,cf,"productmetadata");
1673  }
1674 
1675  // This cause a problem for a MOD13C2 file, So turn it off temporarily. KY 2010-6-29
1676  if(false == tempstrflag) {
1677 
1678 #if 0
1679  string check_disable_smetadata_key ="H4.DisableStructMetaAttr";
1680  bool is_check_disable_smetadata = false;
1681  is_check_disable_smetadata = HDFCFUtil::check_beskeys(check_disable_smetadata_key);
1682 #endif
1683 
1684  if (false == HDF4RequestHandler::get_disable_structmeta() ) {
1685  write_ecsmetadata(das, cf, "StructMetadata");
1686  }
1687  }
1688 
1689  // Write other HDF global attributes, this routine must be called after all ECS metadata are handled.
1690  write_non_ecsmetadata_attrs(cf);
1691 
1692  cf.close();
1693  }
1694  catch(...) {
1695  //delete f;
1696  throw;
1697  }
1698 
1699  try {
1700 
1701  // Check if swath or grid object (like vgroup) attributes should be mapped to DAP2. If yes, start mapping.
1702 #if 0
1703  string check_enable_sg_attr_key="H4.EnableSwathGridAttr";
1704  bool turn_on_enable_sg_attr_key= false;
1705  turn_on_enable_sg_attr_key = HDFCFUtil::check_beskeys(check_enable_sg_attr_key);
1706 #endif
1707 
1708  if(true == HDF4RequestHandler::get_enable_swath_grid_attr()) {
1709 
1710  // MAP grid attributes to DAS.
1711  for (int i = 0; i < (int) f->getGrids().size(); i++) {
1712 
1713 
1714  HDFEOS2::GridDataset* grid = f->getGrids()[i];
1715 
1716  string gname = HDFCFUtil::get_CF_string(grid->getName());
1717 
1718  AttrTable*at = NULL;
1719 
1720  // Create a "grid" DAS table if this grid has attributes.
1721  if(grid->getAttributes().size() != 0){
1722  at = das.get_table(gname);
1723  if (!at)
1724  at = das.add_table(gname, new AttrTable);
1725  }
1726  if(at!= NULL) {
1727 
1728  // Process grid attributes
1729  const vector<HDFEOS2::Attribute *> grid_attrs = grid->getAttributes();
1730  vector<HDFEOS2::Attribute*>::const_iterator it_a;
1731  for (it_a = grid_attrs.begin(); it_a != grid_attrs.end(); ++it_a) {
1732 
1733  int attr_type = (*it_a)->getType();
1734 
1735  // We treat string differently. DFNT_UCHAR and DFNT_CHAR are treated as strings.
1736  if(attr_type==DFNT_UCHAR || attr_type == DFNT_CHAR){
1737  string tempstring2((*it_a)->getValue().begin(),(*it_a)->getValue().end());
1738  string tempfinalstr= string(tempstring2.c_str());
1739 
1740  // Using the customized escattr function to escape special characters except
1741  // \n,\r,\t since escaping them may make the attributes hard to read. KY 2013-10-14
1742  // at->append_attr((*i)->getNewName(), "String" , escattr(tempfinalstr));
1743  at->append_attr((*it_a)->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
1744  }
1745 
1746 
1747  else {
1748  for (int loc=0; loc < (*it_a)->getCount() ; loc++) {
1749  string print_rep = HDFCFUtil::print_attr((*it_a)->getType(), loc, (void*) &((*it_a)->getValue()[0]));
1750  at->append_attr((*it_a)->getNewName(), HDFCFUtil::print_type((*it_a)->getType()), print_rep);
1751  }
1752  }
1753  }
1754  }
1755  }
1756 
1757  //
1758  // MAP swath attributes to DAS.
1759  for (int i = 0; i < (int) f->getSwaths().size(); i++) {
1760 
1761  HDFEOS2::SwathDataset* swath = f->getSwaths()[i];
1762  string sname = swath->getName();
1763  AttrTable*at = NULL;
1764 
1765  // Create a "swath" DAS table if this swath has attributes.
1766  if(swath->getAttributes().size() != 0) {
1767  at = das.get_table(sname);
1768  if (!at)
1769  at = das.add_table(sname, new AttrTable);
1770  }
1771 
1772  if(at != NULL) {
1773  const vector<HDFEOS2::Attribute *> swath_attrs = swath->getAttributes();
1774  vector<HDFEOS2::Attribute*>::const_iterator it_a;
1775  for (it_a = swath_attrs.begin(); it_a != swath_attrs.end(); ++it_a) {
1776 
1777  int attr_type = (*it_a)->getType();
1778 
1779  // We treat string differently. DFNT_UCHAR and DFNT_CHAR are treated as strings.
1780  if(attr_type==DFNT_UCHAR || attr_type == DFNT_CHAR){
1781  string tempstring2((*it_a)->getValue().begin(),(*it_a)->getValue().end());
1782  string tempfinalstr= string(tempstring2.c_str());
1783 
1784  // Using the customized escattr function to escape special characters except
1785  // \n,\r,\t since escaping them may make the attributes hard to read. KY 2013-10-14
1786  // at->append_attr((*i)->getNewName(), "String" , escattr(tempfinalstr));
1787  at->append_attr((*it_a)->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
1788  }
1789  else {
1790  for (int loc=0; loc < (*it_a)->getCount() ; loc++) {
1791  string print_rep = HDFCFUtil::print_attr((*it_a)->getType(), loc, (void*) &((*it_a)->getValue()[0]));
1792  at->append_attr((*it_a)->getNewName(), HDFCFUtil::print_type((*it_a)->getType()), print_rep);
1793  }
1794 
1795  }
1796  }
1797  }
1798  }
1799  }// end of mapping swath and grid object attributes to DAP2
1800  }
1801  catch(...) {
1802  throw;
1803  }
1804 
1805  return 1;
1806 }
1807 
1808 //The wrapper of building HDF-EOS2 and special HDF4 files.
1809 void read_das_use_eos2lib(DAS & das, const string & filename,
1810  int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,bool ecs_metadata,
1811  HDFSP::File**h4filepptr,HDFEOS2::File**eosfilepptr)
1812 {
1813 
1814  BESDEBUG("h4","Coming to read_das_use_eos2lib" << endl);
1815 
1816  int ret_value = read_das_hdfeos2(das,filename,sdfd,fileid, gridfd, swathfd,ecs_metadata,h4filepptr,eosfilepptr);
1817 
1818  BESDEBUG("h4","ret_value of read_das_hdfeos2 is "<<ret_value <<endl);
1819 
1820  // read_das_hdfeos2 return value description:
1821  // 0: general non-EOS2 pure HDF4
1822  // 1: HDF-EOS2 hybrid
1823  // 2: MOD08_M3
1824  // HDF-EOS2 but no need to use HDF-EOS2 lib: no real dimension scales but have CVs for every dimension, treat differently
1825  // 3: AIRS version 6 level 3 and level 2
1826  // HDF-EOS2 but no need to use HDF-EOS2 lib:
1827  // have dimension scales but don’t have CVs for every dimension, also need to condense dimensions, treat differently
1828  // 4. Expected AIRS version 6 level 3 and level 2
1829  // HDF-EOS2 but no need to use HDF-EOS2 lib: Have dimension scales for all dimensions
1830  // 5. MERRA
1831  // Special handling for MERRA products.
1832 
1833  // Treat as pure HDF4 objects
1834  if (ret_value == 4) {
1835  if(true == read_das_special_eos2(das, filename,sdfd,fileid,ecs_metadata,h4filepptr))
1836  return;
1837  }
1838  // Special handling, already handled
1839  else if (ret_value == 2 || ret_value == 3) {
1840  return;
1841  }
1842  else if (ret_value == 1) {
1843 
1844  // Map non-EOS2 objects to DDS
1845  if(true == read_das_hdfhybrid(das,filename,sdfd,fileid,h4filepptr))
1846  return;
1847  }
1848  else {// ret_value is 0(pure HDF4) or 5(Merra)
1849  if(true == read_das_hdfsp(das, filename,sdfd, fileid,h4filepptr))
1850  return;
1851  }
1852 
1853 
1854 // Leave the original code that don't pass the file pointers.
1855 #if 0
1856  // First map HDF-EOS2 attributes to DAS
1857  if(true == read_das_hdfeos2(das, filename)){
1858 
1859  // Map non-EOS2 attributes to DAS
1860  if (true == read_das_hdfhybrid(das,filename))
1861  return;
1862  }
1863 
1864  // Map HDF4 attributes in pure HDF4 files to DAS
1865  if(true == read_das_hdfsp(das, filename)){
1866  return;
1867  }
1868 #endif
1869 
1870  // Call the default mapping of HDF4 to DAS. It should never reach here.
1871  // We add this line to ensure the HDF4 attributes mapped to DAS even if the above routines return false.
1872  read_das(das, filename);
1873 }
1874 
1875 #endif // #ifdef USE_HDFEOS2_LIB
1876 
1877 // The wrapper of building DDS function.
1878 //bool read_dds_hdfsp(DDS & dds, const string & filename,int32 sdfd, int32 fileid,int32 gridfd, int32 swathfd)
1879 bool read_dds_hdfsp(DDS & dds, const string & filename,int32 sdfd, int32 fileid,HDFSP::File*f)
1880 {
1881 
1882  BESDEBUG("h4","Coming to read_dds_sp "<<endl);
1883  dds.set_dataset_name(basename(filename));
1884 
1885  // Obtain SDS fields
1886  const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
1887 
1888  // Read SDS
1889  vector<HDFSP::SDField *>::const_iterator it_g;
1890  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
1891 
1892  // Although the following line's logic needs to improve, it is right.
1893  // When Has_Dim_NoScale_Field is false, it only happens to the OTHERHDF case.
1894  // For the OTHERHDF case, we will not map the dimension_no_dim_scale (empty) field. This is equivalent to
1895  // (0 == (*it_g)->getFieldType()) || (true == (*it_g)->IsDimScale())
1896  if (false == f->Has_Dim_NoScale_Field() || (0 == (*it_g)->getFieldType()) || (true == (*it_g)->IsDimScale())){
1897  try {
1898  read_dds_spfields(dds,filename,sdfd,(*it_g),f->getSPType());
1899  }
1900  catch(...) {
1901  throw;
1902  }
1903  }
1904  }
1905 
1906  // Read Vdata fields.
1907  // To speed up the performance for handling CERES data, we turn off some CERES vdata fields, this should be resumed in the future version with BESKeys.
1908 #if 0
1909  string check_ceres_vdata_key="H4.EnableCERESVdata";
1910  bool turn_on_ceres_vdata_key= false;
1911  turn_on_ceres_vdata_key = HDFCFUtil::check_beskeys(check_ceres_vdata_key);
1912 #endif
1913 
1914  bool output_vdata_flag = true;
1915  //if (false == turn_on_ceres_vdata_key &&
1916  if (false == HDF4RequestHandler::get_enable_ceres_vdata() &&
1917  (CER_AVG == f->getSPType() ||
1918  CER_ES4 == f->getSPType() ||
1919  CER_SRB == f->getSPType() ||
1920  CER_ZAVG == f->getSPType()))
1921  output_vdata_flag = false;
1922 
1923  if(true == output_vdata_flag) {
1924  for(vector<HDFSP::VDATA *>::const_iterator i=f->getVDATAs().begin(); i!=f->getVDATAs().end();i++) {
1925  if(!(*i)->getTreatAsAttrFlag()){
1926  for(vector<HDFSP::VDField *>::const_iterator j=(*i)->getFields().begin();j!=(*i)->getFields().end();j++) {
1927  try {
1928  read_dds_spvdfields(dds,filename,fileid,(*i)->getObjRef(),(*j)->getNumRec(),(*j));
1929  }
1930  catch(...) {
1931  throw;
1932  }
1933  }
1934  }
1935  }
1936  }
1937 
1938  return true;
1939 }
1940 
1941 // Follow CF to build DAS for non-HDFEOS2 HDF4 products. This routine also applies
1942 // to all HDF4 products when HDF-EOS2 library is not configured in.
1943 //bool read_das_hdfsp(DAS & das, const string & filename, int32 sdfd, int32 fileid,int32 gridfd, int32 swathfd)
1944 bool read_das_hdfsp(DAS & das, const string & filename, int32 sdfd, int32 fileid,HDFSP::File**fpptr)
1945 {
1946 
1947  BESDEBUG("h4","Coming to read_das_sp "<<endl);
1948 
1949  // Define a file pointer
1950  HDFSP::File *f = NULL;
1951  try {
1952  // Obtain all the necesary information from HDF4 files.
1953  f = HDFSP::File::Read(filename.c_str(), sdfd,fileid);
1954  }
1955  catch (HDFSP::Exception &e)
1956  {
1957  if (f != NULL)
1958  delete f;
1959  throw InternalErr(e.what());
1960  }
1961 
1962  try {
1963  // Generate CF coordinate variables(including auxiliary coordinate variables) and dimensions
1964  // All the names follow CF.
1965  f->Prepare();
1966  }
1967  catch (HDFSP::Exception &e) {
1968  delete f;
1969  throw InternalErr(e.what());
1970  }
1971 
1972  *fpptr = f;
1973 
1974  // Check if mapping vgroup attribute key is turned on, if yes, mapping vgroup attributes.
1975 #if 0
1976  string check_enable_vg_attr_key="H4.EnableVgroupAttr";
1977  bool turn_on_enable_vg_attr_key= false;
1978  turn_on_enable_vg_attr_key = HDFCFUtil::check_beskeys(check_enable_vg_attr_key);
1979 #endif
1980 
1981 
1982  //if(true == turn_on_enable_vg_attr_key ) {
1983  if(true == HDF4RequestHandler::get_enable_vgroup_attr()) {
1984 
1985  // Obtain vgroup attributes if having vgroup attributes.
1986  vector<HDFSP::AttrContainer *>vg_container = f->getVgattrs();
1987  for(vector<HDFSP::AttrContainer *>::const_iterator i=f->getVgattrs().begin();i!=f->getVgattrs().end();i++) {
1988  AttrTable *vgattr_at = das.get_table((*i)->getName());
1989  if (!vgattr_at)
1990  vgattr_at = das.add_table((*i)->getName(), new AttrTable);
1991 
1992  for(vector<HDFSP::Attribute *>::const_iterator j=(*i)->getAttributes().begin();j!=(*i)->getAttributes().end();j++) {
1993 
1994  // Handle string first.
1995  if((*j)->getType()==DFNT_UCHAR || (*j)->getType() == DFNT_CHAR){
1996  string tempstring2((*j)->getValue().begin(),(*j)->getValue().end());
1997  string tempfinalstr= string(tempstring2.c_str());
1998 
1999  //escaping the special characters in string attributes when mapping to DAP
2000  vgattr_at->append_attr((*j)->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
2001  }
2002  else {
2003  for (int loc=0; loc < (*j)->getCount() ; loc++) {
2004 
2005  string print_rep = HDFCFUtil::print_attr((*j)->getType(), loc, (void*) &((*j)->getValue()[0]));
2006  vgattr_at->append_attr((*j)->getNewName(), HDFCFUtil::print_type((*j)->getType()), print_rep);
2007  }
2008  }
2009  }
2010  }
2011  }// end of mapping vgroup attributes.
2012 
2013  // Initialize ECS metadata
2014  string core_metadata = "";
2015  string archive_metadata = "";
2016  string struct_metadata = "";
2017 
2018  // Obtain SD pointer, this is used to retrieve the file attributes associated with the SD interface
2019  HDFSP::SD* spsd = f->getSD();
2020 
2021  // Except TRMM, we don't find ECS metadata in other non-EOS products. For the option to treat EOS2 as pure HDF4, we
2022  // kind of relax the support of merging metadata as we do for the EOS2 case(read_das_hdfeos2). We will see if we have the user
2023  // request to make them consistent in the future. KY 2013-07-08
2024  for(vector<HDFSP::Attribute *>::const_iterator i=spsd->getAttributes().begin();i!=spsd->getAttributes().end();i++) {
2025 
2026  // Here we try to combine ECS metadata into a string.
2027  if(((*i)->getName().compare(0, 12, "CoreMetadata" )== 0) ||
2028  ((*i)->getName().compare(0, 12, "coremetadata" )== 0)){
2029 
2030  // We assume that CoreMetadata.0, CoreMetadata.1, ..., CoreMetadata.n attribures
2031  // are processed in the right order during HDFSP::Attribute vector iteration.
2032  // Otherwise, this won't work.
2033  string tempstring((*i)->getValue().begin(),(*i)->getValue().end());
2034 
2035  // Temporarily turn off CERES data since there are so many fields in CERES. It will choke clients KY 2010-7-9
2036  if(f->getSPType() != CER_AVG &&
2037  f->getSPType() != CER_ES4 &&
2038  f->getSPType() !=CER_SRB &&
2039  f->getSPType() != CER_ZAVG)
2040  core_metadata.append(tempstring);
2041  }
2042  else if(((*i)->getName().compare(0, 15, "ArchiveMetadata" )== 0) ||
2043  ((*i)->getName().compare(0, 16, "ArchivedMetadata")==0) ||
2044  ((*i)->getName().compare(0, 15, "archivemetadata" )== 0)){
2045  string tempstring((*i)->getValue().begin(),(*i)->getValue().end());
2046  // Currently some TRMM "swath" archivemetadata includes special characters that cannot be handled by OPeNDAP
2047  // So turn it off.
2048  // Turn off CERES data since it may choke JAVA clients KY 2010-7-9
2049  if(f->getSPType() != TRMML2_V6 && f->getSPType() != CER_AVG && f->getSPType() != CER_ES4 && f->getSPType() !=CER_SRB && f->getSPType() != CER_ZAVG)
2050  archive_metadata.append(tempstring);
2051  }
2052  else if(((*i)->getName().compare(0, 14, "StructMetadata" )== 0) ||
2053  ((*i)->getName().compare(0, 14, "structmetadata" )== 0)){
2054 
2055 #if 0
2056  string check_disable_smetadata_key ="H4.DisableStructMetaAttr";
2057  bool is_check_disable_smetadata = false;
2058  is_check_disable_smetadata = HDFCFUtil::check_beskeys(check_disable_smetadata_key);
2059 #endif
2060 
2061  //if (false == is_check_disable_smetadata) {
2062  if (false == HDF4RequestHandler::get_disable_structmeta()) {
2063 
2064  string tempstring((*i)->getValue().begin(),(*i)->getValue().end());
2065 
2066  // Turn off TRMM "swath" verison 6 level 2 productsCERES data since it may choke JAVA clients KY 2010-7-9
2067  if(f->getSPType() != TRMML2_V6 &&
2068  f->getSPType() != CER_AVG &&
2069  f->getSPType() != CER_ES4 &&
2070  f->getSPType() !=CER_SRB &&
2071  f->getSPType() != CER_ZAVG)
2072  struct_metadata.append(tempstring);
2073 
2074  }
2075  }
2076  else {
2077  // Process gloabal attributes
2078  AttrTable *at = das.get_table("HDF_GLOBAL");
2079  if (!at)
2080  at = das.add_table("HDF_GLOBAL", new AttrTable);
2081 
2082  // We treat string differently. DFNT_UCHAR and DFNT_CHAR are treated as strings.
2083  if((*i)->getType()==DFNT_UCHAR || (*i)->getType() == DFNT_CHAR){
2084  string tempstring2((*i)->getValue().begin(),(*i)->getValue().end());
2085  string tempfinalstr= string(tempstring2.c_str());
2086 
2087  // Using the customized escattr function to escape special characters except
2088  // \n,\r,\t since escaping them may make the attributes hard to read. KY 2013-10-14
2089  // at->append_attr((*i)->getNewName(), "String" , escattr(tempfinalstr));
2090  at->append_attr((*i)->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
2091  }
2092 
2093  else {
2094  for (int loc=0; loc < (*i)->getCount() ; loc++) {
2095  string print_rep = HDFCFUtil::print_attr((*i)->getType(), loc, (void*) &((*i)->getValue()[0]));
2096  at->append_attr((*i)->getNewName(), HDFCFUtil::print_type((*i)->getType()), print_rep);
2097  }
2098 
2099  }
2100  }
2101 
2102  }
2103 
2104  // The following code may be condensed in the future. KY 2012-09-19
2105  // Coremetadata, structmetadata and archive metadata need special parsers.
2106 
2107  // Write coremetadata.
2108  if(core_metadata.size() > 0){
2109  AttrTable *at = das.get_table("CoreMetadata");
2110  if (!at)
2111  at = das.add_table("CoreMetadata", new AttrTable);
2112  // tell lexer to scan attribute string
2113  void *buf = hdfeos_string(core_metadata.c_str());
2114  parser_arg arg(at);
2115 
2116  if (hdfeosparse(&arg) != 0) {
2117  hdfeos_delete_buffer(buf);
2118  throw Error("Parse error while processing a CoreMetadata attribute.");
2119  }
2120 
2121  // Errors returned from here are ignored.
2122  if (arg.status() == false) {
2123  (*BESLog::TheLog()) << "Parse error while processing a CoreMetadata attribute. (2) " << endl;
2124  // << arg.error()->get_error_message() << endl;
2125  }
2126 
2127  hdfeos_delete_buffer(buf);
2128  }
2129 
2130  // Write archive metadata.
2131  if(archive_metadata.size() > 0){
2132  AttrTable *at = das.get_table("ArchiveMetadata");
2133  if (!at)
2134  at = das.add_table("ArchiveMetadata", new AttrTable);
2135  // tell lexer to scan attribute string
2136  void *buf = hdfeos_string(archive_metadata.c_str());
2137  parser_arg arg(at);
2138  if (hdfeosparse(&arg) != 0){
2139  // delete f;
2140  hdfeos_delete_buffer(buf);
2141  throw Error("Parse error while processing an ArchiveMetadata attribute.");
2142  }
2143 
2144  // Errors returned from here are ignored.
2145  if (arg.status() == false) {
2146  (*BESLog::TheLog())<< "Parse error while processing an ArchiveMetadata attribute. (2) " << endl;
2147  // << arg.error()->get_error_message() << endl;
2148  }
2149 
2150  hdfeos_delete_buffer(buf);
2151  }
2152 
2153  // Write struct metadata.
2154  if(struct_metadata.size() > 0){
2155  AttrTable *at = das.get_table("StructMetadata");
2156  if (!at)
2157  at = das.add_table("StructMetadata", new AttrTable);
2158  // tell lexer to scan attribute string
2159  void *buf = hdfeos_string(struct_metadata.c_str());
2160  parser_arg arg(at);
2161  if (hdfeosparse(&arg) != 0){
2162  // delete f;
2163  hdfeos_delete_buffer(buf);
2164  throw Error("Parse error while processing a StructMetadata attribute.");
2165  }
2166 
2167  if (arg.status() == false) {
2168  (*BESLog::TheLog())<< "Parse error while processing a StructMetadata attribute. (2)" << endl;
2169  }
2170 
2171 
2172  // Errors returned from here are ignored.
2173 #if 0
2174  if (arg.status() == false) {
2175  (*BESLog::TheLog())<< "Parse error while processing a StructMetadata attribute. (2)" << endl
2176  << arg.error()->get_error_message() << endl;
2177  }
2178 #endif
2179 
2180  hdfeos_delete_buffer(buf);
2181  }
2182 
2183  // The following code checks the special handling of scale and offset of the OBPG products.
2184  //Store value of "Scaling" attribute.
2185  string scaling;
2186 
2187  //Store value of "Slope" attribute.
2188  float slope = 0.;
2189  bool global_slope_flag = false;
2190  float intercept = 0.;
2191  bool global_intercept_flag = false;
2192 
2193  // Check OBPG attributes. Specifically, check if slope and intercept can be obtained from the file level.
2194  // If having global slope and intercept, obtain OBPG scaling, slope and intercept values.
2195  HDFCFUtil::check_obpg_global_attrs(f,scaling,slope,global_slope_flag,intercept,global_intercept_flag);
2196 
2197  // Handle individual fields
2198  const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
2199  vector<HDFSP::SDField *>::const_iterator it_g;
2200  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
2201 
2202  // The following two if-statements are double secure checks. It will
2203  // make sure no-dimension-scale dimension variables and the associated coordinate variables(if any) are ignored.
2204  // Ignore ALL coordinate variables if this is "OTHERHDF" case and some dimensions
2205  // don't have dimension scale data.
2206  if ( true == f->Has_Dim_NoScale_Field() &&
2207  ((*it_g)->getFieldType() !=0)&&
2208  ((*it_g)->IsDimScale() == false))
2209  continue;
2210 
2211  // Ignore the empty(no data) dimension variable.
2212  if (OTHERHDF == f->getSPType() && true == (*it_g)->IsDimNoScale())
2213  continue;
2214 
2215  AttrTable *at = das.get_table((*it_g)->getNewName());
2216  if (!at)
2217  at = das.add_table((*it_g)->getNewName(), new AttrTable);
2218 
2219  // Some fields have "long_name" attributes,so we have to use this attribute rather than creating our own
2220  bool long_name_flag = false;
2221 
2222  for(vector<HDFSP::Attribute *>::const_iterator i=(*it_g)->getAttributes().begin();
2223  i!=(*it_g)->getAttributes().end();i++) {
2224  if((*i)->getName() == "long_name") {
2225  long_name_flag = true;
2226  break;
2227  }
2228  }
2229 
2230  if(false == long_name_flag) {
2231  if (f->getSPType() == TRMML2_V7) {
2232  if((*it_g)->getFieldType() == 1)
2233  at->append_attr("standard_name","String","latitude");
2234  else if ((*it_g)->getFieldType() == 2) {
2235  at->append_attr("standard_name","String","longitude");
2236 
2237  }
2238 
2239  }
2240  else if (f->getSPType() == TRMML3S_V7 || f->getSPType() == TRMML3M_V7) {
2241  if((*it_g)->getFieldType() == 1) {
2242  at->append_attr("long_name","String","latitude");
2243  at->append_attr("standard_name","String","latitude");
2244 
2245  }
2246  else if ((*it_g)->getFieldType() == 2) {
2247  at->append_attr("long_name","String","longitude");
2248  at->append_attr("standard_name","String","longitude");
2249  }
2250 
2251  }
2252  else
2253  at->append_attr("long_name", "String", (*it_g)->getName());
2254  }
2255 
2256  // For some OBPG files that only provide slope and intercept at the file level,
2257  // we need to add the global slope and intercept to all fields and change their names to scale_factor and add_offset.
2258  // For OBPG files that provide slope and intercept at the field level, we need to rename those attribute names to scale_factor and add_offset.
2259  HDFCFUtil::add_obpg_special_attrs(f,das,*it_g,scaling,slope,global_slope_flag,intercept,global_intercept_flag);
2260 
2261  // MAP individual SDS field to DAP DAS
2262  for(vector<HDFSP::Attribute *>::const_iterator i=(*it_g)->getAttributes().begin();i!=(*it_g)->getAttributes().end();i++) {
2263 
2264  // Handle string first.
2265  if((*i)->getType()==DFNT_UCHAR || (*i)->getType() == DFNT_CHAR){
2266  string tempstring2((*i)->getValue().begin(),(*i)->getValue().end());
2267  string tempfinalstr= string(tempstring2.c_str());
2268 
2269  // We want to escape the possible special characters except the fullpath attribute. This may be overkilled since
2270  // fullpath is only added for some CERES and MERRA data. We think people use fullpath really mean to keep their
2271  // original names. So escaping them for the time being. KY 2013-10-14
2272 
2273  at->append_attr((*i)->getNewName(), "String" ,((*i)->getNewName()=="fullpath")?tempfinalstr:HDFCFUtil::escattr(tempfinalstr));
2274  }
2275  else {
2276  for (int loc=0; loc < (*i)->getCount() ; loc++) {
2277  string print_rep = HDFCFUtil::print_attr((*i)->getType(), loc, (void*) &((*i)->getValue()[0]));
2278  at->append_attr((*i)->getNewName(), HDFCFUtil::print_type((*i)->getType()), print_rep);
2279  }
2280  }
2281 
2282  }
2283 
2284  // MAP dimension info. to DAS(Currently this should only affect the OTHERHDF case when no dimension scale for some dimensions)
2285  // KY 2012-09-19
2286  // For the type DFNT_CHAR, one dimensional char array is mapped to a scalar DAP string,
2287  // N dimensional char array is mapped to N-1 dimensional DAP string,
2288  // So the number of dimension info stored in the attribute container should be reduced by 1.
2289  // KY 2014-04-11
2290 
2291  bool has_dim_info = true;
2292  vector<HDFSP::AttrContainer *>::const_iterator it_end = (*it_g)->getDimInfo().end();
2293  if((*it_g)->getType() == DFNT_CHAR) {
2294  if((*it_g)->getRank() >1 && (*it_g)->getDimInfo().size() >1)
2295  it_end = (*it_g)->getDimInfo().begin()+(*it_g)->getDimInfo().size() -1;
2296  else
2297  has_dim_info = false;
2298  }
2299 
2300  if( true == has_dim_info) {
2301 
2302  for(vector<HDFSP::AttrContainer *>::const_iterator i=(*it_g)->getDimInfo().begin();i!=it_end;i++) {
2303  //for(vector<HDFSP::AttrContainer *>::const_iterator i=(*it_g)->getDimInfo().begin();i!=(*it_g)->getDimInfo().end();i++) {
2304 
2305  // Here a little surgory to add the field path(including) name before dim0, dim1, etc.
2306  string attr_container_name = (*it_g)->getNewName() + (*i)->getName();
2307  AttrTable *dim_at = das.get_table(attr_container_name);
2308  if (!dim_at)
2309  dim_at = das.add_table(attr_container_name, new AttrTable);
2310 
2311  for(vector<HDFSP::Attribute *>::const_iterator j=(*i)->getAttributes().begin();j!=(*i)->getAttributes().end();j++) {
2312 
2313  // Handle string first.
2314  if((*j)->getType()==DFNT_UCHAR || (*j)->getType() == DFNT_CHAR){
2315  string tempstring2((*j)->getValue().begin(),(*j)->getValue().end());
2316  string tempfinalstr= string(tempstring2.c_str());
2317 
2318  //escaping the special characters in string attributes when mapping to DAP
2319  dim_at->append_attr((*j)->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
2320  }
2321  else {
2322  for (int loc=0; loc < (*j)->getCount() ; loc++) {
2323 
2324  string print_rep = HDFCFUtil::print_attr((*j)->getType(), loc, (void*) &((*j)->getValue()[0]));
2325  dim_at->append_attr((*j)->getNewName(), HDFCFUtil::print_type((*j)->getType()), print_rep);
2326  }
2327  }
2328  }
2329 
2330  }
2331  }
2332 
2333  // Handle special CF attributes such as units, valid_range and coordinates
2334  // Overwrite units if fieldtype is latitude.
2335  if((*it_g)->getFieldType() == 1){
2336 
2337  at->del_attr("units"); // Override any existing units attribute.
2338  at->append_attr("units", "String",(*it_g)->getUnits());
2339  if (f->getSPType() == CER_ES4) // Drop the valid_range attribute since the value will be interpreted wrongly by CF tools
2340  at->del_attr("valid_range");
2341 
2342 
2343  }
2344  // Overwrite units if fieldtype is longitude
2345  if((*it_g)->getFieldType() == 2){
2346  at->del_attr("units"); // Override any existing units attribute.
2347  at->append_attr("units", "String",(*it_g)->getUnits());
2348  if (f->getSPType() == CER_ES4) // Drop the valid_range attribute since the value will be interpreted wrongly by CF tools
2349  at->del_attr("valid_range");
2350 
2351  }
2352 
2353  // The following if-statement may not be necessary since fieldtype=4 is the missing CV.
2354  // This missing CV is added by the handler and the units is always level.
2355  if((*it_g)->getFieldType() == 4){
2356  at->del_attr("units"); // Override any existing units attribute.
2357  at->append_attr("units", "String",(*it_g)->getUnits());
2358  }
2359 
2360  // Overwrite coordinates if fieldtype is neither lat nor lon.
2361  if((*it_g)->getFieldType() == 0){
2362  at->del_attr("coordinates"); // Override any existing units attribute.
2363 
2364  // If no "dimension scale" dimension exists, delete the "coordinates" attributes
2365  if (false == f->Has_Dim_NoScale_Field()) {
2366  string coordinate = (*it_g)->getCoordinate();
2367  if (coordinate !="")
2368  at->append_attr("coordinates", "String", coordinate);
2369  }
2370  }
2371  }
2372 
2373 
2374  // For OTHERHDF products, add units for latitude and longitude; also change unit to units.
2375  HDFCFUtil::handle_otherhdf_special_attrs(f,das);
2376 
2377  // For NASA products, add missing CF attributes if possible
2378  HDFCFUtil::add_missing_cf_attrs(f,das);
2379 
2380 #if 0
2381  string check_scale_offset_type_key = "H4.EnableCheckScaleOffsetType";
2382  bool turn_on_enable_check_scale_offset_key= false;
2383  turn_on_enable_check_scale_offset_key = HDFCFUtil::check_beskeys(check_scale_offset_type_key);
2384 #endif
2385 
2386  // Check if having _FillValue. If having _FillValue, compare the datatype of _FillValue
2387  // with the variable datatype. Correct the fillvalue datatype if necessary.
2388  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
2389 
2390  AttrTable *at = das.get_table((*it_g)->getNewName());
2391  if (at != NULL) {
2392  int32 var_type = (*it_g)->getType();
2393  try {
2394  HDFCFUtil::correct_fvalue_type(at,var_type);
2395  }
2396  catch(...) {
2397  throw;
2398  }
2399  }
2400 
2401  // If H4.EnableCheckScaleOffsetType BES key is true,
2402  // if yes, check if having scale_factor and add_offset attributes;
2403  // if yes, check if scale_factor and add_offset attribute types are the same;
2404  // if no, make add_offset's datatype be the same as the datatype of scale_factor.
2405  // (CF requires the type of scale_factor and add_offset the same).
2406  //if (true == turn_on_enable_check_scale_offset_key && at !=NULL)
2407  if (true == HDF4RequestHandler::get_enable_check_scale_offset_type() && at !=NULL)
2409  }
2410 
2411  // Optimization for users to tune the DAS output.
2412  HDFCFUtil::handle_merra_ceres_attrs_with_bes_keys(f,das,filename);
2413 
2414  // Check the EnableVdataDescAttr key. If this key is turned on, the handler-added attribute VDdescname and
2415  // the attributes of vdata and vdata fields will be outputed to DAS. Otherwise, these attributes will
2416  // not output to DAS. The key will be turned off by default to shorten the DAP output. KY 2012-09-18
2417  try {
2418  HDFCFUtil::handle_vdata_attrs_with_desc_key(f,das);
2419  }
2420  catch(...) {
2421  throw;
2422  }
2423 
2424  return true;
2425 }
2426 
2427 // This routine is for case 4 of the cases returned by read_das_hdfeos2.
2428 // Creating this routine is for performance reasons. Structmetadata is
2429 // turned off because the information has been retrieved and presented
2430 // by DDS and DAS.
2431 // Currently we don't have a user case for this routine and also
2432 // this code is not used. We still keep it for the future usage.
2433 // KY 2014-01-29
2434 
2435 bool read_das_special_eos2(DAS &das,const string& filename,int32 sdfd,int32 fileid,bool ecs_metadata,HDFSP::File**fpptr) {
2436 
2437  BESDEBUG("h4","Coming to read_das_special_eos2 " << endl);
2438 
2439 #if 0
2440  // HDF4 H interface ID
2441  int32 myfileid;
2442  myfileid = Hopen(const_cast<char *>(filename.c_str()), DFACC_READ,0);
2443 #endif
2444 
2445  // Define a file pointer
2446  HDFSP::File *f = NULL;
2447  try {
2448 
2449  // Obtain all the necesary information from HDF4 files.
2450  f = HDFSP::File::Read(filename.c_str(), sdfd,fileid);
2451  }
2452  catch (HDFSP::Exception &e)
2453  {
2454  if (f!= NULL)
2455  delete f;
2456  throw InternalErr(e.what());
2457  }
2458 
2459  try {
2460  // Generate CF coordinate variables(including auxiliary coordinate variables) and dimensions
2461  // All the names follow CF.
2462  f->Prepare();
2463  }
2464  catch (HDFSP::Exception &e) {
2465  delete f;
2466  throw InternalErr(e.what());
2467  }
2468 
2469  *fpptr = f;
2470 
2471  try {
2472  read_das_special_eos2_core(das, f, filename,ecs_metadata);
2473  }
2474  catch(...) {
2475  throw;
2476  }
2477 
2478  // The return value is a dummy value, not used.
2479  return true;
2480 }
2481 
2482 // This routine is for special EOS2 that can be tuned to build up DAS and DDS quickly.
2483 // We also turn off the generation of StructMetadata for the performance reason.
2484 bool read_das_special_eos2_core(DAS &das,HDFSP::File* f,const string& filename,bool ecs_metadata) {
2485 
2486  BESDEBUG("h4","Coming to read_das_special_eos2_core "<<endl);
2487  // Initialize ECS metadata
2488  string core_metadata = "";
2489  string archive_metadata = "";
2490  string struct_metadata = "";
2491 
2492  // Obtain SD pointer, this is used to retrieve the file attributes associated with the SD interface
2493  HDFSP::SD* spsd = f->getSD();
2494 
2495  //Ignore StructMetadata to improve performance
2496  for(vector<HDFSP::Attribute *>::const_iterator i=spsd->getAttributes().begin();i!=spsd->getAttributes().end();i++) {
2497 
2498  // Here we try to combine ECS metadata into a string.
2499  if(((*i)->getName().compare(0, 12, "CoreMetadata" )== 0) ||
2500  ((*i)->getName().compare(0, 12, "coremetadata" )== 0)){
2501 
2502  if(ecs_metadata == true) {
2503  // We assume that CoreMetadata.0, CoreMetadata.1, ..., CoreMetadata.n attribures
2504  // are processed in the right order during HDFSP::Attribute vector iteration.
2505  // Otherwise, this won't work.
2506  string tempstring((*i)->getValue().begin(),(*i)->getValue().end());
2507  core_metadata.append(tempstring);
2508  }
2509  }
2510  else if(((*i)->getName().compare(0, 15, "ArchiveMetadata" )== 0) ||
2511  ((*i)->getName().compare(0, 16, "ArchivedMetadata")==0) ||
2512  ((*i)->getName().compare(0, 15, "archivemetadata" )== 0)){
2513  if(ecs_metadata == true) {
2514  string tempstring((*i)->getValue().begin(),(*i)->getValue().end());
2515  archive_metadata.append(tempstring);
2516  }
2517  }
2518  else if(((*i)->getName().compare(0, 14, "StructMetadata" )== 0) ||
2519  ((*i)->getName().compare(0, 14, "structmetadata" )== 0))
2520  ; // Ignore StructMetadata for performance
2521  else {
2522  // Process gloabal attributes
2523  AttrTable *at = das.get_table("HDF_GLOBAL");
2524  if (!at)
2525  at = das.add_table("HDF_GLOBAL", new AttrTable);
2526 
2527  // We treat string differently. DFNT_UCHAR and DFNT_CHAR are treated as strings.
2528  if((*i)->getType()==DFNT_UCHAR || (*i)->getType() == DFNT_CHAR){
2529  string tempstring2((*i)->getValue().begin(),(*i)->getValue().end());
2530  string tempfinalstr= string(tempstring2.c_str());
2531 
2532  // Using the customized escattr function to escape special characters except
2533  // \n,\r,\t since escaping them may make the attributes hard to read. KY 2013-10-14
2534  // at->append_attr((*i)->getNewName(), "String" , escattr(tempfinalstr));
2535  at->append_attr((*i)->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
2536  }
2537 
2538  else {
2539  for (int loc=0; loc < (*i)->getCount() ; loc++) {
2540  string print_rep = HDFCFUtil::print_attr((*i)->getType(), loc, (void*) &((*i)->getValue()[0]));
2541  at->append_attr((*i)->getNewName(), HDFCFUtil::print_type((*i)->getType()), print_rep);
2542  }
2543 
2544  }
2545  }
2546 
2547  }
2548 
2549  // The following code may be condensed in the future. KY 2012-09-19
2550  // Coremetadata, structmetadata and archive metadata need special parsers.
2551 
2552  if(ecs_metadata == true) {
2553  // Write coremetadata.
2554  if(core_metadata.size() > 0){
2555  AttrTable *at = das.get_table("CoreMetadata");
2556  if (!at)
2557  at = das.add_table("CoreMetadata", new AttrTable);
2558  // tell lexer to scan attribute string
2559  void *buf = hdfeos_string(core_metadata.c_str());
2560  parser_arg arg(at);
2561 
2562  if (hdfeosparse(&arg) != 0) {
2563  hdfeos_delete_buffer(buf);
2564  throw Error("Parse error while processing a CoreMetadata attribute.");
2565  }
2566 
2567  // Errors returned from here are ignored.
2568  if (arg.status() == false) {
2569  (*BESLog::TheLog()) << "Parse error while processing a CoreMetadata attribute. (2)" << endl;
2570 // << arg.error()->get_error_message() << endl;
2571  }
2572 
2573  hdfeos_delete_buffer(buf);
2574 
2575  }
2576 
2577  // Write archive metadata.
2578  if(archive_metadata.size() > 0){
2579  AttrTable *at = das.get_table("ArchiveMetadata");
2580  if (!at)
2581  at = das.add_table("ArchiveMetadata", new AttrTable);
2582  // tell lexer to scan attribute string
2583  void *buf = hdfeos_string(archive_metadata.c_str());
2584  parser_arg arg(at);
2585  if (hdfeosparse(&arg) != 0) {
2586  hdfeos_delete_buffer(buf);
2587  throw Error("Parse error while processing an ArchiveMetadata attribute.");
2588  }
2589 
2590  // Errors returned from here are ignored.
2591  if (arg.status() == false) {
2592  (*BESLog::TheLog())<< "Parse error while processing an ArchiveMetadata attribute. (2)" << endl;
2593  // << arg.error()->get_error_message() << endl;
2594  }
2595 
2596  hdfeos_delete_buffer(buf);
2597  }
2598  }
2599 
2600  // Handle individual fields
2601  const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
2602  vector<HDFSP::SDField *>::const_iterator it_g;
2603  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
2604 
2605  // Add units for CV variables
2606 // if((*it_g)->getFieldType() != 0 && (*it_g)->IsDimScale() == false){
2607  if((*it_g)->getFieldType() != 0){
2608 
2609  AttrTable *at = das.get_table((*it_g)->getNewName());
2610  if (!at)
2611  at = das.add_table((*it_g)->getNewName(), new AttrTable);
2612 
2613  string tempunits = (*it_g)->getUnits();
2614  if(at->simple_find("units")== at->attr_end() && tempunits!="")
2615  at->append_attr("units", "String" ,tempunits);
2616  if((*it_g)->getFieldType() == 1){
2617  if(at->simple_find("long_name")== at->attr_end())
2618  at->append_attr("long_name","String","Latitude");
2619  }
2620  else if((*it_g)->getFieldType() == 2) {
2621  if(at->simple_find("long_name")== at->attr_end())
2622  at->append_attr("long_name","String","Longitude");
2623  }
2624  }
2625  else {// We will check if having the coordinates attribute.
2626  AttrTable *at = das.get_table((*it_g)->getNewName());
2627  if (!at)
2628  at = das.add_table((*it_g)->getNewName(), new AttrTable);
2629  string tempcoors = (*it_g)->getCoordinate();
2630  // If we add the coordinates attribute, any existing coordinates attribute will be removed.
2631  if(tempcoors!=""){
2632  at->del_attr("coordinates");
2633  at->append_attr("coordinates","String",tempcoors);
2634  }
2635 
2636  }
2637 
2638  // Ignore variables that don't have attributes.
2639  if((*it_g)->getAttributes().size() == 0)
2640  continue;
2641 
2642  AttrTable *at = das.get_table((*it_g)->getNewName());
2643  if (!at)
2644  at = das.add_table((*it_g)->getNewName(), new AttrTable);
2645 
2646  // MAP individual SDS field to DAP DAS
2647  for(vector<HDFSP::Attribute *>::const_iterator i=(*it_g)->getAttributes().begin();i!=(*it_g)->getAttributes().end();i++) {
2648 
2649  // Handle string first.
2650  if((*i)->getType()==DFNT_UCHAR || (*i)->getType() == DFNT_CHAR){
2651  string tempstring2((*i)->getValue().begin(),(*i)->getValue().end());
2652  string tempfinalstr= string(tempstring2.c_str());
2653 
2654  // We want to escape the possible special characters for attributes except the fullpath attribute. This may be overkilled since
2655  // fullpath is only added for some CERES and MERRA data. However, we think people use fullpath really mean to keep their
2656  // original names. So we don't escape the fullpath attribute. KY 2013-10-14
2657 
2658  at->append_attr((*i)->getNewName(), "String" ,((*i)->getNewName()=="fullpath")?tempfinalstr:HDFCFUtil::escattr(tempfinalstr));
2659  }
2660  else {
2661  for (int loc=0; loc < (*i)->getCount() ; loc++) {
2662  string print_rep = HDFCFUtil::print_attr((*i)->getType(), loc, (void*) &((*i)->getValue()[0]));
2663  at->append_attr((*i)->getNewName(), HDFCFUtil::print_type((*i)->getType()), print_rep);
2664  }
2665  }
2666  }
2667 
2668  }
2669 
2670 //#if 0
2671  // Handle HDF-EOS2 object attributes. These are found in AIRS version 6.
2672  HDFCFUtil::map_eos2_objects_attrs(das,filename);
2673 //#endif
2674 
2675  return true;
2676 }
2677 
2678 
2679 // MOD/MYD08M3 follows the no-CF scale/offset rulea,we need to change the add_offset value when add_offset is 0.
2680 void change_das_mod08_scale_offset(DAS &das, HDFSP::File *f) {
2681 
2682  // Handle individual fields
2683  // Check HDFCFUtil::handle_modis_special_attrs_disable_scale_comp
2684  const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
2685  vector<HDFSP::SDField *>::const_iterator it_g;
2686  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
2687  if((*it_g)->getFieldType() == 0){
2688  AttrTable *at = das.get_table((*it_g)->getNewName());
2689  if (!at)
2690  at = das.add_table((*it_g)->getNewName(), new AttrTable);
2691 
2692  // Declare add_offset type in string format.
2693  string add_offset_type;
2694 
2695  // add_offset values
2696  string add_offset_value="0";
2697  double orig_offset_value = 0;
2698  bool add_offset_modify = false;
2699 
2700 
2701  // Go through all attributes to find add_offset
2702  // If add_offset is 0 or add_offset is not found, we don't need
2703  // to modify the add_offset value.
2704  AttrTable::Attr_iter it = at->attr_begin();
2705  while (it!=at->attr_end())
2706  {
2707  if(at->get_name(it)=="add_offset")
2708  {
2709  add_offset_value = (*at->get_attr_vector(it)->begin());
2710  orig_offset_value = atof(add_offset_value.c_str());
2711  add_offset_type = at->get_type(it);
2712  if(add_offset_value == "0.0" || orig_offset_value == 0)
2713  add_offset_modify = false;
2714  else
2715  add_offset_modify = true;
2716  break;
2717  }
2718  it++;
2719 
2720  }
2721 
2722  // We need to modify the add_offset value if the add_offset exists.
2723  if( true == add_offset_modify) {
2724 
2725  // Declare scale_factor type in string format.
2726  string scale_factor_type;
2727 
2728  // Scale values
2729  string scale_factor_value="";
2730  double orig_scale_value = 1;
2731 
2732  it = at->attr_begin();
2733  while (it!=at->attr_end())
2734  {
2735  if(at->get_name(it)=="scale_factor")
2736  {
2737  scale_factor_value = (*at->get_attr_vector(it)->begin());
2738  orig_scale_value = atof(scale_factor_value.c_str());
2739  scale_factor_type = at->get_type(it);
2740  }
2741  it++;
2742  }
2743 
2744  if(scale_factor_value.length() !=0) {
2745  double new_offset_value = -1 * orig_scale_value*orig_offset_value;
2746  string print_rep = HDFCFUtil::print_attr(DFNT_FLOAT64,0,(void*)(&new_offset_value));
2747  at->del_attr("add_offset");
2748  at->append_attr("add_offset", HDFCFUtil::print_type(DFNT_FLOAT64), print_rep);
2749  }
2750  }
2751 
2752  }
2753 
2754  }
2755 
2756 }
2757 
2758 // Function to build special AIRS version 6 and MOD08_M3 DDS. Doing this way is for improving performance.
2759 bool read_dds_special_1d_grid(DDS &dds,HDFSP::File* spf,const string& filename, int32 sdid,bool check_cache) {
2760 //bool read_dds_special_1d_grid(DDS &dds,HDFSP::File* spf,const string& filename, int32 sdid, int32 fileid) {
2761 
2762 
2763  BESDEBUG("h4","Coming to read_dds_special_1d_grid "<<endl);
2764  bool dds_cache = false;
2765  size_t total_bytes_dds_cache = 0;
2766 
2767  // Only support AIRS version 6 level 2 or level 3 KY 2015-06-07
2768  if(true == check_cache) {
2769 
2770  total_bytes_dds_cache = HDFCFUtil::obtain_dds_cache_size(spf);
2771  BESDEBUG("h4","Total DDS cache file size is "<< total_bytes_dds_cache<<endl);
2772  if(total_bytes_dds_cache !=0)
2773  dds_cache = true;
2774 
2775  }
2776 
2777  SPType sptype = OTHERHDF;
2778  const vector<HDFSP::SDField *>& spsds = spf->getSD()->getFields();
2779 
2780  // Read SDS
2781  vector<HDFSP::SDField *>::const_iterator it_g;
2782  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
2783 
2784  BaseType *bt=NULL;
2785  switch((*it_g)->getType()) {
2786 #define HANDLE_CASE(tid, type) \
2787  case tid: \
2788  bt = new (type)((*it_g)->getNewName(),filename); \
2789  break;
2790  HANDLE_CASE(DFNT_FLOAT32, HDFFloat32);
2791  HANDLE_CASE(DFNT_FLOAT64, HDFFloat64);
2792  HANDLE_CASE(DFNT_CHAR, HDFStr);
2793 #ifndef SIGNED_BYTE_TO_INT32
2794  HANDLE_CASE(DFNT_INT8, HDFByte);
2795 #else
2796  HANDLE_CASE(DFNT_INT8,HDFInt32);
2797 #endif
2798  HANDLE_CASE(DFNT_UINT8, HDFByte);
2799  HANDLE_CASE(DFNT_INT16, HDFInt16);
2800  HANDLE_CASE(DFNT_UINT16, HDFUInt16);
2801  HANDLE_CASE(DFNT_INT32, HDFInt32);
2802  HANDLE_CASE(DFNT_UINT32, HDFUInt32);
2803  HANDLE_CASE(DFNT_UCHAR8, HDFByte);
2804  default:
2805  throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
2806 #undef HANDLE_CASE
2807  }
2808 
2809  if(bt)
2810  {
2811 
2812  const vector<HDFSP::Dimension*>& dims= (*it_g)->getDimensions();
2813 
2814  vector<HDFSP::Dimension*>::const_iterator it_d;
2815 
2816  // Char will be mapped to DAP string.
2817  if(DFNT_CHAR == (*it_g)->getType()) {
2818  if(1 == (*it_g)->getRank()) {
2819  HDFCFStr * sca_str = NULL;
2820  try {
2821  sca_str = new HDFCFStr(
2822  sdid,
2823  (*it_g)->getFieldRef(),
2824  filename,
2825  (*it_g)->getName(),
2826  (*it_g)->getNewName(),
2827  false
2828  );
2829  }
2830  catch(...) {
2831  delete bt;
2832  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
2833  }
2834  dds.add_var(sca_str);
2835  delete bt;
2836  delete sca_str;
2837  }
2838 
2839  else {
2840  HDFCFStrField *ar = NULL;
2841  try {
2842 
2843  ar = new HDFCFStrField(
2844  (*it_g)->getRank() -1 ,
2845  filename,
2846  false,
2847  sdid,
2848  (*it_g)->getFieldRef(),
2849  0,
2850  (*it_g)->getName(),
2851  (*it_g)->getNewName(),
2852  bt);
2853 
2854  }
2855  catch(...) {
2856  delete bt;
2857  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStrField instance.");
2858  }
2859 
2860  for(it_d = dims.begin(); it_d != dims.begin()+dims.size()-1; it_d++)
2861  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
2862  dds.add_var(ar);
2863  delete bt;
2864  delete ar;
2865  }
2866 
2867  }
2868 
2869  else {// Other datatypes
2870 
2871  // Non missing fields
2872  if((*it_g)->getFieldType()!= 4) {
2873  HDFSPArray_RealField *ar = NULL;
2874 
2875  try {
2876 
2877  vector<int32>dimsizes;
2878 
2879  dimsizes.resize((*it_g)->getRank());
2880  for(int i = 0; i <(*it_g)->getRank();i++)
2881  dimsizes[i] = (int32)((dims[i])->getSize());
2882  ar = new HDFSPArray_RealField(
2883  (*it_g)->getRank(),
2884  filename,
2885  sdid,
2886  (*it_g)->getFieldRef(),
2887  (*it_g)->getType(),
2888  sptype,
2889  (*it_g)->getName(),
2890  dimsizes,
2891  (*it_g)->getNewName(),
2892  bt);
2893  }
2894  catch(...) {
2895  delete bt;
2896  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFSPArray_RealField instance.");
2897  }
2898  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
2899  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
2900  dds.add_var(ar);
2901  delete bt;
2902  delete ar;
2903  }
2904  else {
2905  if((*it_g)->getRank()!=1){
2906  delete bt;
2907  throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
2908  }
2909  int nelem = ((*it_g)->getDimensions()[0])->getSize();
2910 
2911  HDFSPArrayMissGeoField *ar = NULL;
2912 
2913  try {
2914  ar = new HDFSPArrayMissGeoField(
2915  (*it_g)->getRank(),
2916  nelem,
2917  (*it_g)->getNewName(),
2918  bt);
2919  }
2920  catch(...) {
2921  delete bt;
2922  throw InternalErr(__FILE__,__LINE__,
2923  "Unable to allocate the HDFSPArrayMissGeoField instance.");
2924  }
2925 
2926 
2927  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
2928  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
2929  dds.add_var(ar);
2930  delete bt;
2931  delete ar;
2932 
2933  }
2934  }
2935  }
2936  }
2937 
2938  // If we need to generate a DDS cache file,
2939  if(true == dds_cache) {
2940 
2941  // Check the file path
2942  string md_cache_dir;
2943  string key = "H4.Cache.metadata.path";
2944  bool found = false;
2945  TheBESKeys::TheKeys()->get_value(key,md_cache_dir,found);
2946 
2947  if(true == found) {
2948 
2949  // Create the DDS cache file name.
2950  string base_file_name = basename(filename);
2951  string dds_filename = md_cache_dir + "/"+base_file_name +"_dds";
2952 
2953  // DDS cache file is a binary file, this makes the file size smaller.
2954  FILE* dds_file =fopen(dds_filename.c_str(),"wb");
2955  if(NULL == dds_file) {
2956  string msg = "Cannot create the cache file. " + dds_filename + get_errno();
2957  throw InternalErr(__FILE__,__LINE__,msg);
2958  }
2959  int fd = fileno(dds_file);
2960  struct flock *l= lock(F_WRLCK);
2961  if (fcntl(fd, F_SETLKW, l) == -1) {
2962  fclose(dds_file);
2963  string msg = "Cannot hold the write lock for dds cached file "+ dds_filename;
2964  throw InternalErr (__FILE__, __LINE__,msg);
2965  }
2966  // TRY CATCH to close fclose.
2967  try {
2968  HDFCFUtil::write_sp_sds_dds_cache(spf,dds_file,total_bytes_dds_cache,dds_filename);
2969  }
2970  catch(...) {
2971  if (fcntl(fd, F_SETLK, lock(F_UNLCK)) == -1) {
2972  fclose(dds_file);
2973  string msg = "Cannot release the write lock for dds cached file "+ dds_filename;
2974  throw InternalErr (__FILE__, __LINE__,msg);
2975  }
2976 
2977  fclose(dds_file);
2978  throw InternalErr(__FILE__,__LINE__,"Fail to generate a dds cache file.");
2979  }
2980  if (fcntl(fd, F_SETLK, lock(F_UNLCK)) == -1) {
2981  fclose(dds_file);
2982  string msg = "Cannot release the write lock for dds cached file "+ dds_filename;
2983  throw InternalErr (__FILE__, __LINE__,msg);
2984  }
2985  fclose(dds_file);
2986 
2987  }
2988 
2989  else {
2990  throw InternalErr (__FILE__, __LINE__,
2991  "DDS/DAS metadata cache path cannot be found when 'H4.EnableMetaDataCacheFile' key is set to be true.");
2992  }
2993  }
2994 
2995  return true;
2996 
2997 }
2998 
2999 // Read SDS fields
3000 void read_dds_spfields(DDS &dds,const string& filename,const int sdfd,HDFSP::SDField *spsds, SPType sptype) {
3001 
3002  BESDEBUG("h4","Coming to read_dds_spfields "<<endl);
3003 
3004  // Ignore the dimension variable that is empty for non-special handling NASA HDF products
3005  if(OTHERHDF == sptype && (true == spsds->IsDimNoScale()))
3006  return;
3007 
3008  BaseType *bt=NULL;
3009  switch(spsds->getType()) {
3010 
3011 #define HANDLE_CASE(tid, type) \
3012  case tid: \
3013  bt = new (type)(spsds->getNewName(),filename); \
3014  break;
3015  HANDLE_CASE(DFNT_FLOAT32, HDFFloat32);
3016  HANDLE_CASE(DFNT_FLOAT64, HDFFloat64);
3017  HANDLE_CASE(DFNT_CHAR, HDFStr);
3018 #ifndef SIGNED_BYTE_TO_INT32
3019  HANDLE_CASE(DFNT_INT8, HDFByte);
3020  //HANDLE_CASE(DFNT_CHAR, HDFByte);
3021 #else
3022  HANDLE_CASE(DFNT_INT8,HDFInt32);
3023  //HANDLE_CASE(DFNT_CHAR, HDFInt32);
3024 #endif
3025  HANDLE_CASE(DFNT_UINT8, HDFByte);
3026  HANDLE_CASE(DFNT_INT16, HDFInt16);
3027  HANDLE_CASE(DFNT_UINT16, HDFUInt16);
3028  HANDLE_CASE(DFNT_INT32, HDFInt32);
3029  HANDLE_CASE(DFNT_UINT32, HDFUInt32);
3030  HANDLE_CASE(DFNT_UCHAR, HDFByte);
3031  default:
3032  throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
3033 #undef HANDLE_CASE
3034  }
3035  int fieldtype = spsds->getFieldType();// Whether the field is real field,lat/lon field or missing Z-dimension field
3036 
3037  if(bt)
3038  {
3039 
3040  const vector<HDFSP::Dimension*>& dims= spsds->getCorrectedDimensions();
3041  vector<HDFSP::Dimension*>::const_iterator it_d;
3042 
3043  if(DFNT_CHAR == spsds->getType()) {
3044 
3045  if(1 == spsds->getRank()) {
3046 
3047  HDFCFStr * sca_str = NULL;
3048 
3049  try {
3050 
3051  sca_str = new HDFCFStr(
3052  sdfd,
3053  spsds->getFieldRef(),
3054  filename,
3055  spsds->getName(),
3056  spsds->getNewName(),
3057  false
3058  );
3059  }
3060  catch(...) {
3061  delete bt;
3062  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
3063  }
3064  dds.add_var(sca_str);
3065  delete bt;
3066  delete sca_str;
3067  }
3068  else {
3069  HDFCFStrField *ar = NULL;
3070  try {
3071 
3072  ar = new HDFCFStrField(
3073  spsds->getRank() -1 ,
3074  filename,
3075  false,
3076  sdfd,
3077  spsds->getFieldRef(),
3078  0,
3079  spsds->getName(),
3080  spsds->getNewName(),
3081  bt);
3082 
3083  }
3084  catch(...) {
3085  delete bt;
3086  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStrField instance.");
3087  }
3088 
3089  for(it_d = dims.begin(); it_d != dims.begin()+dims.size()-1; it_d++)
3090  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3091  dds.add_var(ar);
3092  delete bt;
3093  delete ar;
3094  }
3095 
3096  }
3097 
3098  // For non-CV variables and the existing non-lat/lon CV variables
3099  else if(fieldtype == 0 || fieldtype == 3 ) {
3100 
3101  HDFSPArray_RealField *ar = NULL;
3102 
3103  try {
3104  vector<int32>dimsizes;
3105  dimsizes.resize(spsds->getRank());
3106  for(int i = 0; i <spsds->getRank();i++)
3107  dimsizes[i] = (int32)((dims[i])->getSize());
3108 
3109  ar = new HDFSPArray_RealField(
3110  spsds->getRank(),
3111  filename,
3112  sdfd,
3113  spsds->getFieldRef(),
3114  spsds->getType(),
3115  sptype,
3116  spsds->getName(),
3117  dimsizes,
3118  spsds->getNewName(),
3119  bt);
3120  }
3121  catch(...) {
3122  delete bt;
3123  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFSPArray_RealField instance.");
3124  }
3125 
3126  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3127  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3128  dds.add_var(ar);
3129  delete bt;
3130  delete ar;
3131  }
3132 
3133  // For latitude and longitude
3134  else if(fieldtype == 1 || fieldtype == 2) {
3135 
3136  if(sptype == MODISARNSS || sptype == TRMML2_V7) {
3137 
3138  HDFSPArray_RealField *ar = NULL;
3139 
3140  try {
3141 
3142  vector<int32>dimsizes;
3143 
3144  dimsizes.resize(spsds->getRank());
3145  for(int i = 0; i <spsds->getRank();i++)
3146  dimsizes[i] = (dims[i])->getSize();
3147 
3148  ar = new HDFSPArray_RealField(
3149  spsds->getRank(),
3150  filename,
3151  sdfd,
3152  spsds->getFieldRef(),
3153  spsds->getType(),
3154  sptype,
3155  spsds->getName(),
3156  dimsizes,
3157  spsds->getNewName(),
3158  bt);
3159  }
3160  catch(...) {
3161  delete bt;
3162  throw InternalErr(__FILE__,__LINE__,
3163  "Unable to allocate the HDFSPArray_RealField instance.");
3164  }
3165 
3166 
3167  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3168  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3169  dds.add_var(ar);
3170  delete bt;
3171  delete ar;
3172 
3173  }
3174  else {
3175 
3176  HDFSPArrayGeoField *ar = NULL;
3177 
3178  try {
3179  ar = new HDFSPArrayGeoField(
3180  spsds->getRank(),
3181  filename,
3182  sdfd,
3183  spsds->getFieldRef(),
3184  spsds->getType(),
3185  sptype,
3186  fieldtype,
3187  spsds->getName(),
3188  spsds->getNewName(),
3189  bt);
3190  }
3191  catch(...) {
3192  delete bt;
3193  throw InternalErr(__FILE__,__LINE__,
3194  "Unable to allocate the HDFSPArray_RealField instance.");
3195  }
3196 
3197  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3198  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3199  dds.add_var(ar);
3200  delete bt;
3201  delete ar;
3202  }
3203  }
3204 
3205 
3206  else if(fieldtype == 4) { //missing Z dimensional field(or coordinate variables with missing values)
3207  if(spsds->getRank()!=1){
3208  delete bt;
3209  throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
3210  }
3211  int nelem = (spsds->getDimensions()[0])->getSize();
3212 
3213  HDFSPArrayMissGeoField *ar = NULL;
3214 
3215  try {
3216  ar = new HDFSPArrayMissGeoField(
3217  spsds->getRank(),
3218  nelem,
3219  spsds->getNewName(),
3220  bt);
3221  }
3222  catch(...) {
3223  delete bt;
3224  throw InternalErr(__FILE__,__LINE__,
3225  "Unable to allocate the HDFSPArrayMissGeoField instance.");
3226  }
3227 
3228 
3229  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3230  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3231  dds.add_var(ar);
3232  delete bt;
3233  delete ar;
3234  }
3235  // fieldtype =5 originally keeps for time. Still keep it for a while.
3236 
3237  else if(fieldtype == 6) { //Coordinate variables added from the product specification
3238 
3239  if(spsds->getRank()!=1){
3240  delete bt;
3241  throw InternalErr(__FILE__, __LINE__, "The rank of added coordinate variable must be 1");
3242  }
3243  int nelem = (spsds->getDimensions()[0])->getSize();
3244 
3245  HDFSPArrayAddCVField *ar = NULL;
3246  try {
3247  ar = new HDFSPArrayAddCVField(
3248  spsds->getType(),
3249  sptype,
3250  spsds->getName(),
3251  nelem,
3252  spsds->getNewName(),
3253  bt);
3254  }
3255  catch(...) {
3256  delete bt;
3257  throw InternalErr(__FILE__,__LINE__,
3258  "Unable to allocate the HDFSPArrayAddCVField instance.");
3259  }
3260 
3261 
3262  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3263  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3264  dds.add_var(ar);
3265  delete bt;
3266  delete ar;
3267  }
3268  else {
3269  delete bt;
3270  throw InternalErr(__FILE__, __LINE__, "The field type should be one of 0,1,2,3,4 or 6.");
3271 
3272  }
3273  }
3274 
3275 }
3276 
3277 // Read Vdata fields.
3278 void read_dds_spvdfields(DDS &dds,const string & filename, const int fileid,int32 objref,int32 numrec,HDFSP::VDField *spvd) {
3279 
3280  BESDEBUG("h4","Coming to read_dds_spvdfields "<<endl);
3281 
3282  // First map the HDF4 datatype to DAP2
3283  BaseType *bt=NULL;
3284  switch(spvd->getType()) {
3285 #define HANDLE_CASE(tid, type) \
3286  case tid: \
3287  bt = new (type)(spvd->getNewName(),filename); \
3288  break;
3289  HANDLE_CASE(DFNT_FLOAT32, HDFFloat32);
3290  HANDLE_CASE(DFNT_FLOAT64, HDFFloat64);
3291  HANDLE_CASE(DFNT_CHAR8,HDFStr);
3292 #ifndef SIGNED_BYTE_TO_INT32
3293  HANDLE_CASE(DFNT_INT8, HDFByte);
3294 #else
3295  HANDLE_CASE(DFNT_INT8,HDFInt32);
3296 #endif
3297  HANDLE_CASE(DFNT_UINT8, HDFByte);
3298  HANDLE_CASE(DFNT_INT16, HDFInt16);
3299  HANDLE_CASE(DFNT_UINT16, HDFUInt16);
3300  HANDLE_CASE(DFNT_INT32, HDFInt32);
3301  HANDLE_CASE(DFNT_UINT32, HDFUInt32);
3302  HANDLE_CASE(DFNT_UCHAR8, HDFByte);
3303  //HANDLE_CASE(DFNT_CHAR8, HDFByte);
3304  //HANDLE_CASE(DFNT_CHAR8, HDFByte);
3305  default:
3306  throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
3307 #undef HANDLE_CASE
3308  }
3309 
3310  if(bt)
3311  {
3312 
3313  if(DFNT_CHAR == spvd->getType()) {
3314 
3315  // If the field order is >1, the vdata field will be 2-D array
3316  // with the number of elements along the fastest changing dimension
3317  // as the field order.
3318  int vdrank = ((spvd->getFieldOrder())>1)?2:1;
3319  if (1 == vdrank) {
3320 
3321  HDFCFStr * sca_str = NULL;
3322  try {
3323  sca_str = new HDFCFStr(
3324  fileid,
3325  objref,
3326  filename,
3327  spvd->getName(),
3328  spvd->getNewName(),
3329  true
3330  );
3331  }
3332  catch(...) {
3333  delete bt;
3334  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
3335  }
3336  dds.add_var(sca_str);
3337  delete bt;
3338  delete sca_str;
3339  }
3340 
3341  else {
3342 
3343  HDFCFStrField *ar = NULL;
3344  try {
3345 
3346  ar = new HDFCFStrField(
3347  vdrank -1 ,
3348  filename,
3349  true,
3350  fileid,
3351  objref,
3352  spvd->getFieldOrder(),
3353  spvd->getName(),
3354  spvd->getNewName(),
3355  bt);
3356 
3357  }
3358  catch(...) {
3359  delete bt;
3360  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStrField instance.");
3361  }
3362 
3363  string dimname0 = "VDFDim0_"+spvd->getNewName();
3364  ar->append_dim(numrec, dimname0);
3365  dds.add_var(ar);
3366  delete bt;
3367  delete ar;
3368 
3369  }
3370  }
3371  else {
3372  HDFSPArray_VDField *ar = NULL;
3373 
3374  // If the field order is >1, the vdata field will be 2-D array
3375  // with the number of elements along the fastest changing dimension
3376  // as the field order.
3377  int vdrank = ((spvd->getFieldOrder())>1)?2:1;
3378  ar = new HDFSPArray_VDField(
3379  vdrank,
3380  filename,
3381  fileid,
3382  objref,
3383  spvd->getType(),
3384  spvd->getFieldOrder(),
3385  spvd->getName(),
3386  spvd->getNewName(),
3387  bt);
3388 
3389  string dimname1 = "VDFDim0_"+spvd->getNewName();
3390 
3391  string dimname2 = "VDFDim1_"+spvd->getNewName();
3392  if(spvd->getFieldOrder() >1) {
3393  ar->append_dim(numrec,dimname1);
3394  ar->append_dim(spvd->getFieldOrder(),dimname2);
3395  }
3396  else
3397  ar->append_dim(numrec,dimname1);
3398 
3399  dds.add_var(ar);
3400  delete bt;
3401  delete ar;
3402  }
3403  }
3404 
3405 }
3406 
3407 // This routine will check if this is a special EOS2 file that we can improve the performance
3408 // Currently AIRS level 2 and 3 version 6 and MOD08_M3-like products are what we can serve. KY 2014-01-29
3409 int check_special_eosfile(const string & filename, string& grid_name,int32 sdfd,int32 fileid ) {
3410 
3411  int32 sds_id = 0;
3412  int32 n_sds = 0;
3413  int32 n_sd_attrs = 0;
3414  bool is_eos = false;
3415  int ret_val = 1;
3416 
3417  // Obtain number of SDS objects and number of SD(file) attributes
3418  if (SDfileinfo (sdfd, &n_sds, &n_sd_attrs) == FAIL){
3419  throw InternalErr (__FILE__,__LINE__,"SDfileinfo failed ");
3420  }
3421 
3422  char attr_name[H4_MAX_NC_NAME];
3423  int32 attr_type = -1;
3424  int32 attr_count = -1;
3425  char structmdname[] = "StructMetadata.0";
3426 
3427  // Is this an HDF-EOS2 file?
3428  for (int attr_index = 0; attr_index < n_sd_attrs;attr_index++) {
3429  if(SDattrinfo(sdfd,attr_index,attr_name,&attr_type,&attr_count) == FAIL) {
3430  throw InternalErr (__FILE__,__LINE__,"SDattrinfo failed ");
3431  }
3432 
3433  if(strcmp(attr_name,structmdname)==0) {
3434  is_eos = true;
3435  break;
3436  }
3437  }
3438 
3439  if(true == is_eos) {
3440 
3441  int sds_index = 0;
3442  int32 sds_rank = 0;
3443  int32 dim_sizes[H4_MAX_VAR_DIMS];
3444  int32 sds_dtype = 0;
3445  int32 n_sds_attrs = 0;
3446  char sds_name[H4_MAX_NC_NAME];
3447  char xdim_name[] ="XDim";
3448  char ydim_name[] ="YDim";
3449 
3450  string temp_grid_name1;
3451  string temp_grid_name2;
3452  bool xdim_is_cv_flag = false;
3453  bool ydim_is_cv_flag = false;
3454 
3455 
3456  // The following for-loop checks if this is a MOD08_M3-like HDF-EOS2 product.
3457  for (sds_index = 0; sds_index < (int)n_sds; sds_index++) {
3458 
3459  sds_id = SDselect (sdfd, sds_index);
3460  if (sds_id == FAIL) {
3461  throw InternalErr (__FILE__,__LINE__,"SDselect failed ");
3462  }
3463 
3464  // Obtain object name, rank, size, field type and number of SDS attributes
3465  int status = SDgetinfo (sds_id, sds_name, &sds_rank, dim_sizes,
3466  &sds_dtype, &n_sds_attrs);
3467  if (status == FAIL) {
3468  SDendaccess(sds_id);
3469  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3470  }
3471 
3472  if(1 == sds_rank) {
3473 
3474  // This variable "XDim" exists
3475  if(strcmp(sds_name,xdim_name) == 0) {
3476  int32 sds_dimid = SDgetdimid(sds_id,0);
3477  if(sds_dimid == FAIL) {
3478  SDendaccess(sds_id);
3479  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3480  }
3481  char dim_name[H4_MAX_NC_NAME];
3482  int32 dim_size = 0;
3483  int32 dim_type = 0;
3484  int32 num_dim_attrs = 0;
3485  if(SDdiminfo(sds_dimid,dim_name,&dim_size,&dim_type,&num_dim_attrs) == FAIL) {
3486  SDendaccess(sds_id);
3487  throw InternalErr(__FILE__,__LINE__,"SDdiminfo failed ");
3488  }
3489 
3490  // No dimension scale and XDim exists
3491  if(0 == dim_type) {
3492  string tempdimname(dim_name);
3493  if(tempdimname.size() >=5) {
3494  if(tempdimname.compare(0,5,"XDim:") == 0) {
3495 
3496  // Obtain the grid name.
3497  temp_grid_name1 = tempdimname.substr(5);
3498  xdim_is_cv_flag = true;
3499 
3500  }
3501  }
3502  else if("XDim" == tempdimname)
3503  xdim_is_cv_flag = true;
3504  }
3505  }
3506 
3507  // The variable "YDim" exists
3508  if(strcmp(sds_name,ydim_name) == 0) {
3509 
3510  int32 sds_dimid = SDgetdimid(sds_id,0);
3511  if(sds_dimid == FAIL) {
3512  SDendaccess (sds_id);
3513  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3514  }
3515  char dim_name[H4_MAX_NC_NAME];
3516  int32 dim_size = 0;
3517  int32 dim_type = 0;
3518  int32 num_dim_attrs = 0;
3519  if(SDdiminfo(sds_dimid,dim_name,&dim_size,&dim_type,&num_dim_attrs) == FAIL) {
3520  SDendaccess(sds_id);
3521  throw InternalErr(__FILE__,__LINE__,"SDdiminfo failed ");
3522  }
3523 
3524  // For this case, the dimension should not have dimension scales.
3525  if(0 == dim_type) {
3526  string tempdimname(dim_name);
3527  if(tempdimname.size() >=5) {
3528  if(tempdimname.compare(0,5,"YDim:") == 0) {
3529  // Obtain the grid name.
3530  temp_grid_name2 = tempdimname.substr(5);
3531  ydim_is_cv_flag = true;
3532  }
3533  }
3534  else if ("YDim" == tempdimname)
3535  ydim_is_cv_flag = true;
3536  }
3537  }
3538  }
3539 
3540  SDendaccess(sds_id);
3541  if((true == xdim_is_cv_flag) && (true == ydim_is_cv_flag ))
3542  break;
3543 
3544  }
3545 
3546  // If one-grid and variable XDim/YDim exist and also they don't have dim. scales,we treat this as MOD08-M3-like products
3547  if ((temp_grid_name1 == temp_grid_name2) && (true == xdim_is_cv_flag) && (true == ydim_is_cv_flag)) {
3548  grid_name = temp_grid_name1;
3549  ret_val = 2;
3550  }
3551 
3552  // Check if this is a new AIRS level 2 and 3 product. Since new AIRS level 2 and 3 version 6 products still have dimensions that don't have
3553  // dimension scales and the old way to handle level 2 and 3 dimensions makes the performance suffer. We will see if we can improve
3554  // performance by handling the data with just the HDF4 interfaces.
3555  // At least the file name should have string AIRS.L3. or AIRS.L2..
3556  else if((basename(filename).size() >8) && (basename(filename).compare(0,4,"AIRS") == 0)
3557  && ((basename(filename).find(".L3.")!=string::npos) || (basename(filename).find(".L2.")!=string::npos))){
3558 
3559  bool has_dimscale = false;
3560 
3561  // Go through the SDS object and check if this file has dimension scales.
3562  for (sds_index = 0; sds_index < n_sds; sds_index++) {
3563 
3564  sds_id = SDselect (sdfd, sds_index);
3565  if (sds_id == FAIL) {
3566  throw InternalErr (__FILE__,__LINE__,"SDselect failed ");
3567  }
3568 
3569  // Obtain object name, rank, size, field type and number of SDS attributes
3570  int status = SDgetinfo (sds_id, sds_name, &sds_rank, dim_sizes,
3571  &sds_dtype, &n_sds_attrs);
3572  if (status == FAIL) {
3573  SDendaccess(sds_id);
3574  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3575  }
3576 
3577  for (int dim_index = 0; dim_index<sds_rank; dim_index++) {
3578 
3579  int32 sds_dimid = SDgetdimid(sds_id,dim_index);
3580  if(sds_dimid == FAIL) {
3581  SDendaccess(sds_id);
3582  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3583  }
3584 
3585  char dim_name[H4_MAX_NC_NAME];
3586  int32 dim_size = 0;
3587  int32 dim_type = 0;
3588  int32 num_dim_attrs = 0;
3589  if(SDdiminfo(sds_dimid,dim_name,&dim_size,&dim_type,&num_dim_attrs) == FAIL) {
3590  SDendaccess(sds_id);
3591  throw InternalErr(__FILE__,__LINE__,"SDdiminfo failed ");
3592  }
3593 
3594  if(dim_type !=0) {
3595  has_dimscale = true;
3596  break;
3597  }
3598 
3599  }
3600  SDendaccess(sds_id);
3601  if( true == has_dimscale)
3602  break;
3603  }
3604 
3605  // If having dimension scales, this is an AIRS level 2 or 3 version 6. Treat it differently. Otherwise, this is an old AIRS level 3 product.
3606  if (true == has_dimscale)
3607  ret_val = 3;
3608  }
3609  else {// Check if this is an HDF-EOS2 file but not using HDF-EOS2 at all.
3610  // We turn off this for the time being because
3611  // 1) We need to make sure this is a grid file not swath or point file.
3612  // It will be time consuming to identify grids or swaths and hurts the performance for general case.
3613  // 2) No real NASA files exist. We will handle them later.
3614  // KY 2014-01-29
3615  ;
3616 #if 0
3617  bool has_dimscale = true;
3618  bool is_grid = false;
3619 
3620  // Go through the SDS object
3621  for (sds_index = 0; sds_index < n_sds; sds_index++) {
3622 
3623  sds_id = SDselect (sdid, sds_index);
3624  if (sds_id == FAIL) {
3625  SDend(sdid);
3626  throw InternalErr (__FILE__,__LINE__,"SDselect failed ");
3627  }
3628 
3629  // Obtain object name, rank, size, field type and number of SDS attributes
3630  int status = SDgetinfo (sds_id, sds_name, &sds_rank, dim_sizes,
3631  &sds_dtype, &n_sds_attrs);
3632  if (status == FAIL) {
3633  SDendaccess(sds_id);
3634  SDend(sdid);
3635  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3636  }
3637 
3638 
3639  for (int dim_index = 0; dim_index<sds_rank; dim_index++) {
3640 
3641  int32 sds_dimid = SDgetdimid(sds_id,dim_index);
3642  if(sds_dimid == FAIL) {
3643  SDendaccess(sds_id);
3644  SDend(sdid);
3645  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3646  }
3647  char dim_name[H4_MAX_NC_NAME];
3648  int32 dim_size = 0;
3649  int32 dim_type = 0;
3650  int32 num_dim_attrs = 0;
3651  if(SDdiminfo(sds_dimid,dim_name,&dim_size,&dim_type,&num_dim_attrs) == FAIL) {
3652  SDendaccess(sds_id);
3653  SDend(sdid);
3654  throw InternalErr(__FILE__,__LINE__,"SDdiminfo failed ");
3655  }
3656 
3657  if(0 == dim_type) {
3658  has_dimscale = false;
3659  }
3660 
3661  }
3662  SDendaccess(sds_id);
3663  }
3664  if (true == has_dimscale)
3665  ret_val = 4;
3666 #endif
3667  }
3668  }
3669 
3670  return ret_val;
3671 }
3672 
3673 // Generate DAS for the file that only use SDS APIs. Currently this routine only applies to AIRS version 6
3674 // that can take advantage of the handler's metadata cache feature.
3675 void read_das_sds(DAS & das, const string & filename,int32 sdfd, bool ecs_metadata,HDFSP::File**h4fileptr) {
3676 
3677  HDFSP::File *spf = NULL;
3678  try {
3679  spf = HDFSP::File::Read(filename.c_str(),sdfd,-1);
3680  spf->Handle_AIRS_L23();
3681  read_das_special_eos2_core(das,spf,filename,ecs_metadata);
3682  }
3683  catch (HDFSP::Exception &e)
3684  {
3685  if (spf != NULL)
3686  delete spf;
3687  throw InternalErr(e.what());
3688  }
3689 
3690  *h4fileptr = spf;
3691  return;
3692 }
3693 
3694 // Generate DDS for the file that only use SDS APIs. Currently this routine only applies to AIRS version 6
3695 // that can take advantage of the handler's metadata cache feature.
3696 void read_dds_sds(DDS &dds, const string & filename,int32 sdfd, HDFSP::File*h4file,bool dds_setcache) {
3697 
3698  // Set DDS dataset.
3699  dds.set_dataset_name(basename(filename));
3700  read_dds_special_1d_grid(dds,h4file,filename,sdfd,dds_setcache);
3701  return;
3702 
3703 }
3704 // Default option
3705 void read_dds(DDS & dds, const string & filename)
3706 {
3707  // generate DDS, DAS
3708  DAS das;
3709  dds.set_dataset_name(basename(filename));
3710  build_descriptions(dds, das, filename);
3711 
3712  if (!dds.check_semantics()) { // DDS didn't get built right
3713  THROW(dhdferr_ddssem);
3714  }
3715  return;
3716 }
3717 
3718 void read_das(DAS & das, const string & filename)
3719 {
3720  // generate DDS, DAS
3721  DDS dds(NULL);
3722  dds.set_dataset_name(basename(filename));
3723 
3724  build_descriptions(dds, das, filename);
3725 
3726  if (!dds.check_semantics()) { // DDS didn't get built right
3727  dds.print(cout);
3728  THROW(dhdferr_ddssem);
3729  }
3730  return;
3731 }
3732 
3733 // Scan the HDF file and build the DDS and DAS
3734 static void build_descriptions(DDS & dds, DAS & das,
3735  const string & filename)
3736 {
3737  sds_map sdsmap;
3738  vd_map vdatamap;
3739  gr_map grmap;
3740 
3741  // Build descriptions of SDS items
3742  // If CF option is enabled, StructMetadata will be parsed here.
3743  SDS_descriptions(sdsmap, das, filename);
3744 
3745  // Build descriptions of file annotations
3746  FileAnnot_descriptions(das, filename);
3747 
3748  // Build descriptions of Vdatas
3749  Vdata_descriptions(vdatamap, das, filename);
3750 
3751  // Build descriptions of General Rasters
3752  GR_descriptions(grmap, das, filename);
3753 
3754  // Build descriptions of Vgroups and add SDS/Vdata/GR in the correct order
3755  Vgroup_descriptions(dds, das, filename, sdsmap, vdatamap, grmap);
3756  return;
3757 }
3758 
3759 // These two Functor classes are used to look for EOS attributes with certain
3760 // base names (is_named) and to accumulate values in in different hdf_attr
3761 // objects with the same base names (accum_attr). These are used by
3762 // merge_split_eos_attributes() to do just that. Some HDF EOS attributes are
3763 // longer than HDF 4's 32,000 character limit. Those attributes are split up
3764 // in the HDF 4 files and named `StructMetadata.0', `StructMetadata.1', et
3765 // cetera. This code merges those attributes so that they can be processed
3766 // correctly by the hdf eos attribute parser (see AddHDFAttr() further down
3767 // in this file). 10/29/2001 jhrg
3768 
3769 struct accum_attr
3770  :public binary_function < hdf_genvec &, hdf_attr, hdf_genvec & > {
3771 
3772  string d_named;
3773 
3774  accum_attr(const string & named):d_named(named) {
3775  }
3776 
3777  hdf_genvec & operator() (hdf_genvec & accum, const hdf_attr & attr) {
3778  // Assume that all fields with the same base name should be combined,
3779  // and assume that they are in order.
3780  BESDEBUG("h4", "attr.name: " << attr.name << endl);
3781  if (attr.name.find(d_named) != string::npos) {
3782 #if 0
3783  string stuff;
3784  stuff.assign(attr.values.data(), attr.values.size());
3785  cerr << "Attribute chunk: " << attr.name << endl;
3786  cerr << stuff << endl;
3787 #endif
3788  accum.append(attr.values.number_type(), attr.values.data(),
3789  attr.values.size());
3790  return accum;
3791  }
3792  else {
3793  return accum;
3794  }
3795  }
3796 };
3797 
3798 struct is_named:public unary_function < hdf_attr, bool > {
3799  string d_named;
3800 
3801  is_named(const string & named):d_named(named) {
3802  }
3803 
3804  bool operator() (const hdf_attr & attr) {
3805  return (attr.name.find(d_named) != string::npos);
3806  }
3807 };
3808 
3809 static void
3810 merge_split_eos_attributes(vector < hdf_attr > &attr_vec,
3811  const string & attr_name)
3812 {
3813  // Only do this if there's more than one part.
3814  if (count_if(attr_vec.begin(), attr_vec.end(), is_named(attr_name)) > 1) {
3815  // Merge all split up parts named `attr_name.' Assume they are in
3816  // order in `attr_vec.'
3817  hdf_genvec attributes;
3818  attributes = accumulate(attr_vec.begin(), attr_vec.end(),
3819  attributes, accum_attr(attr_name));
3820 
3821  // When things go south, check out the hdf_genvec...
3822  // BEDEBUG seems not providing a way to handle the following debugging info.
3823  // I can define a vector and call attributes.print(s_m), then use
3824  // BESDEBUG to output the debugging info. The downside is that whether BESDEBUG
3825  // is called, a vector of s_m will always be generated and a chunk of memory is
3826  // always used. So don't change this for the time being. KY 2012-09-13
3827  DBG(vector < string > s_m;
3828  attributes.print(s_m);
3829  cerr << "Accum struct MD: (" << s_m.size() << ") "
3830  << s_m[0] << endl);
3831 
3832  // Remove all the parts that have been merged
3833  attr_vec.erase(remove_if(attr_vec.begin(), attr_vec.end(),
3834  is_named(attr_name)), attr_vec.end());
3835 
3836  // Make a new hdf_attr and assign it the newly merged attributes...
3837  hdf_attr merged_attr;
3838  merged_attr.name = attr_name;
3839  merged_attr.values = attributes;
3840 
3841  // And add it to the vector of attributes.
3842  attr_vec.push_back(merged_attr);
3843  }
3844 }
3845 
3846 // Read SDS's out of filename, build descriptions and put them into dds, das.
3847 static void SDS_descriptions(sds_map & map, DAS & das,
3848  const string & filename)
3849 {
3850 
3851  hdfistream_sds sdsin(filename);
3852  sdsin.setmeta(true);
3853 
3854  // Read SDS file attributes attr_iter i = ;
3855 
3856  vector < hdf_attr > fileattrs;
3857  sdsin >> fileattrs;
3858 
3859  // Read SDS's
3860  sdsin.rewind();
3861  while (!sdsin.eos()) {
3862  sds_info sdi; // add the next sds_info to map
3863  sdsin >> sdi.sds;
3864  sdi.in_vgroup = false; // assume we're not part of a vgroup
3865  map[sdi.sds.ref] = sdi; // assign to map by ref
3866  }
3867 
3868  sdsin.close();
3869 
3870  // This is the call to combine SDS attributes that have been split up
3871  // into N 32,000 character strings. 10/24/2001 jhrg
3872  merge_split_eos_attributes(fileattrs, "StructMetadata");
3873  merge_split_eos_attributes(fileattrs, "CoreMetadata");
3874  merge_split_eos_attributes(fileattrs, "ProductMetadata");
3875  merge_split_eos_attributes(fileattrs, "ArchiveMetadata");
3876  merge_split_eos_attributes(fileattrs, "coremetadata");
3877  merge_split_eos_attributes(fileattrs, "productmetadata");
3878 
3879  // Build DAS, add SDS file attributes
3880  AddHDFAttr(das, string("HDF_GLOBAL"), fileattrs);
3881  // add each SDS's attrs
3882  vector < hdf_attr > dattrs;
3883 
3884  // TODO Remove these attributes (name and dimension)? jhrg 8/17/11
3885  // ***
3886  for (SDSI s = map.begin(); s != map.end(); ++s) {
3887  const hdf_sds *sds = &s->second.sds;
3888  AddHDFAttr(das, sds->name, sds->attrs);
3889  for (int k = 0; k < (int) sds->dims.size(); ++k) {
3890  dattrs = Dims2Attrs(sds->dims[k]);
3891  AddHDFAttr(das, sds->name + "_dim_" + num2string(k), dattrs);
3892  }
3893 
3894  }
3895 
3896  return;
3897 }
3898 
3899 // Read Vdata's out of filename, build descriptions and put them into dds.
3900 static void Vdata_descriptions(vd_map & map, DAS & das,
3901  const string & filename)
3902 {
3903  hdfistream_vdata vdin(filename);
3904  vdin.setmeta(true);
3905 
3906  // Read Vdata's
3907  while (!vdin.eos()) {
3908  vd_info vdi; // add the next vd_info to map
3909  vdin >> vdi.vdata;
3910  vdi.in_vgroup = false; // assume we're not part of a vgroup
3911  map[vdi.vdata.ref] = vdi; // assign to map by ref
3912  }
3913  vdin.close();
3914 
3915  // Build DAS
3916  vector < hdf_attr > dattrs;
3917  for (VDI s = map.begin(); s != map.end(); ++s) {
3918  const hdf_vdata *vd = &s->second.vdata;
3919  AddHDFAttr(das, vd->name, vd->attrs);
3920  }
3921 
3922  return;
3923 }
3924 
3925 // Read Vgroup's out of filename, build descriptions and put them into dds.
3926 static void Vgroup_descriptions(DDS & dds, DAS & das,
3927  const string & filename, sds_map & sdmap,
3928  vd_map & vdmap, gr_map & grmap)
3929 {
3930 
3931  hdfistream_vgroup vgin(filename);
3932 
3933  // Read Vgroup's
3934  vg_map vgmap;
3935  while (!vgin.eos()) {
3936  vg_info vgi; // add the next vg_info to map
3937  vgin >> vgi.vgroup; // read vgroup itself
3938  vgi.toplevel = true; // assume toplevel until we prove otherwise
3939  vgmap[vgi.vgroup.ref] = vgi; // assign to map by vgroup ref
3940  }
3941  vgin.close();
3942  // for each Vgroup
3943  for (VGI v = vgmap.begin(); v != vgmap.end(); ++v) {
3944  const hdf_vgroup *vg = &v->second.vgroup;
3945 
3946  // Add Vgroup attributes
3947  AddHDFAttr(das, vg->name, vg->attrs);
3948 
3949  // now, assign children
3950  for (uint32 i = 0; i < vg->tags.size(); i++) {
3951  int32 tag = vg->tags[i];
3952  int32 ref = vg->refs[i];
3953  switch (tag) {
3954  case DFTAG_VG:
3955  // Could be a GRI or a Vgroup
3956  if (grmap.find(ref) != grmap.end())
3957  grmap[ref].in_vgroup = true;
3958  else
3959  vgmap[ref].toplevel = false;
3960  break;
3961  case DFTAG_VH:
3962  vdmap[ref].in_vgroup = true;
3963  break;
3964  case DFTAG_NDG:
3965  sdmap[ref].in_vgroup = true;
3966  break;
3967  default:
3968  (*BESLog::TheLog()) << "unknown tag: " << tag << " ref: " << ref << endl;
3969  // TODO: Make this an exception? jhrg 8/19/11
3970  // Don't make an exception. Possibly you will meet other valid tags. Need to know if it
3971  // is worth to tackle this. KY 09/13/12
3972  // cerr << "unknown tag: " << tag << " ref: " << ref << endl;
3973  break;
3974  }// switch (tag)
3975  } // for (uint32 i = 0; i < vg->tags.size(); i++)
3976  } // for (VGI v = vgmap.begin(); v != vgmap.end(); ++v)
3977  // Build DDS for all toplevel vgroups
3978  BaseType *pbt = 0;
3979  for (VGI v = vgmap.begin(); v != vgmap.end(); ++v) {
3980  if (!v->second.toplevel)
3981  continue; // skip over non-toplevel vgroups
3982  pbt = NewStructureFromVgroup(v->second.vgroup,
3983  vgmap, sdmap, vdmap,
3984  grmap, filename);
3985  if (pbt != 0) {
3986  dds.add_var(pbt);
3987  delete pbt;
3988  }
3989 
3990  } // for (VGI v = vgmap.begin(); v != vgmap.end(); ++v)
3991 
3992  // add lone SDS's
3993  for (SDSI s = sdmap.begin(); s != sdmap.end(); ++s) {
3994  if (s->second.in_vgroup)
3995  continue; // skip over SDS's in vgroups
3996  if (s->second.sds.has_scale()) // make a grid
3997  pbt = NewGridFromSDS(s->second.sds, filename);
3998  else
3999  pbt = NewArrayFromSDS(s->second.sds, filename);
4000  if (pbt != 0) {
4001  dds.add_var(pbt);
4002  delete pbt;
4003  }
4004  }
4005 
4006  // add lone Vdata's
4007  for (VDI v = vdmap.begin(); v != vdmap.end(); ++v) {
4008  if (v->second.in_vgroup)
4009  continue; // skip over Vdata in vgroups
4010  pbt = NewSequenceFromVdata(v->second.vdata, filename);
4011  if (pbt != 0) {
4012  dds.add_var(pbt);
4013  delete pbt;
4014  }
4015  }
4016  // add lone GR's
4017  for (GRI g = grmap.begin(); g != grmap.end(); ++g) {
4018  if (g->second.in_vgroup)
4019  continue; // skip over GRs in vgroups
4020  pbt = NewArrayFromGR(g->second.gri, filename);
4021  if (pbt != 0) {
4022  dds.add_var(pbt);
4023  delete pbt ;
4024  }
4025  }
4026 }
4027 
4028 static void GR_descriptions(gr_map & map, DAS & das,
4029  const string & filename)
4030 {
4031 
4032  hdfistream_gri grin(filename);
4033  grin.setmeta(true);
4034 
4035  // Read GR file attributes
4036  vector < hdf_attr > fileattrs;
4037  grin >> fileattrs;
4038 
4039  // Read general rasters
4040  grin.rewind();
4041  while (!grin.eos()) {
4042  gr_info gri; // add the next gr_info to map
4043  grin >> gri.gri;
4044  gri.in_vgroup = false; // assume we're not part of a vgroup
4045  map[gri.gri.ref] = gri; // assign to map by ref
4046  }
4047 
4048  grin.close();
4049 
4050  // Build DAS
4051  AddHDFAttr(das, string("HDF_GLOBAL"), fileattrs); // add GR file attributes
4052 
4053  // add each GR's attrs
4054  vector < hdf_attr > pattrs;
4055  for (GRI g = map.begin(); g != map.end(); ++g) {
4056  const hdf_gri *gri = &g->second.gri;
4057  // add GR attributes
4058  AddHDFAttr(das, gri->name, gri->attrs);
4059 
4060  // add palettes as attributes
4061  pattrs = Pals2Attrs(gri->palettes);
4062  AddHDFAttr(das, gri->name, pattrs);
4063 
4064  }
4065 
4066  return;
4067 }
4068 
4069 // Read file annotations out of filename, put in attribute structure
4070 static void FileAnnot_descriptions(DAS & das, const string & filename)
4071 {
4072 
4073  hdfistream_annot annotin(filename);
4074  vector < string > fileannots;
4075 
4076  annotin >> fileannots;
4077  AddHDFAttr(das, string("HDF_GLOBAL"), fileannots);
4078 
4079  annotin.close();
4080  return;
4081 }
4082 
4083 // add a vector of hdf_attr to a DAS
4084 void AddHDFAttr(DAS & das, const string & varname,
4085  const vector < hdf_attr > &hav)
4086 {
4087  if (hav.size() == 0) // nothing to add
4088  return;
4089  // get pointer to the AttrTable for the variable varname (create one if
4090  // necessary)
4091  string tempname = varname;
4092  AttrTable *atp = das.get_table(tempname);
4093  if (atp == 0) {
4094  atp = new AttrTable;
4095  atp = das.add_table(tempname, atp);
4096  }
4097  // add the attributes to the DAS
4098  vector < string > attv; // vector of attribute strings
4099  string attrtype; // name of type of attribute
4100  for (int i = 0; i < (int) hav.size(); ++i) { // for each attribute
4101 
4102  attrtype = DAPTypeName(hav[i].values.number_type());
4103  // get a vector of strings representing the values of the attribute
4104  attv = vector < string > (); // clear attv
4105  hav[i].values.print(attv);
4106 
4107  // add the attribute and its values to the DAS
4108  for (int j = 0; j < (int) attv.size(); ++j) {
4109  // handle HDF-EOS metadata with separate parser
4110  string container_name = hav[i].name;
4111  if (container_name.find("StructMetadata") == 0
4112  || container_name.find("CoreMetadata") == 0
4113  || container_name.find("ProductMetadata") == 0
4114  || container_name.find("ArchiveMetadata") == 0
4115  || container_name.find("coremetadata") == 0
4116  || container_name.find("productmetadata") == 0) {
4117  string::size_type dotzero = container_name.find('.');
4118  if (dotzero != container_name.npos)
4119  container_name.erase(dotzero); // erase .0
4120 
4121 
4122  AttrTable *at = das.get_table(container_name);
4123  if (!at)
4124  at = das.add_table(container_name, new AttrTable);
4125 
4126  // tell lexer to scan attribute string
4127  void *buf = hdfeos_string(attv[j].c_str());
4128 
4129  // cerr << "About to print attributes to be parsed..." << endl;
4130  // TODO: remove when done!
4131  // cerr << "attv[" << j << "]" << endl << attv[j].c_str() << endl;
4132 
4133  parser_arg arg(at);
4134  // HDF-EOS attribute parsing is complex and some errors are
4135  // tolerated. Thus, if the parser proper returns an error,
4136  // that results in an exception that is fatal. However, if
4137  // the status returned by an otherwise successful parse shows
4138  // an error was encountered but successful parsing continued,
4139  // that's OK, but it should be logged.
4140  //
4141  // Also, HDF-EOS files should be read using the new HDF-EOS
4142  // features and not this older parser. jhrg 8/18/11
4143  //
4144  // TODO: How to log (as opposed to using BESDEBUG)?
4145  if (hdfeosparse(&arg) != 0){
4146  hdfeos_delete_buffer(buf);
4147  throw Error("HDF-EOS parse error while processing a " + container_name + " HDFEOS attribute.");
4148  }
4149 
4150  // We don't use the parse_error for this case since it generates memory leaking. KY 2014-02-25
4151  if (arg.status() == false) {
4152  (*BESLog::TheLog())<< "HDF-EOS parse error while processing a "
4153  << container_name << " HDFEOS attribute. (2)" << endl;
4154  //<< arg.error()->get_error_message() << endl;
4155  }
4156 
4157  hdfeos_delete_buffer(buf);
4158  }
4159  else {
4160  if (attrtype == "String")
4161 #ifdef ATTR_STRING_QUOTE_FIX
4162  attv[j] = escattr(attv[j]);
4163 #else
4164  attv[j] = "\"" + escattr(attv[j]) + "\"";
4165 #endif
4166 
4167  if (atp->append_attr(hav[i].name, attrtype, attv[j]) == 0)
4168  THROW(dhdferr_addattr);
4169  }
4170  }
4171  }
4172 
4173  return;
4174 }
4175 
4176 // add a vector of annotations to a DAS. They are stored as attributes. They
4177 // are encoded as string values of an attribute named "HDF_ANNOT".
4178 void AddHDFAttr(DAS & das, const string & varname,
4179  const vector < string > &anv)
4180 {
4181  if (anv.size() == 0) // nothing to add
4182  return;
4183 
4184  // get pointer to the AttrTable for the variable varname (create one if
4185  // necessary)
4186  AttrTable *atp = das.get_table(varname);
4187  if (atp == 0) {
4188  atp = new AttrTable;
4189  atp = das.add_table(varname, atp);
4190  }
4191  // add the annotations to the DAS
4192  string an;
4193  for (int i = 0; i < (int) anv.size(); ++i) { // for each annotation
4194 #ifdef ATTR_STRING_QUOTE_FIX
4195  an = escattr(anv[i]); // quote strings
4196 #else
4197  an = "\"" + escattr(anv[i]) + "\""; // quote strings
4198 #endif
4199  if (atp->append_attr(string("HDF_ANNOT"), "String", an) == 0)
4200  THROW(dhdferr_addattr);
4201  }
4202 
4203  return;
4204 }
4205 
4206 // Add a vector of palettes as attributes to a GR. Each palette is added as
4207 // two attributes: the first contains the palette data; the second contains
4208 // the number of components in the palette.
4209 static vector < hdf_attr > Pals2Attrs(const vector < hdf_palette > palv)
4210 {
4211  vector < hdf_attr > pattrs;
4212 
4213  if (palv.size() != 0) {
4214  // for each palette create an attribute with the palette inside, and an
4215  // attribute containing the number of components
4216  hdf_attr pattr;
4217  string palname;
4218  for (int i = 0; i < (int) palv.size(); ++i) {
4219  palname = "hdf_palette_" + num2string(i);
4220  pattr.name = palname;
4221  pattr.values = palv[i].table;
4222  pattrs.push_back(pattr);
4223  pattr.name = palname + "_ncomps";
4224  pattr.values = hdf_genvec(DFNT_INT32,
4225  const_cast <
4226  int32 * >(&palv[i].ncomp), 1);
4227  pattrs.push_back(pattr);
4228  if (palv[i].name.length() != 0) {
4229  pattr.name = palname + "_name";
4230  pattr.values = hdf_genvec(DFNT_CHAR,
4231  const_cast <
4232  char *>(palv[i].name.c_str()),
4233  palv[i].name.length());
4234  pattrs.push_back(pattr);
4235  }
4236  }
4237  }
4238  return pattrs;
4239 }
4240 
4241 // Convert the meta information in a hdf_dim into a vector of
4242 // hdf_attr.
4243 static vector < hdf_attr > Dims2Attrs(const hdf_dim dim)
4244 {
4245  vector < hdf_attr > dattrs;
4246  hdf_attr dattr;
4247  if (dim.name.length() != 0) {
4248  dattr.name = "name";
4249  dattr.values =
4250  hdf_genvec(DFNT_CHAR, const_cast < char *>(dim.name.c_str()),
4251  dim.name.length());
4252  dattrs.push_back(dattr);
4253  }
4254  if (dim.label.length() != 0) {
4255  dattr.name = "long_name";
4256  dattr.values =
4257  hdf_genvec(DFNT_CHAR, const_cast < char *>(dim.label.c_str()),
4258  dim.label.length());
4259  dattrs.push_back(dattr);
4260  }
4261  if (dim.unit.length() != 0) {
4262  dattr.name = "units";
4263  dattr.values =
4264  hdf_genvec(DFNT_CHAR, const_cast < char *>(dim.unit.c_str()),
4265  dim.unit.length());
4266  dattrs.push_back(dattr);
4267  }
4268  if (dim.format.length() != 0) {
4269  dattr.name = "format";
4270  dattr.values =
4271  hdf_genvec(DFNT_CHAR, const_cast < char *>(dim.format.c_str()),
4272  dim.format.length());
4273  dattrs.push_back(dattr);
4274  }
4275  return dattrs;
4276 }
4277 
HDFSP::SD::getAttributes
const std::vector< Attribute * > & getAttributes() const
Public interface to obtain the SD(file) attributes.
Definition: HDFSP.h:583
HDFSP::File::getVDATAs
const std::vector< VDATA * > & getVDATAs() const
Public interface to Obtain Vdata.
Definition: HDFSP.h:777
hdfistream_gri
Definition: hcstream.h:395
HDFSP::SD::getFields
const std::vector< SDField * > & getFields() const
Redundant member function.
Definition: HDFSP.h:577
vg_info
Definition: hdf-maps.h:60
HDFSPArrayMissGeoField
Definition: HDFSPArrayMissField.h:20
HDFCFUtil::print_attr
static std::string print_attr(int32, int, void *)
Print attribute values in string.
Definition: HDFCFUtil.cc:265
hdf_vgroup
Definition: hdfclass.h:218
HE2CF::open
bool open(const std::string &filename, const int sd_id, const int file_id)
openes \afilename HDF4 file.
Definition: HE2CF.cc:955
HDFFloat64
Definition: HDFFloat64.h:50
HDFCFStrField
Definition: HDFCFStrField.h:15
HDFSP::SD
This class retrieves all SDS objects and SD file attributes.
Definition: HDFSP.h:557
HDFSP::SDField
One instance of this class represents one SDS object.
Definition: HDFSP.h:345
vd_info
Definition: hdf-maps.h:50
HDFStr
Definition: HDFStr.h:51
HDFCFStr
Definition: HDFCFStr.h:44
HDFSP::File::getSPType
SPType getSPType() const
Obtain special HDF4 product type.
Definition: HDFSP.h:749
HDFSPArrayGeoField
Definition: HDFSPArrayGeoField.h:18
HDFCFUtil::escattr
static std::string escattr(std::string s)
Definition: HDFCFUtil.cc:3275
HDFInt16
Definition: HDFInt16.h:37
HDFUInt32
Definition: HDFUInt32.h:50
HDFSP::File::getSD
SD * getSD() const
Public interface to Obtain SD.
Definition: HDFSP.h:771
HDFCFStr.h
This class provides a way to map HDF4 1-D character array to DAP Str for the CF option.
HDFEOS2CFStr.h
This class provides a way to map HDFEOS2 1-D character array to DAP Str for the CF option.
HDFSP::Field::getNewName
const std::string & getNewName() const
Get the CF name(special characters replaced by underscores) of this field.
Definition: HDFSP.h:297
HDFFloat32
Definition: HDFFloat32.h:38
HDFSP::Exception::what
virtual const char * what() const
Return exception message.
Definition: HDFSP.h:109
hdf_sds
Definition: hdfclass.h:179
HDFSPArray_RealField
Definition: HDFSPArray_RealField.h:20
hdf_dim
Definition: hdfclass.h:167
TheBESKeys::TheKeys
static TheBESKeys * TheKeys()
Definition: TheBESKeys.cc:62
HE2CF::write_attribute_FillValue
bool write_attribute_FillValue(const std::string &varname, int type, float val)
Definition: HE2CF.cc:1052
HDFCFUtil::get_CF_string
static std::string get_CF_string(std::string s)
Change special characters to "_".
Definition: HDFCFUtil.cc:161
HDFSP::Field::getType
int32 getType() const
Get the data type of this field.
Definition: HDFSP.h:309
HE2CF::set_DAS
void set_DAS(libdap::DAS *das)
sets DAS pointer so that we can bulid attribute tables.
Definition: HE2CF.cc:181
HDFCFUtil::correct_scale_offset_type
static void correct_scale_offset_type(libdap::AttrTable *at)
Definition: HDFCFUtil.cc:611
hdf_gri
Definition: hdfclass.h:243
HE2CF::write_attribute
bool write_attribute(const std::string &gname, const std::string &fname, const std::string &newfname, int n_groups, int fieldtype)
Definition: HE2CF.cc:985
hdf_vdata
Definition: hdfclass.h:204
HDFByte
Definition: HDFByte.h:50
HDFSPArrayAddCVField
Definition: HDFSPArrayAddCVField.h:24
HE2CF::write_attribute_units
bool write_attribute_units(const std::string &varname, std::string units)
Definition: HE2CF.cc:1159
HDFSP::SDField::getCorrectedDimensions
const std::vector< Dimension * > & getCorrectedDimensions() const
Get the list of the corrected dimensions.
Definition: HDFSP.h:360
HDFSP::File
Definition: HDFSP.h:726
HDFSP::File::getVgattrs
const std::vector< AttrContainer * > & getVgattrs() const
Get attributes for all vgroups.
Definition: HDFSP.h:783
hdfistream_sds
Definition: hcstream.h:84
dhdferr_ddssem
Definition: dhdferr.h:77
gr_info
Definition: hdf-maps.h:55
TheBESKeys::get_value
void get_value(const std::string &s, std::string &val, bool &found)
Retrieve the value of a given key, if set.
Definition: TheBESKeys.cc:272
dhdferr_addattr
Definition: dhdferr.h:68
HDFSP::Field::getRank
int32 getRank() const
Get the dimension rank of this field.
Definition: HDFSP.h:303
hdfistream_vdata
Definition: hcstream.h:245
HDFSP::File::Prepare
void Prepare()
Definition: HDFSP.cc:4144
hdfistream_annot
Definition: hcstream.h:185
HDFEOS2CFStrField.h
This class provides a way to map HDFEOS2 character >1D array to DAP Str array for the CF option.
HDFSP::Field::getName
const std::string & getName() const
Get the name of this field.
Definition: HDFSP.h:291
HE2CF::write_attribute_coordinates
bool write_attribute_coordinates(const std::string &varname, std::string coord)
Definition: HE2CF.cc:1146
HDFSP::File::Read
static File * Read(const char *path, int32 sdid, int32 fileid)
Retrieve SDS and Vdata information from the HDF4 file.
Definition: HDFSP.cc:202
HE2CF::close
bool close()
closes the opened file.
Definition: HE2CF.cc:932
hdf_genvec
Definition: hdfclass.h:71
HDFSPArray_VDField
Definition: HDFSPArray_VDField.h:19
Error
hdf_attr
Definition: hdfclass.h:149
sds_info
Definition: hdf-maps.h:45
HDFSP::SDField::getDimensions
const std::vector< Dimension * > & getDimensions() const
Get the list of dimensions.
Definition: HDFSP.h:414
HE2CF::get_metadata
string get_metadata(const std::string &metadataname, bool &suffix_is_num, std::vector< std::string > &non_num_names, std::vector< std::string > &non_num_data)
retrieves the merged metadata.
Definition: HE2CF.cc:948
HDFSP::File::Read_Hybrid
static File * Read_Hybrid(const char *path, int32 sdid, int32 fileid)
Definition: HDFSP.cc:257
HE2CF
Definition: HE2CF.h:53
HDFSP::Exception
Definition: HDFSP.h:93
HDFSP::SDField::IsDimNoScale
bool IsDimNoScale() const
Is this field a dimension without dimension scale(or empty[no data]dimension variable)
Definition: HDFSP.h:427
HDFSP::File::Has_Dim_NoScale_Field
bool Has_Dim_NoScale_Field() const
This file has a field that is a SDS dimension but no dimension scale.
Definition: HDFSP.h:756
HDFUInt16
Definition: HDFUInt16.h:38
hdfistream_vgroup
Definition: hcstream.h:321
HDFSP::VDField
One instance of this class represents one Vdata field.
Definition: HDFSP.h:503
HDFCFUtil::correct_fvalue_type
static void correct_fvalue_type(libdap::AttrTable *at, int32 dtype)
Definition: HDFCFUtil.cc:544
HDFSP::VDField::getFieldOrder
int32 getFieldOrder() const
Get the order of this field.
Definition: HDFSP.h:512
HDFCFUtil::print_type
static std::string print_type(int32)
Print datatype in string.
Definition: HDFCFUtil.cc:386
HDFInt32
Definition: HDFInt32.h:50