bes  Updated for version 3.20.10
hdfdesc.cc
Go to the documentation of this file.
1 // This file is part of the hdf4 data handler for the OPeNDAP data server.
6 // The code includes the support of HDF-EOS2 and NASA HDF4 files that follow CF.
7 // Copyright (c) 2008-2012 The HDF Group.
8 // Author: MuQun Yang <myang6@hdfgroup.org>
9 // Author: Hyo-Kyung Lee <hyoklee@hdfgroup.org>
10 //
11 // Copyright (c) 2005 OPeNDAP, Inc.
12 // Author: James Gallagher <jgallagher@opendap.org>
13 //
14 // This is free software; you can redistribute it and/or modify it under the
15 // terms of the GNU Lesser General Public License as published by the Free
16 // Software Foundation; either version 2.1 of the License, or (at your
17 // option) any later version.
18 //
19 // This software is distributed in the hope that it will be useful, but
20 // WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
21 // or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
22 // License for more details.
23 //
24 // You should have received a copy of the GNU Lesser General Public License
25 // along with this software; if not, write to the Free Software Foundation,
26 // Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
27 //
28 // You can contact OPeNDAP, Inc. at PO Box 112, Saunderstown, RI. 02874-0112.
29 
31 // Copyright 1996, by the California Institute of Technology.
32 // ALL RIGHTS RESERVED. United States Government Sponsorship
33 // acknowledged. Any commercial use must be negotiated with the
34 // Office of Technology Transfer at the California Institute of
35 // Technology. This software may be subject to U.S. export control
36 // laws and regulations. By accepting this software, the user
37 // agrees to comply with all applicable U.S. export laws and
38 // regulations. User has the responsibility to obtain export
39 // licenses, or other export authority as may be required before
40 // exporting such information to foreign countries or providing
41 // access to foreign persons.
42 
43 // Author: Todd Karakashian, NASA/Jet Propulsion Laboratory
44 // Todd.K.Karakashian@jpl.nasa.gov
45 //
46 //
48 
49 #include "config.h"
50 #include "config_hdf.h"
51 
52 #include <cstdio>
53 #include <cassert>
54 #include <libgen.h>
55 
56 // STL includes
57 #include <string>
58 #include <fstream>
59 #include <iostream>
60 #include <sstream>
61 #include <algorithm>
62 #include <numeric>
63 #include <functional>
64 
65 
66 // Include this on linux to suppress an annoying warning about multiple
67 // definitions of MIN and MAX.
68 #ifdef HAVE_SYS_PARAM_H
69 #include <sys/param.h>
70 #endif
71 
72 #include <unistd.h>
73 #include <sys/types.h>
74 #include <dirent.h>
75 #include <iomanip>
76 #include <cerrno>
77 
78 
79 // HDF and HDFClass includes
80 #include <mfhdf.h>
81 
82 // DODS includes
83 #include <libdap/DDS.h>
84 #include <libdap/DAS.h>
85 #include <libdap/escaping.h>
86 #include <libdap/parser.h>
87 #include <libdap/InternalErr.h>
88 #include <libdap/debug.h>
89 
90 #include <BESDebug.h>
91 #include <BESLog.h>
92 
93 #include "HDF4RequestHandler.h"
94 // DODS/HDF includes for the default option only
95 #include "hcstream.h"
96 #include "hdfclass.h"
97 #include "hcerr.h"
98 #include "dhdferr.h"
99 #include "HDFArray.h"
100 #include "HDFSequence.h"
101 #include "HDFTypeFactory.h"
102 #include "HDFGrid.h"
103 #include "dodsutil.h"
104 #include "hdf-dods.h"
105 #include "hdf-maps.h"
106 
107 // DAP2 doesn't have signed char type, the signed char will be converted to int32 with this macro.
108 #define SIGNED_BYTE_TO_INT32 1
109 
110 // HDF datatype headers for both the default and the CF options
111 #include "HDFByte.h"
112 #include "HDFInt16.h"
113 #include "HDFUInt16.h"
114 #include "HDFInt32.h"
115 #include "HDFUInt32.h"
116 #include "HDFFloat32.h"
117 #include "HDFFloat64.h"
118 #include "HDFStr.h"
119 
120 // Routines that handle SDS and Vdata attributes for the HDF-EOS2 objects in a hybrid HDF-EOS2 file for the CF option
121 #include "HE2CF.h"
122 
123 // HDF4 for the CF option(EOS2 will treat as pure HDF4 objects if the HDF-EOS2 library is not configured in)
124 #include "HDFSP.h"
125 #include "HDFSPArray_RealField.h"
126 #include "HDFSPArrayGeoField.h"
127 #include "HDFSPArrayMissField.h"
128 #include "HDFSPArrayAddCVField.h"
129 #include "HDFSPArray_VDField.h"
130 #include "HDFCFStrField.h"
131 #include "HDFCFStr.h"
132 #include "HDFCFUtil.h"
133 
134 // HDF-EOS2 (including the hybrid) will be handled as HDF-EOS2 objects if the HDF-EOS2 library is configured in
135 #ifdef USE_HDFEOS2_LIB
136 #include "HDFEOS2.h"
137 #include "HDFEOS2Array_RealField.h"
138 #include "HDFEOS2ArrayGridGeoField.h"
139 #include "HDFEOS2ArraySwathGeoField.h"
140 #include "HDFEOS2ArrayMissField.h"
141 #include "HDFEOS2ArraySwathDimMapField.h"
142 #include "HDFEOS2ArraySwathGeoMultiDimMapField.h"
143 #include "HDFEOS2ArraySwathGeoDimMapExtraField.h"
144 #include "HDFEOS2CFStr.h"
145 #include "HDFEOS2CFStrField.h"
146 #include "HDFEOS2HandleType.h"
147 #endif
148 
149 
150 using namespace std;
151 
152 // Added 5/7/09; This bug (#1163) was fixed in July 2008 except for this
153 // handler. jhrg
154 #define ATTR_STRING_QUOTE_FIX
155 
156 template < class T > string num2string(T n)
157 {
158  ostringstream oss;
159  oss << n;
160  return oss.str();
161 }
162 
163 // Glue routines declared in hdfeos.lex
164 void hdfeos_switch_to_buffer(void *new_buffer);
165 void hdfeos_delete_buffer(void * buffer);
166 void *hdfeos_string(const char *yy_str);
167 
168 struct yy_buffer_state;
169 yy_buffer_state *hdfeos_scan_string(const char *str);
170 extern int hdfeosparse(libdap::parser_arg *arg); // defined in hdfeos.tab.c
171 
172 // Functions for the default option
173 void AddHDFAttr(DAS & das, const string & varname,
174  const vector < hdf_attr > &hav);
175 void AddHDFAttr(DAS & das, const string & varname,
176  const vector < string > &anv);
177 
178 static void build_descriptions(DDS & dds, DAS & das,
179  const string & filename);
180 static void SDS_descriptions(sds_map & map, DAS & das,
181  const string & filename);
182 static void Vdata_descriptions(vd_map & map, DAS & das,
183  const string & filename);
184 static void Vgroup_descriptions(DDS & dds, DAS & das,
185  const string & filename, sds_map & sdmap,
186  vd_map & vdmap, gr_map & grmap);
187 static void GR_descriptions(gr_map & map, DAS & das,
188  const string & filename);
189 static void FileAnnot_descriptions(DAS & das, const string & filename);
190 static vector < hdf_attr > Pals2Attrs(const vector < hdf_palette > palv);
191 static vector < hdf_attr > Dims2Attrs(const hdf_dim dim);
192 
193 void read_das(DAS & das, const string & filename);
194 void read_dds(DDS & dds, const string & filename);
195 
196 // For the CF option
197 // read_dds for HDF4 files. Some NASA non-eos2 HDF4 products are handled specifially to follow the CF conventions.
198 bool read_dds_hdfsp(DDS & dds, const string & filename,int32 sdfd, int32 fileid,HDFSP::File*h4file);
199 bool read_das_hdfsp(DAS & das, const string & filename,int32 sdfd, int32 fileid,HDFSP::File**h4filepptr);
200 
201 // read_dds for special NASA HDF-EOS2 hybrid(non-EOS2) objects
202 bool read_dds_hdfhybrid(DDS & dds, const string & filename,int32 sdfd, int32 fileid,HDFSP::File*h4file);
203 bool read_das_hdfhybrid(DAS & das, const string & filename,int32 sdfd, int32 fileid,HDFSP::File**h4filepptr);
204 
205 // Functions to read special 1-d HDF-EOS2 grid. This grid can be built up quickly.
206 //bool read_dds_special_1d_grid(DDS &dds, HDFSP::File *spf, const string & filename,int32 sdfd, int32 fileid);
207 bool read_dds_special_1d_grid(DDS &dds, HDFSP::File *spf, const string & filename,int32 sdfd,bool can_cache);
208 bool read_das_special_eos2(DAS &das,const string & filename,int32 sdid, int32 fileid,bool ecs_metadata,HDFSP::File**h4filepptr);
209 bool read_das_special_eos2_core(DAS &das, HDFSP::File *spf, const string & filename,bool ecs_metadata);
210 
211 void read_das_sds(DAS & das, const string & filename,int32 sdfd, bool ecs_metadata,HDFSP::File**h4fileptr);
212 void read_dds_sds(DDS &dds, const string & filename,int32 sdfd, HDFSP::File*h4file,bool dds_set_cache);
213 
214 void change_das_mod08_scale_offset(DAS & das, HDFSP::File *spf);
215 
216 // Functions to handle SDS fields for the CF option.
217 void read_dds_spfields(DDS &dds,const string& filename,const int sdfd,HDFSP::SDField *spsds, SPType sptype);
218 
219 // Functions to handle Vdata fields for the CF option.
220 void read_dds_spvdfields(DDS &dds,const string& filename, const int fileid,int32 vdref, int32 numrec,HDFSP::VDField *spvd);
221 
222 // Check if this is a special HDF-EOS2 file that can be handled by HDF4 directly. Using HDF4 only can improve performance.
223 int check_special_eosfile(const string&filename,string&grid_name,int32 sdfd,int32 fileid);
224 
225 
226 // The following blocks only handle HDF-EOS2 objects based on HDF-EOS2 libraries.
227 #ifdef USE_HDFEOS2_LIB
228 
229 // Parse HDF-EOS2's ECS metadata(coremetadata etc.)
230 void parse_ecs_metadata(DAS &das,const string & metaname, const string &metadata);
231 
232 // read_dds for HDF-EOS2
234 // We find some special HDF-EOS2(MOD08_M3) products that provides coordinate variables
235 // without using the dimension scales. We will handle this in a special way.
236 // So change the return value of read_dds_hdfeos2 to represent different cases
237 // 0: general non-EOS2 pure HDF4
238 // 1: HDF-EOS2 hybrid
239 // 2: MOD08_M3
240 // HDF-EOS2 but no need to use HDF-EOS2 lib: no real dimension scales but have CVs for every dimension, treat differently
241 // 3: AIRS version 6
242 // HDF-EOS2 but no need to use HDF-EOS2 lib:
243 // have dimension scales but don’t have CVs for every dimension, also need to condense dimensions, treat differently
244 // 4. Expected AIRS level 3 or level 2
245 // HDF-EOS2 but no need to use HDF-EOS2 lib: Have dimension scales for all dimensions
246 // 5. MERRA
247 // Special handling for MERRA file
248 int read_dds_hdfeos2(DDS & dds, const string & filename,int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,HDFSP::File*h4file,HDFEOS2::File*eosfile);
249 
250 // reas das for HDF-EOS2
251 int read_das_hdfeos2(DAS & das, const string & filename,int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,bool ecs_metadata,HDFSP::File**h4filepptr,HDFEOS2::File**eosfilepptr);
252 
253 
254 // read_dds for one grid or swath
255 void read_dds_hdfeos2_grid_swath(DDS &dds, const string&filename, HDFEOS2::Dataset *dataset, int grid_or_swath,bool ownll, SOType sotype,bool multi_dmap,
256  int32 sdfd, int32 /*fileid //unused SBL 2/7/20 */, int32 gridfd,int32 swathfd)
257 {
258 
259  BESDEBUG("h4","Coming to read_dds_hdfeos2_grid_swath "<<endl);
260  // grid_or_swath - 0: grid, 1: swath
261  if(grid_or_swath < 0 || grid_or_swath > 1)
262  throw InternalErr(__FILE__, __LINE__, "The current type should be either grid or swath");
263 
265 
266  // Declare dim. map entry. The defination of dimmap_entry can be found at HDFCFUtil.h.
267  vector<struct dimmap_entry> dimmaps;
268 
269  //The extra dim map file name(lat/lon of swath with dim. map can be found in a separate HDF file.
270  string modis_geofilename="";
271  bool geofile_has_dimmap = false;
272 
273  // 1. Obtain dimension map info and stores the info. to dimmaps.
274  // 2. Check if MODIS swath geo-location HDF-EOS2 file exists for the dimension map case of MODIS Swath
275  if(grid_or_swath == 1)
276  HDFCFUtil::obtain_dimmap_info(filename,dataset,dimmaps,modis_geofilename,geofile_has_dimmap);
278 
279 
281  const vector<HDFEOS2::Field*>& fields = (dataset)->getDataFields();
282  vector<HDFEOS2::Field*> all_fields = fields;
283  vector<HDFEOS2::Field*>::const_iterator it_f;
284 
285  if(1 == grid_or_swath) {
286  HDFEOS2::SwathDataset *sw = static_cast<HDFEOS2::SwathDataset *>(dataset);
287  const vector<HDFEOS2::Field*>geofields = sw->getGeoFields();
288  for (it_f = geofields.begin(); it_f != geofields.end(); it_f++)
289  all_fields.push_back(*it_f);
290  }
292 
294  for(it_f = all_fields.begin(); it_f != all_fields.end(); it_f++)
295  {
296  BESDEBUG("h4","New field Name " <<(*it_f)->getNewName()<<endl);
297 
298  BaseType *bt=NULL;
299 
300  // Whether the field is real field,lat/lon field or missing Z-dimension field
301  int fieldtype = (*it_f)->getFieldType();
302 
303  // Check if the datatype needs to be changed.This is for MODIS data that needs to apply scale and offset.
304  // ctype_field_namelist is assigned in the function read_das_hdfeos2.
305  bool changedtype = false;
306  for (vector<string>::const_iterator i = ctype_field_namelist.begin(); i != ctype_field_namelist.end(); ++i){
307  if ((*i) == (*it_f)->getNewName()){
308  changedtype = true;
309  break;
310  }
311  }
312 
313  switch((*it_f)->getType())
314  {
315 
316 #define HANDLE_CASE2(tid, type) \
317  case tid: \
318  if(true == changedtype && fieldtype==0) \
319  bt = new (HDFFloat32) ((*it_f)->getNewName(), (dataset)->getName()); \
320  else \
321  bt = new (type)((*it_f)->getNewName(), (dataset)->getName()); \
322  break;
323 
324 #define HANDLE_CASE(tid, type)\
325  case tid: \
326  bt = new (type)((*it_f)->getNewName(), (dataset)->getName()); \
327  break;
328  HANDLE_CASE(DFNT_FLOAT32, HDFFloat32);
329  HANDLE_CASE(DFNT_FLOAT64, HDFFloat64);
330  HANDLE_CASE(DFNT_CHAR8,HDFStr);
331 #ifndef SIGNED_BYTE_TO_INT32
332  HANDLE_CASE2(DFNT_INT8, HDFByte);
333 #else
334  HANDLE_CASE2(DFNT_INT8,HDFInt32);
335 #endif
336  HANDLE_CASE2(DFNT_UINT8, HDFByte);
337  HANDLE_CASE2(DFNT_INT16, HDFInt16);
338  HANDLE_CASE2(DFNT_UINT16,HDFUInt16);
339  HANDLE_CASE2(DFNT_INT32, HDFInt32);
340  HANDLE_CASE2(DFNT_UINT32, HDFUInt32);
341  HANDLE_CASE2(DFNT_UCHAR8, HDFByte);
342  default:
343  throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
344 #undef HANDLE_CASE
345 #undef HANDLE_CASE2
346  }
347 
348  if(bt)
349  {
350  const vector<HDFEOS2::Dimension*>& dims= (*it_f)->getCorrectedDimensions();
351  vector<HDFEOS2::Dimension*>::const_iterator it_d;
352 
353  // Char array maps to DAP string.
354  if(DFNT_CHAR == (*it_f)->getType()) {
355 
356  if((*it_f)->getRank() >1) {
357 
358  HDFEOS2CFStrField * ar = NULL;
359 
360  try {
361 
362  ar = new HDFEOS2CFStrField(
363  (*it_f)->getRank() -1,
364  (grid_or_swath ==0)?gridfd:swathfd,
365  filename,
366  (dataset)->getName(),
367  (*it_f)->getName(),
368  grid_or_swath,
369  (*it_f)->getNewName(),
370  bt);
371  }
372  catch(...) {
373  delete bt;
374  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
375  }
376  for(it_d = dims.begin(); it_d != dims.begin()+dims.size()-1; it_d++){
377  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
378  }
379 
380  dds.add_var(ar);
381  delete bt;
382  if(ar != NULL)
383  delete ar;
384 
385  }
386 
387  else {
388  HDFEOS2CFStr * sca_str = NULL;
389  try {
390 
391  sca_str = new HDFEOS2CFStr(
392  (grid_or_swath ==0)?gridfd:swathfd,
393  filename,
394  (dataset)->getName(),
395  (*it_f)->getName(),
396  (*it_f)->getNewName(),
397  grid_or_swath);
398  }
399  catch(...) {
400  delete bt;
401  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
402  }
403  dds.add_var(sca_str);
404  delete bt;
405  delete sca_str;
406  }
407 
408  }
409 
410  // For general variables and non-lat/lon existing coordinate variables
411  else if(fieldtype == 0 || fieldtype == 3 || fieldtype == 5) {
412 
413  // grid
414  if(grid_or_swath==0){
415  HDFEOS2Array_RealField *ar = NULL;
416  ar = new HDFEOS2Array_RealField(
417  (*it_f)->getRank(),
418  filename,false,sdfd,gridfd,
419  (dataset)->getName(), "", (*it_f)->getName(),
420  sotype,
421  (*it_f)->getNewName(), bt);
422  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
423  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
424  dds.add_var(ar);
425  delete bt;
426  delete ar;
427  }
428  // swath
429  else if(grid_or_swath==1){
430 
431  string tempfieldname = (*it_f)->getName();
432 
433  // This swath uses the dimension map,but not the multi-dim. map we can handle.
434  if((*it_f)->UseDimMap() && false == multi_dmap) {
435  // We also find that a separate geolocation file exists
436 
437  if (!modis_geofilename.empty()) {
438 
439  // This field can be found in the geo-location file. The field name may be corrected.
440  if (true == HDFCFUtil::is_modis_dimmap_nonll_field(tempfieldname)) {
441 
442  if(false == geofile_has_dimmap) {
443 
444  // Here we have to use HDFEOS2Array_RealField since the field may
445  // need to apply scale and offset equation.
446  // MODIS geolocation swath name is always MODIS_Swath_Type_GEO.
447  // We can improve the handling of this by not hard-coding the swath name
448  // in the future. KY 2012-08-16
449  HDFEOS2Array_RealField *ar = NULL;
450  ar = new HDFEOS2Array_RealField(
451  (*it_f)->getRank(),
452  modis_geofilename,
453  true,
454  sdfd,
455  swathfd,
456  "",
457  "MODIS_Swath_Type_GEO",
458  tempfieldname,
459  sotype,
460  (*it_f)->getNewName(),
461  bt);
462 
463  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
464  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
465  dds.add_var(ar);
466  delete bt;
467  delete ar;
468  }
469  else {// Use dimension maps in the dimension map file
470 
471  HDFEOS2ArraySwathDimMapField * ar = NULL;
472 
473  // SET dimmaps to empty.
474  // This is very key since we are using the geolocation file for the new information.
475  // The dimension map info. will be obtained when the data is reading. KY 2013-03-13
476 
477  dimmaps.clear();
478  ar = new HDFEOS2ArraySwathDimMapField(
479  (*it_f)->getRank(),
480  modis_geofilename,
481  true,
482  sdfd,
483  swathfd,
484  "",
485  "MODIS_Swath_Type_GEO",
486  tempfieldname,
487  dimmaps,
488  sotype,
489  (*it_f)->getNewName(),
490  bt);
491  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
492  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
493  dds.add_var(ar);
494  delete bt;
495  delete ar;
496  }
497  }
498  else { // This field cannot be found in the dimension map file.
499 
500  HDFEOS2ArraySwathDimMapField * ar = NULL;
501 
502  // Even if the dimension map file exists, it only applies to some
503  // specific data fields, if this field doesn't belong to these fields,
504  // we should still apply the dimension map rule to these fields.
505 
506  ar = new HDFEOS2ArraySwathDimMapField(
507  (*it_f)->getRank(),
508  filename,
509  false,
510  sdfd,
511  swathfd,
512  "",
513  (dataset)->getName(),
514  tempfieldname,
515  dimmaps,
516  sotype,
517  (*it_f)->getNewName(),
518  bt);
519  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
520  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
521  dds.add_var(ar);
522  delete bt;
523  delete ar;
524 
525 
526  }
527  }
528  else {// No dimension map file
529  HDFEOS2ArraySwathDimMapField * ar = NULL;
530  ar = new HDFEOS2ArraySwathDimMapField(
531  (*it_f)->getRank(),
532  filename,
533  false,
534  sdfd,
535  swathfd,
536  "",
537  (dataset)->getName(),
538  tempfieldname,
539  dimmaps,
540  sotype,
541  (*it_f)->getNewName(),
542  bt);
543  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
544  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
545  dds.add_var(ar);
546  delete bt;
547  delete ar;
548  }
549  }
550  else { // No dimension map
551 
552  HDFEOS2Array_RealField * ar = NULL;
553  ar = new HDFEOS2Array_RealField(
554  (*it_f)->getRank(),
555  filename,
556  false,
557  sdfd,
558  swathfd,
559  "",
560  (dataset)->getName(),
561  tempfieldname,
562  sotype,
563  (*it_f)->getNewName(),
564  bt);
565  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
566  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
567  dds.add_var(ar);
568  delete bt;
569  delete ar;
570  }
571  }
572  else {
573  delete bt;
574  throw InternalErr(__FILE__, __LINE__, "The current type should be either grid or swath");
575  }
576  }
577 
578  // For latitude and longitude
579  else if(fieldtype == 1 || fieldtype == 2) {
580 
581  // For grid
582  if(grid_or_swath==0) {
583 
584  HDFEOS2ArrayGridGeoField *ar = NULL;
585  //int fieldtype = (*it_f)->getFieldType();
586  bool ydimmajor = (*it_f)->getYDimMajor();
587  bool condenseddim = (*it_f)->getCondensedDim();
588  bool speciallon = (*it_f)->getSpecialLon();
589  int specialformat = (*it_f)->getSpecialLLFormat();
590 
591  ar = new HDFEOS2ArrayGridGeoField(
592  (*it_f)->getRank(),
593  fieldtype,
594  ownll,
595  ydimmajor,
596  condenseddim,
597  speciallon,
598  specialformat,
599  /*fieldcache,*/
600  filename,
601  gridfd,
602  (dataset)->getName(),
603  (*it_f)->getName(),
604  (*it_f)->getNewName(),
605  bt);
606 
607  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
608  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
609  dds.add_var(ar);
610  delete bt;
611  delete ar;
612  }
613 
614  // We encounter a very special MODIS case (MOD/MYD ATML2 files),
615  // Latitude and longitude fields are located under data fields.
616  // So include this case. KY 2010-7-12
617  // We also encounter another special case(MOD11_L2.A2012248.2355.041.2012249083024.hdf),
618  // the latitude/longitude with dimension map is under the "data fields".
619  // So we have to consider this. KY 2012-09-24
620 
621  else if(grid_or_swath ==1) {
622 
623  if(true == multi_dmap) {
624  if((*it_f)->getRank() !=2)
625  throw InternalErr(__FILE__, __LINE__, "For the multi-dimmap case, the field rank must be 2.");
626  int dim0size = (dims[0])->getSize();
627  int dim1size = (dims[1])->getSize();
628  int dim0offset = (*it_f)->getLLDim0Offset();
629  int dim1offset = (*it_f)->getLLDim1Offset();
630  int dim0inc = (*it_f)->getLLDim0Inc();
631  int dim1inc = (*it_f)->getLLDim1Inc();
632  string fieldname;
633  if(fieldtype == 1)
634  fieldname = "Latitude";
635  else
636  fieldname = "Longitude";
637 #if 0
638 cerr<<"hdfdesc: newfieldname is "<<(*it_f)->getNewName() <<endl;
639 cerr<<"hdfdesc: dim0size "<<dim0size <<endl;
640 cerr<<"hdfdesc: dim1size "<<dim1size <<endl;
641 cerr<<"hdfdesc: dim0offset "<<dim0offset <<endl;
642 cerr<<"hdfdesc: dim1offset "<<dim1offset <<endl;
643 cerr<<"hdfdesc: dim0inc "<<dim0inc <<endl;
644 cerr<<"hdfdesc: dim1inc "<<dim1inc <<endl;
645 #endif
646 
647  HDFEOS2ArraySwathGeoMultiDimMapField * ar = NULL;
648 
649  ar = new HDFEOS2ArraySwathGeoMultiDimMapField(
650  (*it_f)->getRank(),
651  filename,
652  swathfd,
653  (dataset)->getName(),
654  fieldname,
655  dim0size,
656  dim0offset,
657  dim0inc,
658  dim1size,
659  dim1offset,
660  dim1inc,
661  (*it_f)->getNewName(),
662  bt);
663 
664  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
665  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
666 
667  dds.add_var(ar);
668  delete bt;
669  delete ar;
670  }
671  else {
672 
673  // Use Swath dimension map
674  if((*it_f)->UseDimMap()) {
675 
676  // Have an extra HDF-EOS file for latitude and longtiude
677  if(!modis_geofilename.empty()) {
678 
679  if (false == geofile_has_dimmap) {
680  HDFEOS2ArraySwathGeoDimMapExtraField *ar = NULL;
681  ar = new HDFEOS2ArraySwathGeoDimMapExtraField(
682  (*it_f)->getRank(),
683  modis_geofilename,
684  (*it_f)->getName(),
685  (*it_f)->getNewName(),
686  bt);
687  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
688  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
689  dds.add_var(ar);
690  delete bt;
691  delete ar;
692  }
693  else {
694 
695  HDFEOS2ArraySwathDimMapField * ar = NULL;
696 
697  // SET dimmaps to empty.
698  // This is essential since we are using the geolocation file for the new information.
699  // The dimension map info. will be obtained when the data is read. KY 2013-03-13
700  dimmaps.clear();
701  ar = new HDFEOS2ArraySwathDimMapField(
702  (*it_f)->getRank(),
703  modis_geofilename,
704  true,
705  sdfd,
706  swathfd,
707  "",
708  "MODIS_Swath_Type_GEO",
709  (*it_f)->getName(),
710  dimmaps,
711  sotype,
712  (*it_f)->getNewName(),
713  bt);
714  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
715  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
716 
717  dds.add_var(ar);
718  delete bt;
719  delete ar;
720  }
721  }
722  // Will interpolate by the handler
723  else {
724 
725  HDFEOS2ArraySwathDimMapField * ar = NULL;
726  ar = new HDFEOS2ArraySwathDimMapField(
727  (*it_f)->getRank(),
728  filename,
729  false,
730  sdfd,
731  swathfd,
732  "",
733  (dataset)->getName(),
734  (*it_f)->getName(),
735  dimmaps,
736  sotype,
737  (*it_f)->getNewName(),
738  bt);
739  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
740  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
741 
742  dds.add_var(ar);
743  delete bt;
744  delete ar;
745  }
746  }
747  else {// No Dimension map
748 
749  HDFEOS2ArraySwathGeoField * ar = NULL;
750  ar = new HDFEOS2ArraySwathGeoField(
751  (*it_f)->getRank(),
752  filename,
753  swathfd,
754  (dataset)->getName(),
755  (*it_f)->getName(),
756  (*it_f)->getNewName(),
757  bt);
758 
759  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
760  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
761  dds.add_var(ar);
762  delete bt;
763  delete ar;
764  }
765  }
766  }
767  else {
768  delete bt;
769  throw InternalErr(__FILE__, __LINE__, "The current type should be either grid or swath");
770  }
771 
772  }
773 
774  //Missing Z dimensional field
775  else if(fieldtype == 4) {
776 
777  if((*it_f)->getRank()!=1){
778  delete bt;
779  throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
780  }
781 
782  int nelem = ((*it_f)->getCorrectedDimensions()[0])->getSize();
783  HDFEOS2ArrayMissGeoField *ar = NULL;
784  ar = new HDFEOS2ArrayMissGeoField(
785  (*it_f)->getRank(),
786  nelem,
787  (*it_f)->getNewName(),
788  bt);
789 
790  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
791  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
792 
793  dds.add_var(ar);
794  delete bt;
795  delete ar;
796  }
797  else {
798  delete bt;
799  throw InternalErr(__FILE__, __LINE__, "Encounter unsupported datatype or The field type should be between 0 and 5. ");
800  }
801 
802  }
803  }
804 
805 }
806 
807 // Build DDS for HDF-EOS2 only.
808 //bool read_dds_hdfeos2(DDS & dds, const string & filename)
809 int read_dds_hdfeos2(DDS & dds, const string & filename,int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,HDFSP::File*spf,HDFEOS2::File*f)
810 {
811 
812  BESDEBUG("h4","Coming to read_dds_hdfeos2 "<<endl);
813 
814  // Set DDS dataset.
815  dds.set_dataset_name(basename(filename));
816 
817  // There are some HDF-EOS2 files(MERRA) that should be treated
818  // exactly like HDF4 SDS files. We don't need to use HDF-EOS2 APIs to
819  // retrieve any information. In fact, treating them as HDF-EOS2 files
820  // will cause confusions and we may get wrong information.
821  // A quick fix is to check if the file name contains MERRA. KY 2011-3-4
822  // Find MERRA data, return 5, then just use HDF4 SDS code.
823  if((basename(filename).size() >=5) && ((basename(filename)).compare(0,5,"MERRA")==0))
824  return 5;
825  //return false;
826 
827 #if 0
828  string check_enable_spec_eos_key="H4.EnableSpecialEOS";
829  bool turn_on_enable_spec_eos_key= false;
830  turn_on_enable_spec_eos_key = HDFCFUtil::check_beskeys(check_enable_spec_eos_key);
831 #endif
832 
833  //if(true == turn_on_enable_spec_eos_key) {
834  if(true == HDF4RequestHandler::get_enable_special_eos()) {
835 
836  string grid_name;
837  int ret_val = check_special_eosfile(filename,grid_name,sdfd,fileid);
838 
839  // These are AIRS-like products that use HDF4 SDS dimension scale perfectly.
840  // We originally thought that the AIRS version 6 products fall into this category, so we added this case.
841  // However, the current AIRS version 6 products still miss some dimension scales. So currently we don't
842  // find any products that support this case. Leave it for future use. KY 2015-06-03
843  if(4== ret_val)
844  return ret_val;
845 
846 
847  // Case 2 or 3 are MOD08M3 or AIRS version 6
848  if(2 == ret_val || 3 == ret_val) {
849 
850  try {
851  read_dds_special_1d_grid(dds,spf,filename,sdfd,false);
852  //read_dds_special_1d_grid(dds,spf,filename,sdfd,fileid);
853  } catch (...)
854  {
855  //delete spf;
856  throw;
857  }
858  return ret_val;
859  }
860 
861  }
862 
863  // Special HDF-EOS2 file, doesn't use HDF-EOS2 file structure. so
864  // the file pointer passed from DAS is Null. return 0.
865  if( f == NULL)
866  return 0;
867 
868  //Some grids have one shared lat/lon pair. For this case,"onelatlon" is true.
869  // Other grids have their individual grids. We have to handle them differently.
870  // ownll is the flag to distinguish "one lat/lon pair" and multiple lat/lon pairs.
871  const vector<HDFEOS2::GridDataset *>& grids = f->getGrids();
872  bool ownll = false;
873  bool onelatlon = f->getOneLatLon();
874 
875  // Set scale and offset type to the DEFAULT one.
876  SOType sotype = DEFAULT_CF_EQU;
877 
878  // Iterate all the grids of this file and map them to DAP DDS.
879  vector<HDFEOS2::GridDataset *>::const_iterator it_g;
880  for(it_g = grids.begin(); it_g != grids.end(); it_g++){
881 
882  // Check if this grid provides its own lat/lon.
883  ownll = onelatlon?onelatlon:(*it_g)->getLatLonFlag();
884 
885  // Obtain Scale and offset type. This is for MODIS products who use non-CF scale/offset rules.
886  sotype = (*it_g)->getScaleType();
887  try {
888  read_dds_hdfeos2_grid_swath(
889  dds, filename, static_cast<HDFEOS2::Dataset*>(*it_g), 0,ownll,sotype,false,sdfd,fileid,gridfd,swathfd);
890  // Add 1-D CF grid projection required coordinate variables.
891  // Currently only supports sinusoidal projection.
892  HDFCFUtil::add_cf_grid_cvs(dds,*it_g);
893  }
894  catch(...) {
895  // delete f;
896  throw;
897  }
898  }
899 
900  // Obtain the multi dimmap flag.
901  bool multi_dmap = f->getMultiDimMaps();
902 
903 
904  // Iterate all the swaths of this file and map them to DAP DDS.
905  const vector<HDFEOS2::SwathDataset *>& swaths= f->getSwaths();
906  vector<HDFEOS2::SwathDataset *>::const_iterator it_s;
907  for(it_s = swaths.begin(); it_s != swaths.end(); it_s++) {
908 
909  // Obtain Scale and offset type. This is for MODIS products who use non-CF scale/offset rules.
910  sotype = (*it_s)->getScaleType();
911  try {
912  //No global lat/lon for multiple swaths
913  read_dds_hdfeos2_grid_swath(
914  dds, filename, static_cast<HDFEOS2::Dataset*>(*it_s), 1,false,sotype,multi_dmap,sdfd,fileid,gridfd,swathfd);
915  }
916  catch(...) {
917  //delete f;
918  throw;
919  }
920  }
921 
922  // Clear the field name list of which the datatype is changed. KY 2012-8-1
923  // ctype_field_namelist is a global vector(see HDFEOS2HandleType.h for more description)
924  // Since the handler program is a continuously running service, this values of this global vector may
925  // change from one file to another. So clearing this vector each time when mapping DDS is over.
926  ctype_field_namelist.clear();
927 
928  return 1;
929 }
930 
931 
932 // The wrapper of building DDS of non-EOS fields and attributes in a hybrid HDF-EOS2 file.
933 //bool read_dds_hdfhybrid(DDS & dds, const string & filename,int32 sdfd, int32 fileid,int32 gridfd,int32 swathfd)
934 bool read_dds_hdfhybrid(DDS & dds, const string & filename,int32 sdfd, int32 fileid,HDFSP::File*f)
935 
936 {
937 
938  BESDEBUG("h4","Coming to read_dds_hdfhybrid "<<endl);
939 
940  // Set DDS dataset.
941  dds.set_dataset_name(basename(filename));
942 
943  // Obtain non-EOS SDS fields.
944  const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
945 
946  // Read SDS
947  vector<HDFSP::SDField *>::const_iterator it_g;
948  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
949  try {
950  read_dds_spfields(dds,filename,sdfd,(*it_g),f->getSPType());
951  }
952  catch(...) {
953  throw;
954  }
955  }
956 
957  // Read Vdata fields.
958  // To speed up the performance for CERES data, we turn off some CERES vdata fields.
959 
960  // Many MODIS and MISR products use Vdata intensively. To make the output CF compliant, we map
961  // each vdata field to a DAP array. However, this may cause the generation of many DAP fields. So
962  // we use the BES keys for users to turn on/off as they choose. By default, the key is turned on. KY 2012-6-26
963 
964 #if 0
965  string check_hybrid_vdata_key="H4.EnableHybridVdata";
966  bool turn_on_hybrid_vdata_key = false;
967  turn_on_hybrid_vdata_key = HDFCFUtil::check_beskeys(check_hybrid_vdata_key);
968 #endif
969 
970  //if( true == turn_on_hybrid_vdata_key) {
971  if( true == HDF4RequestHandler::get_enable_hybrid_vdata()) {
972  for(vector<HDFSP::VDATA *>::const_iterator i = f->getVDATAs().begin(); i!=f->getVDATAs().end();i++) {
973  if(false == (*i)->getTreatAsAttrFlag()){
974  for(vector<HDFSP::VDField *>::const_iterator j=(*i)->getFields().begin();j!=(*i)->getFields().end();j++) {
975  try {
976  read_dds_spvdfields(dds,filename,fileid, (*i)->getObjRef(),(*j)->getNumRec(),(*j));
977  }
978  catch(...) {
979  throw;
980  }
981  }
982  }
983  }
984  }
985 
986  return true;
987 }
988 
989 
990 // Build DAS for non-EOS objects in a hybrid HDF-EOS2 file.
991 bool read_das_hdfhybrid(DAS & das, const string & filename,int32 sdfd, int32 fileid,HDFSP::File**fpptr)
992 {
993 
994  BESDEBUG("h4","Coming to read_das_hdfhybrid "<<endl);
995  // Declare a non-EOS file pointer
996  HDFSP::File *f = NULL;
997  try {
998  // Read non-EOS objects in a hybrid HDF-EOS2 file.
999  f = HDFSP::File::Read_Hybrid(filename.c_str(), sdfd,fileid);
1000  }
1001  catch (HDFSP::Exception &e)
1002  {
1003  if(f!=NULL)
1004  delete f;
1005  throw InternalErr(e.what());
1006  }
1007 
1008  // Remember the file pointer
1009  *fpptr = f;
1010 
1011 #if 0
1012  string check_scale_offset_type_key = "H4.EnableCheckScaleOffsetType";
1013  bool turn_on_enable_check_scale_offset_key= false;
1014  turn_on_enable_check_scale_offset_key = HDFCFUtil::check_beskeys(check_scale_offset_type_key);
1015 #endif
1016 
1017  // First Added non-HDFEOS2 SDS attributes.
1018  const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
1019  vector<HDFSP::SDField *>::const_iterator it_g;
1020  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
1021 
1022  // Use CF field name as the DAS table name.
1023  AttrTable *at = das.get_table((*it_g)->getNewName());
1024  if (!at)
1025  at = das.add_table((*it_g)->getNewName(), new AttrTable);
1026 
1027  // Some fields have "long_name" attributes,so we have to use this attribute rather than creating our own "long_name"
1028  bool long_name_flag = false;
1029 
1030  for(vector<HDFSP::Attribute *>::const_iterator i=(*it_g)->getAttributes().begin();i!=(*it_g)->getAttributes().end();i++) {
1031 
1032  if((*i)->getName() == "long_name") {
1033  long_name_flag = true;
1034  break;
1035  }
1036  }
1037 
1038  if(false == long_name_flag)
1039  at->append_attr("long_name", "String", (*it_g)->getName());
1040 
1041  // Map all attributes to DAP DAS.
1042  for(vector<HDFSP::Attribute *>::const_iterator i=(*it_g)->getAttributes().begin();i!=(*it_g)->getAttributes().end();i++) {
1043 
1044  // Handle string first.
1045  if((*i)->getType()==DFNT_UCHAR || (*i)->getType() == DFNT_CHAR){
1046 
1047  // Questionable use of string. KY 2014-02-12
1048  string tempstring2((*i)->getValue().begin(),(*i)->getValue().end());
1049  string tempfinalstr= string(tempstring2.c_str());
1050 
1051  // We want to escape the possible special characters except the fullpath attribute.
1052  // The fullpath is only added for some CERES and MERRA data. People use fullpath to keep their
1053  // original names even their original name includes special characters. KY 2014-02-12
1054  at->append_attr((*i)->getNewName(), "String" , ((*i)->getNewName()=="fullpath")?tempfinalstr:HDFCFUtil::escattr(tempfinalstr));
1055  }
1056  else {
1057  for (int loc=0; loc < (*i)->getCount() ; loc++) {
1058  string print_rep = HDFCFUtil::print_attr((*i)->getType(), loc, (void*) &((*i)->getValue()[0]));
1059  at->append_attr((*i)->getNewName(), HDFCFUtil::print_type((*i)->getType()), print_rep);
1060  }
1061  }
1062  }
1063 
1064  // Check if having _FillValue. If having _FillValue, compare the datatype of _FillValue
1065  // with the variable datatype. Correct the fillvalue datatype if necessary.
1066  if(at != NULL) {
1067  int32 var_type = (*it_g)->getType();
1068  try {
1069  HDFCFUtil::correct_fvalue_type(at,var_type);
1070  }
1071  catch(...) {
1072  //delete f;
1073  throw;
1074  }
1075  }
1076 
1077  // If H4.EnableCheckScaleOffsetType BES key is true,
1078  // if yes, check if having scale_factor and add_offset attributes;
1079  // if yes, check if scale_factor and add_offset attribute types are the same;
1080  // if no, make add_offset's datatype be the same as the datatype of scale_factor.
1081  // (CF requires the type of scale_factor and add_offset the same).
1082  //if (true == turn_on_enable_check_scale_offset_key && at !=NULL)
1083  if (true == HDF4RequestHandler::get_enable_check_scale_offset_type() && at !=NULL)
1085 
1086  }
1087 
1088  // Handle vdata attributes.
1089  try {
1090  HDFCFUtil::handle_vdata_attrs_with_desc_key(f,das);
1091  }
1092  catch(...) {
1093  throw;
1094  }
1095 
1096  return true;
1097 
1098 }
1099 
1102 void read_dds_use_eos2lib(DDS & dds, const string & filename,int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,HDFSP::File*h4file,HDFEOS2::File*eosfile)
1103 {
1104 
1105  BESDEBUG("h4","Coming to read_dds_use_eos2lib" <<endl);
1106 
1107  int ret_value = read_dds_hdfeos2(dds,filename,sdfd,fileid,gridfd,swathfd,h4file,eosfile);
1108 
1109  BESDEBUG("h4","ret_value of read_dds_hdfeos2 is "<<ret_value<<endl);
1110 
1111  // read_dds_hdfeos2 return value description:
1112  // 0: general non-EOS2 pure HDF4
1113  // 1: HDF-EOS2 hybrid
1114  // 2: MOD08_M3
1115  // HDF-EOS2 but no need to use HDF-EOS2 lib: no real dimension scales but have CVs for every dimension, treat differently
1116  // 3: AIRS version 6
1117  // HDF-EOS2 but no need to use HDF-EOS2 lib:
1118  // have dimension scales but don’t have CVs for every dimension, also need to condense dimensions, treat differently
1119  // 4. Ideal(Expected) AIRS version 6(No real products yet)
1120  // HDF-EOS2 but no need to use HDF-EOS2 lib: Have dimension scales for all dimensions
1121  // 5. MERRA
1122  // Special handling for MERRA file
1123 
1124 
1125  // Treat MERRA and non-HDFEOS2 HDF4 products as pure HDF4 objects
1126  // For Ideal AIRS version 6 products, we temporarily handle them in a generic HDF4 way.
1127  if (0 == ret_value || 5 == ret_value || 4 == ret_value ) {
1128  if(true == read_dds_hdfsp(dds, filename,sdfd,fileid,h4file))
1129  return;
1130  }
1131  // Special handling
1132  else if ( 1 == ret_value ) {
1133 
1134  // Map non-EOS2 objects to DDS
1135  if(true ==read_dds_hdfhybrid(dds,filename,sdfd,fileid,h4file))
1136  return;
1137  }
1138  else {// ret_value = 2 and 3 are handled already in the read_dds_hdfeos2 calls. Just return.
1139  return;
1140  }
1141 
1142 // leave this code block for performance comparison.
1143 #if 0
1144  // first map HDF-EOS2 objects to DDS
1145  if(true == read_dds_hdfeos2(dds, filename)){
1146 
1147  // Map non-EOS2 objects to DDS
1148  if(true == read_dds_hdfhybrid(dds,filename))
1149  return;
1150  }
1151 
1152  // Map HDF4 objects in pure HDF4 files to DDS
1153  if(read_dds_hdfsp(dds, filename)){
1154  return;
1155  }
1156 #endif
1157 
1158  // Call the default mapping of HDF4 to DDS. It should never reach here.
1159  // We add this line to ensure the HDF4 objects mapped to DDS even if the above routines return false.
1160  read_dds(dds, filename);
1161 
1162 }
1163 
1164 // Map other HDF global attributes, this routine must be called after all ECS metadata are handled.
1165 void write_non_ecsmetadata_attrs(HE2CF& cf) {
1166 
1167  cf.set_non_ecsmetadata_attrs();
1168 
1169 }
1170 
1171 // Map HDF-EOS2's ECS attributes to DAS. ECS attributes include coremetadata, structmetadata etc.
1172 void write_ecsmetadata(DAS& das, HE2CF& cf, const string& _meta)
1173 {
1174 
1175  // There is a maximum length for each ECS metadata if one uses ECS toolkit to add the metadata.
1176  // For some products of which the metadata size is huge, one metadata may be stored in several
1177  // ECS attributes such as coremetadata.0, coremetadata.1 etc.
1178  // When mapping the ECS metadata, we need to merge such metadata attributes into one attribute(coremetadata)
1179  // so that end users can easily understand this metadata.
1180  // ECS toolkit recommends data producers to use the format shown in the following coremetadata example:
1181  // coremetadata.0, coremetadata.1, etc.
1182  // Most NASA HDF-EOS2 products follow this naming convention.
1183  // However, the toolkit also allows data producers to freely name its metadata.
1184  // So we also find the following slightly different format:
1185  // (1) No suffix: coremetadata
1186  // (2) only have one such ECS attribute: coremetadata.0
1187  // (3) have several ECS attributes with two dots after the name: coremetadata.0, coremetadata.0.1 etc.
1188  // (4) Have non-number suffix: productmetadata.s, productmetadata.t etc.
1189  // We handle the above case in the function set_metadata defined in HE2CF.cc. KY 2013-07-06
1190 
1191  bool suffix_is_number = true;
1192  vector<string> meta_nonum_names;
1193  vector<string> meta_nonum_data;
1194 
1195  string meta = cf.get_metadata(_meta,suffix_is_number,meta_nonum_names, meta_nonum_data);
1196 
1197  if(""==meta && true == suffix_is_number){
1198  return; // No _meta metadata exists.
1199  }
1200 
1201  BESDEBUG("h4",meta << endl);
1202 
1203  if (false == suffix_is_number) {
1204  // For the case when the suffix is like productmetadata.s, productmetadata.t,
1205  // we will not merge the metadata since we are not sure about the order.
1206  // We just parse each attribute individually.
1207  for (unsigned int i = 0; i <meta_nonum_names.size(); i++)
1208  parse_ecs_metadata(das,meta_nonum_names[i],meta_nonum_data[i]);
1209  }
1210  else
1211  parse_ecs_metadata(das,_meta,meta);
1212 
1213 }
1214 
1215 void parse_ecs_metadata(DAS &das,const string & metaname, const string &metadata) {
1216 
1217 
1218  AttrTable *at = das.get_table(metaname);
1219  if (!at)
1220  at = das.add_table(metaname, new AttrTable);
1221 
1222  // tell lexer to scan attribute string
1223  void *buf = hdfeos_string(metadata.c_str());
1224  parser_arg arg(at);
1225 
1226  if (hdfeosparse(&arg) != 0) {
1227  hdfeos_delete_buffer(buf);
1228  throw Error("HDF-EOS parse error while processing a " + metadata + " HDFEOS attribute.");
1229  }
1230 
1231  if (arg.status() == false) {
1232  (*BESLog::TheLog())<< "HDF-EOS parse error while processing a "
1233  << metadata << " HDFEOS attribute. (2) " << endl;
1234  // << arg.error()->get_error_message() << endl;
1235  }
1236 
1237  hdfeos_delete_buffer(buf);
1238 }
1239 
1240 // Build DAS for HDFEOS2 files.
1241 int read_das_hdfeos2(DAS & das, const string & filename,int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,
1242  bool ecs_metadata,HDFSP::File**spfpptr,HDFEOS2::File **fpptr)
1243 {
1244 
1245  BESDEBUG("h4","Coming to read_das_hdfeos2 " << endl);
1246 
1247  // There are some HDF-EOS2 files(MERRA) that should be treated
1248  // exactly like HDF4 SDS files. We don't need to use HDF-EOS2 APIs to
1249  // retrieve any information. In fact, treating them as HDF-EOS2 files
1250  // will cause confusions and retrieve wrong information, though may not be essential.
1251  // So far, we've only found that the MERRA product has this problem.
1252  // A quick fix is to check if the file name contains MERRA. KY 2011-3-4
1253  // Actually, AIRS version 6 and MODO8M3 also fall into this category,
1254  // they are also specially handled, check read_das_special_eos2_core. KY 2015-06-04
1255 
1256  // Find MERRA data, return 5.
1257  if((basename(filename).size() >=5) && ((basename(filename)).compare(0,5,"MERRA")==0)) {
1258  return 5;
1259  }
1260 
1261  // We will check if the handler wants to turn on the special EOS key checking
1262 #if 0
1263  string check_enable_spec_eos_key="H4.EnableSpecialEOS";
1264  bool turn_on_enable_spec_eos_key= false;
1265  turn_on_enable_spec_eos_key = HDFCFUtil::check_beskeys(check_enable_spec_eos_key);
1266 #endif
1267  if(true == HDF4RequestHandler::get_enable_special_eos()) {
1268 
1269  string grid_name;
1270  int ret_val = check_special_eosfile(filename,grid_name,sdfd,fileid);
1271 
1272  // Expected AIRS level 2 or 3
1273  if(4== ret_val)
1274  return ret_val;
1275 
1276  bool airs_l2_l3_v6 = false;
1277  bool special_1d_grid = false;
1278 
1279  // AIRS level 2,3 version 6 or MOD08_M3-like products
1280  if(2 == ret_val || 3 == ret_val) {
1281 
1282  HDFSP::File *spf = NULL;
1283  try {
1284  spf = HDFSP::File::Read(filename.c_str(),sdfd,fileid);
1285  }
1286  catch (HDFSP::Exception &e)
1287  {
1288  if (spf != NULL)
1289  delete spf;
1290  throw InternalErr(e.what());
1291  }
1292 
1293  try {
1294  if( 2 == ret_val) {
1295 
1296  // More check and build the relations if this is a special MOD08_M3-like file
1297  if(spf->Check_update_special(grid_name)== true){
1298 
1299  special_1d_grid = true;
1300 
1301  // Building the normal HDF4 DAS here.
1302  read_das_special_eos2_core(das,spf,filename,ecs_metadata);
1303 
1304  // Need to handle MOD08M3 product
1305  if(grid_name =="mod08") {
1306  change_das_mod08_scale_offset(das,spf);
1307  }
1308  }
1309  }
1310  else {
1311 
1312  airs_l2_l3_v6 = true;
1313  spf->Handle_AIRS_L23();
1314  read_das_special_eos2_core(das,spf,filename,ecs_metadata);
1315  }
1316  //delete spf;
1317 
1318  }
1319  catch (...)
1320  {
1321  delete spf;
1322  throw;
1323  }
1324 
1325  // If this is MOD08M3 or AIRS version 6,we just need to return the file pointer.
1326  if (true == special_1d_grid || true == airs_l2_l3_v6) {
1327  *spfpptr = spf;
1328  return ret_val;
1329  }
1330 
1331  }
1332  }
1333 
1334  HDFEOS2::File *f = NULL;
1335 
1336  try {
1337  // Read all the information of EOS objects from an HDF-EOS2 file
1338  f= HDFEOS2::File::Read(filename.c_str(),gridfd,swathfd);
1339  }
1340  catch (HDFEOS2::Exception &e){
1341 
1342  if(f != NULL)
1343  delete f;
1344 
1345  // If this file is not an HDF-EOS2 file, return 0.
1346  if (!e.getFileType()){
1347  //return false;
1348  return 0;
1349  }
1350  else
1351  {
1352  throw InternalErr(e.what());
1353  }
1354  }
1355 
1356  try {
1357  // Generate CF coordinate variables(including auxiliary coordinate variables) and dimensions
1358  // All the names follow CF.
1359  f->Prepare(filename.c_str());
1360  }
1361 
1362  catch (HDFEOS2:: Exception &e) {
1363  if(f!=NULL)
1364  delete f;
1365  throw InternalErr(e.what());
1366  }
1367 
1368  *fpptr = f;
1369 
1370  // HE2CF cf is used to handle hybrid SDS and SD attributes.
1371  HE2CF cf;
1372 
1373  try {
1374  cf.open(filename,sdfd,fileid);
1375  }
1376  catch(...) {
1377  throw;
1378  }
1379  cf.set_DAS(&das);
1380 
1381  SOType sotype = DEFAULT_CF_EQU;
1382 
1383  // A flag not to generate structMetadata for the MOD13C2 file.
1384  // MOD13C2's structMetadata has wrong values. It couldn't pass the parser.
1385  // So we want to turn it off. KY 2010-8-10
1386  bool tempstrflag = false;
1387 
1388  // Product name(AMSR_E) that needs to change attribute from "SCALE FACTOR" to scale_factor etc. to follow the CF conventions
1389  //bool filename_change_scale = false; //unused variable. SBL 2/7/20
1390  if (f->getSwaths().size() > 0) {
1391  string temp_fname = basename(filename);
1392  string temp_prod_prefix = "AMSR_E";
1393  if ((temp_fname.size() > temp_prod_prefix.size()) &&
1394  (0 == (temp_fname.compare(0,temp_prod_prefix.size(),temp_prod_prefix)))) {
1395  //filename_change_scale = true; //don't see accessed again in code
1396  }
1397 
1398  }
1399 
1400  // Obtain information to identify MEaSURES VIP. This product needs to be handled properly.
1401  bool gridname_change_valid_range = false;
1402  if(1 == f->getGrids().size()) {
1403  string gridname = f->getGrids()[0]->getName();
1404  if ("VIP_CMG_GRID" == gridname)
1405  gridname_change_valid_range = true;
1406  }
1407 
1408  // Obtain information to identify MODIS_SWATH_Type_L1B product. This product's scale and offset need to be handled properly.
1409  bool is_modis_l1b = false;
1410 
1411  // Since this is a swath product, we check swath only.
1412  for (int i = 0; i<(int) f->getSwaths().size(); i++) {
1413  HDFEOS2::SwathDataset* swath = f->getSwaths()[i];
1414  string sname = swath->getName();
1415  if("MODIS_SWATH_Type_L1B" == sname){
1416  is_modis_l1b = true;
1417  break;
1418  }
1419  }
1420 
1421 #if 0
1422  string check_disable_scale_comp_key = "H4.DisableScaleOffsetComp";
1423  bool turn_on_disable_scale_comp_key= false;
1424  turn_on_disable_scale_comp_key = HDFCFUtil::check_beskeys(check_disable_scale_comp_key);
1425 
1426  string check_scale_offset_type_key = "H4.EnableCheckScaleOffsetType";
1427  bool turn_on_enable_check_scale_offset_key= false;
1428  turn_on_enable_check_scale_offset_key = HDFCFUtil::check_beskeys(check_scale_offset_type_key);
1429 #endif
1430 
1431  try {
1432 
1433  // MAP grids to DAS.
1434  for (int i = 0; i < (int) f->getGrids().size(); i++) {
1435 
1436  HDFEOS2::GridDataset* grid = f->getGrids()[i];
1437  string gname = grid->getName();
1438  sotype = grid->getScaleType();
1439 
1440  const vector<HDFEOS2::Field*>gfields = grid->getDataFields();
1441  vector<HDFEOS2::Field*>::const_iterator it_gf;
1442 
1443  for (it_gf = gfields.begin();it_gf != gfields.end();++it_gf) {
1444 
1445  bool change_fvtype = false;
1446 
1447  // original field name
1448  string fname = (*it_gf)->getName();
1449 
1450  // new field name that follows CF
1451  string newfname = (*it_gf)->getNewName();
1452 
1453  BESDEBUG("h4","Original field name: " << fname << endl);
1454  BESDEBUG("h4","Corrected field name: " << newfname << endl);
1455 
1456  // whether coordinate variable or data variables
1457  int fieldtype = (*it_gf)->getFieldType();
1458 
1459  // 0 means that the data field is NOT a coordinate variable.
1460  if (fieldtype == 0){
1461 
1462  // If you don't find any _FillValue through generic API.
1463  if((*it_gf)->haveAddedFillValue()) {
1464  BESDEBUG("h4","Has an added fill value." << endl);
1465  float addedfillvalue =
1466  (*it_gf)->getAddedFillValue();
1467  int type =
1468  (*it_gf)->getType();
1469  BESDEBUG("h4","Added fill value = "<<addedfillvalue);
1470  cf.write_attribute_FillValue(newfname,
1471  type, addedfillvalue);
1472  }
1473  string coordinate = (*it_gf)->getCoordinate();
1474  BESDEBUG("h4","Coordinate attribute: " << coordinate <<endl);
1475  if (coordinate != "")
1476  cf.write_attribute_coordinates(newfname, coordinate);
1477  }
1478 
1479  // This will override _FillValue if it's defined on the field.
1480  cf.write_attribute(gname, fname, newfname,
1481  f->getGrids().size(), fieldtype);
1482 
1483  // For fieldtype values:
1484  // 0 is general fields
1485  // 1 is latitude.
1486  // 2 is longtitude.
1487  // 3 is the existing 3rd-dimension coordinate variable
1488  // 4 is the dimension that misses the coordinate variable,use natural number
1489  // 5 is time
1490  if(fieldtype > 0){
1491 
1492  // MOD13C2 is treated specially.
1493  if(fieldtype == 1 && ((*it_gf)->getSpecialLLFormat())==3)
1494  tempstrflag = true;
1495 
1496  // Don't change the units if the 3-rd dimension field exists.(fieldtype =3)
1497  // KY 2013-02-15
1498  if (fieldtype !=3) {
1499  string tempunits = (*it_gf)->getUnits();
1500  BESDEBUG("h4",
1501  "fieldtype " << fieldtype
1502  << " units" << tempunits
1503  << endl);
1504  cf.write_attribute_units(newfname, tempunits);
1505  }
1506  }
1507 
1508  //Rename attributes of MODIS products.
1509  AttrTable *at = das.get_table(newfname);
1510 
1511  // No need for the case that follows the CF scale and offset .
1512  if(sotype!=DEFAULT_CF_EQU && at!=NULL)
1513  {
1514  bool has_Key_attr = false;
1515  AttrTable::Attr_iter it = at->attr_begin();
1516  while (it!=at->attr_end())
1517  {
1518  if(at->get_name(it)=="Key")
1519  {
1520  has_Key_attr = true;
1521  break;
1522  }
1523  it++;
1524  }
1525 
1526  //if((false == is_modis_l1b) && (false == gridname_change_valid_range)&&(false == has_Key_attr) && (true == turn_on_disable_scale_comp_key))
1527  if((false == is_modis_l1b) && (false == gridname_change_valid_range)&&(false == has_Key_attr) &&
1528  (true == HDF4RequestHandler::get_disable_scaleoffset_comp()))
1529  HDFCFUtil::handle_modis_special_attrs_disable_scale_comp(at,basename(filename), true, newfname,sotype);
1530  else {
1531 
1532  // Check if the datatype of this field needs to be changed.
1533  bool changedtype = HDFCFUtil::change_data_type(das,sotype,newfname);
1534 
1535  // Build up the field name list if the datatype of the field needs to be changed.
1536  if (true == changedtype)
1537  ctype_field_namelist.push_back(newfname);
1538 
1539  HDFCFUtil::handle_modis_special_attrs(at,basename(filename),true, newfname,sotype,gridname_change_valid_range,changedtype,change_fvtype);
1540 
1541  }
1542  }
1543 
1544  // Handle AMSR-E attributes.
1545  HDFCFUtil::handle_amsr_attrs(at);
1546 
1547  // Check if having _FillValue. If having _FillValue, compare the datatype of _FillValue
1548  // with the variable datatype. Correct the fillvalue datatype if necessary.
1549  if((false == change_fvtype) && at != NULL) {
1550  int32 var_type = (*it_gf)->getType();
1551  HDFCFUtil::correct_fvalue_type(at,var_type);
1552  }
1553 
1554  // if h4.enablecheckscaleoffsettype bes key is true,
1555  // if yes, check if having scale_factor and add_offset attributes;
1556  // if yes, check if scale_factor and add_offset attribute types are the same;
1557  // if no, make add_offset's datatype be the same as the datatype of scale_factor.
1558  // (cf requires the type of scale_factor and add_offset the same).
1559  //if (true == turn_on_enable_check_scale_offset_key && at!=NULL)
1560  if (true == HDF4RequestHandler::get_enable_check_scale_offset_type() && at!=NULL)
1562 
1563  }
1564 
1565  // Add possible 1-D CV CF attributes to identify projection info. for CF.
1566  // Currently only the Sinusoidal projection is supported.
1567  HDFCFUtil::add_cf_grid_cv_attrs(das,grid);
1568 
1569  }
1570  }
1571  catch(...) {
1572  //delete f;
1573  throw;
1574  }
1575 
1576  try {
1577  // MAP Swath attributes to DAS.
1578  for (int i = 0; i < (int) f->getSwaths().size(); i++) {
1579 
1580  HDFEOS2::SwathDataset* swath = f->getSwaths()[i];
1581 
1582  // Swath includes two parts: "Geolocation Fields" and "Data Fields".
1583  // The all_fields vector includes both.
1584  const vector<HDFEOS2::Field*> geofields = swath->getGeoFields();
1585  vector<HDFEOS2::Field*> all_fields = geofields;
1586  vector<HDFEOS2::Field*>::const_iterator it_f;
1587 
1588  const vector<HDFEOS2::Field*> datafields = swath->getDataFields();
1589  for (it_f = datafields.begin(); it_f != datafields.end(); it_f++)
1590  all_fields.push_back(*it_f);
1591 
1592  int total_geofields = geofields.size();
1593 
1594  string gname = swath->getName();
1595  BESDEBUG("h4","Swath name: " << gname << endl);
1596 
1597  sotype = swath->getScaleType();
1598 
1599  // field_counter is only used to separate the geo field from the data field.
1600  int field_counter = 0;
1601 
1602  for(it_f = all_fields.begin(); it_f != all_fields.end(); it_f++)
1603  {
1604  bool change_fvtype = false;
1605  string fname = (*it_f)->getName();
1606  string newfname = (*it_f)->getNewName();
1607  BESDEBUG("h4","Original Field name: " << fname << endl);
1608  BESDEBUG("h4","Corrected Field name: " << newfname << endl);
1609 
1610  int fieldtype = (*it_f)->getFieldType();
1611  if (fieldtype == 0){
1612  string coordinate = (*it_f)->getCoordinate();
1613  BESDEBUG("h4","Coordinate attribute: " << coordinate <<endl);
1614  if (coordinate != "")
1615  cf.write_attribute_coordinates(newfname, coordinate);
1616  }
1617 
1618  // 1 is latitude.
1619  // 2 is longitude.
1620  // Don't change "units" if a non-latlon coordinate variable exists.
1621  //if(fieldtype >0 )
1622  if(fieldtype >0 && fieldtype !=3){
1623  string tempunits = (*it_f)->getUnits();
1624  BESDEBUG("h4",
1625  "fieldtype " << fieldtype
1626  << " units" << tempunits << endl);
1627  cf.write_attribute_units(newfname, tempunits);
1628 
1629  }
1630  BESDEBUG("h4","Field Name: " << fname << endl);
1631 
1632  // coordinate "fillvalue" attribute
1633  // This operation should only apply to data fields.
1634  if (field_counter >=total_geofields) {
1635  if((*it_f)->haveAddedFillValue()){
1636  float addedfillvalue =
1637  (*it_f)->getAddedFillValue();
1638  int type =
1639  (*it_f)->getType();
1640  BESDEBUG("h4","Added fill value = "<<addedfillvalue);
1641  cf.write_attribute_FillValue(newfname, type, addedfillvalue);
1642  }
1643  }
1644  cf.write_attribute(gname, fname, newfname,
1645  f->getSwaths().size(), fieldtype);
1646 
1647  AttrTable *at = das.get_table(newfname);
1648 
1649  // No need for CF scale and offset equation.
1650  if(sotype!=DEFAULT_CF_EQU && at!=NULL)
1651  {
1652 
1653  bool has_Key_attr = false;
1654  AttrTable::Attr_iter it = at->attr_begin();
1655  while (it!=at->attr_end())
1656  {
1657  if(at->get_name(it)=="Key")
1658  {
1659  has_Key_attr = true;
1660  break;
1661  }
1662  it++;
1663  }
1664 
1665  //if((false == is_modis_l1b) && (false == gridname_change_valid_range) &&(false == has_Key_attr) && (true == turn_on_disable_scale_comp_key))
1666  if((false == is_modis_l1b) && (false == gridname_change_valid_range) &&(false == has_Key_attr) &&
1667  (true == HDF4RequestHandler::get_disable_scaleoffset_comp()))
1668  HDFCFUtil::handle_modis_special_attrs_disable_scale_comp(at,basename(filename),false,newfname,sotype);
1669  else {
1670 
1671  // Check if the datatype of this field needs to be changed.
1672  bool changedtype = HDFCFUtil::change_data_type(das,sotype,newfname);
1673 
1674  // Build up the field name list if the datatype of the field needs to be changed.
1675  if (true == changedtype)
1676 
1677  ctype_field_namelist.push_back(newfname);
1678 
1679  // Handle MODIS special attributes such as valid_range, scale_factor and add_offset etc.
1680  // Need to catch the exception since this function calls handle_modis_vip_special_attrs that may
1681  // throw an exception.
1682  HDFCFUtil::handle_modis_special_attrs(at,basename(filename), false,newfname,sotype,gridname_change_valid_range,changedtype,change_fvtype);
1683  }
1684  }
1685 
1686  // Handle AMSR-E attributes
1687  if(at !=NULL)
1688  HDFCFUtil::handle_amsr_attrs(at);
1689 
1690  // Check if having _FillValue. If having _FillValue, compare the datatype of _FillValue
1691  // with the variable datatype. Correct the fillvalue datatype if necessary.
1692  if((false == change_fvtype) && at != NULL) {
1693  int32 var_type = (*it_f)->getType();
1694  HDFCFUtil::correct_fvalue_type(at,var_type);
1695  }
1696 
1697  // If H4.EnableCheckScaleOffsetType BES key is true,
1698  // if yes, check if having scale_factor and add_offset attributes;
1699  // if yes, check if scale_factor and add_offset attribute types are the same;
1700  // if no, make add_offset's datatype be the same as the datatype of scale_factor.
1701  // (CF requires the type of scale_factor and add_offset the same).
1702  //if (true == turn_on_enable_check_scale_offset_key && at !=NULL)
1703  if (true == HDF4RequestHandler::get_enable_check_scale_offset_type() && at !=NULL)
1705 
1706  field_counter++;
1707  }
1708  }
1709  }
1710  catch(...) {
1711  //delete f;
1712  throw;
1713  }
1714 
1715 
1716  try {
1717 
1718  if(ecs_metadata == true) {
1719 
1720  // Handle ECS metadata. The following metadata are what we found so far.
1721  write_ecsmetadata(das,cf, "CoreMetadata");
1722 
1723  write_ecsmetadata(das,cf, "coremetadata");
1724 
1725  write_ecsmetadata(das,cf,"ArchiveMetadata");
1726 
1727  write_ecsmetadata(das,cf,"archivemetadata");
1728 
1729  write_ecsmetadata(das,cf,"ProductMetadata");
1730 
1731  write_ecsmetadata(das,cf,"productmetadata");
1732  }
1733 
1734  // This cause a problem for a MOD13C2 file, So turn it off temporarily. KY 2010-6-29
1735  if(false == tempstrflag) {
1736 
1737 #if 0
1738  string check_disable_smetadata_key ="H4.DisableStructMetaAttr";
1739  bool is_check_disable_smetadata = false;
1740  is_check_disable_smetadata = HDFCFUtil::check_beskeys(check_disable_smetadata_key);
1741 #endif
1742 
1743  if (false == HDF4RequestHandler::get_disable_structmeta() ) {
1744  write_ecsmetadata(das, cf, "StructMetadata");
1745  }
1746  }
1747 
1748  // Write other HDF global attributes, this routine must be called after all ECS metadata are handled.
1749  write_non_ecsmetadata_attrs(cf);
1750 
1751  cf.close();
1752  }
1753  catch(...) {
1754  //delete f;
1755  throw;
1756  }
1757 
1758  try {
1759 
1760  // Check if swath or grid object (like vgroup) attributes should be mapped to DAP2. If yes, start mapping.
1761 #if 0
1762  string check_enable_sg_attr_key="H4.EnableSwathGridAttr";
1763  bool turn_on_enable_sg_attr_key= false;
1764  turn_on_enable_sg_attr_key = HDFCFUtil::check_beskeys(check_enable_sg_attr_key);
1765 #endif
1766 
1767  if(true == HDF4RequestHandler::get_enable_swath_grid_attr()) {
1768 
1769  // MAP grid attributes to DAS.
1770  for (int i = 0; i < (int) f->getGrids().size(); i++) {
1771 
1772 
1773  HDFEOS2::GridDataset* grid = f->getGrids()[i];
1774 
1775  string gname = HDFCFUtil::get_CF_string(grid->getName());
1776 
1777  AttrTable*at = NULL;
1778 
1779  // Create a "grid" DAS table if this grid has attributes.
1780  if(grid->getAttributes().size() != 0){
1781  at = das.get_table(gname);
1782  if (!at)
1783  at = das.add_table(gname, new AttrTable);
1784  }
1785  if(at!= NULL) {
1786 
1787  // Process grid attributes
1788  const vector<HDFEOS2::Attribute *> grid_attrs = grid->getAttributes();
1789  vector<HDFEOS2::Attribute*>::const_iterator it_a;
1790  for (it_a = grid_attrs.begin(); it_a != grid_attrs.end(); ++it_a) {
1791 
1792  int attr_type = (*it_a)->getType();
1793 
1794  // We treat string differently. DFNT_UCHAR and DFNT_CHAR are treated as strings.
1795  if(attr_type==DFNT_UCHAR || attr_type == DFNT_CHAR){
1796  string tempstring2((*it_a)->getValue().begin(),(*it_a)->getValue().end());
1797  string tempfinalstr= string(tempstring2.c_str());
1798 
1799  // Using the customized escattr function to escape special characters except
1800  // \n,\r,\t since escaping them may make the attributes hard to read. KY 2013-10-14
1801  // at->append_attr((*i)->getNewName(), "String" , escattr(tempfinalstr));
1802  at->append_attr((*it_a)->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
1803  }
1804 
1805 
1806  else {
1807  for (int loc=0; loc < (*it_a)->getCount() ; loc++) {
1808  string print_rep = HDFCFUtil::print_attr((*it_a)->getType(), loc, (void*) &((*it_a)->getValue()[0]));
1809  at->append_attr((*it_a)->getNewName(), HDFCFUtil::print_type((*it_a)->getType()), print_rep);
1810  }
1811  }
1812  }
1813  }
1814  }
1815 
1816  //
1817  // MAP swath attributes to DAS.
1818  for (int i = 0; i < (int) f->getSwaths().size(); i++) {
1819 
1820  HDFEOS2::SwathDataset* swath = f->getSwaths()[i];
1821  string sname = swath->getName();
1822  AttrTable*at = NULL;
1823 
1824  // Create a "swath" DAS table if this swath has attributes.
1825  if(swath->getAttributes().size() != 0) {
1826  at = das.get_table(sname);
1827  if (!at)
1828  at = das.add_table(sname, new AttrTable);
1829  }
1830 
1831  if(at != NULL) {
1832  const vector<HDFEOS2::Attribute *> swath_attrs = swath->getAttributes();
1833  vector<HDFEOS2::Attribute*>::const_iterator it_a;
1834  for (it_a = swath_attrs.begin(); it_a != swath_attrs.end(); ++it_a) {
1835 
1836  int attr_type = (*it_a)->getType();
1837 
1838  // We treat string differently. DFNT_UCHAR and DFNT_CHAR are treated as strings.
1839  if(attr_type==DFNT_UCHAR || attr_type == DFNT_CHAR){
1840  string tempstring2((*it_a)->getValue().begin(),(*it_a)->getValue().end());
1841  string tempfinalstr= string(tempstring2.c_str());
1842 
1843  // Using the customized escattr function to escape special characters except
1844  // \n,\r,\t since escaping them may make the attributes hard to read. KY 2013-10-14
1845  // at->append_attr((*i)->getNewName(), "String" , escattr(tempfinalstr));
1846  at->append_attr((*it_a)->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
1847  }
1848  else {
1849  for (int loc=0; loc < (*it_a)->getCount() ; loc++) {
1850  string print_rep = HDFCFUtil::print_attr((*it_a)->getType(), loc, (void*) &((*it_a)->getValue()[0]));
1851  at->append_attr((*it_a)->getNewName(), HDFCFUtil::print_type((*it_a)->getType()), print_rep);
1852  }
1853 
1854  }
1855  }
1856  }
1857  }
1858  }// end of mapping swath and grid object attributes to DAP2
1859  }
1860  catch(...) {
1861  throw;
1862  }
1863 
1864  return 1;
1865 }
1866 
1867 //The wrapper of building HDF-EOS2 and special HDF4 files.
1868 void read_das_use_eos2lib(DAS & das, const string & filename,
1869  int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,bool ecs_metadata,
1870  HDFSP::File**h4filepptr,HDFEOS2::File**eosfilepptr)
1871 {
1872 
1873  BESDEBUG("h4","Coming to read_das_use_eos2lib" << endl);
1874 
1875  int ret_value = read_das_hdfeos2(das,filename,sdfd,fileid, gridfd, swathfd,ecs_metadata,h4filepptr,eosfilepptr);
1876 
1877  BESDEBUG("h4","ret_value of read_das_hdfeos2 is "<<ret_value <<endl);
1878 
1879  // read_das_hdfeos2 return value description:
1880  // 0: general non-EOS2 pure HDF4
1881  // 1: HDF-EOS2 hybrid
1882  // 2: MOD08_M3
1883  // HDF-EOS2 but no need to use HDF-EOS2 lib: no real dimension scales but have CVs for every dimension, treat differently
1884  // 3: AIRS version 6 level 3 and level 2
1885  // HDF-EOS2 but no need to use HDF-EOS2 lib:
1886  // have dimension scales but don’t have CVs for every dimension, also need to condense dimensions, treat differently
1887  // 4. Expected AIRS version 6 level 3 and level 2
1888  // HDF-EOS2 but no need to use HDF-EOS2 lib: Have dimension scales for all dimensions
1889  // 5. MERRA
1890  // Special handling for MERRA products.
1891 
1892  // Treat as pure HDF4 objects
1893  if (ret_value == 4) {
1894  if(true == read_das_special_eos2(das, filename,sdfd,fileid,ecs_metadata,h4filepptr))
1895  return;
1896  }
1897  // Special handling, already handled
1898  else if (ret_value == 2 || ret_value == 3) {
1899  return;
1900  }
1901  else if (ret_value == 1) {
1902 
1903  // Map non-EOS2 objects to DDS
1904  if(true == read_das_hdfhybrid(das,filename,sdfd,fileid,h4filepptr))
1905  return;
1906  }
1907  else {// ret_value is 0(pure HDF4) or 5(Merra)
1908  if(true == read_das_hdfsp(das, filename,sdfd, fileid,h4filepptr))
1909  return;
1910  }
1911 
1912 
1913 // Leave the original code that don't pass the file pointers.
1914 #if 0
1915  // First map HDF-EOS2 attributes to DAS
1916  if(true == read_das_hdfeos2(das, filename)){
1917 
1918  // Map non-EOS2 attributes to DAS
1919  if (true == read_das_hdfhybrid(das,filename))
1920  return;
1921  }
1922 
1923  // Map HDF4 attributes in pure HDF4 files to DAS
1924  if(true == read_das_hdfsp(das, filename)){
1925  return;
1926  }
1927 #endif
1928 
1929  // Call the default mapping of HDF4 to DAS. It should never reach here.
1930  // We add this line to ensure the HDF4 attributes mapped to DAS even if the above routines return false.
1931  read_das(das, filename);
1932 }
1933 
1934 #endif // #ifdef USE_HDFEOS2_LIB
1935 
1936 // The wrapper of building DDS function.
1937 //bool read_dds_hdfsp(DDS & dds, const string & filename,int32 sdfd, int32 fileid,int32 gridfd, int32 swathfd)
1938 bool read_dds_hdfsp(DDS & dds, const string & filename,int32 sdfd, int32 fileid,HDFSP::File*f)
1939 {
1940 
1941  BESDEBUG("h4","Coming to read_dds_sp "<<endl);
1942  dds.set_dataset_name(basename(filename));
1943 
1944  // Obtain SDS fields
1945  const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
1946 
1947  // Read SDS
1948  vector<HDFSP::SDField *>::const_iterator it_g;
1949  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
1950 
1951  // Although the following line's logic needs to improve, it is right.
1952  // When Has_Dim_NoScale_Field is false, it only happens to the OTHERHDF case.
1953  // For the OTHERHDF case, we will not map the dimension_no_dim_scale (empty) field. This is equivalent to
1954  // (0 == (*it_g)->getFieldType()) || (true == (*it_g)->IsDimScale())
1955  if (false == f->Has_Dim_NoScale_Field() || (0 == (*it_g)->getFieldType()) || (true == (*it_g)->IsDimScale())){
1956  try {
1957  read_dds_spfields(dds,filename,sdfd,(*it_g),f->getSPType());
1958  }
1959  catch(...) {
1960  throw;
1961  }
1962  }
1963  }
1964 
1965  // Read Vdata fields.
1966  // To speed up the performance for handling CERES data, we turn off some CERES vdata fields, this should be resumed in the future version with BESKeys.
1967 #if 0
1968  string check_ceres_vdata_key="H4.EnableCERESVdata";
1969  bool turn_on_ceres_vdata_key= false;
1970  turn_on_ceres_vdata_key = HDFCFUtil::check_beskeys(check_ceres_vdata_key);
1971 #endif
1972 
1973  bool output_vdata_flag = true;
1974  //if (false == turn_on_ceres_vdata_key &&
1975  if (false == HDF4RequestHandler::get_enable_ceres_vdata() &&
1976  (CER_AVG == f->getSPType() ||
1977  CER_ES4 == f->getSPType() ||
1978  CER_SRB == f->getSPType() ||
1979  CER_ZAVG == f->getSPType()))
1980  output_vdata_flag = false;
1981 
1982  if(true == output_vdata_flag) {
1983  for(vector<HDFSP::VDATA *>::const_iterator i=f->getVDATAs().begin(); i!=f->getVDATAs().end();i++) {
1984  if(!(*i)->getTreatAsAttrFlag()){
1985  for(vector<HDFSP::VDField *>::const_iterator j=(*i)->getFields().begin();j!=(*i)->getFields().end();j++) {
1986  try {
1987  read_dds_spvdfields(dds,filename,fileid,(*i)->getObjRef(),(*j)->getNumRec(),(*j));
1988  }
1989  catch(...) {
1990  throw;
1991  }
1992  }
1993  }
1994  }
1995  }
1996 
1997  return true;
1998 }
1999 
2000 // Follow CF to build DAS for non-HDFEOS2 HDF4 products. This routine also applies
2001 // to all HDF4 products when HDF-EOS2 library is not configured in.
2002 //bool read_das_hdfsp(DAS & das, const string & filename, int32 sdfd, int32 fileid,int32 gridfd, int32 swathfd)
2003 bool read_das_hdfsp(DAS & das, const string & filename, int32 sdfd, int32 fileid,HDFSP::File**fpptr)
2004 {
2005 
2006  BESDEBUG("h4","Coming to read_das_sp "<<endl);
2007 
2008  // Define a file pointer
2009  HDFSP::File *f = NULL;
2010  try {
2011  // Obtain all the necesary information from HDF4 files.
2012  f = HDFSP::File::Read(filename.c_str(), sdfd,fileid);
2013  }
2014  catch (HDFSP::Exception &e)
2015  {
2016  if (f != NULL)
2017  delete f;
2018  throw InternalErr(e.what());
2019  }
2020 
2021  try {
2022  // Generate CF coordinate variables(including auxiliary coordinate variables) and dimensions
2023  // All the names follow CF.
2024  f->Prepare();
2025  }
2026  catch (HDFSP::Exception &e) {
2027  delete f;
2028  throw InternalErr(e.what());
2029  }
2030 
2031  *fpptr = f;
2032 
2033  // Check if mapping vgroup attribute key is turned on, if yes, mapping vgroup attributes.
2034 #if 0
2035  string check_enable_vg_attr_key="H4.EnableVgroupAttr";
2036  bool turn_on_enable_vg_attr_key= false;
2037  turn_on_enable_vg_attr_key = HDFCFUtil::check_beskeys(check_enable_vg_attr_key);
2038 #endif
2039 
2040 
2041  //if(true == turn_on_enable_vg_attr_key ) {
2042  if(true == HDF4RequestHandler::get_enable_vgroup_attr()) {
2043 
2044  // Obtain vgroup attributes if having vgroup attributes.
2045  vector<HDFSP::AttrContainer *>vg_container = f->getVgattrs();
2046  for(vector<HDFSP::AttrContainer *>::const_iterator i=f->getVgattrs().begin();i!=f->getVgattrs().end();i++) {
2047  AttrTable *vgattr_at = das.get_table((*i)->getName());
2048  if (!vgattr_at)
2049  vgattr_at = das.add_table((*i)->getName(), new AttrTable);
2050 
2051  for(vector<HDFSP::Attribute *>::const_iterator j=(*i)->getAttributes().begin();j!=(*i)->getAttributes().end();j++) {
2052 
2053  // Handle string first.
2054  if((*j)->getType()==DFNT_UCHAR || (*j)->getType() == DFNT_CHAR){
2055  string tempstring2((*j)->getValue().begin(),(*j)->getValue().end());
2056  string tempfinalstr= string(tempstring2.c_str());
2057 
2058  //escaping the special characters in string attributes when mapping to DAP
2059  vgattr_at->append_attr((*j)->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
2060  }
2061  else {
2062  for (int loc=0; loc < (*j)->getCount() ; loc++) {
2063 
2064  string print_rep = HDFCFUtil::print_attr((*j)->getType(), loc, (void*) &((*j)->getValue()[0]));
2065  vgattr_at->append_attr((*j)->getNewName(), HDFCFUtil::print_type((*j)->getType()), print_rep);
2066  }
2067  }
2068  }
2069  }
2070  }// end of mapping vgroup attributes.
2071 
2072  // Initialize ECS metadata
2073  string core_metadata = "";
2074  string archive_metadata = "";
2075  string struct_metadata = "";
2076 
2077  // Obtain SD pointer, this is used to retrieve the file attributes associated with the SD interface
2078  HDFSP::SD* spsd = f->getSD();
2079 
2080  // Except TRMM, we don't find ECS metadata in other non-EOS products. For the option to treat EOS2 as pure HDF4, we
2081  // kind of relax the support of merging metadata as we do for the EOS2 case(read_das_hdfeos2). We will see if we have the user
2082  // request to make them consistent in the future. KY 2013-07-08
2083  for(vector<HDFSP::Attribute *>::const_iterator i=spsd->getAttributes().begin();i!=spsd->getAttributes().end();i++) {
2084 
2085  // Here we try to combine ECS metadata into a string.
2086  if(((*i)->getName().compare(0, 12, "CoreMetadata" )== 0) ||
2087  ((*i)->getName().compare(0, 12, "coremetadata" )== 0)){
2088 
2089  // We assume that CoreMetadata.0, CoreMetadata.1, ..., CoreMetadata.n attribures
2090  // are processed in the right order during HDFSP::Attribute vector iteration.
2091  // Otherwise, this won't work.
2092  string tempstring((*i)->getValue().begin(),(*i)->getValue().end());
2093 
2094  // Temporarily turn off CERES data since there are so many fields in CERES. It will choke clients KY 2010-7-9
2095  if(f->getSPType() != CER_AVG &&
2096  f->getSPType() != CER_ES4 &&
2097  f->getSPType() !=CER_SRB &&
2098  f->getSPType() != CER_ZAVG)
2099  core_metadata.append(tempstring);
2100  }
2101  else if(((*i)->getName().compare(0, 15, "ArchiveMetadata" )== 0) ||
2102  ((*i)->getName().compare(0, 16, "ArchivedMetadata")==0) ||
2103  ((*i)->getName().compare(0, 15, "archivemetadata" )== 0)){
2104  string tempstring((*i)->getValue().begin(),(*i)->getValue().end());
2105  // Currently some TRMM "swath" archivemetadata includes special characters that cannot be handled by OPeNDAP
2106  // So turn it off.
2107  // Turn off CERES data since it may choke JAVA clients KY 2010-7-9
2108  if(f->getSPType() != TRMML2_V6 && f->getSPType() != CER_AVG && f->getSPType() != CER_ES4 && f->getSPType() !=CER_SRB && f->getSPType() != CER_ZAVG)
2109  archive_metadata.append(tempstring);
2110  }
2111  else if(((*i)->getName().compare(0, 14, "StructMetadata" )== 0) ||
2112  ((*i)->getName().compare(0, 14, "structmetadata" )== 0)){
2113 
2114 #if 0
2115  string check_disable_smetadata_key ="H4.DisableStructMetaAttr";
2116  bool is_check_disable_smetadata = false;
2117  is_check_disable_smetadata = HDFCFUtil::check_beskeys(check_disable_smetadata_key);
2118 #endif
2119 
2120  //if (false == is_check_disable_smetadata) {
2121  if (false == HDF4RequestHandler::get_disable_structmeta()) {
2122 
2123  string tempstring((*i)->getValue().begin(),(*i)->getValue().end());
2124 
2125  // Turn off TRMM "swath" verison 6 level 2 productsCERES data since it may choke JAVA clients KY 2010-7-9
2126  if(f->getSPType() != TRMML2_V6 &&
2127  f->getSPType() != CER_AVG &&
2128  f->getSPType() != CER_ES4 &&
2129  f->getSPType() !=CER_SRB &&
2130  f->getSPType() != CER_ZAVG)
2131  struct_metadata.append(tempstring);
2132 
2133  }
2134  }
2135  else {
2136  // Process gloabal attributes
2137  AttrTable *at = das.get_table("HDF_GLOBAL");
2138  if (!at)
2139  at = das.add_table("HDF_GLOBAL", new AttrTable);
2140 
2141  // We treat string differently. DFNT_UCHAR and DFNT_CHAR are treated as strings.
2142  if((*i)->getType()==DFNT_UCHAR || (*i)->getType() == DFNT_CHAR){
2143  string tempstring2((*i)->getValue().begin(),(*i)->getValue().end());
2144  string tempfinalstr= string(tempstring2.c_str());
2145 
2146  // Using the customized escattr function to escape special characters except
2147  // \n,\r,\t since escaping them may make the attributes hard to read. KY 2013-10-14
2148  // at->append_attr((*i)->getNewName(), "String" , escattr(tempfinalstr));
2149  at->append_attr((*i)->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
2150  }
2151 
2152  else {
2153  for (int loc=0; loc < (*i)->getCount() ; loc++) {
2154  string print_rep = HDFCFUtil::print_attr((*i)->getType(), loc, (void*) &((*i)->getValue()[0]));
2155  at->append_attr((*i)->getNewName(), HDFCFUtil::print_type((*i)->getType()), print_rep);
2156  }
2157 
2158  }
2159  }
2160 
2161  }
2162 
2163  // The following code may be condensed in the future. KY 2012-09-19
2164  // Coremetadata, structmetadata and archive metadata need special parsers.
2165 
2166  // Write coremetadata.
2167  if(core_metadata.size() > 0){
2168  AttrTable *at = das.get_table("CoreMetadata");
2169  if (!at)
2170  at = das.add_table("CoreMetadata", new AttrTable);
2171  // tell lexer to scan attribute string
2172  void *buf = hdfeos_string(core_metadata.c_str());
2173  parser_arg arg(at);
2174 
2175  if (hdfeosparse(&arg) != 0) {
2176  hdfeos_delete_buffer(buf);
2177  throw Error("Parse error while processing a CoreMetadata attribute.");
2178  }
2179 
2180  // Errors returned from here are ignored.
2181  if (arg.status() == false) {
2182  ERROR_LOG("Parse error while processing a CoreMetadata attribute. (2) " << endl);
2183  // << arg.error()->get_error_message() << endl;
2184  }
2185 
2186  hdfeos_delete_buffer(buf);
2187  }
2188 
2189  // Write archive metadata.
2190  if(archive_metadata.size() > 0){
2191  AttrTable *at = das.get_table("ArchiveMetadata");
2192  if (!at)
2193  at = das.add_table("ArchiveMetadata", new AttrTable);
2194  // tell lexer to scan attribute string
2195  void *buf = hdfeos_string(archive_metadata.c_str());
2196  parser_arg arg(at);
2197  if (hdfeosparse(&arg) != 0){
2198  // delete f;
2199  hdfeos_delete_buffer(buf);
2200  throw Error("Parse error while processing an ArchiveMetadata attribute.");
2201  }
2202 
2203  // Errors returned from here are ignored.
2204  if (arg.status() == false) {
2205  ERROR_LOG("Parse error while processing an ArchiveMetadata attribute. (2) " << endl);
2206  // << arg.error()->get_error_message() << endl;
2207  }
2208 
2209  hdfeos_delete_buffer(buf);
2210  }
2211 
2212  // Write struct metadata.
2213  if(struct_metadata.size() > 0){
2214  AttrTable *at = das.get_table("StructMetadata");
2215  if (!at)
2216  at = das.add_table("StructMetadata", new AttrTable);
2217  // tell lexer to scan attribute string
2218  void *buf = hdfeos_string(struct_metadata.c_str());
2219  parser_arg arg(at);
2220  if (hdfeosparse(&arg) != 0){
2221  // delete f;
2222  hdfeos_delete_buffer(buf);
2223  throw Error("Parse error while processing a StructMetadata attribute.");
2224  }
2225 
2226  if (arg.status() == false) {
2227  ERROR_LOG("Parse error while processing a StructMetadata attribute. (2)" << endl);
2228  }
2229 
2230 
2231  // Errors returned from here are ignored.
2232 #if 0
2233  if (arg.status() == false) {
2234  (*BESLog::TheLog())<< "Parse error while processing a StructMetadata attribute. (2)" << endl
2235  << arg.error()->get_error_message() << endl;
2236  }
2237 #endif
2238 
2239  hdfeos_delete_buffer(buf);
2240  }
2241 
2242  // The following code checks the special handling of scale and offset of the OBPG products.
2243  //Store value of "Scaling" attribute.
2244  string scaling;
2245 
2246  //Store value of "Slope" attribute.
2247  float slope = 0.;
2248  bool global_slope_flag = false;
2249  float intercept = 0.;
2250  bool global_intercept_flag = false;
2251 
2252  // Check OBPG attributes. Specifically, check if slope and intercept can be obtained from the file level.
2253  // If having global slope and intercept, obtain OBPG scaling, slope and intercept values.
2254  HDFCFUtil::check_obpg_global_attrs(f,scaling,slope,global_slope_flag,intercept,global_intercept_flag);
2255 
2256  // Handle individual fields
2257  const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
2258  vector<HDFSP::SDField *>::const_iterator it_g;
2259  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
2260 
2261  // The following two if-statements are double secure checks. It will
2262  // make sure no-dimension-scale dimension variables and the associated coordinate variables(if any) are ignored.
2263  // Ignore ALL coordinate variables if this is "OTHERHDF" case and some dimensions
2264  // don't have dimension scale data.
2265  if ( true == f->Has_Dim_NoScale_Field() &&
2266  ((*it_g)->getFieldType() !=0)&&
2267  ((*it_g)->IsDimScale() == false))
2268  continue;
2269 
2270  // Ignore the empty(no data) dimension variable.
2271  if (OTHERHDF == f->getSPType() && true == (*it_g)->IsDimNoScale())
2272  continue;
2273 
2274  AttrTable *at = das.get_table((*it_g)->getNewName());
2275  if (!at)
2276  at = das.add_table((*it_g)->getNewName(), new AttrTable);
2277 
2278  // Some fields have "long_name" attributes,so we have to use this attribute rather than creating our own
2279  bool long_name_flag = false;
2280 
2281  for(vector<HDFSP::Attribute *>::const_iterator i=(*it_g)->getAttributes().begin();
2282  i!=(*it_g)->getAttributes().end();i++) {
2283  if((*i)->getName() == "long_name") {
2284  long_name_flag = true;
2285  break;
2286  }
2287  }
2288 
2289  if(false == long_name_flag) {
2290  if (f->getSPType() == TRMML2_V7) {
2291  if((*it_g)->getFieldType() == 1)
2292  at->append_attr("standard_name","String","latitude");
2293  else if ((*it_g)->getFieldType() == 2) {
2294  at->append_attr("standard_name","String","longitude");
2295 
2296  }
2297 
2298  }
2299  else if (f->getSPType() == TRMML3S_V7 || f->getSPType() == TRMML3M_V7) {
2300  if((*it_g)->getFieldType() == 1) {
2301  at->append_attr("long_name","String","latitude");
2302  at->append_attr("standard_name","String","latitude");
2303 
2304  }
2305  else if ((*it_g)->getFieldType() == 2) {
2306  at->append_attr("long_name","String","longitude");
2307  at->append_attr("standard_name","String","longitude");
2308  }
2309 
2310  }
2311  else
2312  at->append_attr("long_name", "String", (*it_g)->getName());
2313  }
2314 
2315  // For some OBPG files that only provide slope and intercept at the file level,
2316  // we need to add the global slope and intercept to all fields and change their names to scale_factor and add_offset.
2317  // For OBPG files that provide slope and intercept at the field level, we need to rename those attribute names to scale_factor and add_offset.
2318  HDFCFUtil::add_obpg_special_attrs(f,das,*it_g,scaling,slope,global_slope_flag,intercept,global_intercept_flag);
2319 
2320  // MAP individual SDS field to DAP DAS
2321  for(vector<HDFSP::Attribute *>::const_iterator i=(*it_g)->getAttributes().begin();i!=(*it_g)->getAttributes().end();i++) {
2322 
2323  // Handle string first.
2324  if((*i)->getType()==DFNT_UCHAR || (*i)->getType() == DFNT_CHAR){
2325  string tempstring2((*i)->getValue().begin(),(*i)->getValue().end());
2326  string tempfinalstr= string(tempstring2.c_str());
2327 
2328  // We want to escape the possible special characters except the fullpath attribute. This may be overkilled since
2329  // fullpath is only added for some CERES and MERRA data. We think people use fullpath really mean to keep their
2330  // original names. So escaping them for the time being. KY 2013-10-14
2331 
2332  at->append_attr((*i)->getNewName(), "String" ,((*i)->getNewName()=="fullpath")?tempfinalstr:HDFCFUtil::escattr(tempfinalstr));
2333  }
2334  else {
2335  for (int loc=0; loc < (*i)->getCount() ; loc++) {
2336  string print_rep = HDFCFUtil::print_attr((*i)->getType(), loc, (void*) &((*i)->getValue()[0]));
2337  at->append_attr((*i)->getNewName(), HDFCFUtil::print_type((*i)->getType()), print_rep);
2338  }
2339  }
2340 
2341  }
2342 
2343  // MAP dimension info. to DAS(Currently this should only affect the OTHERHDF case when no dimension scale for some dimensions)
2344  // KY 2012-09-19
2345  // For the type DFNT_CHAR, one dimensional char array is mapped to a scalar DAP string,
2346  // N dimensional char array is mapped to N-1 dimensional DAP string,
2347  // So the number of dimension info stored in the attribute container should be reduced by 1.
2348  // KY 2014-04-11
2349 
2350  bool has_dim_info = true;
2351  vector<HDFSP::AttrContainer *>::const_iterator it_end = (*it_g)->getDimInfo().end();
2352  if((*it_g)->getType() == DFNT_CHAR) {
2353  if((*it_g)->getRank() >1 && (*it_g)->getDimInfo().size() >1)
2354  it_end = (*it_g)->getDimInfo().begin()+(*it_g)->getDimInfo().size() -1;
2355  else
2356  has_dim_info = false;
2357  }
2358 
2359  if( true == has_dim_info) {
2360 
2361  for(vector<HDFSP::AttrContainer *>::const_iterator i=(*it_g)->getDimInfo().begin();i!=it_end;i++) {
2362  //for(vector<HDFSP::AttrContainer *>::const_iterator i=(*it_g)->getDimInfo().begin();i!=(*it_g)->getDimInfo().end();i++) {
2363 
2364  // Here a little surgory to add the field path(including) name before dim0, dim1, etc.
2365  string attr_container_name = (*it_g)->getNewName() + (*i)->getName();
2366  AttrTable *dim_at = das.get_table(attr_container_name);
2367  if (!dim_at)
2368  dim_at = das.add_table(attr_container_name, new AttrTable);
2369 
2370  for(vector<HDFSP::Attribute *>::const_iterator j=(*i)->getAttributes().begin();j!=(*i)->getAttributes().end();j++) {
2371 
2372  // Handle string first.
2373  if((*j)->getType()==DFNT_UCHAR || (*j)->getType() == DFNT_CHAR){
2374  string tempstring2((*j)->getValue().begin(),(*j)->getValue().end());
2375  string tempfinalstr= string(tempstring2.c_str());
2376 
2377  //escaping the special characters in string attributes when mapping to DAP
2378  dim_at->append_attr((*j)->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
2379  }
2380  else {
2381  for (int loc=0; loc < (*j)->getCount() ; loc++) {
2382 
2383  string print_rep = HDFCFUtil::print_attr((*j)->getType(), loc, (void*) &((*j)->getValue()[0]));
2384  dim_at->append_attr((*j)->getNewName(), HDFCFUtil::print_type((*j)->getType()), print_rep);
2385  }
2386  }
2387  }
2388 
2389  }
2390  }
2391 
2392  // Handle special CF attributes such as units, valid_range and coordinates
2393  // Overwrite units if fieldtype is latitude.
2394  if((*it_g)->getFieldType() == 1){
2395 
2396  at->del_attr("units"); // Override any existing units attribute.
2397  at->append_attr("units", "String",(*it_g)->getUnits());
2398  if (f->getSPType() == CER_ES4) // Drop the valid_range attribute since the value will be interpreted wrongly by CF tools
2399  at->del_attr("valid_range");
2400 
2401 
2402  }
2403  // Overwrite units if fieldtype is longitude
2404  if((*it_g)->getFieldType() == 2){
2405  at->del_attr("units"); // Override any existing units attribute.
2406  at->append_attr("units", "String",(*it_g)->getUnits());
2407  if (f->getSPType() == CER_ES4) // Drop the valid_range attribute since the value will be interpreted wrongly by CF tools
2408  at->del_attr("valid_range");
2409 
2410  }
2411 
2412  // The following if-statement may not be necessary since fieldtype=4 is the missing CV.
2413  // This missing CV is added by the handler and the units is always level.
2414  if((*it_g)->getFieldType() == 4){
2415  at->del_attr("units"); // Override any existing units attribute.
2416  at->append_attr("units", "String",(*it_g)->getUnits());
2417  }
2418 
2419  // Overwrite coordinates if fieldtype is neither lat nor lon.
2420  if((*it_g)->getFieldType() == 0){
2421  at->del_attr("coordinates"); // Override any existing units attribute.
2422 
2423  // If no "dimension scale" dimension exists, delete the "coordinates" attributes
2424  if (false == f->Has_Dim_NoScale_Field()) {
2425  string coordinate = (*it_g)->getCoordinate();
2426  if (coordinate !="")
2427  at->append_attr("coordinates", "String", coordinate);
2428  }
2429  }
2430  }
2431 
2432 
2433  // For OTHERHDF products, add units for latitude and longitude; also change unit to units.
2434  HDFCFUtil::handle_otherhdf_special_attrs(f,das);
2435 
2436  // For NASA products, add missing CF attributes if possible
2437  HDFCFUtil::add_missing_cf_attrs(f,das);
2438 
2439 #if 0
2440  string check_scale_offset_type_key = "H4.EnableCheckScaleOffsetType";
2441  bool turn_on_enable_check_scale_offset_key= false;
2442  turn_on_enable_check_scale_offset_key = HDFCFUtil::check_beskeys(check_scale_offset_type_key);
2443 #endif
2444 
2445  // Check if having _FillValue. If having _FillValue, compare the datatype of _FillValue
2446  // with the variable datatype. Correct the fillvalue datatype if necessary.
2447  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
2448 
2449  AttrTable *at = das.get_table((*it_g)->getNewName());
2450  if (at != NULL) {
2451  int32 var_type = (*it_g)->getType();
2452  try {
2453  HDFCFUtil::correct_fvalue_type(at,var_type);
2454  }
2455  catch(...) {
2456  throw;
2457  }
2458  }
2459 
2460  // If H4.EnableCheckScaleOffsetType BES key is true,
2461  // if yes, check if having scale_factor and add_offset attributes;
2462  // if yes, check if scale_factor and add_offset attribute types are the same;
2463  // if no, make add_offset's datatype be the same as the datatype of scale_factor.
2464  // (CF requires the type of scale_factor and add_offset the same).
2465  //if (true == turn_on_enable_check_scale_offset_key && at !=NULL)
2466  if (true == HDF4RequestHandler::get_enable_check_scale_offset_type() && at !=NULL)
2468  }
2469 
2470  // Optimization for users to tune the DAS output.
2471  HDFCFUtil::handle_merra_ceres_attrs_with_bes_keys(f,das,filename);
2472 
2473  // Check the EnableVdataDescAttr key. If this key is turned on, the handler-added attribute VDdescname and
2474  // the attributes of vdata and vdata fields will be outputed to DAS. Otherwise, these attributes will
2475  // not output to DAS. The key will be turned off by default to shorten the DAP output. KY 2012-09-18
2476  try {
2477  HDFCFUtil::handle_vdata_attrs_with_desc_key(f,das);
2478  }
2479  catch(...) {
2480  throw;
2481  }
2482 
2483  return true;
2484 }
2485 
2486 // This routine is for case 4 of the cases returned by read_das_hdfeos2.
2487 // Creating this routine is for performance reasons. Structmetadata is
2488 // turned off because the information has been retrieved and presented
2489 // by DDS and DAS.
2490 // Currently we don't have a user case for this routine and also
2491 // this code is not used. We still keep it for the future usage.
2492 // KY 2014-01-29
2493 
2494 bool read_das_special_eos2(DAS &das,const string& filename,int32 sdfd,int32 fileid,bool ecs_metadata,HDFSP::File**fpptr) {
2495 
2496  BESDEBUG("h4","Coming to read_das_special_eos2 " << endl);
2497 
2498 #if 0
2499  // HDF4 H interface ID
2500  int32 myfileid;
2501  myfileid = Hopen(const_cast<char *>(filename.c_str()), DFACC_READ,0);
2502 #endif
2503 
2504  // Define a file pointer
2505  HDFSP::File *f = NULL;
2506  try {
2507 
2508  // Obtain all the necesary information from HDF4 files.
2509  f = HDFSP::File::Read(filename.c_str(), sdfd,fileid);
2510  }
2511  catch (HDFSP::Exception &e)
2512  {
2513  if (f!= NULL)
2514  delete f;
2515  throw InternalErr(e.what());
2516  }
2517 
2518  try {
2519  // Generate CF coordinate variables(including auxiliary coordinate variables) and dimensions
2520  // All the names follow CF.
2521  f->Prepare();
2522  }
2523  catch (HDFSP::Exception &e) {
2524  delete f;
2525  throw InternalErr(e.what());
2526  }
2527 
2528  *fpptr = f;
2529 
2530  try {
2531  read_das_special_eos2_core(das, f, filename,ecs_metadata);
2532  }
2533  catch(...) {
2534  throw;
2535  }
2536 
2537  // The return value is a dummy value, not used.
2538  return true;
2539 }
2540 
2541 // This routine is for special EOS2 that can be tuned to build up DAS and DDS quickly.
2542 // We also turn off the generation of StructMetadata for the performance reason.
2543 bool read_das_special_eos2_core(DAS &das,HDFSP::File* f,const string& filename,bool ecs_metadata) {
2544 
2545  BESDEBUG("h4","Coming to read_das_special_eos2_core "<<endl);
2546  // Initialize ECS metadata
2547  string core_metadata = "";
2548  string archive_metadata = "";
2549  string struct_metadata = "";
2550 
2551  // Obtain SD pointer, this is used to retrieve the file attributes associated with the SD interface
2552  HDFSP::SD* spsd = f->getSD();
2553 
2554  //Ignore StructMetadata to improve performance
2555  for(vector<HDFSP::Attribute *>::const_iterator i=spsd->getAttributes().begin();i!=spsd->getAttributes().end();i++) {
2556 
2557  // Here we try to combine ECS metadata into a string.
2558  if(((*i)->getName().compare(0, 12, "CoreMetadata" )== 0) ||
2559  ((*i)->getName().compare(0, 12, "coremetadata" )== 0)){
2560 
2561  if(ecs_metadata == true) {
2562  // We assume that CoreMetadata.0, CoreMetadata.1, ..., CoreMetadata.n attribures
2563  // are processed in the right order during HDFSP::Attribute vector iteration.
2564  // Otherwise, this won't work.
2565  string tempstring((*i)->getValue().begin(),(*i)->getValue().end());
2566  core_metadata.append(tempstring);
2567  }
2568  }
2569  else if(((*i)->getName().compare(0, 15, "ArchiveMetadata" )== 0) ||
2570  ((*i)->getName().compare(0, 16, "ArchivedMetadata")==0) ||
2571  ((*i)->getName().compare(0, 15, "archivemetadata" )== 0)){
2572  if(ecs_metadata == true) {
2573  string tempstring((*i)->getValue().begin(),(*i)->getValue().end());
2574  archive_metadata.append(tempstring);
2575  }
2576  }
2577  else if(((*i)->getName().compare(0, 14, "StructMetadata" )== 0) ||
2578  ((*i)->getName().compare(0, 14, "structmetadata" )== 0))
2579  ; // Ignore StructMetadata for performance
2580  else {
2581  // Process gloabal attributes
2582  AttrTable *at = das.get_table("HDF_GLOBAL");
2583  if (!at)
2584  at = das.add_table("HDF_GLOBAL", new AttrTable);
2585 
2586  // We treat string differently. DFNT_UCHAR and DFNT_CHAR are treated as strings.
2587  if((*i)->getType()==DFNT_UCHAR || (*i)->getType() == DFNT_CHAR){
2588  string tempstring2((*i)->getValue().begin(),(*i)->getValue().end());
2589  string tempfinalstr= string(tempstring2.c_str());
2590 
2591  // Using the customized escattr function to escape special characters except
2592  // \n,\r,\t since escaping them may make the attributes hard to read. KY 2013-10-14
2593  // at->append_attr((*i)->getNewName(), "String" , escattr(tempfinalstr));
2594  at->append_attr((*i)->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
2595  }
2596 
2597  else {
2598  for (int loc=0; loc < (*i)->getCount() ; loc++) {
2599  string print_rep = HDFCFUtil::print_attr((*i)->getType(), loc, (void*) &((*i)->getValue()[0]));
2600  at->append_attr((*i)->getNewName(), HDFCFUtil::print_type((*i)->getType()), print_rep);
2601  }
2602 
2603  }
2604  }
2605 
2606  }
2607 
2608  // The following code may be condensed in the future. KY 2012-09-19
2609  // Coremetadata, structmetadata and archive metadata need special parsers.
2610 
2611  if(ecs_metadata == true) {
2612  // Write coremetadata.
2613  if(core_metadata.size() > 0){
2614  AttrTable *at = das.get_table("CoreMetadata");
2615  if (!at)
2616  at = das.add_table("CoreMetadata", new AttrTable);
2617  // tell lexer to scan attribute string
2618  void *buf = hdfeos_string(core_metadata.c_str());
2619  parser_arg arg(at);
2620 
2621  if (hdfeosparse(&arg) != 0) {
2622  hdfeos_delete_buffer(buf);
2623  throw Error("Parse error while processing a CoreMetadata attribute.");
2624  }
2625 
2626  // Errors returned from here are ignored.
2627  if (arg.status() == false) {
2628  ERROR_LOG("Parse error while processing a CoreMetadata attribute. (2)" << endl);
2629 // << arg.error()->get_error_message() << endl;
2630  }
2631 
2632  hdfeos_delete_buffer(buf);
2633 
2634  }
2635 
2636  // Write archive metadata.
2637  if(archive_metadata.size() > 0){
2638  AttrTable *at = das.get_table("ArchiveMetadata");
2639  if (!at)
2640  at = das.add_table("ArchiveMetadata", new AttrTable);
2641  // tell lexer to scan attribute string
2642  void *buf = hdfeos_string(archive_metadata.c_str());
2643  parser_arg arg(at);
2644  if (hdfeosparse(&arg) != 0) {
2645  hdfeos_delete_buffer(buf);
2646  throw Error("Parse error while processing an ArchiveMetadata attribute.");
2647  }
2648 
2649  // Errors returned from here are ignored.
2650  if (arg.status() == false) {
2651  ERROR_LOG("Parse error while processing an ArchiveMetadata attribute. (2)" << endl);
2652  // << arg.error()->get_error_message() << endl;
2653  }
2654 
2655  hdfeos_delete_buffer(buf);
2656  }
2657  }
2658 
2659  // Handle individual fields
2660  const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
2661  vector<HDFSP::SDField *>::const_iterator it_g;
2662  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
2663 
2664  // Add units for CV variables
2665 // if((*it_g)->getFieldType() != 0 && (*it_g)->IsDimScale() == false){
2666  if((*it_g)->getFieldType() != 0){
2667 
2668  AttrTable *at = das.get_table((*it_g)->getNewName());
2669  if (!at)
2670  at = das.add_table((*it_g)->getNewName(), new AttrTable);
2671 
2672  string tempunits = (*it_g)->getUnits();
2673  if(at->simple_find("units")== at->attr_end() && tempunits!="")
2674  at->append_attr("units", "String" ,tempunits);
2675  if((*it_g)->getFieldType() == 1){
2676  if(at->simple_find("long_name")== at->attr_end())
2677  at->append_attr("long_name","String","Latitude");
2678  }
2679  else if((*it_g)->getFieldType() == 2) {
2680  if(at->simple_find("long_name")== at->attr_end())
2681  at->append_attr("long_name","String","Longitude");
2682  }
2683  }
2684  else {// We will check if having the coordinates attribute.
2685  AttrTable *at = das.get_table((*it_g)->getNewName());
2686  if (!at)
2687  at = das.add_table((*it_g)->getNewName(), new AttrTable);
2688  string tempcoors = (*it_g)->getCoordinate();
2689  // If we add the coordinates attribute, any existing coordinates attribute will be removed.
2690  if(tempcoors!=""){
2691  at->del_attr("coordinates");
2692  at->append_attr("coordinates","String",tempcoors);
2693  }
2694 
2695  }
2696 
2697  // Ignore variables that don't have attributes.
2698  if((*it_g)->getAttributes().size() == 0)
2699  continue;
2700 
2701  AttrTable *at = das.get_table((*it_g)->getNewName());
2702  if (!at)
2703  at = das.add_table((*it_g)->getNewName(), new AttrTable);
2704 
2705  // MAP individual SDS field to DAP DAS
2706  for(vector<HDFSP::Attribute *>::const_iterator i=(*it_g)->getAttributes().begin();i!=(*it_g)->getAttributes().end();i++) {
2707 
2708  // Handle string first.
2709  if((*i)->getType()==DFNT_UCHAR || (*i)->getType() == DFNT_CHAR){
2710  string tempstring2((*i)->getValue().begin(),(*i)->getValue().end());
2711  string tempfinalstr= string(tempstring2.c_str());
2712 
2713  // We want to escape the possible special characters for attributes except the fullpath attribute. This may be overkilled since
2714  // fullpath is only added for some CERES and MERRA data. However, we think people use fullpath really mean to keep their
2715  // original names. So we don't escape the fullpath attribute. KY 2013-10-14
2716 
2717  at->append_attr((*i)->getNewName(), "String" ,((*i)->getNewName()=="fullpath")?tempfinalstr:HDFCFUtil::escattr(tempfinalstr));
2718  }
2719  else {
2720  for (int loc=0; loc < (*i)->getCount() ; loc++) {
2721  string print_rep = HDFCFUtil::print_attr((*i)->getType(), loc, (void*) &((*i)->getValue()[0]));
2722  at->append_attr((*i)->getNewName(), HDFCFUtil::print_type((*i)->getType()), print_rep);
2723  }
2724  }
2725  }
2726 
2727  }
2728 
2729 //#if 0
2730  // Handle HDF-EOS2 object attributes. These are found in AIRS version 6.
2731  HDFCFUtil::map_eos2_objects_attrs(das,filename);
2732 //#endif
2733 
2734  return true;
2735 }
2736 
2737 
2738 // MOD/MYD08M3 follows the no-CF scale/offset rulea,we need to change the add_offset value when add_offset is 0.
2739 void change_das_mod08_scale_offset(DAS &das, HDFSP::File *f) {
2740 
2741  // Handle individual fields
2742  // Check HDFCFUtil::handle_modis_special_attrs_disable_scale_comp
2743  const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
2744  vector<HDFSP::SDField *>::const_iterator it_g;
2745  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
2746  if((*it_g)->getFieldType() == 0){
2747  AttrTable *at = das.get_table((*it_g)->getNewName());
2748  if (!at)
2749  at = das.add_table((*it_g)->getNewName(), new AttrTable);
2750 
2751  // Declare add_offset type in string format.
2752  string add_offset_type;
2753 
2754  // add_offset values
2755  string add_offset_value="0";
2756  double orig_offset_value = 0;
2757  bool add_offset_modify = false;
2758 
2759 
2760  // Go through all attributes to find add_offset
2761  // If add_offset is 0 or add_offset is not found, we don't need
2762  // to modify the add_offset value.
2763  AttrTable::Attr_iter it = at->attr_begin();
2764  while (it!=at->attr_end())
2765  {
2766  if(at->get_name(it)=="add_offset")
2767  {
2768  add_offset_value = (*at->get_attr_vector(it)->begin());
2769  orig_offset_value = atof(add_offset_value.c_str());
2770  add_offset_type = at->get_type(it);
2771  if(add_offset_value == "0.0" || orig_offset_value == 0)
2772  add_offset_modify = false;
2773  else
2774  add_offset_modify = true;
2775  break;
2776  }
2777  it++;
2778 
2779  }
2780 
2781  // We need to modify the add_offset value if the add_offset exists.
2782  if( true == add_offset_modify) {
2783 
2784  // Declare scale_factor type in string format.
2785  string scale_factor_type;
2786 
2787  // Scale values
2788  string scale_factor_value="";
2789  double orig_scale_value = 1;
2790 
2791  it = at->attr_begin();
2792  while (it!=at->attr_end())
2793  {
2794  if(at->get_name(it)=="scale_factor")
2795  {
2796  scale_factor_value = (*at->get_attr_vector(it)->begin());
2797  orig_scale_value = atof(scale_factor_value.c_str());
2798  scale_factor_type = at->get_type(it);
2799  }
2800  it++;
2801  }
2802 
2803  if(scale_factor_value.length() !=0) {
2804  double new_offset_value = -1 * orig_scale_value*orig_offset_value;
2805  string print_rep = HDFCFUtil::print_attr(DFNT_FLOAT64,0,(void*)(&new_offset_value));
2806  at->del_attr("add_offset");
2807  at->append_attr("add_offset", HDFCFUtil::print_type(DFNT_FLOAT64), print_rep);
2808  }
2809  }
2810 
2811  }
2812 
2813  }
2814 
2815 }
2816 
2817 // Function to build special AIRS version 6 and MOD08_M3 DDS. Doing this way is for improving performance.
2818 bool read_dds_special_1d_grid(DDS &dds,HDFSP::File* spf,const string& filename, int32 sdid,bool check_cache) {
2819 //bool read_dds_special_1d_grid(DDS &dds,HDFSP::File* spf,const string& filename, int32 sdid, int32 fileid) {
2820 
2821 
2822  BESDEBUG("h4","Coming to read_dds_special_1d_grid "<<endl);
2823  bool dds_cache = false;
2824  size_t total_bytes_dds_cache = 0;
2825 
2826  // Only support AIRS version 6 level 2 or level 3 KY 2015-06-07
2827  if(true == check_cache) {
2828 
2829  total_bytes_dds_cache = HDFCFUtil::obtain_dds_cache_size(spf);
2830  BESDEBUG("h4","Total DDS cache file size is "<< total_bytes_dds_cache<<endl);
2831  if(total_bytes_dds_cache !=0)
2832  dds_cache = true;
2833 
2834  }
2835 
2836  SPType sptype = OTHERHDF;
2837  const vector<HDFSP::SDField *>& spsds = spf->getSD()->getFields();
2838 
2839  // Read SDS
2840  vector<HDFSP::SDField *>::const_iterator it_g;
2841  for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
2842 
2843  BaseType *bt=NULL;
2844  switch((*it_g)->getType()) {
2845 #define HANDLE_CASE(tid, type) \
2846  case tid: \
2847  bt = new (type)((*it_g)->getNewName(),filename); \
2848  break;
2849  HANDLE_CASE(DFNT_FLOAT32, HDFFloat32);
2850  HANDLE_CASE(DFNT_FLOAT64, HDFFloat64);
2851  HANDLE_CASE(DFNT_CHAR, HDFStr);
2852 #ifndef SIGNED_BYTE_TO_INT32
2853  HANDLE_CASE(DFNT_INT8, HDFByte);
2854 #else
2855  HANDLE_CASE(DFNT_INT8,HDFInt32);
2856 #endif
2857  HANDLE_CASE(DFNT_UINT8, HDFByte);
2858  HANDLE_CASE(DFNT_INT16, HDFInt16);
2859  HANDLE_CASE(DFNT_UINT16, HDFUInt16);
2860  HANDLE_CASE(DFNT_INT32, HDFInt32);
2861  HANDLE_CASE(DFNT_UINT32, HDFUInt32);
2862  HANDLE_CASE(DFNT_UCHAR8, HDFByte);
2863  default:
2864  throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
2865 #undef HANDLE_CASE
2866  }
2867 
2868  if(bt)
2869  {
2870 
2871  const vector<HDFSP::Dimension*>& dims= (*it_g)->getDimensions();
2872 
2873  vector<HDFSP::Dimension*>::const_iterator it_d;
2874 
2875  // Char will be mapped to DAP string.
2876  if(DFNT_CHAR == (*it_g)->getType()) {
2877  if(1 == (*it_g)->getRank()) {
2878  HDFCFStr * sca_str = NULL;
2879  try {
2880  sca_str = new HDFCFStr(
2881  sdid,
2882  (*it_g)->getFieldRef(),
2883  filename,
2884  (*it_g)->getName(),
2885  (*it_g)->getNewName(),
2886  false
2887  );
2888  }
2889  catch(...) {
2890  delete bt;
2891  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
2892  }
2893  dds.add_var(sca_str);
2894  delete bt;
2895  delete sca_str;
2896  }
2897 
2898  else {
2899  HDFCFStrField *ar = NULL;
2900  try {
2901 
2902  ar = new HDFCFStrField(
2903  (*it_g)->getRank() -1 ,
2904  filename,
2905  false,
2906  sdid,
2907  (*it_g)->getFieldRef(),
2908  0,
2909  (*it_g)->getName(),
2910  (*it_g)->getNewName(),
2911  bt);
2912 
2913  }
2914  catch(...) {
2915  delete bt;
2916  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStrField instance.");
2917  }
2918 
2919  for(it_d = dims.begin(); it_d != dims.begin()+dims.size()-1; it_d++)
2920  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
2921  dds.add_var(ar);
2922  delete bt;
2923  delete ar;
2924  }
2925 
2926  }
2927 
2928  else {// Other datatypes
2929 
2930  // Non missing fields
2931  if((*it_g)->getFieldType()!= 4) {
2932  HDFSPArray_RealField *ar = NULL;
2933 
2934  try {
2935 
2936  vector<int32>dimsizes;
2937 
2938  dimsizes.resize((*it_g)->getRank());
2939  for(int i = 0; i <(*it_g)->getRank();i++)
2940  dimsizes[i] = (int32)((dims[i])->getSize());
2941  ar = new HDFSPArray_RealField(
2942  (*it_g)->getRank(),
2943  filename,
2944  sdid,
2945  (*it_g)->getFieldRef(),
2946  (*it_g)->getType(),
2947  sptype,
2948  (*it_g)->getName(),
2949  dimsizes,
2950  (*it_g)->getNewName(),
2951  bt);
2952  }
2953  catch(...) {
2954  delete bt;
2955  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFSPArray_RealField instance.");
2956  }
2957  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
2958  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
2959  dds.add_var(ar);
2960  delete bt;
2961  delete ar;
2962  }
2963  else {
2964  if((*it_g)->getRank()!=1){
2965  delete bt;
2966  throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
2967  }
2968  int nelem = ((*it_g)->getDimensions()[0])->getSize();
2969 
2970  HDFSPArrayMissGeoField *ar = NULL;
2971 
2972  try {
2973  ar = new HDFSPArrayMissGeoField(
2974  (*it_g)->getRank(),
2975  nelem,
2976  (*it_g)->getNewName(),
2977  bt);
2978  }
2979  catch(...) {
2980  delete bt;
2981  throw InternalErr(__FILE__,__LINE__,
2982  "Unable to allocate the HDFSPArrayMissGeoField instance.");
2983  }
2984 
2985 
2986  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
2987  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
2988  dds.add_var(ar);
2989  delete bt;
2990  delete ar;
2991 
2992  }
2993  }
2994  }
2995  }
2996 
2997  // If we need to generate a DDS cache file,
2998  if(true == dds_cache) {
2999 
3000  // Check the file path
3001  string md_cache_dir;
3002  string key = "H4.Cache.metadata.path";
3003  bool found = false;
3004  TheBESKeys::TheKeys()->get_value(key,md_cache_dir,found);
3005 
3006  if(true == found) {
3007 
3008  // Create the DDS cache file name.
3009  string base_file_name = basename(filename);
3010  string dds_filename = md_cache_dir + "/"+base_file_name +"_dds";
3011 
3012  // DDS cache file is a binary file, this makes the file size smaller.
3013  FILE* dds_file =fopen(dds_filename.c_str(),"wb");
3014  if(NULL == dds_file) {
3015  string msg = "Cannot create the cache file. " + dds_filename + get_errno();
3016  throw InternalErr(__FILE__,__LINE__,msg);
3017  }
3018  int fd = fileno(dds_file);
3019  struct flock *l= lock(F_WRLCK);
3020  if (fcntl(fd, F_SETLKW, l) == -1) {
3021  fclose(dds_file);
3022  string msg = "Cannot hold the write lock for dds cached file "+ dds_filename;
3023  throw InternalErr (__FILE__, __LINE__,msg);
3024  }
3025  // TRY CATCH to close fclose.
3026  try {
3027  HDFCFUtil::write_sp_sds_dds_cache(spf,dds_file,total_bytes_dds_cache,dds_filename);
3028  }
3029  catch(...) {
3030  if (fcntl(fd, F_SETLK, lock(F_UNLCK)) == -1) {
3031  fclose(dds_file);
3032  string msg = "Cannot release the write lock for dds cached file "+ dds_filename;
3033  throw InternalErr (__FILE__, __LINE__,msg);
3034  }
3035 
3036  fclose(dds_file);
3037  throw InternalErr(__FILE__,__LINE__,"Fail to generate a dds cache file.");
3038  }
3039  if (fcntl(fd, F_SETLK, lock(F_UNLCK)) == -1) {
3040  fclose(dds_file);
3041  string msg = "Cannot release the write lock for dds cached file "+ dds_filename;
3042  throw InternalErr (__FILE__, __LINE__,msg);
3043  }
3044  fclose(dds_file);
3045 
3046  }
3047 
3048  else {
3049  throw InternalErr (__FILE__, __LINE__,
3050  "DDS/DAS metadata cache path cannot be found when 'H4.EnableMetaDataCacheFile' key is set to be true.");
3051  }
3052  }
3053 
3054  return true;
3055 
3056 }
3057 
3058 // Read SDS fields
3059 void read_dds_spfields(DDS &dds,const string& filename,const int sdfd,HDFSP::SDField *spsds, SPType sptype) {
3060 
3061  BESDEBUG("h4","Coming to read_dds_spfields "<<endl);
3062 
3063  // Ignore the dimension variable that is empty for non-special handling NASA HDF products
3064  if(OTHERHDF == sptype && (true == spsds->IsDimNoScale()))
3065  return;
3066 
3067  BaseType *bt=NULL;
3068  switch(spsds->getType()) {
3069 
3070 #define HANDLE_CASE(tid, type) \
3071  case tid: \
3072  bt = new (type)(spsds->getNewName(),filename); \
3073  break;
3074  HANDLE_CASE(DFNT_FLOAT32, HDFFloat32);
3075  HANDLE_CASE(DFNT_FLOAT64, HDFFloat64);
3076  HANDLE_CASE(DFNT_CHAR, HDFStr);
3077 #ifndef SIGNED_BYTE_TO_INT32
3078  HANDLE_CASE(DFNT_INT8, HDFByte);
3079  //HANDLE_CASE(DFNT_CHAR, HDFByte);
3080 #else
3081  HANDLE_CASE(DFNT_INT8,HDFInt32);
3082  //HANDLE_CASE(DFNT_CHAR, HDFInt32);
3083 #endif
3084  HANDLE_CASE(DFNT_UINT8, HDFByte);
3085  HANDLE_CASE(DFNT_INT16, HDFInt16);
3086  HANDLE_CASE(DFNT_UINT16, HDFUInt16);
3087  HANDLE_CASE(DFNT_INT32, HDFInt32);
3088  HANDLE_CASE(DFNT_UINT32, HDFUInt32);
3089  HANDLE_CASE(DFNT_UCHAR, HDFByte);
3090  default:
3091  throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
3092 #undef HANDLE_CASE
3093  }
3094  int fieldtype = spsds->getFieldType();// Whether the field is real field,lat/lon field or missing Z-dimension field
3095 
3096  if(bt)
3097  {
3098 
3099  const vector<HDFSP::Dimension*>& dims= spsds->getCorrectedDimensions();
3100  vector<HDFSP::Dimension*>::const_iterator it_d;
3101 
3102  if(DFNT_CHAR == spsds->getType()) {
3103 
3104  if(1 == spsds->getRank()) {
3105 
3106  HDFCFStr * sca_str = NULL;
3107 
3108  try {
3109 
3110  sca_str = new HDFCFStr(
3111  sdfd,
3112  spsds->getFieldRef(),
3113  filename,
3114  spsds->getName(),
3115  spsds->getNewName(),
3116  false
3117  );
3118  }
3119  catch(...) {
3120  delete bt;
3121  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
3122  }
3123  dds.add_var(sca_str);
3124  delete bt;
3125  delete sca_str;
3126  }
3127  else {
3128  HDFCFStrField *ar = NULL;
3129  try {
3130 
3131  ar = new HDFCFStrField(
3132  spsds->getRank() -1 ,
3133  filename,
3134  false,
3135  sdfd,
3136  spsds->getFieldRef(),
3137  0,
3138  spsds->getName(),
3139  spsds->getNewName(),
3140  bt);
3141 
3142  }
3143  catch(...) {
3144  delete bt;
3145  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStrField instance.");
3146  }
3147 
3148  for(it_d = dims.begin(); it_d != dims.begin()+dims.size()-1; it_d++)
3149  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3150  dds.add_var(ar);
3151  delete bt;
3152  delete ar;
3153  }
3154 
3155  }
3156 
3157  // For non-CV variables and the existing non-lat/lon CV variables
3158  else if(fieldtype == 0 || fieldtype == 3 ) {
3159 
3160  HDFSPArray_RealField *ar = NULL;
3161 
3162  try {
3163  vector<int32>dimsizes;
3164  dimsizes.resize(spsds->getRank());
3165  for(int i = 0; i <spsds->getRank();i++)
3166  dimsizes[i] = (int32)((dims[i])->getSize());
3167 
3168  ar = new HDFSPArray_RealField(
3169  spsds->getRank(),
3170  filename,
3171  sdfd,
3172  spsds->getFieldRef(),
3173  spsds->getType(),
3174  sptype,
3175  spsds->getName(),
3176  dimsizes,
3177  spsds->getNewName(),
3178  bt);
3179  }
3180  catch(...) {
3181  delete bt;
3182  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFSPArray_RealField instance.");
3183  }
3184 
3185  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3186  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3187  dds.add_var(ar);
3188  delete bt;
3189  delete ar;
3190  }
3191 
3192  // For latitude and longitude
3193  else if(fieldtype == 1 || fieldtype == 2) {
3194 
3195  if(sptype == MODISARNSS || sptype == TRMML2_V7) {
3196 
3197  HDFSPArray_RealField *ar = NULL;
3198 
3199  try {
3200 
3201  vector<int32>dimsizes;
3202 
3203  dimsizes.resize(spsds->getRank());
3204  for(int i = 0; i <spsds->getRank();i++)
3205  dimsizes[i] = (dims[i])->getSize();
3206 
3207  ar = new HDFSPArray_RealField(
3208  spsds->getRank(),
3209  filename,
3210  sdfd,
3211  spsds->getFieldRef(),
3212  spsds->getType(),
3213  sptype,
3214  spsds->getName(),
3215  dimsizes,
3216  spsds->getNewName(),
3217  bt);
3218  }
3219  catch(...) {
3220  delete bt;
3221  throw InternalErr(__FILE__,__LINE__,
3222  "Unable to allocate the HDFSPArray_RealField instance.");
3223  }
3224 
3225 
3226  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3227  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3228  dds.add_var(ar);
3229  delete bt;
3230  delete ar;
3231 
3232  }
3233  else {
3234 
3235  HDFSPArrayGeoField *ar = NULL;
3236 
3237  try {
3238  ar = new HDFSPArrayGeoField(
3239  spsds->getRank(),
3240  filename,
3241  sdfd,
3242  spsds->getFieldRef(),
3243  spsds->getType(),
3244  sptype,
3245  fieldtype,
3246  spsds->getName(),
3247  spsds->getNewName(),
3248  bt);
3249  }
3250  catch(...) {
3251  delete bt;
3252  throw InternalErr(__FILE__,__LINE__,
3253  "Unable to allocate the HDFSPArray_RealField instance.");
3254  }
3255 
3256  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3257  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3258  dds.add_var(ar);
3259  delete bt;
3260  delete ar;
3261  }
3262  }
3263 
3264 
3265  else if(fieldtype == 4) { //missing Z dimensional field(or coordinate variables with missing values)
3266  if(spsds->getRank()!=1){
3267  delete bt;
3268  throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
3269  }
3270  int nelem = (spsds->getDimensions()[0])->getSize();
3271 
3272  HDFSPArrayMissGeoField *ar = NULL;
3273 
3274  try {
3275  ar = new HDFSPArrayMissGeoField(
3276  spsds->getRank(),
3277  nelem,
3278  spsds->getNewName(),
3279  bt);
3280  }
3281  catch(...) {
3282  delete bt;
3283  throw InternalErr(__FILE__,__LINE__,
3284  "Unable to allocate the HDFSPArrayMissGeoField instance.");
3285  }
3286 
3287 
3288  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3289  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3290  dds.add_var(ar);
3291  delete bt;
3292  delete ar;
3293  }
3294  // fieldtype =5 originally keeps for time. Still keep it for a while.
3295 
3296  else if(fieldtype == 6) { //Coordinate variables added from the product specification
3297 
3298  if(spsds->getRank()!=1){
3299  delete bt;
3300  throw InternalErr(__FILE__, __LINE__, "The rank of added coordinate variable must be 1");
3301  }
3302  int nelem = (spsds->getDimensions()[0])->getSize();
3303 
3304  HDFSPArrayAddCVField *ar = NULL;
3305  try {
3306  ar = new HDFSPArrayAddCVField(
3307  spsds->getType(),
3308  sptype,
3309  spsds->getName(),
3310  nelem,
3311  spsds->getNewName(),
3312  bt);
3313  }
3314  catch(...) {
3315  delete bt;
3316  throw InternalErr(__FILE__,__LINE__,
3317  "Unable to allocate the HDFSPArrayAddCVField instance.");
3318  }
3319 
3320 
3321  for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3322  ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3323  dds.add_var(ar);
3324  delete bt;
3325  delete ar;
3326  }
3327  else {
3328  delete bt;
3329  throw InternalErr(__FILE__, __LINE__, "The field type should be one of 0,1,2,3,4 or 6.");
3330 
3331  }
3332  }
3333 
3334 }
3335 
3336 // Read Vdata fields.
3337 void read_dds_spvdfields(DDS &dds,const string & filename, const int fileid,int32 objref,int32 numrec,HDFSP::VDField *spvd) {
3338 
3339  BESDEBUG("h4","Coming to read_dds_spvdfields "<<endl);
3340 
3341  // First map the HDF4 datatype to DAP2
3342  BaseType *bt=NULL;
3343  switch(spvd->getType()) {
3344 #define HANDLE_CASE(tid, type) \
3345  case tid: \
3346  bt = new (type)(spvd->getNewName(),filename); \
3347  break;
3348  HANDLE_CASE(DFNT_FLOAT32, HDFFloat32);
3349  HANDLE_CASE(DFNT_FLOAT64, HDFFloat64);
3350  HANDLE_CASE(DFNT_CHAR8,HDFStr);
3351 #ifndef SIGNED_BYTE_TO_INT32
3352  HANDLE_CASE(DFNT_INT8, HDFByte);
3353 #else
3354  HANDLE_CASE(DFNT_INT8,HDFInt32);
3355 #endif
3356  HANDLE_CASE(DFNT_UINT8, HDFByte);
3357  HANDLE_CASE(DFNT_INT16, HDFInt16);
3358  HANDLE_CASE(DFNT_UINT16, HDFUInt16);
3359  HANDLE_CASE(DFNT_INT32, HDFInt32);
3360  HANDLE_CASE(DFNT_UINT32, HDFUInt32);
3361  HANDLE_CASE(DFNT_UCHAR8, HDFByte);
3362  //HANDLE_CASE(DFNT_CHAR8, HDFByte);
3363  //HANDLE_CASE(DFNT_CHAR8, HDFByte);
3364  default:
3365  throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
3366 #undef HANDLE_CASE
3367  }
3368 
3369  if(bt)
3370  {
3371 
3372  if(DFNT_CHAR == spvd->getType()) {
3373 
3374  // If the field order is >1, the vdata field will be 2-D array
3375  // with the number of elements along the fastest changing dimension
3376  // as the field order.
3377  int vdrank = ((spvd->getFieldOrder())>1)?2:1;
3378  if (1 == vdrank) {
3379 
3380  HDFCFStr * sca_str = NULL;
3381  try {
3382  sca_str = new HDFCFStr(
3383  fileid,
3384  objref,
3385  filename,
3386  spvd->getName(),
3387  spvd->getNewName(),
3388  true
3389  );
3390  }
3391  catch(...) {
3392  delete bt;
3393  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
3394  }
3395  dds.add_var(sca_str);
3396  delete bt;
3397  delete sca_str;
3398  }
3399 
3400  else {
3401 
3402  HDFCFStrField *ar = NULL;
3403  try {
3404 
3405  ar = new HDFCFStrField(
3406  vdrank -1 ,
3407  filename,
3408  true,
3409  fileid,
3410  objref,
3411  spvd->getFieldOrder(),
3412  spvd->getName(),
3413  spvd->getNewName(),
3414  bt);
3415 
3416  }
3417  catch(...) {
3418  delete bt;
3419  throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStrField instance.");
3420  }
3421 
3422  string dimname0 = "VDFDim0_"+spvd->getNewName();
3423  ar->append_dim(numrec, dimname0);
3424  dds.add_var(ar);
3425  delete bt;
3426  delete ar;
3427 
3428  }
3429  }
3430  else {
3431  HDFSPArray_VDField *ar = NULL;
3432 
3433  // If the field order is >1, the vdata field will be 2-D array
3434  // with the number of elements along the fastest changing dimension
3435  // as the field order.
3436  int vdrank = ((spvd->getFieldOrder())>1)?2:1;
3437  ar = new HDFSPArray_VDField(
3438  vdrank,
3439  filename,
3440  fileid,
3441  objref,
3442  spvd->getType(),
3443  spvd->getFieldOrder(),
3444  spvd->getName(),
3445  spvd->getNewName(),
3446  bt);
3447 
3448  string dimname1 = "VDFDim0_"+spvd->getNewName();
3449 
3450  string dimname2 = "VDFDim1_"+spvd->getNewName();
3451  if(spvd->getFieldOrder() >1) {
3452  ar->append_dim(numrec,dimname1);
3453  ar->append_dim(spvd->getFieldOrder(),dimname2);
3454  }
3455  else
3456  ar->append_dim(numrec,dimname1);
3457 
3458  dds.add_var(ar);
3459  delete bt;
3460  delete ar;
3461  }
3462  }
3463 
3464 }
3465 
3466 // This routine will check if this is a special EOS2 file that we can improve the performance
3467 // Currently AIRS level 2 and 3 version 6 and MOD08_M3-like products are what we can serve. KY 2014-01-29
3468 int check_special_eosfile(const string & filename, string& grid_name,int32 sdfd,int32 /*fileid //unused SBL 2/7/20 */ ) {
3469 
3470  int32 sds_id = 0;
3471  int32 n_sds = 0;
3472  int32 n_sd_attrs = 0;
3473  bool is_eos = false;
3474  int ret_val = 1;
3475 
3476  // Obtain number of SDS objects and number of SD(file) attributes
3477  if (SDfileinfo (sdfd, &n_sds, &n_sd_attrs) == FAIL){
3478  throw InternalErr (__FILE__,__LINE__,"SDfileinfo failed ");
3479  }
3480 
3481  char attr_name[H4_MAX_NC_NAME];
3482  int32 attr_type = -1;
3483  int32 attr_count = -1;
3484  char structmdname[] = "StructMetadata.0";
3485 
3486  // Is this an HDF-EOS2 file?
3487  for (int attr_index = 0; attr_index < n_sd_attrs;attr_index++) {
3488  if(SDattrinfo(sdfd,attr_index,attr_name,&attr_type,&attr_count) == FAIL) {
3489  throw InternalErr (__FILE__,__LINE__,"SDattrinfo failed ");
3490  }
3491 
3492  if(strcmp(attr_name,structmdname)==0) {
3493  is_eos = true;
3494  break;
3495  }
3496  }
3497 
3498  if(true == is_eos) {
3499 
3500  int sds_index = 0;
3501  int32 sds_rank = 0;
3502  int32 dim_sizes[H4_MAX_VAR_DIMS];
3503  int32 sds_dtype = 0;
3504  int32 n_sds_attrs = 0;
3505  char sds_name[H4_MAX_NC_NAME];
3506  char xdim_name[] ="XDim";
3507  char ydim_name[] ="YDim";
3508 
3509  string temp_grid_name1;
3510  string temp_grid_name2;
3511  bool xdim_is_cv_flag = false;
3512  bool ydim_is_cv_flag = false;
3513 
3514 
3515  // The following for-loop checks if this is a MOD08_M3-like HDF-EOS2 product.
3516  for (sds_index = 0; sds_index < (int)n_sds; sds_index++) {
3517 
3518  sds_id = SDselect (sdfd, sds_index);
3519  if (sds_id == FAIL) {
3520  throw InternalErr (__FILE__,__LINE__,"SDselect failed ");
3521  }
3522 
3523  // Obtain object name, rank, size, field type and number of SDS attributes
3524  int status = SDgetinfo (sds_id, sds_name, &sds_rank, dim_sizes,
3525  &sds_dtype, &n_sds_attrs);
3526  if (status == FAIL) {
3527  SDendaccess(sds_id);
3528  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3529  }
3530 
3531  if(1 == sds_rank) {
3532 
3533  // This variable "XDim" exists
3534  if(strcmp(sds_name,xdim_name) == 0) {
3535  int32 sds_dimid = SDgetdimid(sds_id,0);
3536  if(sds_dimid == FAIL) {
3537  SDendaccess(sds_id);
3538  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3539  }
3540  char dim_name[H4_MAX_NC_NAME];
3541  int32 dim_size = 0;
3542  int32 dim_type = 0;
3543  int32 num_dim_attrs = 0;
3544  if(SDdiminfo(sds_dimid,dim_name,&dim_size,&dim_type,&num_dim_attrs) == FAIL) {
3545  SDendaccess(sds_id);
3546  throw InternalErr(__FILE__,__LINE__,"SDdiminfo failed ");
3547  }
3548 
3549  // No dimension scale and XDim exists
3550  if(0 == dim_type) {
3551  string tempdimname(dim_name);
3552  if(tempdimname.size() >=5) {
3553  if(tempdimname.compare(0,5,"XDim:") == 0) {
3554 
3555  // Obtain the grid name.
3556  temp_grid_name1 = tempdimname.substr(5);
3557  xdim_is_cv_flag = true;
3558 
3559  }
3560  }
3561  else if("XDim" == tempdimname)
3562  xdim_is_cv_flag = true;
3563  }
3564  }
3565 
3566  // The variable "YDim" exists
3567  if(strcmp(sds_name,ydim_name) == 0) {
3568 
3569  int32 sds_dimid = SDgetdimid(sds_id,0);
3570  if(sds_dimid == FAIL) {
3571  SDendaccess (sds_id);
3572  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3573  }
3574  char dim_name[H4_MAX_NC_NAME];
3575  int32 dim_size = 0;
3576  int32 dim_type = 0;
3577  int32 num_dim_attrs = 0;
3578  if(SDdiminfo(sds_dimid,dim_name,&dim_size,&dim_type,&num_dim_attrs) == FAIL) {
3579  SDendaccess(sds_id);
3580  throw InternalErr(__FILE__,__LINE__,"SDdiminfo failed ");
3581  }
3582 
3583  // For this case, the dimension should not have dimension scales.
3584  if(0 == dim_type) {
3585  string tempdimname(dim_name);
3586  if(tempdimname.size() >=5) {
3587  if(tempdimname.compare(0,5,"YDim:") == 0) {
3588  // Obtain the grid name.
3589  temp_grid_name2 = tempdimname.substr(5);
3590  ydim_is_cv_flag = true;
3591  }
3592  }
3593  else if ("YDim" == tempdimname)
3594  ydim_is_cv_flag = true;
3595  }
3596  }
3597  }
3598 
3599  SDendaccess(sds_id);
3600  if((true == xdim_is_cv_flag) && (true == ydim_is_cv_flag ))
3601  break;
3602 
3603  }
3604 
3605  // If one-grid and variable XDim/YDim exist and also they don't have dim. scales,we treat this as MOD08-M3-like products
3606  if ((temp_grid_name1 == temp_grid_name2) && (true == xdim_is_cv_flag) && (true == ydim_is_cv_flag)) {
3607  grid_name = temp_grid_name1;
3608  ret_val = 2;
3609  }
3610 
3611  // Check if this is a new AIRS level 2 and 3 product. Since new AIRS level 2 and 3 version 6 products still have dimensions that don't have
3612  // dimension scales and the old way to handle level 2 and 3 dimensions makes the performance suffer. We will see if we can improve
3613  // performance by handling the data with just the HDF4 interfaces.
3614  // At least the file name should have string AIRS.L3. or AIRS.L2..
3615  else if((basename(filename).size() >8) && (basename(filename).compare(0,4,"AIRS") == 0)
3616  && ((basename(filename).find(".L3.")!=string::npos) || (basename(filename).find(".L2.")!=string::npos))){
3617 
3618  bool has_dimscale = false;
3619 
3620  // Go through the SDS object and check if this file has dimension scales.
3621  for (sds_index = 0; sds_index < n_sds; sds_index++) {
3622 
3623  sds_id = SDselect (sdfd, sds_index);
3624  if (sds_id == FAIL) {
3625  throw InternalErr (__FILE__,__LINE__,"SDselect failed ");
3626  }
3627 
3628  // Obtain object name, rank, size, field type and number of SDS attributes
3629  int status = SDgetinfo (sds_id, sds_name, &sds_rank, dim_sizes,
3630  &sds_dtype, &n_sds_attrs);
3631  if (status == FAIL) {
3632  SDendaccess(sds_id);
3633  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3634  }
3635 
3636  for (int dim_index = 0; dim_index<sds_rank; dim_index++) {
3637 
3638  int32 sds_dimid = SDgetdimid(sds_id,dim_index);
3639  if(sds_dimid == FAIL) {
3640  SDendaccess(sds_id);
3641  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3642  }
3643 
3644  char dim_name[H4_MAX_NC_NAME];
3645  int32 dim_size = 0;
3646  int32 dim_type = 0;
3647  int32 num_dim_attrs = 0;
3648  if(SDdiminfo(sds_dimid,dim_name,&dim_size,&dim_type,&num_dim_attrs) == FAIL) {
3649  SDendaccess(sds_id);
3650  throw InternalErr(__FILE__,__LINE__,"SDdiminfo failed ");
3651  }
3652 
3653  if(dim_type !=0) {
3654  has_dimscale = true;
3655  break;
3656  }
3657 
3658  }
3659  SDendaccess(sds_id);
3660  if( true == has_dimscale)
3661  break;
3662  }
3663 
3664  // If having dimension scales, this is an AIRS level 2 or 3 version 6. Treat it differently. Otherwise, this is an old AIRS level 3 product.
3665  if (true == has_dimscale)
3666  ret_val = 3;
3667  }
3668  else {// Check if this is an HDF-EOS2 file but not using HDF-EOS2 at all.
3669  // We turn off this for the time being because
3670  // 1) We need to make sure this is a grid file not swath or point file.
3671  // It will be time consuming to identify grids or swaths and hurts the performance for general case.
3672  // 2) No real NASA files exist. We will handle them later.
3673  // KY 2014-01-29
3674  ;
3675 #if 0
3676  bool has_dimscale = true;
3677  bool is_grid = false;
3678 
3679  // Go through the SDS object
3680  for (sds_index = 0; sds_index < n_sds; sds_index++) {
3681 
3682  sds_id = SDselect (sdid, sds_index);
3683  if (sds_id == FAIL) {
3684  SDend(sdid);
3685  throw InternalErr (__FILE__,__LINE__,"SDselect failed ");
3686  }
3687 
3688  // Obtain object name, rank, size, field type and number of SDS attributes
3689  int status = SDgetinfo (sds_id, sds_name, &sds_rank, dim_sizes,
3690  &sds_dtype, &n_sds_attrs);
3691  if (status == FAIL) {
3692  SDendaccess(sds_id);
3693  SDend(sdid);
3694  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3695  }
3696 
3697 
3698  for (int dim_index = 0; dim_index<sds_rank; dim_index++) {
3699 
3700  int32 sds_dimid = SDgetdimid(sds_id,dim_index);
3701  if(sds_dimid == FAIL) {
3702  SDendaccess(sds_id);
3703  SDend(sdid);
3704  throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3705  }
3706  char dim_name[H4_MAX_NC_NAME];
3707  int32 dim_size = 0;
3708  int32 dim_type = 0;
3709  int32 num_dim_attrs = 0;
3710  if(SDdiminfo(sds_dimid,dim_name,&dim_size,&dim_type,&num_dim_attrs) == FAIL) {
3711  SDendaccess(sds_id);
3712  SDend(sdid);
3713  throw InternalErr(__FILE__,__LINE__,"SDdiminfo failed ");
3714  }
3715 
3716  if(0 == dim_type) {
3717  has_dimscale = false;
3718  }
3719 
3720  }
3721  SDendaccess(sds_id);
3722  }
3723  if (true == has_dimscale)
3724  ret_val = 4;
3725 #endif
3726  }
3727  }
3728 
3729  return ret_val;
3730 }
3731 
3732 // Generate DAS for the file that only use SDS APIs. Currently this routine only applies to AIRS version 6
3733 // that can take advantage of the handler's metadata cache feature.
3734 void read_das_sds(DAS & das, const string & filename,int32 sdfd, bool ecs_metadata,HDFSP::File**h4fileptr) {
3735 
3736  HDFSP::File *spf = NULL;
3737  try {
3738  spf = HDFSP::File::Read(filename.c_str(),sdfd,-1);
3739  spf->Handle_AIRS_L23();
3740  read_das_special_eos2_core(das,spf,filename,ecs_metadata);
3741  }
3742  catch (HDFSP::Exception &e)
3743  {
3744  if (spf != NULL)
3745  delete spf;
3746  throw InternalErr(e.what());
3747  }
3748 
3749  *h4fileptr = spf;
3750  return;
3751 }
3752 
3753 // Generate DDS for the file that only use SDS APIs. Currently this routine only applies to AIRS version 6
3754 // that can take advantage of the handler's metadata cache feature.
3755 void read_dds_sds(DDS &dds, const string & filename,int32 sdfd, HDFSP::File*h4file,bool dds_setcache) {
3756 
3757  // Set DDS dataset.
3758  dds.set_dataset_name(basename(filename));
3759  read_dds_special_1d_grid(dds,h4file,filename,sdfd,dds_setcache);
3760  return;
3761 
3762 }
3763 // Default option
3764 void read_dds(DDS & dds, const string & filename)
3765 {
3766  // generate DDS, DAS
3767  DAS das;
3768  dds.set_dataset_name(basename(filename));
3769  build_descriptions(dds, das, filename);
3770 
3771  if (!dds.check_semantics()) { // DDS didn't get built right
3772  THROW(dhdferr_ddssem);
3773  }
3774  return;
3775 }
3776 
3777 void read_das(DAS & das, const string & filename)
3778 {
3779  // generate DDS, DAS
3780  DDS dds(NULL);
3781  dds.set_dataset_name(basename(filename));
3782 
3783  build_descriptions(dds, das, filename);
3784 
3785  if (!dds.check_semantics()) { // DDS didn't get built right
3786  dds.print(cout);
3787  THROW(dhdferr_ddssem);
3788  }
3789  return;
3790 }
3791 
3792 // Scan the HDF file and build the DDS and DAS
3793 static void build_descriptions(DDS & dds, DAS & das,
3794  const string & filename)
3795 {
3796  sds_map sdsmap;
3797  vd_map vdatamap;
3798  gr_map grmap;
3799 
3800  // Build descriptions of SDS items
3801  // If CF option is enabled, StructMetadata will be parsed here.
3802  SDS_descriptions(sdsmap, das, filename);
3803 
3804  // Build descriptions of file annotations
3805  FileAnnot_descriptions(das, filename);
3806 
3807  // Build descriptions of Vdatas
3808  Vdata_descriptions(vdatamap, das, filename);
3809 
3810  // Build descriptions of General Rasters
3811  GR_descriptions(grmap, das, filename);
3812 
3813  // Build descriptions of Vgroups and add SDS/Vdata/GR in the correct order
3814  Vgroup_descriptions(dds, das, filename, sdsmap, vdatamap, grmap);
3815  return;
3816 }
3817 
3818 // These two Functor classes are used to look for EOS attributes with certain
3819 // base names (is_named) and to accumulate values in in different hdf_attr
3820 // objects with the same base names (accum_attr). These are used by
3821 // merge_split_eos_attributes() to do just that. Some HDF EOS attributes are
3822 // longer than HDF 4's 32,000 character limit. Those attributes are split up
3823 // in the HDF 4 files and named `StructMetadata.0', `StructMetadata.1', et
3824 // cetera. This code merges those attributes so that they can be processed
3825 // correctly by the hdf eos attribute parser (see AddHDFAttr() further down
3826 // in this file). 10/29/2001 jhrg
3827 
3828 struct accum_attr
3829  :public binary_function < hdf_genvec &, hdf_attr, hdf_genvec & > {
3830 
3831  string d_named;
3832 
3833  accum_attr(const string & named):d_named(named) {
3834  }
3835 
3836  hdf_genvec & operator() (hdf_genvec & accum, const hdf_attr & attr) {
3837  // Assume that all fields with the same base name should be combined,
3838  // and assume that they are in order.
3839  BESDEBUG("h4", "attr.name: " << attr.name << endl);
3840  if (attr.name.find(d_named) != string::npos) {
3841 #if 0
3842  string stuff;
3843  stuff.assign(attr.values.data(), attr.values.size());
3844  cerr << "Attribute chunk: " << attr.name << endl;
3845  cerr << stuff << endl;
3846 #endif
3847  accum.append(attr.values.number_type(), attr.values.data(),
3848  attr.values.size());
3849  return accum;
3850  }
3851  else {
3852  return accum;
3853  }
3854  }
3855 };
3856 
3857 struct is_named:public unary_function < hdf_attr, bool > {
3858  string d_named;
3859 
3860  is_named(const string & named):d_named(named) {
3861  }
3862 
3863  bool operator() (const hdf_attr & attr) {
3864  return (attr.name.find(d_named) != string::npos);
3865  }
3866 };
3867 
3868 static void
3869 merge_split_eos_attributes(vector < hdf_attr > &attr_vec,
3870  const string & attr_name)
3871 {
3872  // Only do this if there's more than one part.
3873  if (count_if(attr_vec.begin(), attr_vec.end(), is_named(attr_name)) > 1) {
3874  // Merge all split up parts named `attr_name.' Assume they are in
3875  // order in `attr_vec.'
3876  hdf_genvec attributes;
3877  attributes = accumulate(attr_vec.begin(), attr_vec.end(),
3878  attributes, accum_attr(attr_name));
3879 
3880  // When things go south, check out the hdf_genvec...
3881  // BEDEBUG seems not providing a way to handle the following debugging info.
3882  // I can define a vector and call attributes.print(s_m), then use
3883  // BESDEBUG to output the debugging info. The downside is that whether BESDEBUG
3884  // is called, a vector of s_m will always be generated and a chunk of memory is
3885  // always used. So don't change this for the time being. KY 2012-09-13
3886  DBG(vector < string > s_m;
3887  attributes.print(s_m);
3888  cerr << "Accum struct MD: (" << s_m.size() << ") "
3889  << s_m[0] << endl);
3890 
3891  // Remove all the parts that have been merged
3892  attr_vec.erase(remove_if(attr_vec.begin(), attr_vec.end(),
3893  is_named(attr_name)), attr_vec.end());
3894 
3895  // Make a new hdf_attr and assign it the newly merged attributes...
3896  hdf_attr merged_attr;
3897  merged_attr.name = attr_name;
3898  merged_attr.values = attributes;
3899 
3900  // And add it to the vector of attributes.
3901  attr_vec.push_back(merged_attr);
3902  }
3903 }
3904 
3905 // Read SDS's out of filename, build descriptions and put them into dds, das.
3906 static void SDS_descriptions(sds_map & map, DAS & das,
3907  const string & filename)
3908 {
3909 
3910  hdfistream_sds sdsin(filename);
3911  sdsin.setmeta(true);
3912 
3913  // Read SDS file attributes attr_iter i = ;
3914 
3915  vector < hdf_attr > fileattrs;
3916  sdsin >> fileattrs;
3917 
3918  // Read SDS's
3919  sdsin.rewind();
3920  while (!sdsin.eos()) {
3921  sds_info sdi; // add the next sds_info to map
3922  sdsin >> sdi.sds;
3923  sdi.in_vgroup = false; // assume we're not part of a vgroup
3924  map[sdi.sds.ref] = sdi; // assign to map by ref
3925  }
3926 
3927  sdsin.close();
3928 
3929  // This is the call to combine SDS attributes that have been split up
3930  // into N 32,000 character strings. 10/24/2001 jhrg
3931  merge_split_eos_attributes(fileattrs, "StructMetadata");
3932  merge_split_eos_attributes(fileattrs, "CoreMetadata");
3933  merge_split_eos_attributes(fileattrs, "ProductMetadata");
3934  merge_split_eos_attributes(fileattrs, "ArchiveMetadata");
3935  merge_split_eos_attributes(fileattrs, "coremetadata");
3936  merge_split_eos_attributes(fileattrs, "productmetadata");
3937 
3938  // Build DAS, add SDS file attributes
3939  AddHDFAttr(das, string("HDF_GLOBAL"), fileattrs);
3940  // add each SDS's attrs
3941  vector < hdf_attr > dattrs;
3942 
3943  // TODO Remove these attributes (name and dimension)? jhrg 8/17/11
3944  // ***
3945  for (SDSI s = map.begin(); s != map.end(); ++s) {
3946  const hdf_sds *sds = &s->second.sds;
3947  AddHDFAttr(das, sds->name, sds->attrs);
3948  for (int k = 0; k < (int) sds->dims.size(); ++k) {
3949  dattrs = Dims2Attrs(sds->dims[k]);
3950  AddHDFAttr(das, sds->name + "_dim_" + num2string(k), dattrs);
3951  }
3952 
3953  }
3954 
3955  return;
3956 }
3957 
3958 // Read Vdata's out of filename, build descriptions and put them into dds.
3959 static void Vdata_descriptions(vd_map & map, DAS & das,
3960  const string & filename)
3961 {
3962  hdfistream_vdata vdin(filename);
3963  vdin.setmeta(true);
3964 
3965  // Read Vdata's
3966  while (!vdin.eos()) {
3967  vd_info vdi; // add the next vd_info to map
3968  vdin >> vdi.vdata;
3969  vdi.in_vgroup = false; // assume we're not part of a vgroup
3970  map[vdi.vdata.ref] = vdi; // assign to map by ref
3971  }
3972  vdin.close();
3973 
3974  // Build DAS
3975  vector < hdf_attr > dattrs;
3976  for (VDI s = map.begin(); s != map.end(); ++s) {
3977  const hdf_vdata *vd = &s->second.vdata;
3978  AddHDFAttr(das, vd->name, vd->attrs);
3979  }
3980 
3981  return;
3982 }
3983 
3984 // Read Vgroup's out of filename, build descriptions and put them into dds.
3985 static void Vgroup_descriptions(DDS & dds, DAS & das,
3986  const string & filename, sds_map & sdmap,
3987  vd_map & vdmap, gr_map & grmap)
3988 {
3989 
3990  hdfistream_vgroup vgin(filename);
3991 
3992  // Read Vgroup's
3993  vg_map vgmap;
3994  while (!vgin.eos()) {
3995  vg_info vgi; // add the next vg_info to map
3996  vgin >> vgi.vgroup; // read vgroup itself
3997  vgi.toplevel = true; // assume toplevel until we prove otherwise
3998  vgmap[vgi.vgroup.ref] = vgi; // assign to map by vgroup ref
3999  }
4000  vgin.close();
4001  // for each Vgroup
4002  for (VGI v = vgmap.begin(); v != vgmap.end(); ++v) {
4003  const hdf_vgroup *vg = &v->second.vgroup;
4004 
4005  // Add Vgroup attributes
4006  AddHDFAttr(das, vg->name, vg->attrs);
4007 
4008  // now, assign children
4009  for (uint32 i = 0; i < vg->tags.size(); i++) {
4010  int32 tag = vg->tags[i];
4011  int32 ref = vg->refs[i];
4012  switch (tag) {
4013  case DFTAG_VG:
4014  // Could be a GRI or a Vgroup
4015  if (grmap.find(ref) != grmap.end())
4016  grmap[ref].in_vgroup = true;
4017  else
4018  vgmap[ref].toplevel = false;
4019  break;
4020  case DFTAG_VH:
4021  vdmap[ref].in_vgroup = true;
4022  break;
4023  case DFTAG_NDG:
4024  sdmap[ref].in_vgroup = true;
4025  break;
4026  default:
4027  ERROR_LOG("unknown tag: " << tag << " ref: " << ref << endl);
4028  // TODO: Make this an exception? jhrg 8/19/11
4029  // Don't make an exception. Possibly you will meet other valid tags. Need to know if it
4030  // is worth to tackle this. KY 09/13/12
4031  // cerr << "unknown tag: " << tag << " ref: " << ref << endl;
4032  break;
4033  }// switch (tag)
4034  } // for (uint32 i = 0; i < vg->tags.size(); i++)
4035  } // for (VGI v = vgmap.begin(); v != vgmap.end(); ++v)
4036  // Build DDS for all toplevel vgroups
4037  BaseType *pbt = 0;
4038  for (VGI v = vgmap.begin(); v != vgmap.end(); ++v) {
4039  if (!v->second.toplevel)
4040  continue; // skip over non-toplevel vgroups
4041  pbt = NewStructureFromVgroup(v->second.vgroup,
4042  vgmap, sdmap, vdmap,
4043  grmap, filename);
4044  if (pbt != 0) {
4045  dds.add_var(pbt);
4046  delete pbt;
4047  }
4048 
4049  } // for (VGI v = vgmap.begin(); v != vgmap.end(); ++v)
4050 
4051  // add lone SDS's
4052  for (SDSI s = sdmap.begin(); s != sdmap.end(); ++s) {
4053  if (s->second.in_vgroup)
4054  continue; // skip over SDS's in vgroups
4055  if (s->second.sds.has_scale()) // make a grid
4056  pbt = NewGridFromSDS(s->second.sds, filename);
4057  else
4058  pbt = NewArrayFromSDS(s->second.sds, filename);
4059  if (pbt != 0) {
4060  dds.add_var(pbt);
4061  delete pbt;
4062  }
4063  }
4064 
4065  // add lone Vdata's
4066  for (VDI v = vdmap.begin(); v != vdmap.end(); ++v) {
4067  if (v->second.in_vgroup)
4068  continue; // skip over Vdata in vgroups
4069  pbt = NewSequenceFromVdata(v->second.vdata, filename);
4070  if (pbt != 0) {
4071  dds.add_var(pbt);
4072  delete pbt;
4073  }
4074  }
4075  // add lone GR's
4076  for (GRI g = grmap.begin(); g != grmap.end(); ++g) {
4077  if (g->second.in_vgroup)
4078  continue; // skip over GRs in vgroups
4079  pbt = NewArrayFromGR(g->second.gri, filename);
4080  if (pbt != 0) {
4081  dds.add_var(pbt);
4082  delete pbt ;
4083  }
4084  }
4085 }
4086 
4087 static void GR_descriptions(gr_map & map, DAS & das,
4088  const string & filename)
4089 {
4090 
4091  hdfistream_gri grin(filename);
4092  grin.setmeta(true);
4093 
4094  // Read GR file attributes
4095  vector < hdf_attr > fileattrs;
4096  grin >> fileattrs;
4097 
4098  // Read general rasters
4099  grin.rewind();
4100  while (!grin.eos()) {
4101  gr_info gri; // add the next gr_info to map
4102  grin >> gri.gri;
4103  gri.in_vgroup = false; // assume we're not part of a vgroup
4104  map[gri.gri.ref] = gri; // assign to map by ref
4105  }
4106 
4107  grin.close();
4108 
4109  // Build DAS
4110  AddHDFAttr(das, string("HDF_GLOBAL"), fileattrs); // add GR file attributes
4111 
4112  // add each GR's attrs
4113  vector < hdf_attr > pattrs;
4114  for (GRI g = map.begin(); g != map.end(); ++g) {
4115  const hdf_gri *gri = &g->second.gri;
4116  // add GR attributes
4117  AddHDFAttr(das, gri->name, gri->attrs);
4118 
4119  // add palettes as attributes
4120  pattrs = Pals2Attrs(gri->palettes);
4121  AddHDFAttr(das, gri->name, pattrs);
4122 
4123  }
4124 
4125  return;
4126 }
4127 
4128 // Read file annotations out of filename, put in attribute structure
4129 static void FileAnnot_descriptions(DAS & das, const string & filename)
4130 {
4131 
4132  hdfistream_annot annotin(filename);
4133  vector < string > fileannots;
4134 
4135  annotin >> fileannots;
4136  AddHDFAttr(das, string("HDF_GLOBAL"), fileannots);
4137 
4138  annotin.close();
4139  return;
4140 }
4141 
4142 // add a vector of hdf_attr to a DAS
4143 void AddHDFAttr(DAS & das, const string & varname,
4144  const vector < hdf_attr > &hav)
4145 {
4146  if (hav.size() == 0) // nothing to add
4147  return;
4148  // get pointer to the AttrTable for the variable varname (create one if
4149  // necessary)
4150  string tempname = varname;
4151  AttrTable *atp = das.get_table(tempname);
4152  if (atp == 0) {
4153  atp = new AttrTable;
4154  atp = das.add_table(tempname, atp);
4155  }
4156  // add the attributes to the DAS
4157  vector < string > attv; // vector of attribute strings
4158  string attrtype; // name of type of attribute
4159  for (int i = 0; i < (int) hav.size(); ++i) { // for each attribute
4160 
4161  attrtype = DAPTypeName(hav[i].values.number_type());
4162  // get a vector of strings representing the values of the attribute
4163  attv = vector < string > (); // clear attv
4164  hav[i].values.print(attv);
4165 
4166  // add the attribute and its values to the DAS
4167  for (int j = 0; j < (int) attv.size(); ++j) {
4168  // handle HDF-EOS metadata with separate parser
4169  string container_name = hav[i].name;
4170  if (container_name.find("StructMetadata") == 0
4171  || container_name.find("CoreMetadata") == 0
4172  || container_name.find("ProductMetadata") == 0
4173  || container_name.find("ArchiveMetadata") == 0
4174  || container_name.find("coremetadata") == 0
4175  || container_name.find("productmetadata") == 0) {
4176  string::size_type dotzero = container_name.find('.');
4177  if (dotzero != container_name.npos)
4178  container_name.erase(dotzero); // erase .0
4179 
4180 
4181  AttrTable *at = das.get_table(container_name);
4182  if (!at)
4183  at = das.add_table(container_name, new AttrTable);
4184 
4185  // tell lexer to scan attribute string
4186  void *buf = hdfeos_string(attv[j].c_str());
4187 
4188  // cerr << "About to print attributes to be parsed..." << endl;
4189  // TODO: remove when done!
4190  // cerr << "attv[" << j << "]" << endl << attv[j].c_str() << endl;
4191 
4192  parser_arg arg(at);
4193  // HDF-EOS attribute parsing is complex and some errors are
4194  // tolerated. Thus, if the parser proper returns an error,
4195  // that results in an exception that is fatal. However, if
4196  // the status returned by an otherwise successful parse shows
4197  // an error was encountered but successful parsing continued,
4198  // that's OK, but it should be logged.
4199  //
4200  // Also, HDF-EOS files should be read using the new HDF-EOS
4201  // features and not this older parser. jhrg 8/18/11
4202  //
4203  // TODO: How to log (as opposed to using BESDEBUG)?
4204  if (hdfeosparse(&arg) != 0){
4205  hdfeos_delete_buffer(buf);
4206  throw Error("HDF-EOS parse error while processing a " + container_name + " HDFEOS attribute.");
4207  }
4208 
4209  // We don't use the parse_error for this case since it generates memory leaking. KY 2014-02-25
4210  if (arg.status() == false) {
4211  ERROR_LOG("HDF-EOS parse error while processing a "
4212  << container_name << " HDFEOS attribute. (2)" << endl);
4213  //<< arg.error()->get_error_message() << endl;
4214  }
4215 
4216  hdfeos_delete_buffer(buf);
4217  }
4218  else {
4219  if (attrtype == "String")
4220 #ifdef ATTR_STRING_QUOTE_FIX
4221  attv[j] = escattr(attv[j]);
4222 #else
4223  attv[j] = "\"" + escattr(attv[j]) + "\"";
4224 #endif
4225 
4226  if (atp->append_attr(hav[i].name, attrtype, attv[j]) == 0)
4227  THROW(dhdferr_addattr);
4228  }
4229  }
4230  }
4231 
4232  return;
4233 }
4234 
4235 // add a vector of annotations to a DAS. They are stored as attributes. They
4236 // are encoded as string values of an attribute named "HDF_ANNOT".
4237 void AddHDFAttr(DAS & das, const string & varname,
4238  const vector < string > &anv)
4239 {
4240  if (anv.size() == 0) // nothing to add
4241  return;
4242 
4243  // get pointer to the AttrTable for the variable varname (create one if
4244  // necessary)
4245  AttrTable *atp = das.get_table(varname);
4246  if (atp == 0) {
4247  atp = new AttrTable;
4248  atp = das.add_table(varname, atp);
4249  }
4250  // add the annotations to the DAS
4251  string an;
4252  for (int i = 0; i < (int) anv.size(); ++i) { // for each annotation
4253 #ifdef ATTR_STRING_QUOTE_FIX
4254  an = escattr(anv[i]); // quote strings
4255 #else
4256  an = "\"" + escattr(anv[i]) + "\""; // quote strings
4257 #endif
4258  if (atp->append_attr(string("HDF_ANNOT"), "String", an) == 0)
4259  THROW(dhdferr_addattr);
4260  }
4261 
4262  return;
4263 }
4264 
4265 // Add a vector of palettes as attributes to a GR. Each palette is added as
4266 // two attributes: the first contains the palette data; the second contains
4267 // the number of components in the palette.
4268 static vector < hdf_attr > Pals2Attrs(const vector < hdf_palette > palv)
4269 {
4270  vector < hdf_attr > pattrs;
4271 
4272  if (palv.size() != 0) {
4273  // for each palette create an attribute with the palette inside, and an
4274  // attribute containing the number of components
4275  hdf_attr pattr;
4276  string palname;
4277  for (int i = 0; i < (int) palv.size(); ++i) {
4278  palname = "hdf_palette_" + num2string(i);
4279  pattr.name = palname;
4280  pattr.values = palv[i].table;
4281  pattrs.push_back(pattr);
4282  pattr.name = palname + "_ncomps";
4283  pattr.values = hdf_genvec(DFNT_INT32,
4284  const_cast <
4285  int32 * >(&palv[i].ncomp), 1);
4286  pattrs.push_back(pattr);
4287  if (palv[i].name.length() != 0) {
4288  pattr.name = palname + "_name";
4289  pattr.values = hdf_genvec(DFNT_CHAR,
4290  const_cast <
4291  char *>(palv[i].name.c_str()),
4292  palv[i].name.length());
4293  pattrs.push_back(pattr);
4294  }
4295  }
4296  }
4297  return pattrs;
4298 }
4299 
4300 // Convert the meta information in a hdf_dim into a vector of
4301 // hdf_attr.
4302 static vector < hdf_attr > Dims2Attrs(const hdf_dim dim)
4303 {
4304  vector < hdf_attr > dattrs;
4305  hdf_attr dattr;
4306  if (dim.name.length() != 0) {
4307  dattr.name = "name";
4308  dattr.values =
4309  hdf_genvec(DFNT_CHAR, const_cast < char *>(dim.name.c_str()),
4310  dim.name.length());
4311  dattrs.push_back(dattr);
4312  }
4313  if (dim.label.length() != 0) {
4314  dattr.name = "long_name";
4315  dattr.values =
4316  hdf_genvec(DFNT_CHAR, const_cast < char *>(dim.label.c_str()),
4317  dim.label.length());
4318  dattrs.push_back(dattr);
4319  }
4320  if (dim.unit.length() != 0) {
4321  dattr.name = "units";
4322  dattr.values =
4323  hdf_genvec(DFNT_CHAR, const_cast < char *>(dim.unit.c_str()),
4324  dim.unit.length());
4325  dattrs.push_back(dattr);
4326  }
4327  if (dim.format.length() != 0) {
4328  dattr.name = "format";
4329  dattr.values =
4330  hdf_genvec(DFNT_CHAR, const_cast < char *>(dim.format.c_str()),
4331  dim.format.length());
4332  dattrs.push_back(dattr);
4333  }
4334  return dattrs;
4335 }
4336 
This class provides a way to map HDF4 1-D character array to DAP Str for the CF option.
This class provides a way to map HDFEOS2 character >1D array to DAP Str array for the CF option.
This class provides a way to map HDFEOS2 1-D character array to DAP Str for the CF option.
virtual const char * what() const
Return exception message.
Definition: HDFSP.h:109
const std::string & getName() const
Get the name of this field.
Definition: HDFSP.h:291
int32 getType() const
Get the data type of this field.
Definition: HDFSP.h:309
const std::string & getNewName() const
Get the CF name(special characters replaced by underscores) of this field.
Definition: HDFSP.h:297
int32 getRank() const
Get the dimension rank of this field.
Definition: HDFSP.h:303
static File * Read(const char *path, int32 sdid, int32 fileid)
Retrieve SDS and Vdata information from the HDF4 file.
Definition: HDFSP.cc:202
void Prepare()
Definition: HDFSP.cc:4160
bool Has_Dim_NoScale_Field() const
This file has a field that is a SDS dimension but no dimension scale.
Definition: HDFSP.h:756
SD * getSD() const
Public interface to Obtain SD.
Definition: HDFSP.h:771
const std::vector< VDATA * > & getVDATAs() const
Public interface to Obtain Vdata.
Definition: HDFSP.h:777
static File * Read_Hybrid(const char *path, int32 sdid, int32 fileid)
Definition: HDFSP.cc:257
SPType getSPType() const
Obtain special HDF4 product type.
Definition: HDFSP.h:749
const std::vector< AttrContainer * > & getVgattrs() const
Get attributes for all vgroups.
Definition: HDFSP.h:783
One instance of this class represents one SDS object.
Definition: HDFSP.h:346
const std::vector< Dimension * > & getCorrectedDimensions() const
Get the list of the corrected dimensions.
Definition: HDFSP.h:360
const std::vector< Dimension * > & getDimensions() const
Get the list of dimensions.
Definition: HDFSP.h:414
bool IsDimNoScale() const
Is this field a dimension without dimension scale(or empty[no data]dimension variable)
Definition: HDFSP.h:427
This class retrieves all SDS objects and SD file attributes.
Definition: HDFSP.h:558
const std::vector< SDField * > & getFields() const
Redundant member function.
Definition: HDFSP.h:577
const std::vector< Attribute * > & getAttributes() const
Public interface to obtain the SD(file) attributes.
Definition: HDFSP.h:583
One instance of this class represents one Vdata field.
Definition: HDFSP.h:504
int32 getFieldOrder() const
Get the order of this field.
Definition: HDFSP.h:512
Definition: HDFStr.h:51
Definition: HE2CF.h:54
bool open(const std::string &filename, const int sd_id, const int file_id)
openes \afilename HDF4 file.
Definition: HE2CF.cc:955
string get_metadata(const std::string &metadataname, bool &suffix_is_num, std::vector< std::string > &non_num_names, std::vector< std::string > &non_num_data)
retrieves the merged metadata.
Definition: HE2CF.cc:948
bool write_attribute(const std::string &gname, const std::string &fname, const std::string &newfname, int n_groups, int fieldtype)
Definition: HE2CF.cc:985
void set_DAS(libdap::DAS *das)
sets DAS pointer so that we can bulid attribute tables.
Definition: HE2CF.cc:181
bool close()
closes the opened file.
Definition: HE2CF.cc:932
bool write_attribute_FillValue(const std::string &varname, int type, float val)
Definition: HE2CF.cc:1052
bool write_attribute_coordinates(const std::string &varname, std::string coord)
Definition: HE2CF.cc:1146
bool write_attribute_units(const std::string &varname, std::string units)
Definition: HE2CF.cc:1159
void get_value(const std::string &s, std::string &val, bool &found)
Retrieve the value of a given key, if set.
Definition: TheBESKeys.cc:340
static TheBESKeys * TheKeys()
Definition: TheBESKeys.cc:71
static std::string print_attr(int32, int, void *)
Print attribute values in string.
Definition: HDFCFUtil.cc:268
static std::string print_type(int32)
Print datatype in string.
Definition: HDFCFUtil.cc:389
static void correct_scale_offset_type(libdap::AttrTable *at)
Definition: HDFCFUtil.cc:614
static std::string get_CF_string(std::string s)
Change special characters to "_".
Definition: HDFCFUtil.cc:164
static std::string escattr(std::string s)
Definition: HDFCFUtil.cc:3287
static void correct_fvalue_type(libdap::AttrTable *at, int32 dtype)
Definition: HDFCFUtil.cc:547