BESDefineCommand.cc

Go to the documentation of this file.
00001 // BESDefineCommand.cc
00002 
00003 // This file is part of bes, A C++ back-end server implementation framework
00004 // for the OPeNDAP Data Access Protocol.
00005 
00006 // Copyright (c) 2004,2005 University Corporation for Atmospheric Research
00007 // Author: Patrick West <pwest@ucar.edu> and Jose Garcia <jgarcia@ucar.edu>
00008 //
00009 // This library is free software; you can redistribute it and/or
00010 // modify it under the terms of the GNU Lesser General Public
00011 // License as published by the Free Software Foundation; either
00012 // version 2.1 of the License, or (at your option) any later version.
00013 // 
00014 // This library is distributed in the hope that it will be useful,
00015 // but WITHOUT ANY WARRANTY; without even the implied warranty of
00016 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
00017 // Lesser General Public License for more details.
00018 // 
00019 // You should have received a copy of the GNU Lesser General Public
00020 // License along with this library; if not, write to the Free Software
00021 // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
00022 //
00023 // You can contact University Corporation for Atmospheric Research at
00024 // 3080 Center Green Drive, Boulder, CO 80301
00025  
00026 // (c) COPYRIGHT University Corporation for Atmostpheric Research 2004-2005
00027 // Please read the full copyright statement in the file COPYRIGHT_UCAR.
00028 //
00029 // Authors:
00030 //      pwest       Patrick West <pwest@ucar.edu>
00031 //      jgarcia     Jose Garcia <jgarcia@ucar.edu>
00032 
00033 #include "BESDefineCommand.h"
00034 #include "BESTokenizer.h"
00035 #include "BESContainerStorageList.h"
00036 #include "BESResponseHandlerList.h"
00037 #include "BESParserException.h"
00038 #include "BESDataNames.h"
00039 
00040 string
00041 BESDefineCommand::parse_options( BESTokenizer &tokens,
00042                                   BESDataHandlerInterface &dhi )
00043 {
00044     string my_token = tokens.get_next_token() ;
00045     if( my_token == "silently" || my_token == "silent" )
00046     {
00047         dhi.data[SILENT] = "yes" ;
00048         my_token = tokens.get_next_token() ;
00049     }
00050     else
00051     {
00052         dhi.data[SILENT] = "no" ;
00053     }
00054     return my_token ;
00055 }
00056 
00092 BESResponseHandler *
00093 BESDefineCommand::parse_request( BESTokenizer &tokenizer,
00094                                      BESDataHandlerInterface &dhi )
00095 {
00096     string my_token = parse_options( tokenizer, dhi ) ;
00097 
00098     /* First we will make sure that the developer has not over-written this
00099      * command to work with a sub command. In other words, they have a new
00100      * command called "define something". Look up define.something
00101      */
00102     string newcmd = _cmd + "." + my_token ;
00103     BESCommand *cmdobj = BESCommand::find_command( newcmd ) ;
00104     if( cmdobj && cmdobj != BESCommand::TermCommand )
00105     {
00106         return cmdobj->parse_request( tokenizer, dhi ) ;
00107     }
00108 
00109     /* No sub-command to define, so the define command looks like:
00110      * define name as sym1,sym2,...,symn with ... aggregate by ...
00111      * 
00112      * no return as
00113      */
00114 
00115     /* Look for the response handler that knows how to build the response
00116      * object for a define command.
00117      */
00118     dhi.action = _cmd ;
00119     BESResponseHandler *retResponse =
00120         BESResponseHandlerList::TheList()->find_handler( _cmd ) ;
00121     if( !retResponse )
00122     {
00123         string s = (string)"No response handler for command " + _cmd ;
00124         throw BESParserException( s, __FILE__, __LINE__ ) ;
00125     }
00126 
00127     bool with_aggregation = false ;
00128 
00129     dhi.data[DEF_NAME] = my_token ;
00130 
00131     my_token = tokenizer.get_next_token() ;
00132     if( my_token == "in" )
00133     {
00134         string store_name = tokenizer.get_next_token() ;
00135         if( store_name == ";" || store_name == "" )
00136         {
00137             tokenizer.parse_error( my_token + " not expected, expecting definition store name" ) ;
00138         }
00139         dhi.data[STORE_NAME] = store_name ;
00140         my_token = tokenizer.get_next_token() ;
00141     }
00142 
00143     if( my_token != "as" )
00144     {
00145         tokenizer.parse_error( my_token + " not expected, expecting \"as\"" ) ;
00146     }
00147     else
00148     {
00149         my_token = tokenizer.get_next_token() ;
00150         bool expecting_comma = false ;
00151         bool with_proyection = false ;
00152         if( my_token == ";" )
00153             tokenizer.parse_error( my_token + " not expected, expecting list of symbolic names\n" ) ;
00154         while( ( my_token != "with" ) && ( my_token!=";" ) )
00155         {
00156             if( ( my_token == "," ) && ( !expecting_comma ) )
00157                 tokenizer.parse_error( my_token + " not expected\n" ) ;
00158             else if( ( my_token == "," ) && ( expecting_comma ) )
00159                 expecting_comma = false ;
00160             else if( ( my_token != "," ) && ( expecting_comma ) )
00161                 tokenizer.parse_error( my_token + " not expected\n" ) ;
00162             else
00163             {
00164                 BESContainer d( my_token ) ;
00165                 BESContainerStorageList::TheList()->look_for( d ) ;
00166                 dhi.containers.push_back( d ) ;
00167                 expecting_comma = true ;
00168             }
00169             my_token = tokenizer.get_next_token() ;
00170             if( my_token == "with" )
00171                 with_proyection = true ;
00172         }
00173         if( !expecting_comma )
00174             tokenizer.parse_error( my_token + " not expected\n" ) ;
00175         else
00176             expecting_comma = false ;
00177         if( with_proyection )
00178         {
00179             my_token = tokenizer.get_next_token() ;
00180             if( my_token == ";" )
00181                 tokenizer.parse_error( my_token + " not expected\n" ) ;
00182             else
00183             {
00184                 int rat = 0 ;
00185                 bool need_constraint = false ;
00186                 int where_in_list = 0 ;
00187                 bool found = false ;
00188                 unsigned int my_type = 0 ;
00189                 while( my_token != "aggregate" && my_token != ";" )
00190                 {
00191                     if( ( my_token == "," ) && ( !expecting_comma ) )
00192                         tokenizer.parse_error( my_token + " not expected\n" ) ;
00193                     else if( ( my_token == "," ) && ( expecting_comma ) )
00194                         expecting_comma = false ;
00195                     else if( ( my_token != "," ) && ( expecting_comma ) )
00196                         tokenizer.parse_error( my_token + " not expected\n" ) ;
00197                     else
00198                     {
00199                         rat++ ;
00200                         switch( rat )
00201                         {
00202                             case 1:
00203                             {
00204                                 my_type = 0 ;
00205                                 string ds = tokenizer.parse_container_name( my_token, my_type ) ;
00206                                 found = false ;
00207                                 dhi.first_container() ;
00208                                 where_in_list = 0 ;
00209                                 while( dhi.container && !found )
00210                                 { 
00211                                     if( ds == dhi.container->get_symbolic_name() )
00212                                     {
00213                                         found = true ;
00214                                     }
00215                                     dhi.next_container() ;
00216                                     where_in_list++ ;
00217                                 }
00218                                 if( !found )
00219                                     tokenizer.parse_error( "Container " + ds + " is in the proyection but is not in the selection." ) ;
00220                                 need_constraint = true ;
00221                                 break ;
00222                             }
00223                             case 2:
00224                             {
00225                                 expecting_comma = true ;
00226                                 rat = 0 ;
00227                                 need_constraint = false ;
00228                                 dhi.first_container() ;
00229                                 for( int w = 0; w < where_in_list-1 ; w++ )
00230                                 {
00231                                     dhi.next_container() ;
00232                                 }
00233                                 if( my_type == 1 )
00234                                 {
00235                                     dhi.container->set_constraint( tokenizer.remove_quotes( my_token ) ) ;
00236                                 }
00237                                 else if( my_type == 2 )
00238                                 {
00239                                     dhi.container->set_attributes( tokenizer.remove_quotes( my_token ) ) ;
00240                                 }
00241                                 else
00242                                 {
00243                                     tokenizer.parse_error( "Unknown property type for container" + dhi.container->get_symbolic_name() ) ;
00244                                 }
00245                                 break;
00246                             }
00247                         }
00248                     }
00249                     my_token = tokenizer.get_next_token() ;
00250                     if( my_token == "aggregate" )
00251                         with_aggregation = true ;
00252                 }
00253                 if( need_constraint )
00254                     tokenizer.parse_error( "; not expected" ) ;
00255             }
00256         }
00257         if( with_aggregation == true )
00258         {
00259             my_token = tokenizer.get_next_token() ;
00260             if( my_token != "using" )
00261             {
00262                 tokenizer.parse_error( my_token + " not expected" ) ;
00263             }
00264 
00265             my_token = tokenizer.get_next_token() ;
00266             if( my_token == ";" )
00267             {
00268                 tokenizer.parse_error( my_token + " not expected" ) ;
00269             }
00270             dhi.data[AGG_HANDLER] = my_token ;
00271 
00272             my_token = tokenizer.get_next_token() ;
00273             if( my_token != "by" )
00274             {
00275                 tokenizer.parse_error( my_token + " not expected" ) ;
00276             }
00277 
00278             my_token = tokenizer.get_next_token() ;
00279             if( my_token == ";" )
00280             {
00281                 tokenizer.parse_error( my_token + " not expected" ) ;
00282             }
00283             dhi.data[AGG_CMD] =
00284                 tokenizer.remove_quotes( my_token ) ;
00285 
00286             my_token = tokenizer.get_next_token() ;
00287         }
00288         if( my_token != ";" )
00289         {
00290             tokenizer.parse_error( my_token + " not expected" ) ;
00291         }
00292     }
00293 
00294     return retResponse ;
00295 }
00296 
00303 void
00304 BESDefineCommand::dump( ostream &strm ) const
00305 {
00306     strm << BESIndent::LMarg << "BESDefineCommand::dump - ("
00307                              << (void *)this << ")" << endl ;
00308     BESIndent::Indent() ;
00309     BESCommand::dump( strm ) ;
00310     BESIndent::UnIndent() ;
00311 }
00312 

Generated on Wed Aug 29 02:59:01 2007 for OPeNDAP Back End Server (BES) by  doxygen 1.5.2