BESDefineCommand.cc
Go to the documentation of this file.00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033 #include "BESDefineCommand.h"
00034 #include "BESTokenizer.h"
00035 #include "BESContainerStorageList.h"
00036 #include "BESResponseHandlerList.h"
00037 #include "BESSyntaxUserError.h"
00038 #include "BESDataNames.h"
00039 #include "BESUtil.h"
00040
00041 string
00042 BESDefineCommand::parse_options( BESTokenizer &tokens,
00043 BESDataHandlerInterface &dhi )
00044 {
00045 string my_token = tokens.get_next_token() ;
00046 if( my_token == "silently" || my_token == "silent" )
00047 {
00048 dhi.data[SILENT] = "yes" ;
00049 my_token = tokens.get_next_token() ;
00050 }
00051 else
00052 {
00053 dhi.data[SILENT] = "no" ;
00054 }
00055 return my_token ;
00056 }
00057
00093 BESResponseHandler *
00094 BESDefineCommand::parse_request( BESTokenizer &tokenizer,
00095 BESDataHandlerInterface &dhi )
00096 {
00097 string my_token = parse_options( tokenizer, dhi ) ;
00098
00099
00100
00101
00102
00103 string newcmd = _cmd + "." + my_token ;
00104 BESCommand *cmdobj = BESCommand::find_command( newcmd ) ;
00105 if( cmdobj && cmdobj != BESCommand::TermCommand )
00106 {
00107 return cmdobj->parse_request( tokenizer, dhi ) ;
00108 }
00109
00110
00111
00112
00113
00114
00115
00116
00117
00118
00119 dhi.action = _cmd ;
00120 BESResponseHandler *retResponse =
00121 BESResponseHandlerList::TheList()->find_handler( _cmd ) ;
00122 if( !retResponse )
00123 {
00124 string s = (string)"No response handler for command " + _cmd ;
00125 throw BESSyntaxUserError( s, __FILE__, __LINE__ ) ;
00126 }
00127
00128 bool with_aggregation = false ;
00129
00130 dhi.data[DEF_NAME] = my_token ;
00131
00132 my_token = tokenizer.get_next_token() ;
00133 if( my_token == "in" )
00134 {
00135 string store_name = tokenizer.get_next_token() ;
00136 if( store_name == ";" || store_name == "" )
00137 {
00138 tokenizer.parse_error( my_token + " not expected, expecting definition store name" ) ;
00139 }
00140 dhi.data[STORE_NAME] = store_name ;
00141 my_token = tokenizer.get_next_token() ;
00142 }
00143
00144 if( my_token != "as" )
00145 {
00146 tokenizer.parse_error( my_token + " not expected, expecting \"as\"" ) ;
00147 }
00148 else
00149 {
00150 my_token = tokenizer.get_next_token() ;
00151 bool expecting_comma = false ;
00152 bool with_proyection = false ;
00153 if( my_token == ";" )
00154 tokenizer.parse_error( my_token + " not expected, expecting list of symbolic names\n" ) ;
00155 while( ( my_token != "with" ) && ( my_token!=";" ) )
00156 {
00157 if( ( my_token == "," ) && ( !expecting_comma ) )
00158 tokenizer.parse_error( my_token + " not expected\n" ) ;
00159 else if( ( my_token == "," ) && ( expecting_comma ) )
00160 expecting_comma = false ;
00161 else if( ( my_token != "," ) && ( expecting_comma ) )
00162 tokenizer.parse_error( my_token + " not expected\n" ) ;
00163 else
00164 {
00165 BESContainer *d =
00166 BESContainerStorageList::TheList()->look_for( my_token ) ;
00167 dhi.containers.push_back( d ) ;
00168 expecting_comma = true ;
00169 }
00170 my_token = tokenizer.get_next_token() ;
00171 if( my_token == "with" )
00172 with_proyection = true ;
00173 }
00174 if( !expecting_comma )
00175 tokenizer.parse_error( my_token + " not expected\n" ) ;
00176 else
00177 expecting_comma = false ;
00178 if( with_proyection )
00179 {
00180 my_token = tokenizer.get_next_token() ;
00181 if( my_token == ";" )
00182 tokenizer.parse_error( my_token + " not expected\n" ) ;
00183 else
00184 {
00185 int rat = 0 ;
00186 bool need_constraint = false ;
00187 int where_in_list = 0 ;
00188 bool found = false ;
00189 unsigned int my_type = 0 ;
00190 while( my_token != "aggregate" && my_token != ";" )
00191 {
00192 if( ( my_token == "," ) && ( !expecting_comma ) )
00193 tokenizer.parse_error( my_token + " not expected\n" ) ;
00194 else if( ( my_token == "," ) && ( expecting_comma ) )
00195 expecting_comma = false ;
00196 else if( ( my_token != "," ) && ( expecting_comma ) )
00197 tokenizer.parse_error( my_token + " not expected\n" ) ;
00198 else
00199 {
00200 rat++ ;
00201 switch( rat )
00202 {
00203 case 1:
00204 {
00205 my_type = 0 ;
00206 string ds = tokenizer.parse_container_name( my_token, my_type ) ;
00207 found = false ;
00208 dhi.first_container() ;
00209 where_in_list = 0 ;
00210 while( dhi.container && !found )
00211 {
00212 if( ds == dhi.container->get_symbolic_name() )
00213 {
00214 found = true ;
00215 }
00216 dhi.next_container() ;
00217 where_in_list++ ;
00218 }
00219 if( !found )
00220 tokenizer.parse_error( "Container " + ds + " is in the proyection but is not in the selection." ) ;
00221 need_constraint = true ;
00222 break ;
00223 }
00224 case 2:
00225 {
00226 expecting_comma = true ;
00227 rat = 0 ;
00228 need_constraint = false ;
00229 dhi.first_container() ;
00230 for( int w = 0; w < where_in_list-1 ; w++ )
00231 {
00232 dhi.next_container() ;
00233 }
00234 if( my_type == 1 )
00235 {
00236 dhi.container->set_constraint( BESUtil::unescape( tokenizer.remove_quotes( my_token ) ) ) ;
00237 }
00238 else if( my_type == 2 )
00239 {
00240 dhi.container->set_attributes( BESUtil::unescape( tokenizer.remove_quotes( my_token ) ) ) ;
00241 }
00242 else
00243 {
00244 tokenizer.parse_error( "Unknown property type for container" + dhi.container->get_symbolic_name() ) ;
00245 }
00246 break;
00247 }
00248 }
00249 }
00250 my_token = tokenizer.get_next_token() ;
00251 if( my_token == "aggregate" )
00252 with_aggregation = true ;
00253 }
00254 if( need_constraint )
00255 tokenizer.parse_error( "; not expected" ) ;
00256 }
00257 }
00258 if( with_aggregation == true )
00259 {
00260 my_token = tokenizer.get_next_token() ;
00261 if( my_token != "using" )
00262 {
00263 tokenizer.parse_error( my_token + " not expected" ) ;
00264 }
00265
00266 my_token = tokenizer.get_next_token() ;
00267 if( my_token == ";" )
00268 {
00269 tokenizer.parse_error( my_token + " not expected" ) ;
00270 }
00271 dhi.data[AGG_HANDLER] = my_token ;
00272
00273 my_token = tokenizer.get_next_token() ;
00274 if( my_token != "by" )
00275 {
00276 tokenizer.parse_error( my_token + " not expected" ) ;
00277 }
00278
00279 my_token = tokenizer.get_next_token() ;
00280 if( my_token == ";" )
00281 {
00282 tokenizer.parse_error( my_token + " not expected" ) ;
00283 }
00284 dhi.data[AGG_CMD] =
00285 BESUtil::unescape( tokenizer.remove_quotes( my_token ) ) ;
00286
00287 my_token = tokenizer.get_next_token() ;
00288 }
00289 if( my_token != ";" )
00290 {
00291 tokenizer.parse_error( my_token + " not expected" ) ;
00292 }
00293 }
00294
00295 return retResponse ;
00296 }
00297
00304 void
00305 BESDefineCommand::dump( ostream &strm ) const
00306 {
00307 strm << BESIndent::LMarg << "BESDefineCommand::dump - ("
00308 << (void *)this << ")" << endl ;
00309 BESIndent::Indent() ;
00310 BESCommand::dump( strm ) ;
00311 BESIndent::UnIndent() ;
00312 }
00313