27 #ifndef _CXSC_IVECRMAT_INL_INCLUDED
28 #define _CXSC_IVECRMAT_INL_INCLUDED
38 { _vmconstr<ivector,rmatrix,interval>(*
this,sl); }
45 { _vmsconstr<ivector,rmatrix_slice,interval>(*
this,sl); }
49 for (
int i=0, j=v.start;i<v.size;i++,j+=v.offset)
84 { _vmvsetinf(iv,rv); }
91 { _vmvsetsup(iv,rv); }
100 #if(CXSC_INDEX_CHECK)
105 { _vsvsetsup(iv,
rvector(rv)); }
108 #if(CXSC_INDEX_CHECK)
113 { _vmvusetinf(iv,rv); }
115 #if(CXSC_INDEX_CHECK)
120 { _vmvusetsup(iv,rv); }
122 #if(CXSC_INDEX_CHECK)
127 { _vsvusetinf(iv,
rvector(rv)); }
129 #if(CXSC_INDEX_CHECK)
134 { _vsvusetsup(iv,
rvector(rv)); }
139 #if(CXSC_INDEX_CHECK)
144 {
return _vmassign<ivector,rmatrix,interval>(*
this,m); }
146 #if(CXSC_INDEX_CHECK)
151 {
return _vmassign<ivector,rmatrix,interval>(*
this,
rmatrix(m)); }
153 #if(CXSC_INDEX_CHECK)
158 {
return _vsvassign(*
this,
rvector(m)); }
160 #if(CXSC_INDEX_CHECK)
168 #if(CXSC_INDEX_CHECK)
173 {
return _mvimult<rmatrix,ivector,ivector>(m,v); }
175 #if(CXSC_INDEX_CHECK)
180 {
return _msvimult<rmatrix_slice,ivector,ivector>(ms,v); }
182 #if(CXSC_INDEX_CHECK)
187 {
return _vmimult<ivector,rmatrix,ivector>(v,m); }
189 #if(CXSC_INDEX_CHECK)
194 {
return _vmsimult<ivector,rmatrix_slice,ivector>(v,ms); }
196 #if(CXSC_INDEX_CHECK)
201 {
return _vmimultassign<ivector,rmatrix,interval>(v,m); }
203 #if(CXSC_INDEX_CHECK)
208 {
return _vmsimultassign<ivector,rmatrix_slice,interval>(v,ms); }
211 #if(CXSC_INDEX_CHECK)
216 {
return _vmimult<ivector,rmatrix,ivector>(
ivector(v),m); }
218 #if(CXSC_INDEX_CHECK)
223 {
return _vsmimultassign<ivector_slice,rmatrix,interval>(*
this,m); }