43 Real resid, alpha, beta, rho, rho_1=0, sigma=0;
46 Real tol_sq = *tol * *tol;
52 if (normb < 1.e-15) normb =
Real(1);
54 if ( (resid =
norm2(r) / normb) <= tol_sq) {
60 for (
int i = 1;
i <= *iter;
i++) {
82 sigma = gsums[1] - beta * beta * sigma;
91 if ( (resid =
norm2(r) / normb) <= tol_sq) {
112 Real *p = (*pit)->pointer( x.
id());
113 const Real *
q = (*pit)->pointer( diag.
id());
116 for (
int i=1, size=(*pit)->size_of_nodes();
i<=size; ++
i)
134 std::fill( p, p+(*pit)->size_of_nodes()*y.
dimension(),
Real(0));
145 for (
int i=0,
n=ene.size_of_nodes();
i<
n; ++
i) {
147 for (
int j=0;
j<
n; ++
j, ++emm) t += (*emm) * fx[
j];
160 const Real *p = (*pit)->pointer( x.
id());
162 for (
int i=1, size=(*pit)->size_of_nodes();
i<=size; ++
i)
163 if ( (*pit)->is_primary_node(
i))
176 const Real *px = (*pit)->pointer( x.
id());
177 const Real *py = (*pit)->pointer( y.
id());
179 for (
int i=1, size=(*pit)->size_of_nodes();
i<=size; ++
i)
180 if ( (*pit)->is_primary_node(
i))
195 prods[0] = prods[1] = 0;
197 const Real *px1 = (*pit)->pointer( x1.
id());
198 const Real *py1 = (*pit)->pointer( y1.
id());
199 const Real *px2 = (*pit)->pointer( x2.
id());
200 const Real *py2 = (*pit)->pointer( y2.
id());
202 for (
int i=1, size=(*pit)->size_of_nodes();
i<=size; ++
i)
203 if ( (*pit)->is_primary_node(
i)) {
216 Real *px = (*pit)->pointer( x.
id());
217 Real *py = (*pit)->pointer( y.
id());
219 for (
int i=1, size=(*pit)->size_of_nodes();
i<=size; ++
i)
228 Real *px = (*pit)->pointer( x.
id());
230 for (
int i=1, size=(*pit)->size_of_nodes();
i<=size; ++
i)
239 Real *px = (*pit)->pointer( x.
id());
241 for (
int i=1, size=(*pit)->size_of_nodes();
i<=size; ++
i)
250 Real *px = (*pit)->pointer( x.
id());
251 Real *py = (*pit)->pointer( y.
id());
254 for (
int i=1, size=(*pit)->size_of_nodes();
i<=size; ++
i)
269 std::vector<const RFC_Pane_transfer*> ps;
272 for (std::vector<const RFC_Pane_transfer*>::iterator
273 pit=ps.begin(); pit!=ps.end(); ++pit) {
274 const Real *p = (*pit)->pointer( sDF.
id());
276 for (
int i=1, size=(*pit)->size_of_nodes();
i<=size; ++
i) {
280 std::cerr <<
"ERROR: ****Invalid number "
281 << t[
k] <<
" in " << win.
name() <<
"["
282 <<
i-1 <<
"][" << k <<
"] (C Convention). Aborting..." << std::endl;
286 min_v =
min( min_v, t);
287 max_v =
max( max_v, t);
304 std::vector<const RFC_Pane_transfer*> ps;
307 for (std::vector<const RFC_Pane_transfer*>::iterator
308 pit=ps.begin(); pit!=ps.end(); ++pit) {
309 const Real *p = (*pit)->pointer( sDF.
id());
311 for (
int i=1, size=(*pit)->size_of_faces();
i<=size; ++
i) {
315 std::cerr <<
"ERROR: ****Invalid number "
316 << t[
k] <<
" in " << win.
name() <<
"["
317 <<
i-1 <<
"][" << k <<
"] (C Convention). Aborting..." << std::endl;
321 min_v =
min( min_v, t);
322 max_v =
max( max_v, t);
339 std::vector<const RFC_Pane_transfer*> ps;
345 for (std::vector<const RFC_Pane_transfer*>::iterator
346 pit=ps.begin(); pit!=ps.end(); ++pit) {
347 ENE ene( (*pit)->base(), 1);
349 for (
int i=1, size=(*pit)->size_of_faces();
i<=size; ++
i, ene.next()) {
355 for (
int i=0,
n = e.get_num_gp(doa);
i<
n; ++
i) {
356 e.get_gp_nat_coor(
i, nc, doa);
359 Real a=e.get_gp_weight(
i, doa) * e.Jacobian_det( pnts, nc);
378 std::vector<const RFC_Pane_transfer*> ps;
384 for (std::vector<const RFC_Pane_transfer*>::iterator
385 pit=ps.begin(); pit!=ps.end(); ++pit) {
386 ENE ene( (*pit)->base(), 1);
388 for (
int i=1, size=(*pit)->size_of_faces();
i<=size; ++
i, ene.next()) {
394 for (
int i=0,
n = e.get_num_gp(doa);
i<
n; ++
i) {
395 e.get_gp_nat_coor(
i, nc, doa);
398 Real a=e.get_gp_weight(
i, doa) * e.Jacobian_det( pnts, nc);
std::string name() const
The name of the window.
void dot2(const Nodal_data_const &x1, const Nodal_data_const &y1, const Nodal_data_const &x2, const Nodal_data_const &y2, Array_n prod) const
Real square(const Array_n_const &x) const
void invert(Nodal_data &x)
here we put it at the!beginning of the common block The point to point and collective!routines know about but MPI_TYPE_STRUCT as yet does not!MPI_STATUS_IGNORE and MPI_STATUSES_IGNORE are similar objects!Until the underlying MPI library implements the C version of these are declared as arrays of MPI_STATUS_SIZE!The types and are OPTIONAL!Their values are zero if they are not available Note that!using these reduces the portability of MPI_IO INTEGER MPI_BOTTOM INTEGER MPI_DOUBLE_PRECISION INTEGER MPI_LOGICAL INTEGER MPI_2REAL INTEGER MPI_2DOUBLE_COMPLEX INTEGER MPI_LB INTEGER MPI_WTIME_IS_GLOBAL INTEGER MPI_COMM_WORLD
An adaptor for enumerating node IDs of an element.
here we put it at the!beginning of the common block The point to point and collective!routines know about but MPI_TYPE_STRUCT as yet does not!MPI_STATUS_IGNORE and MPI_STATUSES_IGNORE are similar objects!Until the underlying MPI library implements the C version of these are declared as arrays of MPI_STATUS_SIZE!The types and are OPTIONAL!Their values are zero if they are not available Note that!using these reduces the portability of MPI_IO INTEGER MPI_BOTTOM INTEGER MPI_DOUBLE_PRECISION INTEGER MPI_LOGICAL INTEGER MPI_2REAL INTEGER MPI_2DOUBLE_COMPLEX INTEGER MPI_LB INTEGER MPI_WTIME_IS_GLOBAL INTEGER MPI_GROUP_EMPTY INTEGER MPI_MAX
void int int REAL REAL * y
Element_var_const make_field(const Nodal_data_const &d, const RFC_Pane_transfer *pn, const ENE &ene)
Construct a element-wise accessor from nodal data and pointers.
std::vector< RFC_Pane_transfer * > trg_ps
Base * base()
The id of its base COM::Pane object.
Adpator for element-wise data container.
Vector_n max(const Array_n_const &v1, const Array_n_const &v2)
here we put it at the!beginning of the common block The point to point and collective!routines know about but MPI_TYPE_STRUCT as yet does not!MPI_STATUS_IGNORE and MPI_STATUSES_IGNORE are similar objects!Until the underlying MPI library implements the C version of these are declared as arrays of MPI_STATUS_SIZE!The types and are OPTIONAL!Their values are zero if they are not available Note that!using these reduces the portability of MPI_IO INTEGER MPI_BOTTOM INTEGER MPI_DOUBLE_PRECISION INTEGER MPI_LOGICAL INTEGER MPI_2REAL INTEGER MPI_2DOUBLE_COMPLEX INTEGER MPI_LB INTEGER MPI_WTIME_IS_GLOBAL INTEGER MPI_GROUP_EMPTY INTEGER MPI_MIN
Value_const get_value(const RFC_Pane_transfer *p, int v) const
void saxpy(const Real &a, const Nodal_data_const &x, const Real &b, Nodal_data &y)
SURF::Generic_element_2 Generic_element
int size_of_faces() const
The total number of faces in the pane.
void panes(std::vector< Pane * > &ps)
Get a vector of local panes contained in the window.
#define RFC_END_NAME_SPACE
void scale(const Real &a, Nodal_data &x)
void int int int REAL REAL REAL * z
RFC_Window_transfer & trg
#define RFC_BEGIN_NAME_SPACE
void set_value(RFC_Pane_transfer *p, int v, const Value_const &vec)
Value_nonconst get_value(RFC_Pane_transfer *p, int v)
int pcg(Nodal_data &x, Nodal_data &b, Nodal_data &p, Nodal_data &q, Nodal_data &r, Nodal_data &s, Nodal_data &z, Nodal_data &di, Real *tol, int *max_iter)
void interpolate(const Generic_element &e, const Element_var_const values, const Generic_element::Nat_coor &nc, _Value &v)
Vector_n min(const Array_n_const &v1, const Array_n_const &v2)
void integrate(const RFC_Window_transfer &win, const Nodal_data_const &sDF, Array_n &intergral, const int doa)
void copy_vec(const Nodal_data_const &x, Nodal_data &y)
std::vector< RFC_Pane_transfer * >::const_iterator Pane_iterator_const
void precondition_Jacobi(const Nodal_data_const &rhs, const Nodal_data_const &diag, Nodal_data &x)
Diagonal (Jacobi) preconditioner.
Real dot(const Nodal_data_const &x, const Nodal_data_const &y) const
An const adaptor for accessing nodal coordinates of a pane.
void multiply_mass_mat_and_x(const Nodal_data_const &x, Nodal_data &y)
void minmax(const RFC_Window_transfer &win, const Facial_data_const &sDF, Array_n &min_v, Array_n &max_v)
bool need_recv(int i) const
void reduce_to_all(Nodal_data &, MPI_Op)
Real norm2(const Nodal_data_const &x) const
here we put it at the!beginning of the common block The point to point and collective!routines know about but MPI_TYPE_STRUCT as yet does not!MPI_STATUS_IGNORE and MPI_STATUSES_IGNORE are similar objects!Until the underlying MPI library implements the C version of these are declared as arrays of MPI_STATUS_SIZE!The types and are OPTIONAL!Their values are zero if they are not available Note that!using these reduces the portability of MPI_IO INTEGER MPI_BOTTOM INTEGER MPI_DOUBLE_PRECISION INTEGER MPI_LOGICAL INTEGER MPI_2REAL INTEGER MPI_2DOUBLE_COMPLEX INTEGER MPI_LB INTEGER MPI_WTIME_IS_GLOBAL INTEGER MPI_GROUP_EMPTY INTEGER MPI_SUM
void allreduce(Array_n &arr, MPI_Op op) const
std::vector< RFC_Pane_transfer * >::iterator Pane_iterator