Statistiques
| Révision :

root / src / comm / HPL_1rinM.c @ 1

Historique | Voir | Annoter | Télécharger (7,6 ko)

1
/* 
2
 * -- High Performance Computing Linpack Benchmark (HPL)                
3
 *    HPL - 2.0 - September 10, 2008                          
4
 *    Antoine P. Petitet                                                
5
 *    University of Tennessee, Knoxville                                
6
 *    Innovative Computing Laboratory                                 
7
 *    (C) Copyright 2000-2008 All Rights Reserved                       
8
 *                                                                      
9
 * -- Copyright notice and Licensing terms:                             
10
 *                                                                      
11
 * Redistribution  and  use in  source and binary forms, with or without
12
 * modification, are  permitted provided  that the following  conditions
13
 * are met:                                                             
14
 *                                                                      
15
 * 1. Redistributions  of  source  code  must retain the above copyright
16
 * notice, this list of conditions and the following disclaimer.        
17
 *                                                                      
18
 * 2. Redistributions in binary form must reproduce  the above copyright
19
 * notice, this list of conditions,  and the following disclaimer in the
20
 * documentation and/or other materials provided with the distribution. 
21
 *                                                                      
22
 * 3. All  advertising  materials  mentioning  features  or  use of this
23
 * software must display the following acknowledgement:                 
24
 * This  product  includes  software  developed  at  the  University  of
25
 * Tennessee, Knoxville, Innovative Computing Laboratory.             
26
 *                                                                      
27
 * 4. The name of the  University,  the name of the  Laboratory,  or the
28
 * names  of  its  contributors  may  not  be used to endorse or promote
29
 * products  derived   from   this  software  without  specific  written
30
 * permission.                                                          
31
 *                                                                      
32
 * -- Disclaimer:                                                       
33
 *                                                                      
34
 * THIS  SOFTWARE  IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
35
 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,  INCLUDING,  BUT NOT
36
 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
37
 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY
38
 * OR  CONTRIBUTORS  BE  LIABLE FOR ANY  DIRECT,  INDIRECT,  INCIDENTAL,
39
 * SPECIAL,  EXEMPLARY,  OR  CONSEQUENTIAL DAMAGES  (INCLUDING,  BUT NOT
40
 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
41
 * DATA OR PROFITS; OR BUSINESS INTERRUPTION)  HOWEVER CAUSED AND ON ANY
42
 * THEORY OF LIABILITY, WHETHER IN CONTRACT,  STRICT LIABILITY,  OR TORT
43
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
44
 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
45
 * ---------------------------------------------------------------------
46
 */ 
47
/*
48
 * Include files
49
 */
50
#include "hpl.h"
51

    
52
#ifdef HPL_NO_MPI_DATATYPE  /* The user insists to not use MPI types */
53
#ifndef HPL_COPY_L       /* and also want to avoid the copy of L ... */
54
#define HPL_COPY_L   /* well, sorry, can not do that: force the copy */
55
#endif
56
#endif
57
 
58
#ifdef STDC_HEADERS
59
int HPL_binit_1rinM
60
(
61
   HPL_T_panel *              PANEL
62
)
63
#else
64
int HPL_binit_1rinM( PANEL )
65
   HPL_T_panel *              PANEL;
66
#endif
67
{
68
#ifdef HPL_USE_MPI_DATATYPE
69
/*
70
 * .. Local Variables ..
71
 */
72
   int                        ierr;
73
#endif
74
/* ..
75
 * .. Executable Statements ..
76
 */
77
   if( PANEL == NULL )           { return( HPL_SUCCESS ); }
78
   if( PANEL->grid->npcol <= 1 ) { return( HPL_SUCCESS ); }
79
#ifdef HPL_USE_MPI_DATATYPE
80
#ifdef HPL_COPY_L
81
/*
82
 * Copy the panel into a contiguous buffer
83
 */
84
   HPL_copyL( PANEL );
85
#endif
86
/*
87
 * Create the MPI user-defined data type
88
 */
89
   ierr = HPL_packL( PANEL, 0, PANEL->len, 0 );
90

    
91
   return( ( ierr == MPI_SUCCESS ? HPL_SUCCESS : HPL_FAILURE ) );
92
#else
93
/*
94
 * Force the copy of the panel into a contiguous buffer
95
 */
96
   HPL_copyL( PANEL );
97
 
98
   return( HPL_SUCCESS );
99
#endif
100
}
101

    
102
#ifdef HPL_USE_MPI_DATATYPE
103
 
104
#define   _M_BUFF     PANEL->buffers[0]
105
#define   _M_COUNT    PANEL->counts[0]
106
#define   _M_TYPE     PANEL->dtypes[0]
107
 
108
#else
109
 
110
#define   _M_BUFF     (void *)(PANEL->L2)
111
#define   _M_COUNT    PANEL->len
112
#define   _M_TYPE     MPI_DOUBLE
113
 
114
#endif
115

    
116
#ifdef STDC_HEADERS
117
int HPL_bcast_1rinM
118
(
119
   HPL_T_panel                * PANEL,
120
   int                        * IFLAG
121
)
122
#else
123
int HPL_bcast_1rinM( PANEL, IFLAG )
124
   HPL_T_panel                * PANEL;
125
   int                        * IFLAG;
126
#endif
127
{
128
/*
129
 * .. Local Variables ..
130
 */
131
   MPI_Comm                   comm;
132
   int                        ierr, go, next, msgid, partner, prev,
133
                              rank, root, size;
134
/* ..
135
 * .. Executable Statements ..
136
 */
137
   if( PANEL == NULL ) { *IFLAG = HPL_SUCCESS; return( HPL_SUCCESS ); }
138
   if( ( size = PANEL->grid->npcol ) <= 1 )
139
   {                     *IFLAG = HPL_SUCCESS; return( HPL_SUCCESS ); }
140
/*
141
 * Cast phase:  If I am the root process,  then  send message to its two
142
 * next neighbors. Otherwise, probe for message. If the message is here,
143
 * then receive it,   and  if I am not the last process of the ring,  or
144
 * just after the root process, then forward it to the next.  Otherwise,
145
 * inform the caller that the panel has still not been received.
146
 */
147
   rank = PANEL->grid->mycol; comm  = PANEL->grid->row_comm;
148
   root = PANEL->pcol;        msgid = PANEL->msgid;
149
   next = MModAdd1( rank, size );
150
 
151
   if( rank == root )
152
   {
153
      ierr = MPI_Send( _M_BUFF, _M_COUNT, _M_TYPE, next, msgid, comm );
154
      if( ( ierr == MPI_SUCCESS ) && ( size > 2 ) )
155
      {
156
         ierr = MPI_Send( _M_BUFF, _M_COUNT, _M_TYPE, MModAdd1( next,
157
                          size ), msgid, comm );
158
      }
159
   }
160
   else
161
   {
162
      prev = MModSub1( rank, size );
163
      if( ( size > 2 ) && 
164
          ( MModSub1( prev, size ) == root ) ) partner = root;
165
      else                                     partner = prev;
166

    
167
      ierr = MPI_Iprobe( partner, msgid, comm, &go, &PANEL->status[0] );
168

    
169
      if( ierr == MPI_SUCCESS )
170
      {
171
         if( go != 0 )
172
         {
173
            ierr = MPI_Recv( _M_BUFF, _M_COUNT, _M_TYPE, partner, msgid,
174
                             comm, &PANEL->status[0] );
175
            if( ( ierr == MPI_SUCCESS ) &&
176
                ( prev != root ) && ( next != root ) )
177
            {
178
               ierr = MPI_Send( _M_BUFF, _M_COUNT, _M_TYPE, next, msgid,
179
                                comm );
180
            }
181
         }
182
         else { *IFLAG = HPL_KEEP_TESTING; return( *IFLAG ); }
183
      }
184
   }
185
/*
186
 * If the message was received and being forwarded,  return HPL_SUCCESS.
187
 * If an error occured in an MPI call, return HPL_FAILURE.
188
 */
189
   *IFLAG = ( ierr == MPI_SUCCESS ? HPL_SUCCESS : HPL_FAILURE );
190

    
191
   return( *IFLAG );
192
} 
193

    
194
#ifdef STDC_HEADERS
195
int HPL_bwait_1rinM
196
(
197
   HPL_T_panel *              PANEL
198
)
199
#else
200
int HPL_bwait_1rinM( PANEL )
201
   HPL_T_panel *              PANEL;
202
#endif
203
{
204
#ifdef HPL_USE_MPI_DATATYPE
205
/*
206
 * .. Local Variables ..
207
 */
208
   int                        ierr;
209
#endif
210
/* ..
211
 * .. Executable Statements ..
212
 */
213
   if( PANEL == NULL )           { return( HPL_SUCCESS ); }
214
   if( PANEL->grid->npcol <= 1 ) { return( HPL_SUCCESS ); }
215
/*
216
 * Release the arrays of request / status / data-types and buffers
217
 */
218
#ifdef HPL_USE_MPI_DATATYPE
219
   ierr = MPI_Type_free( &PANEL->dtypes[0] );
220
   return( ( ierr == MPI_SUCCESS ? HPL_SUCCESS : HPL_FAILURE ) );
221
#else
222
   return( HPL_SUCCESS );
223
#endif
224
}