FEI Package Browser (Single Doxygen Collection) Version of the Day
Loading...
Searching...
No Matches
fei_test.cpp
Go to the documentation of this file.
1/*
2// @HEADER
3// ************************************************************************
4// FEI: Finite Element Interface to Linear Solvers
5// Copyright (2005) Sandia Corporation.
6//
7// Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation, the
8// U.S. Government retains certain rights in this software.
9//
10// Redistribution and use in source and binary forms, with or without
11// modification, are permitted provided that the following conditions are
12// met:
13//
14// 1. Redistributions of source code must retain the above copyright
15// notice, this list of conditions and the following disclaimer.
16//
17// 2. Redistributions in binary form must reproduce the above copyright
18// notice, this list of conditions and the following disclaimer in the
19// documentation and/or other materials provided with the distribution.
20//
21// 3. Neither the name of the Corporation nor the names of the
22// contributors may be used to endorse or promote products derived from
23// this software without specific prior written permission.
24//
25// THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36//
37// Questions? Contact Alan Williams (william@sandia.gov)
38//
39// ************************************************************************
40// @HEADER
41*/
42
43
44#include <base/fei_iostream.hpp>
45
46#include <base/fei_utils.hpp>
48
49//Each of the following 'test_*.hpp' headers declares a class which
50//specializes the 'tester' interface. Each tester specialization
51//provides one or more unit-tests. These are used in the function
52//execute_unit_tests below in this file.
53
75
77
78
79#include <fei_CommUtils.hpp>
80#include <snl_fei_Utils.hpp>
81#include <fei_ParameterSet.hpp>
82
83#include <Teuchos_GlobalMPISession.hpp>
84
86
87#undef fei_file
88#define fei_file "fei_test.cpp"
89
90#include <fei_ErrMacros.hpp>
91
93
95
96int execute_named_test(const std::string& testname,
97 int argc, char** argv, MPI_Comm comm);
98
99void read_input_and_execute_fullsystem_tests(const std::string& filename,
100 int argc, char** argv, MPI_Comm comm);
101
102void execute_unit_tests(const std::string& path, MPI_Comm comm);
103
104void execute_fullsystem_tests(MPI_Comm comm, const std::string& path,
105 fei::ParameterSet& name_numproc_pairs);
106
107#ifndef FEI_SER
109 MPI_Comm& newcomm1, MPI_Group& newgroup1,
110 MPI_Comm& newcomm2, MPI_Group& newgroup2);
111#endif
112
113//--------------------------------------------------
114// main
115//--------------------------------------------------
116int main(int argc, char** argv) {
117
118 double start_time = fei::utils::cpu_time();
119
120 Teuchos::GlobalMPISession mpi_session(&argc, &argv, &std::cout);
121 int localProc = mpi_session.getRank();
122
123 if (localProc == 0) {
124 FEI_COUT << "FEI version: " << fei::utils::version() << FEI_ENDL;
125 }
126
127 //Check whether the -test flag was used.
128
129 std::string testname = fei_test_utils::get_arg_value("-test", argc, argv);
131
132 int errcode = 0;
133
134 if (!testname.empty()) {
135 errcode = execute_named_test(testname, argc, argv, MPI_COMM_WORLD);
136
137 if (localProc == 0 && errcode == 0) {
138 FEI_COUT << localProc << ": FEI test successful" << FEI_ENDL;
139 }
140 }
141 else {
142 //...else since testname is empty,
143 //Check whether the -i flag was used to specify an input file.
144 //(construct_filename constructs a file name using a combination of
145 // '-i <file>' and optional '-d <path>' flags.)
146 std::string filename;
147 if (localProc == 0) {
148 filename = fei_test_utils::construct_filename(argc, argv);
149 }
151
152 try {
154 }
155 catch(std::runtime_error& exc) {
156 fei::console_out() << "caught fei test error: "<<exc.what() << FEI_ENDL;
157 errcode = -1;
158 }
159 }
160
161 int global_err_code = 0;
162 fei::GlobalSum(MPI_COMM_WORLD, errcode, global_err_code);
163
164 if (localProc == 0) {
165 double elapsedTime = fei::utils::cpu_time() - start_time;
166 FEI_COUT << "Proc0 CPU time: " << elapsedTime << " seconds." << FEI_ENDL;
167 }
168
169 return(global_err_code);
170}
171
172void read_input_and_execute_fullsystem_tests(const std::string& filename,
173 int argc, char** argv,
174 MPI_Comm comm)
175{
176 //We'll run some 'full-system' FEI tests, if any are contained in the
177 //specified filename.
178 //First, fill an array with names that we read from filename. These names
179 //are input-file-names, followed by an int specifying the number of processors
180 //the test should be run on.
181
182 std::vector<std::string> inputFileNames;
183 if (!filename.empty()) {
184 const char* filename_c_str = filename.c_str();
185 fei_test_utils::read_input_file(filename_c_str, comm, inputFileNames);
186 fei::ParameterSet name_numproc_pairs;
187 fei::utils::parse_strings(inputFileNames, " ", name_numproc_pairs);
188
189 std::string path = fei_test_utils::get_arg_value("-d", argc, argv);
190
191 //make sure every processor has the path string.
193
194 try {
195 execute_fullsystem_tests(comm, path, name_numproc_pairs);
196 }
197 catch(std::runtime_error& exc) {
198 fei::console_out() << "caught fei test error: "<<exc.what() << FEI_ENDL;
199 throw;
200 }
201
202#ifndef FEI_SER
203 //Next, if we're running on 4 procs, create two new communicators each
204 //representing 2 procs, and run some 2-proc tests using one of those
205 //communicators.
206 int numProcs = fei::numProcs(comm);
207 int localProc = fei::localProc(comm);
208 if (numProcs == 4) {
209 fei::Barrier(comm);
210 if (localProc == 0) {
212 << "*****************************************************************"
213 << FEI_ENDL << " Running tests with partitioned communicators/groups"
214 << FEI_ENDL
215 << "*****************************************************************"
216 << FEI_ENDL << FEI_ENDL;
217 }
218
219 MPI_Comm newcomm1, newcomm2;
220 MPI_Group newgroup1, newgroup2;
221
222 //newcomm1 and newgroup1 will represent procs 0 and 1, while
223 //newcomm2 and newgroup2 will represent procs 2 and 3.
225 newcomm1, newgroup1,
226 newcomm2, newgroup2);
227
228 if (localProc < 2) {
229 execute_fullsystem_tests(newcomm1, path, name_numproc_pairs);
230 }
231
232 }
233#endif
234 }
235}
236
238{
239 int errcode = 0;
240
241 //--------- factory test --------------
242 {
244
245 try {
246 errcode = tst->runtests();
247 }
248 catch(std::exception& exc) {
249 FEI_COUT << "test_library_plugins: caught exception: "
250 << exc.what() << FEI_ENDL;
251 return(-1);
252 }
253 }
254
255 //--------- vector test --------------
256 {
257 fei::SharedPtr<tester> tst(new test_Vector(comm));
258
259 try {
260 errcode = tst->runtests();
261 }
262 catch(std::exception& exc) {
263 FEI_COUT << "test_library_plugins: caught exception: "
264 << exc.what() << FEI_ENDL;
265 return(-1);
266 }
267 }
268
269 //--------- matrix test --------------
270 {
271 fei::SharedPtr<tester> tst(new test_Matrix(comm));
272
273 try {
274 errcode = tst->runtests();
275 }
276 catch(std::exception& exc) {
277 FEI_COUT << "test_library_plugins: caught exception: "
278 << exc.what() << FEI_ENDL;
279 return(-1);
280 }
281 }
282
283 return(errcode);
284}
285
286int execute_named_test(const std::string& testname,
287 int argc, char** argv, MPI_Comm comm)
288{
289 int numProcs = fei::numProcs(comm);
290 int localProc = fei::localProc(comm);
291
292 std::string path = fei_test_utils::get_arg_value("-d", argc, argv);
293
294 //make sure every processor has the path string.
296
297 int errcode = 0;
298
299 fei::Barrier(comm);
300
301 bool testname_recognized = false;
302
303 if (testname == "unit_tests") {
304 testname_recognized = true;
305 try {
306 execute_unit_tests(path, comm);
307 }
308 catch(std::runtime_error& exc) {
309 fei::console_out() << "caught unit-test error: "<<exc.what() << FEI_ENDL;
310 return(-1);
311 }
312 }
313
314 if (testname == "poisson_main") {
315 errcode = poisson_main(argc, argv, comm, numProcs, localProc);
316 testname_recognized = true;
317 }
318 if (testname == "poisson3_main") {
319 errcode = poisson3_main(argc, argv, comm, numProcs, localProc);
320 testname_recognized = true;
321 }
322
323 if (testname == "beam_oldfei_main") {
324 errcode = beam_oldfei_main(argc, argv, comm, numProcs, localProc);
325 testname_recognized = true;
326 }
327 if (testname == "beam_main") {
328 errcode = beam_main(argc, argv, comm, numProcs, localProc);
329 testname_recognized = true;
330 }
331
332 if (testname == "feiDriver_main") {
333 errcode = feiDriver_main(argc, argv, comm, numProcs, localProc);
334 testname_recognized = true;
335 }
336
337 if (testname == "library_plugins") {
338 errcode = test_library_plugins(comm);
339 testname_recognized = true;
340 }
341
342 if (testname == "benchmarks") {
343 testname_recognized = true;
344 if (localProc == 0) {
345 try {
346 execute_benchmarks(comm);
347 }
348 catch(std::runtime_error& exc) {
349 fei::console_out() <<"caught exception from benchmarks: "<<exc.what()<<FEI_ENDL;
350 errcode = -1;
351 }
352 }
353 }
354
355 fei::Barrier(comm);
356
357 if (!testname_recognized && localProc == 0) {
358 FEI_COUT << "fei_test: '-test' argument used, but value ("<<testname
359 << ") not recognized. Valid values are:"<<FEI_ENDL
360 << " unit_tests"<<FEI_ENDL
361 << " benchmarks"<<FEI_ENDL
362 << " library_plugins"<<FEI_ENDL
363 << " poisson_main"<<FEI_ENDL
364 << " poisson3_main"<<FEI_ENDL
365 << " cube_main"<<FEI_ENDL
366 << " cube3_main"<<FEI_ENDL
367 << " feiDriver_main"<<FEI_ENDL << FEI_ENDL;
368 return(-1);
369 }
370 return(errcode);
371}
372
374{
375 test_benchmarks tst(comm);
376
377 bool test_failed = false;
378 if (tst.runtests() != 0) test_failed = false;
379
380 if (test_failed) throw std::runtime_error("unit-test failed");
381}
382
383void execute_unit_tests(const std::string& path,
384 MPI_Comm comm)
385{
386 std::vector<fei::SharedPtr<tester> > testers;
387
388 //Since each of the following classes implement the tester interface,
389 //we can simply stick instances of them into an array, then run through
390 //the array running the tests on each class instance.
391
392 testers.push_back(fei::SharedPtr<tester>(new test_misc(comm)));
393 testers.push_back(fei::SharedPtr<tester>(new test_Utils(comm)));
394 testers.push_back(fei::SharedPtr<tester>(new test_Set(comm)));
395 testers.push_back(fei::SharedPtr<tester>(new test_Database(comm)));
396 testers.push_back(fei::SharedPtr<tester>(new test_EqnBuffer(comm)));
397 testers.push_back(fei::SharedPtr<tester>(new test_EqnCommMgr(comm)));
398 testers.push_back(fei::SharedPtr<tester>(new test_PointBlockMap(comm)));
399 testers.push_back(fei::SharedPtr<tester>(new test_Algebraic(comm)));
400 testers.push_back(fei::SharedPtr<tester>(new test_AztecWrappers(comm)));
401 testers.push_back(fei::SharedPtr<tester>(new test_Tables(comm)));
402 testers.push_back(fei::SharedPtr<tester>(new test_VectorSpace(comm)));
403 testers.push_back(fei::SharedPtr<tester>(new test_MatrixGraph(comm)));
404 testers.push_back(fei::SharedPtr<tester>(new test_Vector(comm)));
405 testers.push_back(fei::SharedPtr<tester>(new test_Matrix(comm)));
406 testers.push_back(fei::SharedPtr<tester>(new test_Factory(comm)));
407 testers.push_back(fei::SharedPtr<tester>(new test_SNL_FEI_Structure(comm)));
408 testers.push_back(fei::SharedPtr<tester>(new test_FEI_Implementation(comm)));
409 testers.push_back(fei::SharedPtr<tester>(new test_FEI_Impl(comm)));
410 testers.push_back(fei::SharedPtr<tester>(new test_LinearSystem(comm)));
411
412 std::vector<fei::SharedPtr<tester> >::const_iterator
413 testers_iter = testers.begin(),
414 testers_end = testers.end();
415
416 std::string failed_test_name;
417 bool test_failed = false;
418 for(; testers_iter != testers_end; ++testers_iter) {
419 fei::Barrier(comm);
420
421 fei::SharedPtr<tester> tst = *testers_iter;
422 tst->setPath(path);
423 if (tst->runtests() != 0) {
424 failed_test_name = tst->getName();
425 test_failed = true;
426 }
427 }
428
429 if (test_failed) {
430 std::string str1("unit-test failed: ");
431 throw std::runtime_error(str1+failed_test_name);
432 }
433}
434
436 const std::string& path,
437 fei::ParameterSet& name_numproc_pairs)
438{
439 test_FEI test_fei(comm);
440
441 if (!path.empty()) {
442 test_fei.setPath(path.c_str());
443 }
444 else {
445 test_fei.setPath(".");
446 }
447
448 //We'll iterate name_numproc_pairs, passing filenames to
449 //test_fei and have it run tests.
450
452 n_iter = name_numproc_pairs.begin(),
453 n_end = name_numproc_pairs.end();
454
455 for(; n_iter != n_end; ++n_iter) {
456 const char* fileName = (*n_iter).getName().c_str();
457 int numProcsToUse = (*n_iter).getIntValue();
458
459 if (numProcsToUse != fei::numProcs(comm)) {
460 continue;
461 }
462
463 fei::Barrier(comm);
464 if (fei::localProc(comm) == 0) {
465 FEI_COUT << FEI_ENDL << "*****" << FEI_ENDL
466 << fileName << FEI_ENDL << "*****"<<FEI_ENDL;
467 }
468 fei::Barrier(comm);
469
470 test_fei.setFileName(fileName);
471
472 int resultCode = test_fei.runtests();
473 if (resultCode < 0) {
474 throw std::runtime_error("nonzero resultCode from test_fei.runtests()");
475 }
476 }
477}
478
479#ifndef FEI_SER
481 MPI_Comm& newcomm1, MPI_Group& newgroup1,
482 MPI_Comm& newcomm2, MPI_Group& newgroup2)
483{
484 //This function is hardwired to operate on 4 processors. It will create two new
485 //communicators and two new groups, each representing 2 processors. newcomm1
486 //and newgroup1 will contain procs 0 and 1, while newcomm2 and newgroup2 will
487 //contain procs 2 and 3.
488
489 int numProcs, localProc;
490 MPI_Comm_rank(comm, &localProc);
491 MPI_Comm_size(comm, &numProcs);
492
493 if (numProcs != 4) {
494 return(-1);
495 }
496
497 std::vector<int> procs(numProcs);
498 for(int i=0; i<numProcs; ++i) {
499 procs[i] = i;
500 }
501
502 int midpoint = 2;
503 int newgroup1_size = 2, newgroup2_size = 2;
504
505 MPI_Group group;
506 MPI_Comm_group(comm, &group);
507
508 MPI_Group_incl(group, newgroup1_size, &procs[0], &newgroup1);
509 MPI_Group_incl(group, newgroup2_size, &procs[0]+midpoint, &newgroup2);
510
511 MPI_Comm_create(comm, newgroup1, &newcomm1);
512 MPI_Comm_create(comm, newgroup2, &newcomm2);
513
514 return(0);
515}
516#endif
517
518
int beam_main(int argc, char **argv, MPI_Comm comm, int numProcs, int localProc)
Definition beam_main.cpp:43
int beam_oldfei_main(int argc, char **argv, MPI_Comm comm, int numProcs, int localProc)
const_iterator end() const
const_iterator begin() const
void setFileName(const char *filename)
Definition test_FEI.hpp:34
int runtests()
Definition test_FEI.cpp:37
void setPath(const std::string &path)
Definition tester.cpp:33
int feiDriver_main(int argc, char **argv, MPI_Comm comm, int numProcs, int localProc)
#define FEI_ENDL
#define FEI_COUT
#define MPI_COMM_WORLD
Definition fei_mpi.h:58
#define MPI_Comm
Definition fei_mpi.h:56
void execute_benchmarks(MPI_Comm comm)
Definition fei_test.cpp:373
int main(int argc, char **argv)
Definition fei_test.cpp:116
int execute_named_test(const std::string &testname, int argc, char **argv, MPI_Comm comm)
Definition fei_test.cpp:286
void read_input_and_execute_fullsystem_tests(const std::string &filename, int argc, char **argv, MPI_Comm comm)
Definition fei_test.cpp:172
int test_library_plugins(MPI_Comm comm)
Definition fei_test.cpp:237
int split_four_procs_into_two_groups(MPI_Comm comm, MPI_Comm &newcomm1, MPI_Group &newgroup1, MPI_Comm &newcomm2, MPI_Group &newgroup2)
Definition fei_test.cpp:480
void execute_unit_tests(const std::string &path, MPI_Comm comm)
Definition fei_test.cpp:383
void execute_fullsystem_tests(MPI_Comm comm, const std::string &path, fei::ParameterSet &name_numproc_pairs)
Definition fei_test.cpp:435
void parse_strings(std::vector< std::string > &stdstrings, const char *separator_string, fei::ParameterSet &paramset)
const char * version()
Definition fei_utils.hpp:53
double cpu_time()
Definition fei_utils.cpp:46
std::string construct_filename(int argc, char **argv)
void broadcast_string(MPI_Comm comm, int root, std::string &strg)
void read_input_file(const char *filename, MPI_Comm comm, std::vector< std::string > &file_contents)
std::string get_arg_value(const char *flag, int argc, char **argv)
int localProc(MPI_Comm comm)
std::ostream & console_out()
int numProcs(MPI_Comm comm)
int GlobalSum(MPI_Comm comm, std::vector< T > &local, std::vector< T > &global)
void Barrier(MPI_Comm comm)
int poisson3_main(int argc, char **argv, MPI_Comm comm, int numProcs, int localProc)
int poisson_main(int argc, char **argv, MPI_Comm comm, int numProcs, int localProc)