lsgrmHeader.h 2.46 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
#ifndef __LSGRM_HEADER_H
#define __LSGRM_HEADER_H
#include <cassert>
#include <cstdlib>
#include <string>
#include <sstream>
#include <fstream>
#include <algorithm>
#include <vector>
#include <iterator>
#include <stack>
#include <boost/algorithm/string.hpp>

14
15
#include <boost/progress.hpp>

remicres's avatar
remicres committed
16
17
#ifdef OTB_USE_MPI
#include "otbMPIConfig.h"
18
#include "mpi.h" // TODO: implement needed methods inside otbMPIConfig.h
remicres's avatar
remicres committed
19
#endif
20
21
22
23
24

/*
 * This function returns TRUE if it's to the process #myrank to do the
 * work on the yard #div in a pool of #nprocs threads
 */
remicres's avatar
remicres committed
25
bool MyTurn(int div = 0)
26
{
remicres's avatar
remicres committed
27
28
#ifdef OTB_USE_MPI
  otb::MPIConfig::Pointer mpiConfig = otb::MPIConfig::Instance();
29
  unsigned int proc = 0;
remicres's avatar
remicres committed
30
31
32
33
34
  if (mpiConfig->GetNbProcs() != 0)
    proc = div % mpiConfig->GetNbProcs();
  return (proc == mpiConfig->GetMyRank());
#endif
  return true;
35
36
37
38
}

/*
 * This function gather the given value in other process, and update it
remicres's avatar
remicres committed
39
 * TODO: MPI implementation using OTB MPI Wrapper
40
41
 */
template<typename T>
remicres's avatar
remicres committed
42
void GatherMe(T& x, MPI_Datatype dataType)
43
{
remicres's avatar
remicres committed
44

45
  if (otb::MPIConfig::Instance()->GetMyRank() == 0)
46
47
48
    {
    // Master process
    // Gather
49
    for (unsigned int p = 1 ; p < otb::MPIConfig::Instance()->GetNbProcs() ; p++)
50
51
      {
      T partial_sum;
52
      MPI_Recv( &partial_sum, 1, dataType, p, MPI_ANY_TAG, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
53
54
55
      x += partial_sum;
      }
    // Dispatch
56
57
    for (unsigned int p = 1 ; p < otb::MPIConfig::Instance()->GetNbProcs() ; p++)
      MPI_Send(&x, 1, dataType, p, MPI_ANY_TAG, MPI_COMM_WORLD);
58
59
60
61
    }
  else
    {
    // Slave process
62
63
    MPI_Send(&x, 1, dataType, 0, MPI_ANY_TAG, MPI_COMM_WORLD);
    MPI_Recv(&x, 1, dataType, 0, MPI_ANY_TAG, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
64
65
    }
}
66

67
68
69

/*
 * Gather accumulatedMemory and isFusion variables
remicres's avatar
remicres committed
70
 * TODO: MPI implementation using OTB MPI Wrapper
71
 */
remicres's avatar
remicres committed
72
void GatherUsefulVariables(unsigned long long int& accumulatedMemory, bool& isFusion)
73
{
74
  otb::MPIConfig::Instance()->barrier();
75
76
77
78
  int isFusionInteger = 0;
  long long int accumulatedMemoryLLI = static_cast<long long int>(accumulatedMemory);
  if (isFusion)
    isFusionInteger = 1;
79
80
  GatherMe<int>(isFusionInteger, MPI_INT);
  GatherMe<long long int>(accumulatedMemoryLLI, MPI_LONG_LONG_INT);
81
82
83
84
85
86
87
88
89
90
  accumulatedMemory = static_cast<long long unsigned int>(accumulatedMemoryLLI);
  if (isFusionInteger>0)
    isFusion = true;
}

/*
 * Print time elapsed
 */
void ShowTime(boost::timer t)
{
remicres's avatar
remicres committed
91
  std::cout << "--- Process duration : " << std::floor(t.elapsed()) << " s" << std::endl;
92
93
  t.restart();
}
94
#endif