MPI Coding Practice

  • Compiling a program for MPI is almost just like compiling a regular C or C++ program

    • The C compiler is mpicc and the C compiler is mpic.

    • For example, to compile MyProg.c you would use a command like

    • mpicc - O2 -o MyProg MyProg.c

Test compile C++
#include <iostream>

int main()
{
        std::cout << "Hello, world!" << std::endl;
        return 0;
}

1. List of programms

Allgather
Code Allgather
#include <stdio.h>
#include <stdlib.h>
#include <mpi.h>

/**
 * @brief Illustrates how to use an allgather.
 * @details This application is meant to be run with 3 MPI processes. Every MPI
 * process begins with a value, then every MPI process collects the entirety of
 * the data gathered and prints them. It can be visualised as follows:
 *
 * +-----------+  +-----------+  +-----------+
 * | Process 0 |  | Process 1 |  | Process 2 |
 * +-+-------+-+  +-+-------+-+  +-+-------+-+
 *   | Value |      | Value |      | Value |
 *   |   0   |      |  100  |      |  200  |
 *   +-------+      +-------+      +-------+
 *       |________      |      ________|
 *                |     |     | 
 *             +-----+-----+-----+
 *             |  0  | 100 | 200 |
 *             +-----+-----+-----+
 *             |   Each process  |
 *             +-----------------+
 **/
int main(int argc, char* argv[])
{
    MPI_Init(&argc, &argv);

    // Get number of processes and check that 3 processes are used
    int size;
    MPI_Comm_size(MPI_COMM_WORLD, &size);
    if(size != 3)
    {
        printf("This application is meant to be run with 3 MPI processes.\n");
        MPI_Abort(MPI_COMM_WORLD, EXIT_FAILURE);
    }

    // Get my rank
    int my_rank;
    MPI_Comm_rank(MPI_COMM_WORLD, &my_rank);

    // Define my value
    int my_value = my_rank * 100;
    printf("Process %d, my value = %d.\n", my_rank, my_value);

    int buffer[3];
    MPI_Allgather(&my_value, 1, MPI_INT, buffer, 1, MPI_INT, MPI_COMM_WORLD);
    printf("Values collected on process %d: %d, %d, %d.\n", my_rank, buffer[0], buffer[1], buffer[2]);

    MPI_Finalize();

    return EXIT_SUCCESS;
}
Allreduce
Code Allreduce
#include <stdio.h>
#include <stdlib.h>
#include <mpi.h>

/**
 * @brief Illustrates how to use an all-reduce.
 * @details This application consists of a sum all-reduction; every MPI process
 * sends its rank for reduction before the sum of these ranks is stored in the
 * receive buffer of each MPI process. It can be visualised as follows:
 *
 * +-----------+ +-----------+ +-----------+ +-----------+
 * | Process 0 | | Process 1 | | Process 2 | | Process 3 |
 * +-+-------+-+ +-+-------+-+ +-+-------+-+ +-+-------+-+
 *   | Value |     | Value |     | Value |     | Value |
 *   |   0   |     |   1   |     |   2   |     |   3   |
 *   +-------+     +----+--+     +--+----+     +-------+
 *            \         |           |         /
 *             \        |           |        /
 *              \       |           |       /
 *               \      |           |      /
 *                +-----+-----+-----+-----+
 *                            |
 *                        +---+---+
 *                        |  SUM  |
 *                        +---+---+
 *                        |   6   |
 *                        +-------+
 *                            |
 *                +-----+-----+-----+-----+
 *               /      |           |      \
 *              /       |           |       \
 *             /        |           |        \
 *            /         |           |         \
 *   +-------+     +----+--+     +--+----+     +-------+  
 *   |   6   |     |   6   |     |   6   |     |   6   |  
 * +-+-------+-+ +-+-------+-+ +-+-------+-+ +-+-------+-+
 * | Process 0 | | Process 1 | | Process 2 | | Process 3 |
 * +-----------+ +-----------+ +-----------+ +-----------+
 **/
int main(int argc, char* argv[])
{
    MPI_Init(&argc, &argv);

    // Get the size of the communicator
    int size = 0;
    MPI_Comm_size(MPI_COMM_WORLD, &size);
    if(size != 4)
    {
        printf("This application is meant to be run with 4 MPI processes.\n");
        MPI_Abort(MPI_COMM_WORLD, EXIT_FAILURE);
    }

    // Get my rank
    int my_rank;
    MPI_Comm_rank(MPI_COMM_WORLD, &my_rank);

    // Each MPI process sends its rank to reduction, root MPI process collects the result
    int reduction_result = 0;
    MPI_Allreduce(&my_rank, &reduction_result, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);

    printf("[MPI Process %d] The sum of all ranks is %d.\n", my_rank, reduction_result);

    MPI_Finalize();

    return EXIT_SUCCESS;
}
Code Demo -
MPI_Allgather
MPI_Allreduce
MPI_Alltoall
MPI_Barrier
MPI_Bcast
MPI_BSend
MPI_Buffer_attach
MPI_Buffer_detach
MPI_Comm_spawn
MPI_Comm_split
MPI_Exscan
MPI_File_close
MPI_File_open
MPI_File
MPI_Gather
MPI_Graph_get
MPI_Graph_neigbors_count
MPI_Graph_neigbors
MPI_Graphdims_get
MPI_lallgather
MPI_lallreduce
MPI_lalltoall
MPI_lbarrier
MPI-lbcast
MPI_lbsend
MPI_lreduce
MPI_lreduce_scatter
MPI_lreduce_scatter_block
MPI_lresend
MPI_lscatterv
MPI_lssend

…​