[MPI] Collective communication - MPI_Bcast
· One to all : MPI_Bcast, MPI_Scater
· All to one : MPI_Reduce, MPI_Gather
· All to all : MPI_Alltoall
- MPI_Bcast : Broadcasts a message from "root" process to all other process in the same communicator.
- C/C++ : MPI_Bcast( array, 100, MPI_INT, 3, comm);
- Fortran : call MPI_Bcast( array, 100, MPI_INTEGER, 3, comm,ierr)
- Example Source Code
program hello
use mpi
integer :: f(10)
integer :: status(MPI_STATUS_SIZE)
integer :: rank, src=0, dest=1, ierr, i
call mpi_init(ierr)
call MPI_Comm_rank( MPI_COMM_WORLD, rank,ierr);
!array preparation from src processor
if (rank == src) then
do i = 1, 10
f(i) = f(i) + i
end do
end if
use mpi
integer :: f(10)
integer :: status(MPI_STATUS_SIZE)
integer :: rank, src=0, dest=1, ierr, i
call mpi_init(ierr)
call MPI_Comm_rank( MPI_COMM_WORLD, rank,ierr);
!array preparation from src processor
if (rank == src) then
do i = 1, 10
f(i) = f(i) + i
end do
end if
!broad cast integer array
call MPI_Bcast(f, 10, MPI_INT, src, MPI_COMM_WORLD, ierr)
print *, f
call mpi_finalize(ierr)
end program hello
-Output
C>>mpiexec -n 3 mpibcast.exe
1 2 3 4 5 6
1 2 3 4 5 6
7 8 9 10
7 8 9 10
1 2 3 4 5 6
7 8 9 10
call MPI_Bcast(f, 10, MPI_INT, src, MPI_COMM_WORLD, ierr)
print *, f
call mpi_finalize(ierr)
end program hello
-Output
C>>mpiexec -n 3 mpibcast.exe
1 2 3 4 5 6
1 2 3 4 5 6
7 8 9 10
7 8 9 10
1 2 3 4 5 6
7 8 9 10
댓글
댓글 쓰기