-
Notifications
You must be signed in to change notification settings - Fork 182
/
io.F90
117 lines (92 loc) · 3.7 KB
/
io.F90
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
! I/O routines for heat equation solver
module io
use heat
contains
! Output routine, saves the temperature distribution as a png image
! Arguments:
! curr (type(field)): variable with the temperature data
! iter (integer): index of the time step
subroutine write_field(curr, iter, parallel)
use pngwriter
implicit none
type(field), intent(in) :: curr
integer, intent(in) :: iter
type(parallel_data), intent(in) :: parallel
character(len=85) :: filename
integer :: stat
real(dp), dimension(:,:), allocatable, target :: full_data
integer :: p, ierr
if (parallel%rank == 0) then
allocate(full_data(curr%nx_full, curr%ny_full))
! Copy rank #0 data to the global array
full_data(1:curr%nx, 1:curr%ny) = curr%data(1:curr%nx, 1:curr%ny)
! Receive data from other ranks
do p = 1, parallel%size - 1
call mpi_recv(full_data(1:curr%nx, p*curr%ny + 1:(p + 1) * curr%ny), &
& curr%nx * curr%ny, MPI_DOUBLE_PRECISION, p, 22, &
& MPI_COMM_WORLD, MPI_STATUS_IGNORE, ierr)
end do
write(filename,'(A5,I4.4,A4,A)') 'heat_', iter, '.png'
stat = save_png(full_data, curr%nx_full, curr%ny_full, filename)
deallocate(full_data)
else
! Send data
call mpi_send(curr%data(1:curr%nx, 1:curr%ny), curr%nx * curr%ny, MPI_DOUBLE_PRECISION, 0, 22, &
& MPI_COMM_WORLD, ierr)
end if
end subroutine write_field
! Reads the temperature distribution from an input file
! Arguments:
! field0 (type(field)): field variable that will store the
! read data
! filename (char): name of the input file
! Note that this version assumes the input data to be in C memory layout
subroutine read_field(field0, filename, parallel)
implicit none
type(field), intent(out) :: field0
character(len=85), intent(in) :: filename
type(parallel_data), intent(out) :: parallel
integer :: nx, ny, i, p, ierr
character(len=2) :: dummy
real(dp), dimension(:,:), allocatable :: full_data, inner_data
open(10, file=filename)
! Read the header
read(10, *) dummy, nx, ny
call parallel_setup(parallel, nx, ny)
call set_field_dimensions(field0, nx, ny, parallel)
! The arrays for temperature field contain also a halo region
allocate(field0%data(0:field0%nx+1, 0:field0%ny+1))
allocate(inner_data(field0%nx, field0%ny))
if (parallel%rank == 0) then
allocate(full_data(nx, ny))
! Read the data
do i = 1, nx
read(10, *) full_data(i, 1:ny)
end do
inner_data = full_data(1:field0%nx, 1:field0%ny)
! Send data to other ranks
do p = 1, parallel%size - 1
call mpi_send(full_data(1:field0%nx, p*field0%ny + 1:(p + 1) * field0%ny), &
& field0%nx * field0%ny, MPI_DOUBLE_PRECISION, p, 22, &
& MPI_COMM_WORLD, ierr)
end do
else
! Receive data
call mpi_recv(inner_data, field0%nx * field0%ny, &
& MPI_DOUBLE_PRECISION, 0, 22, MPI_COMM_WORLD, &
& MPI_STATUS_IGNORE, ierr)
end if
! Copy to full array containing also boundaries
field0%data(1:field0%nx, 1:field0%ny) = inner_data(:,:)
! Set the boundary values
field0%data(1:field0%nx, 0) = field0%data(1:field0%nx, 1)
field0%data(1:field0%nx, field0%ny + 1) = field0%data(1:field0%nx, field0%ny)
field0%data(0, 0:field0%ny + 1) = field0%data(1, 0:field0%ny + 1)
field0%data(field0%nx + 1, 0:field0%ny + 1) = field0%data(field0%nx, 0:field0%ny + 1)
close(10)
deallocate(inner_data)
if (parallel%rank == 0) then
deallocate(full_data)
end if
end subroutine read_field
end module io