Skip to content

Commit 2ec752e

Browse files
committed
Implement ESolnManager into ModEM MF
This commit finally uses the ESolnManager into ModEM. As of this commit it generates bit-for-bit results when running with or without saving the electric field solutions.
1 parent 790dae6 commit 2ec752e

2 files changed

Lines changed: 62 additions & 60 deletions

File tree

f90/MPI/Main_MPI.f90

Lines changed: 55 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -651,6 +651,8 @@ Subroutine Master_job_fwdPred(sigma,d1,eAll,comm,trial)
651651
end if
652652
end if
653653

654+
modem_ctx % comm_current = comm_current
655+
654656
if (present(trial)) then
655657
trial_lcl = trial
656658
else
@@ -659,16 +661,12 @@ Subroutine Master_job_fwdPred(sigma,d1,eAll,comm,trial)
659661

660662
! First, distribute the current model to all workers
661663
call Master_job_Distribute_Model(sigma)
664+
662665
! call Master_job_Distribute_Data(d1)
663666
if(.not. eAll%allocated) then
664-
! call deall(eAll)
665-
! end if
666-
call create_solnVectorMTX(d1%nTx,eAll)
667-
do iTx=1,nTx
668-
call create_solnVector(grid,iTx,e0)
669-
call copy_solnVector(eAll%solns(iTx),e0)
670-
end do
667+
call EsMgr_create_solnVectorMTX(eAll, d1 % nTx, grid=grid)
671668
end if
669+
672670
job_name= 'FORWARD'
673671
call Master_job_Distribute_Taskes(job_name,nTx,sigma,eAll,comm_current, trial=trial_lcl)
674672

@@ -681,16 +679,20 @@ Subroutine Master_job_fwdPred(sigma,d1,eAll,comm,trial)
681679
Call FaceArea(grid, S_F)
682680

683681
! Compute the model Responces
684-
do iTx=1,nTx
685-
do i = 1,d1%d(iTx)%nDt
686-
d1%d(iTx)%data(i)%errorBar = .false.
687-
iDt = d1%d(iTx)%data(i)%dataType
688-
do j = 1,d1%d(iTx)%data(i)%nSite
689-
call dataResp(eAll%solns(iTx),sigma,iDt,d1%d(iTx)%data(i)%rx(j),d1%d(iTx)%data(i)%value(:,j), &
690-
d1%d(iTx)%data(i)%orient(j))
682+
if (EsMgr_save_in_file) then
683+
call Master_job_DataResp(nTx, sigma, d1, trial_lcl)
684+
else
685+
do iTx=1,nTx
686+
do i = 1,d1%d(iTx)%nDt
687+
d1%d(iTx)%data(i)%errorBar = .false.
688+
iDt = d1%d(iTx)%data(i)%dataType
689+
do j = 1,d1%d(iTx)%data(i)%nSite
690+
call dataResp(eAll%solns(iTx),sigma,iDt,d1%d(iTx)%data(i)%rx(j),d1%d(iTx)%data(i)%value(:,j), &
691+
d1%d(iTx)%data(i)%orient(j))
692+
end do
691693
end do
692694
end do
693-
end do
695+
end if
694696
! clean up the grid elements stored in GridCalc on the master node
695697
call deall_rvector(l_E)
696698
call deall_rvector(S_F)
@@ -1067,6 +1069,8 @@ Subroutine Master_job_JmultT(sigma,d,dsigma,eAll,s_hat,comm,use_starting_guess)
10671069
endif
10681070
end if
10691071

1072+
modem_ctx % comm_current = comm_current
1073+
10701074
if (present(use_starting_guess)) then
10711075
use_starting_guess_lcl = use_starting_guess
10721076
else
@@ -1093,7 +1097,6 @@ Subroutine Master_job_JmultT(sigma,d,dsigma,eAll,s_hat,comm,use_starting_guess)
10931097
end do
10941098
end if
10951099

1096-
10971100
if (returne_m_vectors) then
10981101
if (.not. associated(s_hat)) then
10991102
allocate(s_hat(nTx))
@@ -1123,18 +1126,20 @@ Subroutine Master_job_JmultT(sigma,d,dsigma,eAll,s_hat,comm,use_starting_guess)
11231126
file_name='e.soln'
11241127
!call write_solnVectorMTX(20,file_name,eAll_out)
11251128

1126-
do iper=1,nTx
1127-
!e0=eAll%solns(iper)
1128-
!e =eAll_out%solns(iper)
1129-
call PmultT(eAll_temp%solns(iper),sigma,eAll_out%solns(iper) &
1130-
& ,dsigma_temp)
1131-
call QmultT(eAll_temp%solns(iper),sigma,d%d(iper),Qcomb)
1132-
call scMultAdd(ONE,Qcomb,dsigma_temp)
1133-
if (returne_m_vectors) then
1134-
s_hat(iper)=dsigma_temp
1135-
end if
1136-
call linComb_modelParam(ONE,dsigma,ONE,dsigma_temp,dsigma)
1137-
end do
1129+
if (EsMgr_save_in_file) then
1130+
call Master_job_PQMult(nTx, sigma, dsigma, use_starting_guess=use_starting_guess_lcl)
1131+
else
1132+
do iper=1,nTx
1133+
call PmultT(eAll_temp%solns(iper),sigma,eAll_out%solns(iper) &
1134+
& ,dsigma_temp)
1135+
call QmultT(eAll_temp%solns(iper),sigma,d%d(iper),Qcomb)
1136+
call scMultAdd(ONE,Qcomb,dsigma_temp)
1137+
if (returne_m_vectors) then
1138+
s_hat(iper)=dsigma_temp
1139+
end if
1140+
call linComb_modelParam(ONE,dsigma,ONE,dsigma_temp,dsigma)
1141+
end do
1142+
end if
11381143

11391144
endtime=MPI_Wtime()
11401145
time_used = endtime-starttime
@@ -1832,10 +1837,7 @@ subroutine Master_job_Distribute_Taskes(job_name,nTx,sigma,eAll_out, &
18321837
which_per=per_index
18331838
do ipol1=1,nPol_MPI
18341839
which_pol=ipol1
1835-
call create_e_param_place_holder(eAll_in%solns(which_per))
1836-
call Pack_e_para_vec(eAll_in%solns(which_per))
1837-
call MPI_SEND(e_para_vec, Nbytes, MPI_PACKED, who, &
1838-
& FROM_MASTER, comm_current, ierr)
1840+
call EsMgr_save(eAll_in % solns(which_per), to=who)
18391841
end do
18401842
end if
18411843
write(ioMPI,'(a10,a16,i5,a8,i5,a11,i5)')trim(job_name), &
@@ -1861,14 +1863,8 @@ subroutine Master_job_Distribute_Taskes(job_name,nTx,sigma,eAll_out, &
18611863
who=worker_job_task%taskid
18621864
which_per=worker_job_task%per_index
18631865
which_pol=worker_job_task%pol_index
1864-
1865-
call create_e_param_place_holder(eAll_out%solns(which_per))
1866-
call MPI_RECV(e_para_vec, Nbytes, MPI_PACKED, who,FROM_WORKER, &
1867-
& comm_current, STATUS, ierr)
1868-
! call get_nPol_MPI(eAll_out%solns(which_per))
1869-
! if (nPol_MPI==1) which_pol=1
18701866

1871-
call Unpack_e_para_vec(eAll_out%solns(which_per))
1867+
call EsMgr_get(eAll_out % solns(which_per), which_pol, 1, from=who)
18721868

18731869
write(ioMPI,'(a10,a16,i5,a8,i5,a11,i5)')trim(job_name) , &
18741870
& ': Receive Per # ',which_per ,' and Pol # ', which_pol ,' from ',&
@@ -1926,11 +1922,7 @@ subroutine Master_job_Distribute_Taskes(job_name,nTx,sigma,eAll_out, &
19261922
call get_nPol_MPI(eAll_out%solns(per_index))
19271923
do ipol1=1,nPol_MPI
19281924
which_pol=ipol1
1929-
call create_e_param_place_holder(eAll_in%solns( &
1930-
& which_per))
1931-
call Pack_e_para_vec(eAll_in%solns(which_per))
1932-
call MPI_SEND(e_para_vec, Nbytes, MPI_PACKED, &
1933-
& who, FROM_MASTER, comm_current, ierr)
1925+
call EsMgr_save(eAll_in % solns(which_per), who)
19341926
end do
19351927
end if
19361928
write(ioMPI,'(a10,a16,i5,a8,i5,a11,i5)')trim(job_name), &
@@ -2121,6 +2113,11 @@ Subroutine Worker_job(sigma,d)
21212113
write(6,'(a12,a12,a30,a16,i5)') node_info,' MPI TASK [', &
21222114
& trim(worker_job_task%what_to_do),'] received from ', &
21232115
& STATUS(MPI_SOURCE)
2116+
2117+
modem_ctx % comm_current = comm_current
2118+
modem_ctx % rank_current = rank_current
2119+
trial = worker_job_task % trial
2120+
21242121
! for debug
21252122
! write(6,*) 'source = ', MPI_SOURCE
21262123
! write(6,*) 'tag = ', MPI_TAG
@@ -2180,13 +2177,13 @@ Subroutine Worker_job(sigma,d)
21802177
call Pack_worker_job_task
21812178
call MPI_SEND(worker_job_package,Nbytes, MPI_PACKED,0, &
21822179
& FROM_WORKER, comm_current, ierr)
2183-
! Create e0_temp package (one Period and one Polarization)
2184-
! and send it to the master
2180+
! Use EsMgr_save to send e0 back to main task or save it to disk
21852181
which_pol=1
2186-
call create_e_param_place_holder(e0)
2187-
call Pack_e_para_vec(e0)
2188-
call MPI_SEND(e_para_vec, Nbytes, MPI_PACKED, 0, &
2189-
& FROM_WORKER, comm_current, ierr)
2182+
if (trial) then
2183+
call EsMgr_save(e0, to=0, prefix='.trial')
2184+
else
2185+
call EsMgr_save(e0, to=0)
2186+
end if
21902187
end if
21912188
! so long!
21922189
call reset_e_soln(e0)
@@ -2214,8 +2211,10 @@ Subroutine Worker_job(sigma,d)
22142211
do pol_index = 1, get_nPol(per_index)
22152212

22162213
if (worker_job_task % trial) then
2214+
write(0,*) "Reading the trial"
22172215
call EsMgr_get(e0, e0 % tx, pol_index=pol_index, prefix='.trial')
22182216
else
2217+
write(0,*) "Not reading the trial"
22192218
call EsMgr_get(e0, e0 % tx, pol_index=pol_index)
22202219
endif
22212220
end do
@@ -2485,10 +2484,11 @@ Subroutine Worker_job(sigma,d)
24852484
& ' Start Receiving ' , orginal_nPol, ' from Master'
24862485
do ipol=1,nPol_MPI
24872486
which_pol=ipol
2488-
call create_e_param_place_holder(e0)
2489-
call MPI_RECV(e_para_vec, Nbytes, MPI_PACKED, 0, &
2490-
& FROM_MASTER,comm_current, STATUS, ierr)
2491-
call Unpack_e_para_vec(e0)
2487+
if (worker_job_task % trial) then
2488+
call EsMgr_get(e0, per_index, pol_index=ipol, from=0, prefix='.trial')
2489+
else
2490+
call EsMgr_get(e0, per_index, pol_index=ipol, from=0)
2491+
end if
24922492
end do
24932493
call initSolverWithOutE0(per_index,sigma,grid,size_local,&
24942494
e,comb)
@@ -2532,11 +2532,7 @@ Subroutine Worker_job(sigma,d)
25322532
call MPI_SEND(worker_job_package,Nbytes, MPI_PACKED,0, &
25332533
& FROM_WORKER, comm_current, ierr)
25342534
which_pol=1
2535-
call create_e_param_place_holder(e)
2536-
call Pack_e_para_vec(e)
2537-
call MPI_SEND(e_para_vec, Nbytes, MPI_PACKED, 0, &
2538-
& FROM_WORKER, comm_current, ierr)
2539-
!deallocate(e_para_vec,worker_job_package)
2535+
call EsMgr_save(e, to=0, prefix=".JmultT")
25402536
end if
25412537
! hasta la vista!
25422538
now = MPI_Wtime()

f90/Mod3DMT.f90

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,13 @@ program Mod3DMT
6262
call initGlobalData(cUserDef)
6363
! set the grid for the numerical computations
6464
#ifdef MPI
65-
call setGrid_MPI(grid)
65+
call setGrid_MPI(grid)
66+
67+
call EsMgr_init(grid, context=modem_ctx, &
68+
save_in_file=cUserDef % storeSolnsInFile, &
69+
prefix=cUserDef % prefix, &
70+
ftype=FTYPE_BINARY)
71+
6672
! Check if a large grid file with E field is defined:
6773
! NOTE: right now both grids share the same transmitters.
6874
! This why, reading and setting the large grid and its E solution comes after setting the trasnmitters Dictionary.

0 commit comments

Comments
 (0)