Fixed MUMPS with replicated matrix.

stopcriterion
Salvatore Filippone 6 years ago
parent f4f8bcc5ff
commit 0b57535762

@ -148,25 +148,7 @@ subroutine mld_c_smoothers_bld(a,desc_a,prec,info,amold,vmold,imold)
call psb_errpush(info,name,a_err=ch_err)
goto 9999
endif
!
! Now do the real build.
!
do i=1, iszv
!
! build the base preconditioner at level i
!
call prec%precv(i)%bld(info,amold=amold,vmold=vmold,imold=imold)
if (info /= psb_success_) then
write(ch_err,'(a,i7)') 'Error @ level',i
call psb_errpush(psb_err_internal_error_,name,&
& a_err=ch_err)
goto 9999
endif
end do
!
! Issue a warning for inconsistent changes to COARSE_SOLVE
! but only if it really is a multilevel
@ -246,7 +228,7 @@ subroutine mld_c_smoothers_bld(a,desc_a,prec,info,amold,vmold,imold)
write(psb_err_unit,*) ' 2. the solver ', mld_fact_names(coarse_solve_id),&
& ' was not configured at MLD2P4 build time, or'
write(psb_err_unit,*) ' 3. an unsupported solver setup was specified.'
end if
end if
case(mld_sludist_)
if (prec%precv(iszv)%sm%sv%get_id() /= coarse_solve_id) then
@ -293,6 +275,32 @@ subroutine mld_c_smoothers_bld(a,desc_a,prec,info,amold,vmold,imold)
end select
end if
! Sanity check: need to ensure that the MUMPS local/global NZ
! are handled correctly; this is controlled by local vs global solver.
! From this point of view, REPL is LOCAL because it owns everyting.
! Should really find a better way of handling this.
if (prec%precv(iszv)%parms%coarse_mat == mld_repl_mat_) &
& call prec%precv(iszv)%sm%sv%set('MUMPS_LOC_GLOB', mld_local_solver_,info)
!
! Now do the real build.
!
do i=1, iszv
!
! build the base preconditioner at level i
!
call prec%precv(i)%bld(info,amold=amold,vmold=vmold,imold=imold)
if (info /= psb_success_) then
write(ch_err,'(a,i7)') 'Error @ level',i
call psb_errpush(psb_err_internal_error_,name,&
& a_err=ch_err)
goto 9999
endif
end do
if (debug_level >= psb_debug_outer_) &
& write(debug_unit,*) me,' ',trim(name),&
& 'Exiting with',iszv,' levels'

@ -148,25 +148,7 @@ subroutine mld_d_smoothers_bld(a,desc_a,prec,info,amold,vmold,imold)
call psb_errpush(info,name,a_err=ch_err)
goto 9999
endif
!
! Now do the real build.
!
do i=1, iszv
!
! build the base preconditioner at level i
!
call prec%precv(i)%bld(info,amold=amold,vmold=vmold,imold=imold)
if (info /= psb_success_) then
write(ch_err,'(a,i7)') 'Error @ level',i
call psb_errpush(psb_err_internal_error_,name,&
& a_err=ch_err)
goto 9999
endif
end do
!
! Issue a warning for inconsistent changes to COARSE_SOLVE
! but only if it really is a multilevel
@ -246,7 +228,7 @@ subroutine mld_d_smoothers_bld(a,desc_a,prec,info,amold,vmold,imold)
write(psb_err_unit,*) ' 2. the solver ', mld_fact_names(coarse_solve_id),&
& ' was not configured at MLD2P4 build time, or'
write(psb_err_unit,*) ' 3. an unsupported solver setup was specified.'
end if
end if
case(mld_sludist_)
if (prec%precv(iszv)%sm%sv%get_id() /= coarse_solve_id) then
@ -293,6 +275,32 @@ subroutine mld_d_smoothers_bld(a,desc_a,prec,info,amold,vmold,imold)
end select
end if
! Sanity check: need to ensure that the MUMPS local/global NZ
! are handled correctly; this is controlled by local vs global solver.
! From this point of view, REPL is LOCAL because it owns everyting.
! Should really find a better way of handling this.
if (prec%precv(iszv)%parms%coarse_mat == mld_repl_mat_) &
& call prec%precv(iszv)%sm%sv%set('MUMPS_LOC_GLOB', mld_local_solver_,info)
!
! Now do the real build.
!
do i=1, iszv
!
! build the base preconditioner at level i
!
call prec%precv(i)%bld(info,amold=amold,vmold=vmold,imold=imold)
if (info /= psb_success_) then
write(ch_err,'(a,i7)') 'Error @ level',i
call psb_errpush(psb_err_internal_error_,name,&
& a_err=ch_err)
goto 9999
endif
end do
if (debug_level >= psb_debug_outer_) &
& write(debug_unit,*) me,' ',trim(name),&
& 'Exiting with',iszv,' levels'

@ -148,25 +148,7 @@ subroutine mld_s_smoothers_bld(a,desc_a,prec,info,amold,vmold,imold)
call psb_errpush(info,name,a_err=ch_err)
goto 9999
endif
!
! Now do the real build.
!
do i=1, iszv
!
! build the base preconditioner at level i
!
call prec%precv(i)%bld(info,amold=amold,vmold=vmold,imold=imold)
if (info /= psb_success_) then
write(ch_err,'(a,i7)') 'Error @ level',i
call psb_errpush(psb_err_internal_error_,name,&
& a_err=ch_err)
goto 9999
endif
end do
!
! Issue a warning for inconsistent changes to COARSE_SOLVE
! but only if it really is a multilevel
@ -246,7 +228,7 @@ subroutine mld_s_smoothers_bld(a,desc_a,prec,info,amold,vmold,imold)
write(psb_err_unit,*) ' 2. the solver ', mld_fact_names(coarse_solve_id),&
& ' was not configured at MLD2P4 build time, or'
write(psb_err_unit,*) ' 3. an unsupported solver setup was specified.'
end if
end if
case(mld_sludist_)
if (prec%precv(iszv)%sm%sv%get_id() /= coarse_solve_id) then
@ -293,6 +275,32 @@ subroutine mld_s_smoothers_bld(a,desc_a,prec,info,amold,vmold,imold)
end select
end if
! Sanity check: need to ensure that the MUMPS local/global NZ
! are handled correctly; this is controlled by local vs global solver.
! From this point of view, REPL is LOCAL because it owns everyting.
! Should really find a better way of handling this.
if (prec%precv(iszv)%parms%coarse_mat == mld_repl_mat_) &
& call prec%precv(iszv)%sm%sv%set('MUMPS_LOC_GLOB', mld_local_solver_,info)
!
! Now do the real build.
!
do i=1, iszv
!
! build the base preconditioner at level i
!
call prec%precv(i)%bld(info,amold=amold,vmold=vmold,imold=imold)
if (info /= psb_success_) then
write(ch_err,'(a,i7)') 'Error @ level',i
call psb_errpush(psb_err_internal_error_,name,&
& a_err=ch_err)
goto 9999
endif
end do
if (debug_level >= psb_debug_outer_) &
& write(debug_unit,*) me,' ',trim(name),&
& 'Exiting with',iszv,' levels'

@ -148,25 +148,7 @@ subroutine mld_z_smoothers_bld(a,desc_a,prec,info,amold,vmold,imold)
call psb_errpush(info,name,a_err=ch_err)
goto 9999
endif
!
! Now do the real build.
!
do i=1, iszv
!
! build the base preconditioner at level i
!
call prec%precv(i)%bld(info,amold=amold,vmold=vmold,imold=imold)
if (info /= psb_success_) then
write(ch_err,'(a,i7)') 'Error @ level',i
call psb_errpush(psb_err_internal_error_,name,&
& a_err=ch_err)
goto 9999
endif
end do
!
! Issue a warning for inconsistent changes to COARSE_SOLVE
! but only if it really is a multilevel
@ -246,7 +228,7 @@ subroutine mld_z_smoothers_bld(a,desc_a,prec,info,amold,vmold,imold)
write(psb_err_unit,*) ' 2. the solver ', mld_fact_names(coarse_solve_id),&
& ' was not configured at MLD2P4 build time, or'
write(psb_err_unit,*) ' 3. an unsupported solver setup was specified.'
end if
end if
case(mld_sludist_)
if (prec%precv(iszv)%sm%sv%get_id() /= coarse_solve_id) then
@ -293,6 +275,32 @@ subroutine mld_z_smoothers_bld(a,desc_a,prec,info,amold,vmold,imold)
end select
end if
! Sanity check: need to ensure that the MUMPS local/global NZ
! are handled correctly; this is controlled by local vs global solver.
! From this point of view, REPL is LOCAL because it owns everyting.
! Should really find a better way of handling this.
if (prec%precv(iszv)%parms%coarse_mat == mld_repl_mat_) &
& call prec%precv(iszv)%sm%sv%set('MUMPS_LOC_GLOB', mld_local_solver_,info)
!
! Now do the real build.
!
do i=1, iszv
!
! build the base preconditioner at level i
!
call prec%precv(i)%bld(info,amold=amold,vmold=vmold,imold=imold)
if (info /= psb_success_) then
write(ch_err,'(a,i7)') 'Error @ level',i
call psb_errpush(psb_err_internal_error_,name,&
& a_err=ch_err)
goto 9999
endif
end do
if (debug_level >= psb_debug_outer_) &
& write(debug_unit,*) me,' ',trim(name),&
& 'Exiting with',iszv,' levels'

@ -71,18 +71,18 @@
debug_unit = psb_get_debug_unit()
debug_level = psb_get_debug_level()
ictxt = desc_a%get_context()
call psb_info(ictxt, iam, np)
if (sv%ipar(1) == mld_local_solver_ ) then
call psb_info(ictxt, iam, np)
call psb_init(ictxt1,np=1,basectxt=ictxt,ids=(/iam/))
call psb_get_mpicomm(ictxt1, icomm)
allocate(sv%local_ictxt,stat=info)
sv%local_ictxt = ictxt1
write(*,*)'mumps_bld: +++++>',icomm,sv%local_ictxt
!write(*,*)iam,'mumps_bld: local +++++>',icomm,sv%local_ictxt
call psb_info(ictxt1, me, np)
npr = np
else if (sv%ipar(1) == mld_global_solver_ ) then
call psb_get_mpicomm(ictxt,icomm)
write(*,*)'mumps_bld: +++++>',icomm,ictxt
!write(*,*)iam,'mumps_bld: global +++++>',icomm,ictxt
call psb_info(ictxt, iam, np)
me = iam
npr = np
@ -161,11 +161,14 @@
end if
sv%id%n = nglob
! there should be a better way for this
sv%id%nz_loc = acoo%get_nzeros()
sv%id%nz = acoo%get_nzeros()
sv%id%nnz_loc = acoo%get_nzeros()
sv%id%nnz = acoo%get_nzeros()
sv%id%job = 4
if (sv%ipar(1) == mld_global_solver_ ) then
call psb_sum(ictxt,sv%id%nnz)
end if
!call psb_barrier(ictxt)
write(*,*)iam, ' calling mumps N,nz,nz_loc',sv%id%n,sv%id%nz,sv%id%nz_loc
write(*,*)iam, ' calling mumps N,nz,nz_loc',sv%id%n,sv%id%nnz,sv%id%nnz_loc
call dmumps(sv%id)
!call psb_barrier(ictxt)
info = sv%id%infog(1)

@ -60,8 +60,7 @@
type(psb_d_coo_sparse_mat), target :: acoo
integer(psb_ipk_) :: n_row,n_col, nrow_a, nztota, nglob, nglobrec, nzt, npr, npc
integer(psb_ipk_) :: ifrst, ibcheck
integer(psb_ipk_) :: ictxt, ictxt1, icomm, np, iam, me, i, &
& err_act, debug_unit, debug_level
integer(psb_ipk_) :: ictxt, ictxt1, icomm, np, iam, me, i, err_act, debug_unit, debug_level
character(len=20) :: name='d_mumps_solver_bld', ch_err
#if defined(HAVE_MUMPS_) && !defined(LPK8)
@ -72,18 +71,18 @@
debug_unit = psb_get_debug_unit()
debug_level = psb_get_debug_level()
ictxt = desc_a%get_context()
call psb_info(ictxt, iam, np)
if (sv%ipar(1) == mld_local_solver_ ) then
call psb_info(ictxt, iam, np)
call psb_init(ictxt1,np=1,basectxt=ictxt,ids=(/iam/))
call psb_get_mpicomm(ictxt1, icomm)
allocate(sv%local_ictxt,stat=info)
sv%local_ictxt = ictxt1
write(*,*)'mumps_bld: +++++>',icomm,sv%local_ictxt
!write(*,*)iam,'mumps_bld: local +++++>',icomm,sv%local_ictxt
call psb_info(ictxt1, me, np)
npr = np
else if (sv%ipar(1) == mld_global_solver_ ) then
call psb_get_mpicomm(ictxt,icomm)
write(*,*)'mumps_bld: +++++>',icomm,ictxt
!write(*,*)iam,'mumps_bld: global +++++>',icomm,ictxt
call psb_info(ictxt, iam, np)
me = iam
npr = np
@ -162,11 +161,14 @@
end if
sv%id%n = nglob
! there should be a better way for this
sv%id%nz_loc = acoo%get_nzeros()
sv%id%nz = acoo%get_nzeros()
sv%id%nnz_loc = acoo%get_nzeros()
sv%id%nnz = acoo%get_nzeros()
sv%id%job = 4
if (sv%ipar(1) == mld_global_solver_ ) then
call psb_sum(ictxt,sv%id%nnz)
end if
!call psb_barrier(ictxt)
write(*,*)iam, ' calling mumps N,nz,nz_loc',sv%id%n,sv%id%nz,sv%id%nz_loc
write(*,*)iam, ' calling mumps N,nz,nz_loc',sv%id%n,sv%id%nnz,sv%id%nnz_loc
call dmumps(sv%id)
!call psb_barrier(ictxt)
info = sv%id%infog(1)

@ -71,18 +71,18 @@
debug_unit = psb_get_debug_unit()
debug_level = psb_get_debug_level()
ictxt = desc_a%get_context()
call psb_info(ictxt, iam, np)
if (sv%ipar(1) == mld_local_solver_ ) then
call psb_info(ictxt, iam, np)
call psb_init(ictxt1,np=1,basectxt=ictxt,ids=(/iam/))
call psb_get_mpicomm(ictxt1, icomm)
allocate(sv%local_ictxt,stat=info)
sv%local_ictxt = ictxt1
write(*,*)'mumps_bld: +++++>',icomm,sv%local_ictxt
!write(*,*)iam,'mumps_bld: local +++++>',icomm,sv%local_ictxt
call psb_info(ictxt1, me, np)
npr = np
else if (sv%ipar(1) == mld_global_solver_ ) then
call psb_get_mpicomm(ictxt,icomm)
write(*,*)'mumps_bld: +++++>',icomm,ictxt
!write(*,*)iam,'mumps_bld: global +++++>',icomm,ictxt
call psb_info(ictxt, iam, np)
me = iam
npr = np
@ -161,11 +161,14 @@
end if
sv%id%n = nglob
! there should be a better way for this
sv%id%nz_loc = acoo%get_nzeros()
sv%id%nz = acoo%get_nzeros()
sv%id%nnz_loc = acoo%get_nzeros()
sv%id%nnz = acoo%get_nzeros()
sv%id%job = 4
if (sv%ipar(1) == mld_global_solver_ ) then
call psb_sum(ictxt,sv%id%nnz)
end if
!call psb_barrier(ictxt)
write(*,*)iam, ' calling mumps N,nz,nz_loc',sv%id%n,sv%id%nz,sv%id%nz_loc
write(*,*)iam, ' calling mumps N,nz,nz_loc',sv%id%n,sv%id%nnz,sv%id%nnz_loc
call dmumps(sv%id)
!call psb_barrier(ictxt)
info = sv%id%infog(1)

@ -71,18 +71,18 @@
debug_unit = psb_get_debug_unit()
debug_level = psb_get_debug_level()
ictxt = desc_a%get_context()
call psb_info(ictxt, iam, np)
if (sv%ipar(1) == mld_local_solver_ ) then
call psb_info(ictxt, iam, np)
call psb_init(ictxt1,np=1,basectxt=ictxt,ids=(/iam/))
call psb_get_mpicomm(ictxt1, icomm)
allocate(sv%local_ictxt,stat=info)
sv%local_ictxt = ictxt1
write(*,*)'mumps_bld: +++++>',icomm,sv%local_ictxt
!write(*,*)iam,'mumps_bld: local +++++>',icomm,sv%local_ictxt
call psb_info(ictxt1, me, np)
npr = np
else if (sv%ipar(1) == mld_global_solver_ ) then
call psb_get_mpicomm(ictxt,icomm)
write(*,*)'mumps_bld: +++++>',icomm,ictxt
!write(*,*)iam,'mumps_bld: global +++++>',icomm,ictxt
call psb_info(ictxt, iam, np)
me = iam
npr = np
@ -161,11 +161,14 @@
end if
sv%id%n = nglob
! there should be a better way for this
sv%id%nz_loc = acoo%get_nzeros()
sv%id%nz = acoo%get_nzeros()
sv%id%nnz_loc = acoo%get_nzeros()
sv%id%nnz = acoo%get_nzeros()
sv%id%job = 4
if (sv%ipar(1) == mld_global_solver_ ) then
call psb_sum(ictxt,sv%id%nnz)
end if
!call psb_barrier(ictxt)
write(*,*)iam, ' calling mumps N,nz,nz_loc',sv%id%n,sv%id%nz,sv%id%nz_loc
write(*,*)iam, ' calling mumps N,nz,nz_loc',sv%id%n,sv%id%nnz,sv%id%nnz_loc
call dmumps(sv%id)
!call psb_barrier(ictxt)
info = sv%id%infog(1)

Loading…
Cancel
Save