You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
psblas3/util/psb_mat_dist_mod.f90

377 lines
15 KiB
Fortran

!!$
!!$ Parallel Sparse BLAS version 2.2
!!$ (C) Copyright 2006/2007/2008
!!$ Salvatore Filippone University of Rome Tor Vergata
!!$ Alfredo Buttari University of Rome Tor Vergata
!!$
!!$ Redistribution and use in source and binary forms, with or without
!!$ modification, are permitted provided that the following conditions
!!$ are met:
!!$ 1. Redistributions of source code must retain the above copyright
!!$ notice, this list of conditions and the following disclaimer.
!!$ 2. Redistributions in binary form must reproduce the above copyright
!!$ notice, this list of conditions, and the following disclaimer in the
!!$ documentation and/or other materials provided with the distribution.
!!$ 3. The name of the PSBLAS group or the names of its contributors may
!!$ not be used to endorse or promote products derived from this
!!$ software without specific written permission.
!!$
!!$ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
!!$ ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
!!$ TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
!!$ PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE PSBLAS GROUP OR ITS CONTRIBUTORS
!!$ BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
!!$ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
!!$ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
!!$ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
!!$ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
!!$ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
!!$ POSSIBILITY OF SUCH DAMAGE.
!!$
!!$
module psb_mat_dist_mod
interface psb_matdist
psblas3: base/modules/Makefile base/modules/psb_base_mat_mod.f03 base/modules/psb_c_base_mat_mod.f03 base/modules/psb_c_csc_mat_mod.f03 base/modules/psb_c_csr_mat_mod.f03 base/modules/psb_c_mat_mod.f03 base/modules/psb_d_base_mat_mod.f03 base/modules/psb_d_csc_mat_mod.f03 base/modules/psb_d_csr_mat_mod.f03 base/modules/psb_d_mat_mod.f03 base/modules/psb_s_base_mat_mod.f03 base/modules/psb_s_csc_mat_mod.f03 base/modules/psb_s_csr_mat_mod.f03 base/modules/psb_s_mat_mod.f03 base/modules/psb_sort_mod.f90 base/modules/psb_z_base_mat_mod.f03 base/modules/psb_z_csc_mat_mod.f03 base/modules/psb_z_csr_mat_mod.f03 base/modules/psb_z_mat_mod.f03 base/modules/psi_mod.f90 base/modules/psi_serial_mod.f90 base/psblas/psb_cnrmi.f90 base/psblas/psb_dnrmi.f90 base/psblas/psb_snrmi.f90 base/psblas/psb_znrmi.f90 base/serial/Makefile base/serial/f03/Makefile base/serial/f03/psb_base_mat_impl.f03 base/serial/f03/psb_c_base_mat_impl.f03 base/serial/f03/psb_c_coo_impl.f03 base/serial/f03/psb_c_csc_impl.f03 base/serial/f03/psb_c_csr_impl.f03 base/serial/f03/psb_c_mat_impl.f03 base/serial/f03/psb_d_base_mat_impl.f03 base/serial/f03/psb_d_coo_impl.f03 base/serial/f03/psb_d_csc_impl.f03 base/serial/f03/psb_d_csr_impl.f03 base/serial/f03/psb_d_mat_impl.f03 base/serial/f03/psb_s_base_mat_impl.f03 base/serial/f03/psb_s_coo_impl.f03 base/serial/f03/psb_s_csc_impl.f03 base/serial/f03/psb_s_csr_impl.f03 base/serial/f03/psb_s_mat_impl.f03 base/serial/f03/psb_z_base_mat_impl.f03 base/serial/f03/psb_z_coo_impl.f03 base/serial/f03/psb_z_csc_impl.f03 base/serial/f03/psb_z_csr_impl.f03 base/serial/f03/psb_z_mat_impl.f03 base/serial/psb_sort_impl.f90 base/serial/psi_impl.f90 base/serial/psi_serial_impl.f90 test/pargen/runs/ppde.inp test/torture test/torture/Makefile test/torture/psb_mvsv_tester.f90 test/torture/psbtf.f90 test/torture/runs util/Makefile util/psb_hbio_impl.f90 util/psb_hbio_mod.f90 util/psb_mat_dist_impl.f90 util/psb_mat_dist_mod.f90 util/psb_mmio_impl.f90 util/psb_mmio_mod.f90 Merged (at r 4082) the XLF-TEST branch, where we have decoupled interface and implementation for serial stuff.
15 years ago
subroutine smatdist(a_glob, a, ictxt, desc_a,&
& b_glob, b, info, parts, v, inroot,fmt)
!
! an utility subroutine to distribute a matrix among processors
! according to a user defined data distribution, using
! sparse matrix subroutines.
!
! type(d_spmat) :: a_glob
! on entry: this contains the global sparse matrix as follows:
! a%fida =='csr'
! a%aspk for coefficient values
! a%ia1 for column indices
! a%ia2 for row pointers
! a%m for number of global matrix rows
! a%k for number of global matrix columns
! on exit : undefined, with unassociated pointers.
!
! type(d_spmat) :: a
! on entry: fresh variable.
! on exit : this will contain the local sparse matrix.
!
! interface parts
! ! .....user passed subroutine.....
! subroutine parts(global_indx,n,np,pv,nv)
! implicit none
! integer, intent(in) :: global_indx, n, np
! integer, intent(out) :: nv
! integer, intent(out) :: pv(*)
!
! end subroutine parts
! end interface
! on entry: subroutine providing user defined data distribution.
! for each global_indx the subroutine should return
! the list pv of all processes owning the row with
! that index; the list will contain nv entries.
! usually nv=1; if nv >1 then we have an overlap in the data
! distribution.
!
! integer :: ictxt
! on entry: blacs context.
! on exit : unchanged.
!
! type (desc_type) :: desc_a
! on entry: fresh variable.
! on exit : the updated array descriptor
!
! real(psb_dpk_), optional :: b_glob(:)
! on entry: this contains right hand side.
! on exit :
!
! real(psb_dpk_), allocatable, optional :: b(:)
! on entry: fresh variable.
! on exit : this will contain the local right hand side.
!
! integer, optional :: inroot
! on entry: specifies processor holding a_glob. default: 0
! on exit : unchanged.
!
use psb_sparse_mod, only : psb_s_sparse_mat, psb_desc_type, psb_spk_
implicit none
! parameters
type(psb_s_sparse_mat) :: a_glob
real(psb_spk_) :: b_glob(:)
integer :: ictxt
type(psb_s_sparse_mat) :: a
real(psb_spk_), allocatable :: b(:)
type(psb_desc_type) :: desc_a
integer, intent(out) :: info
integer, optional :: inroot
character(len=*), optional :: fmt
integer :: v(:)
interface
subroutine parts(global_indx,n,np,pv,nv)
implicit none
integer, intent(in) :: global_indx, n, np
integer, intent(out) :: nv
integer, intent(out) :: pv(*)
end subroutine parts
end interface
optional :: parts, v
end subroutine smatdist
subroutine dmatdist(a_glob, a, ictxt, desc_a,&
& b_glob, b, info, parts, v, inroot,fmt)
!
! an utility subroutine to distribute a matrix among processors
! according to a user defined data distribution, using
! sparse matrix subroutines.
!
! type(d_spmat) :: a_glob
! on entry: this contains the global sparse matrix as follows:
! a%fida =='csr'
! a%aspk for coefficient values
! a%ia1 for column indices
! a%ia2 for row pointers
! a%m for number of global matrix rows
! a%k for number of global matrix columns
! on exit : undefined, with unassociated pointers.
!
! type(d_spmat) :: a
! on entry: fresh variable.
! on exit : this will contain the local sparse matrix.
!
! interface parts
! ! .....user passed subroutine.....
! subroutine parts(global_indx,n,np,pv,nv)
! implicit none
! integer, intent(in) :: global_indx, n, np
! integer, intent(out) :: nv
! integer, intent(out) :: pv(*)
!
! end subroutine parts
! end interface
! on entry: subroutine providing user defined data distribution.
! for each global_indx the subroutine should return
! the list pv of all processes owning the row with
! that index; the list will contain nv entries.
! usually nv=1; if nv >1 then we have an overlap in the data
! distribution.
!
! integer :: ictxt
! on entry: blacs context.
! on exit : unchanged.
!
! type (desc_type) :: desc_a
! on entry: fresh variable.
! on exit : the updated array descriptor
!
! real(psb_dpk_), optional :: b_glob(:)
! on entry: this contains right hand side.
! on exit :
!
! real(psb_dpk_), allocatable, optional :: b(:)
! on entry: fresh variable.
! on exit : this will contain the local right hand side.
!
! integer, optional :: inroot
! on entry: specifies processor holding a_glob. default: 0
! on exit : unchanged.
!
use psb_sparse_mod, only : psb_d_sparse_mat, psb_dpk_, psb_desc_type
implicit none
! parameters
type(psb_d_sparse_mat) :: a_glob
real(psb_dpk_) :: b_glob(:)
integer :: ictxt
type(psb_d_sparse_mat) :: a
real(psb_dpk_), allocatable :: b(:)
type(psb_desc_type) :: desc_a
integer, intent(out) :: info
integer, optional :: inroot
character(len=*), optional :: fmt
integer :: v(:)
interface
subroutine parts(global_indx,n,np,pv,nv)
implicit none
integer, intent(in) :: global_indx, n, np
integer, intent(out) :: nv
integer, intent(out) :: pv(*)
end subroutine parts
end interface
optional :: parts, v
end subroutine dmatdist
subroutine cmatdist(a_glob, a, ictxt, desc_a,&
& b_glob, b, info, parts, v, inroot,fmt)
!
! an utility subroutine to distribute a matrix among processors
! according to a user defined data distribution, using
! sparse matrix subroutines.
!
! type(d_spmat) :: a_glob
! on entry: this contains the global sparse matrix as follows:
! a%fida =='csr'
! a%aspk for coefficient values
! a%ia1 for column indices
! a%ia2 for row pointers
! a%m for number of global matrix rows
! a%k for number of global matrix columns
! on exit : undefined, with unassociated pointers.
!
! type(d_spmat) :: a
! on entry: fresh variable.
! on exit : this will contain the local sparse matrix.
!
! interface parts
! ! .....user passed subroutine.....
! subroutine parts(global_indx,n,np,pv,nv)
! implicit none
! integer, intent(in) :: global_indx, n, np
! integer, intent(out) :: nv
! integer, intent(out) :: pv(*)
!
! end subroutine parts
! end interface
! on entry: subroutine providing user defined data distribution.
! for each global_indx the subroutine should return
! the list pv of all processes owning the row with
! that index; the list will contain nv entries.
! usually nv=1; if nv >1 then we have an overlap in the data
! distribution.
!
! integer :: ictxt
! on entry: blacs context.
! on exit : unchanged.
!
! type (desc_type) :: desc_a
! on entry: fresh variable.
! on exit : the updated array descriptor
!
! real(psb_dpk_), optional :: b_glob(:)
! on entry: this contains right hand side.
! on exit :
!
! real(psb_dpk_), allocatable, optional :: b(:)
! on entry: fresh variable.
! on exit : this will contain the local right hand side.
!
! integer, optional :: inroot
! on entry: specifies processor holding a_glob. default: 0
! on exit : unchanged.
!
use psb_sparse_mod, only : psb_c_sparse_mat, psb_spk_, psb_desc_type
implicit none
! parameters
type(psb_c_sparse_mat) :: a_glob
complex(psb_spk_) :: b_glob(:)
integer :: ictxt
type(psb_c_sparse_mat) :: a
complex(psb_spk_), allocatable :: b(:)
type(psb_desc_type) :: desc_a
integer, intent(out) :: info
integer, optional :: inroot
character(len=*), optional :: fmt
integer :: v(:)
interface
subroutine parts(global_indx,n,np,pv,nv)
implicit none
integer, intent(in) :: global_indx, n, np
integer, intent(out) :: nv
integer, intent(out) :: pv(*)
end subroutine parts
end interface
optional :: parts, v
end subroutine cmatdist
subroutine zmatdist(a_glob, a, ictxt, desc_a,&
& b_glob, b, info, parts, v, inroot,fmt)
!
! an utility subroutine to distribute a matrix among processors
! according to a user defined data distribution, using
! sparse matrix subroutines.
!
! type(d_spmat) :: a_glob
! on entry: this contains the global sparse matrix as follows:
! a%fida =='csr'
! a%aspk for coefficient values
! a%ia1 for column indices
! a%ia2 for row pointers
! a%m for number of global matrix rows
! a%k for number of global matrix columns
! on exit : undefined, with unassociated pointers.
!
! type(d_spmat) :: a
! on entry: fresh variable.
! on exit : this will contain the local sparse matrix.
!
! interface parts
! ! .....user passed subroutine.....
! subroutine parts(global_indx,n,np,pv,nv)
! implicit none
! integer, intent(in) :: global_indx, n, np
! integer, intent(out) :: nv
! integer, intent(out) :: pv(*)
!
! end subroutine parts
! end interface
! on entry: subroutine providing user defined data distribution.
! for each global_indx the subroutine should return
! the list pv of all processes owning the row with
! that index; the list will contain nv entries.
! usually nv=1; if nv >1 then we have an overlap in the data
! distribution.
!
! integer :: ictxt
! on entry: blacs context.
! on exit : unchanged.
!
! type (desc_type) :: desc_a
! on entry: fresh variable.
! on exit : the updated array descriptor
!
! real(psb_dpk_), optional :: b_glob(:)
! on entry: this contains right hand side.
! on exit :
!
! real(psb_dpk_), allocatable, optional :: b(:)
! on entry: fresh variable.
! on exit : this will contain the local right hand side.
!
! integer, optional :: inroot
! on entry: specifies processor holding a_glob. default: 0
! on exit : unchanged.
!
use psb_sparse_mod, only : psb_z_sparse_mat, psb_dpk_, psb_desc_type
implicit none
! parameters
type(psb_z_sparse_mat) :: a_glob
complex(psb_dpk_) :: b_glob(:)
integer :: ictxt
type(psb_z_sparse_mat) :: a
complex(psb_dpk_), allocatable :: b(:)
type(psb_desc_type) :: desc_a
integer, intent(out) :: info
integer, optional :: inroot
character(len=*), optional :: fmt
integer :: v(:)
interface
subroutine parts(global_indx,n,np,pv,nv)
implicit none
integer, intent(in) :: global_indx, n, np
integer, intent(out) :: nv
integer, intent(out) :: pv(*)
end subroutine parts
end interface
optional :: parts, v
end subroutine zmatdist
end interface
end module psb_mat_dist_mod