Merge branch 'cmake' into development

maint-3.9.0
sfilippone 6 months ago
commit 7436929b74

1
.gitignore vendored

@ -1,6 +1,7 @@
*.a
*.o
*.mod
*.mtx
*.smod
*~

@ -61,7 +61,7 @@ include(CheckOutOfSourceBuild)
#----------------------------------------------------
if ("${CMAKE_Fortran_COMPILER_ID}" MATCHES "GNU" )
set(gfortran_compiler true)
#TODO: check if it is needed an mpi compiler set(CMAKE_Fortran_COMPILER mpifort)
set(CMAKE_Fortran_COMPILER mpifort)
set ( CMAKE_C_FLAGS_CODECOVERAGE "-fprofile-arcs -ftest-coverage -O0"
CACHE STRING "Code coverage C compiler flags")
@ -132,6 +132,13 @@ include("${CMAKE_CURRENT_LIST_DIR}/cmake/CapitalizeString.cmake")
message(STATUS "Using compiler ${CMAKE_C_COMPILER};")
# Define Major and Minor Version and Patch Level, String Version
set(PSBLASMAJOR "3")
set(PSBLASMINOR "9")
set(PSBLASPATCH "0")
set(PSBLASSTRING "\"3.9.0\"")
# Set default values for IPK_SIZE and LPK_SIZE
set(DEFAULT_IPK_SIZE 4)
set(DEFAULT_LPK_SIZE 8)
@ -306,62 +313,90 @@ if(HAVE_SAME_TYPE_AS)
endif()
#----------------------------------------------------------------------------
# Find MPI and set some flags so that FC and CC can point to gfortran and gcc
# MPI detection and configuration
#----------------------------------------------------------------------------
find_package( MPI REQUIRED Fortran )
find_package(MPI REQUIRED Fortran)
if(MPI_FOUND)
message(STATUS ">>> MPI found: ${MPI_C_LIBRARIES} ${MPI_Fortran_LIBRARIES}")
#-----------------------------------------------
# Work around an issue present on fedora systems
# Fedora-specific workaround for noexecstack flag
#-----------------------------------------------
if( (MPI_C_LINK_FLAGS MATCHES "noexecstack") OR (MPI_Fortran_LINK_FLAGS MATCHES "noexecstack") )
message ( WARNING
"The `noexecstack` linker flag was found in the MPI_<lang>_LINK_FLAGS variable. This is
known to cause segmentation faults for some Fortran codes. See, e.g.,
https://gcc.gnu.org/bugzilla/show_bug.cgi?id=71729 or
https://github.com/sourceryinstitute/OpenCoarrays/issues/317.
`noexecstack` is being replaced with `execstack`"
)
string(REPLACE "noexecstack"
"execstack" MPI_C_LINK_FLAGS_FIXED ${MPI_C_LINK_FLAGS})
string(REPLACE "noexecstack"
"execstack" MPI_Fortran_LINK_FLAGS_FIXED ${MPI_Fortran_LINK_FLAGS})
set(MPI_C_LINK_FLAGS "${MPI_C_LINK_FLAGS_FIXED}" CACHE STRING
"MPI C linking flags" FORCE)
set(MPI_Fortran_LINK_FLAGS "${MPI_Fortran_LINK_FLAGS_FIXED}" CACHE STRING
"MPI Fortran linking flags" FORCE)
if((MPI_C_LINK_FLAGS MATCHES "noexecstack") OR (MPI_Fortran_LINK_FLAGS MATCHES "noexecstack"))
message(WARNING
"The `noexecstack` linker flag was found in the MPI_<lang>_LINK_FLAGS variable.
This can cause segmentation faults in Fortran codes.
Replacing `noexecstack` with `execstack`."
)
string(REPLACE "noexecstack" "execstack" MPI_C_LINK_FLAGS_FIXED ${MPI_C_LINK_FLAGS})
string(REPLACE "noexecstack" "execstack" MPI_Fortran_LINK_FLAGS_FIXED ${MPI_Fortran_LINK_FLAGS})
set(MPI_C_LINK_FLAGS "${MPI_C_LINK_FLAGS_FIXED}" CACHE STRING "MPI C linking flags" FORCE)
set(MPI_Fortran_LINK_FLAGS "${MPI_Fortran_LINK_FLAGS_FIXED}" CACHE STRING "MPI Fortran linking flags" FORCE)
endif()
message(STATUS "Found MPI: ${MPI_C_LIBRARIES} ${MPI_Fortran_LIBRARIES}")
#----------------
# Setup MPI flags
#----------------
#-----------------------------------------------
# Compiler and linker flags setup
#-----------------------------------------------
list(REMOVE_DUPLICATES MPI_Fortran_INCLUDE_PATH)
set(CMAKE_C_COMPILE_FLAGS ${CMAKE_C_COMPILE_FLAGS} ${MPI_C_COMPILE_FLAGS})
set(CMAKE_C_LINK_FLAGS ${CMAKE_C_LINK_FLAGS} ${MPI_C_LINK_FLAGS})
set(CMAKE_Fortran_COMPILE_FLAGS ${CMAKE_Fortran_COMPILE_FLAGS} ${MPI_Fortran_COMPILE_FLAGS})
set(CMAKE_Fortran_LINK_FLAGS ${CMAKE_Fortran_LINK_FLAGS} ${MPI_Fortran_LINK_FLAGS})
include_directories(BEFORE ${MPI_C_INCLUDE_PATH} ${MPI_Fortran_INCLUDE_PATH})
message(STATUS "${MPI_C_INCLUDE_PATH}; ${MPI_Fortran_INCLUDE_PATH};; ${CMAKE_Fortran_LINK_FLAGS} ;")
set(CMAKE_C_COMPILE_FLAGS "${CMAKE_C_COMPILE_FLAGS} ${MPI_C_COMPILE_FLAGS}")
set(CMAKE_C_LINK_FLAGS "${CMAKE_C_LINK_FLAGS} ${MPI_C_LINK_FLAGS}")
set(CMAKE_Fortran_COMPILE_FLAGS "${CMAKE_Fortran_COMPILE_FLAGS} ${MPI_Fortran_COMPILE_FLAGS}")
set(CMAKE_Fortran_LINK_FLAGS "${CMAKE_Fortran_LINK_FLAGS} ${MPI_Fortran_LINK_FLAGS}")
message(STATUS "MPI include paths: ${MPI_Fortran_INCLUDE_PATH}")
message(STATUS "Fortran link flags: ${CMAKE_Fortran_LINK_FLAGS}")
#-----------------------------------------------
# Ensure mpi.mod is available for CMake
#-----------------------------------------------
set(CMAKE_Fortran_MODULE_DIRECTORY ${CMAKE_BINARY_DIR}/modules)
file(MAKE_DIRECTORY ${CMAKE_Fortran_MODULE_DIRECTORY})
# Try to copy mpi.mod or MPI.mod into module directory
set(_mpi_mod_found FALSE)
foreach(_mpi_mod_name mpi.mod MPI.mod)
foreach(_mpi_inc ${MPI_Fortran_INCLUDE_PATH})
if(EXISTS "${_mpi_inc}/${_mpi_mod_name}")
file(COPY "${_mpi_inc}/${_mpi_mod_name}" DESTINATION "${CMAKE_Fortran_MODULE_DIRECTORY}")
message(STATUS "Copied ${_mpi_mod_name} from ${_mpi_inc}")
set(_mpi_mod_found TRUE)
break()
endif()
endforeach()
if(_mpi_mod_found)
break()
endif()
endforeach()
if(NOT _mpi_mod_found)
message(WARNING "mpi.mod not found in MPI include paths; assuming it is built-in to mpifort.")
endif()
#-----------------------------------------------
# Enable MPI Fortran module support
#-----------------------------------------------
if(MPI_Fortran_HAVE_F90_MODULE OR MPI_Fortran_HAVE_F08_MODULE)
add_compile_options(-DPSB_MPI_MOD)
message(STATUS "-DPSB_MPI_MOD")
#add_compile_options(-DSERIAL_MPI) # Is it right??
#message(STATUS "-DSERIAL_MPI")
message(STATUS "Defined: -DPSB_MPI_MOD")
endif()
set(PSB_SERIAL_MPI OFF)
else()
message(STATUS "MPI not found, serial ahead")
add_compile_options(-DPSB_SERIAL_MPI)
add_compile_options(-DPSB_MPI_MOD)
#-----------------------------------------------
# Fallback to serial mode (no MPI found)
#-----------------------------------------------
message(WARNING ">>> MPI not found — building in serial mode")
add_compile_options(-DPSB_SERIAL_MPI -DPSB_MPI_MOD)
set(PSB_SERIAL_MPI ON)
set(CSERIALMPI "#define PSB_SERIAL_MPI")
endif()
#-------------------------------------------------------
# Find and Use OpenCoarrays IFF gfortran AND options set
#-------------------------------------------------------
@ -663,10 +698,10 @@ endif()
if(PSB_BUILD_CUDA)
if(NOT DEFINED PSB_CUDA_PATH)
set(PSB_CUDA_PATH "/opt/cuda/12.8")
endif()
# Include the CMakeLists for the cbind library
#if(NOT DEFINED PSB_CUDA_PATH)
# set(PSB_CUDA_PATH "/opt/cuda/12.8")
#endif()
# Include the CMakeLists for the cuda library
include(${CMAKE_CURRENT_LIST_DIR}/cuda/CMakeLists.txt)
include_directories("${PSB_CUDA_PATH}/include")
message(STATUS "${PSB_CUDA_PATH}")
@ -948,7 +983,8 @@ if(WIN32)
PUBLIC psb_cbind_C)
else()
add_library(cbind_C OBJECT ${cbind_source_C_files})
add_library(cbind ${cbind_source_files})
add_library(cbind ${cbind_source_files} $<TARGET_OBJECTS:cbind_C>)
endif()
@ -1121,35 +1157,35 @@ endif()
if(MPI_FOUND)
#if(MPI_FOUND)
# Copy mpi.mod from the first available path in MPI_Fortran_INCLUDE_PATH
set(MPI_MOD_COPIED FALSE)
# set(MPI_MOD_COPIED FALSE)
foreach(path IN LISTS MPI_Fortran_INCLUDE_PATH)
# foreach(path IN LISTS MPI_Fortran_INCLUDE_PATH)
# Construct the full path to the mpi.mod file
set(mpi_mod_path "${path}/mpi.mod")
# set(mpi_mod_path "${path}/mpi.mod")
# Check if the mpi.mod file exists
if(EXISTS "${mpi_mod_path}")
# if(EXISTS "${mpi_mod_path}")
# Copy the mpi.mod file to the module directory
file(COPY "${mpi_mod_path}" DESTINATION "${CMAKE_Fortran_MODULE_DIRECTORY}")
message(STATUS "Copied mpi.mod from ${mpi_mod_path} to ${CMAKE_Fortran_MODULE_DIRECTORY}")
set(MPI_MOD_COPIED TRUE)
break() # Exit the loop once we've copied the file
endif()
endforeach()
# file(COPY "${mpi_mod_path}" DESTINATION "${CMAKE_Fortran_MODULE_DIRECTORY}")
#message(STATUS "Copied mpi.mod from ${mpi_mod_path} to ${CMAKE_Fortran_MODULE_DIRECTORY}")
#set(MPI_MOD_COPIED TRUE)
#break() # Exit the loop once we've copied the file
#endif()
#endforeach()
if(NOT MPI_MOD_COPIED)
message(WARNING "mpi.mod not found in any of the specified paths: ${MPI_Fortran_INCLUDE_PATH}")
endif()
#if(NOT MPI_MOD_COPIED)
# message(WARNING "mpi.mod not found in any of the specified paths: ${MPI_Fortran_INCLUDE_PATH}")
#endif()
foreach(lib base prec linsolve ext util cbind)
#foreach(lib base prec linsolve ext util cbind)
target_link_libraries(${lib} PUBLIC ${MPI_C_LIBRARIES} ${MPI_Fortran_LIBRARIES})
endforeach()
endif()
# target_link_libraries(${lib} PUBLIC ${MPI_C_LIBRARIES} ${MPI_Fortran_LIBRARIES})
#endforeach()
#endif()
if(OpenCoarrays_FOUND)
foreach(lib base prec linsolve ext util cbind) #TODO: check if cbind goes here!
@ -1245,9 +1281,36 @@ export(
# Set the installation directory for the test files
set(INSTALL_TEST_DIR "${CMAKE_INSTALL_PREFIX}/samples" CACHE PATH "Installation directory for sample files")
function(install_directory_recursive source_dir install_base_dir) # Function to install a directory and its subdirectories recursively
file(GLOB_RECURSE ALL_FILES RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}/${source_dir}" "${source_dir}/*")
foreach(FILE_PATH IN LISTS ALL_FILES)
# Construct the full source and destination paths
set(FULL_SOURCE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/${source_dir}/${FILE_PATH}")
set(FULL_INSTALL_PATH "${install_base_dir}/${FILE_PATH}")
# Check if it's a directory
if(IS_DIRECTORY "${FULL_SOURCE_PATH}")
# Create the directory in the install destination
file(MAKE_DIRECTORY "${FULL_INSTALL_PATH}")
else()
# Install the file
install(FILES "${FULL_SOURCE_PATH}" DESTINATION "${install_base_dir}" RENAME "${FILE_PATH}")
endif()
endforeach()
endfunction()
# Install cbind/test directory
install_directory_recursive(cbind/test "${INSTALL_TEST_DIR}/cbind")
# Install test/fileread directory
install_directory_recursive(test/fileread "${INSTALL_TEST_DIR}/fileread")
# Install test/pdegen directory
install_directory_recursive(test/pdegen "${INSTALL_TEST_DIR}/pdegen")
message(STATUS "CMAKE_INSTALL_PREFIX: ${CMAKE_INSTALL_PREFIX} - ${PSB_CMAKE_INSTALL_PREFIX};")

@ -0,0 +1,44 @@
cmake_minimum_required(VERSION 3.10)
project(pdegen C Fortran)
# Check for the installation path for psblas
if(NOT DEFINED PSBLAS_INSTALL_DIR)
message(FATAL_ERROR "Please specify the path to the psblas installation directory using -DPSBLAS_INSTALL_DIR=<path>")
endif()
# Set the include and library directories based on the provided path
set(INSTALLDIR "${PSBLAS_INSTALL_DIR}")
set(INCDIR "${INSTALLDIR}/include")
set(MODDIR "${INSTALLDIR}/modules")
set(LIBDIR "${INSTALLDIR}/lib")
#set(CMAKE_VERBOSE_MAKEFILE ON)
# Find the psblas package
find_package(psblas REQUIRED PATHS ${INSTALLDIR})
# Include directories for the Fortran compiler
include_directories(${INCDIR} ${MODDIR})
# Define executable directory
set(EXEDIR "${CMAKE_CURRENT_SOURCE_DIR}/runs")
# Ensure the executable directory exists
file(MAKE_DIRECTORY ${EXEDIR})
# Define sources for the executables
set(SOURCES_PDEGEN pdegen3dc.c)
# Create executables
add_executable(psb_pdegen ${SOURCES_PDEGEN})
target_link_libraries(psb_pdegen psblas::cbind psblas::ext psblas::util psblas::linsolve psblas::prec psblas::base )
# Set output directory for executables
foreach(target psb_pdegen)
set_target_properties(${target} PROPERTIES
RUNTIME_OUTPUT_DIRECTORY ${EXEDIR}
)
endforeach()

@ -0,0 +1,57 @@
cmake_minimum_required(VERSION 3.10)
project(psblas_project Fortran)
# Check for the installation path for psblas
if(NOT DEFINED PSBLAS_INSTALL_DIR)
message(FATAL_ERROR "Please specify the path to the psblas installation directory using -DPSBLAS_INSTALL_DIR=<path>")
endif()
# Set the include and library directories based on the provided path
set(INSTALLDIR "${PSBLAS_INSTALL_DIR}")
set(INCDIR "${INSTALLDIR}/include")
set(MODDIR "${INSTALLDIR}/modules")
set(LIBDIR "${INSTALLDIR}/lib")
# Find the psblas package
find_package(psblas REQUIRED PATHS ${INSTALLDIR})
# for Fortran compiler
include_directories(${INCDIR} ${MODDIR})
# Define executable directory
set(EXEDIR "${CMAKE_CURRENT_SOURCE_DIR}/runs")
# Ensure the executable directory exists
file(MAKE_DIRECTORY ${EXEDIR})
# PSBLAS libraries
set(PSBLAS_LIBS psblas::util psblas::prec psblas::base)
include(${CMAKE_CURRENT_LIST_DIR}/geaxpby/CMakeLists.txt)
include(${CMAKE_CURRENT_LIST_DIR}/gedot/CMakeLists.txt)
include(${CMAKE_CURRENT_LIST_DIR}/spmm/CMakeLists.txt)
# Create executables
add_executable(psb_geaxpby_test ${geaxpby_source_files})
add_executable(psb_gedot_test ${gedot_source_files})
add_executable(psb_spmm_test ${spmm_source_files})
# Link the necessary libraries
target_link_libraries(psb_geaxpby_test ${PSBLAS_LIBS})
target_link_libraries(psb_gedot_test ${PSBLAS_LIBS})
target_link_libraries(psb_spmm_test ${PSBLAS_LIBS})
# Set output directory
set_target_properties(psb_geaxpby_test PROPERTIES
RUNTIME_OUTPUT_DIRECTORY ${EXEDIR}
)
set_target_properties(psb_gedot_test PROPERTIES
RUNTIME_OUTPUT_DIRECTORY ${EXEDIR}
)
set_target_properties(psb_spmm_test PROPERTIES
RUNTIME_OUTPUT_DIRECTORY ${EXEDIR}
)
# Inform the user of successful configuration
message(STATUS "CMake configuration complete. Build with 'make' in the build directory.")

@ -0,0 +1,67 @@
# Computational Routines Test
This is a directory containing all the tests done in order to analyze the correctness of the computational routines present in PSBLAS [[3]](#psblas).
## Test Environment
These tests are developed using a linux environment, in particular Rocky Linux 9.5 (Blue Onyx).
The compiler used is:
- gnu 12.2.1
The necessary dependnces are:
- mpich 4.2.2
- PSBLAS 3.9
- CUDA 12.5
## Test Approach
In order to check wheter each kernel computation is correct or not, it was taken into account a simple approach resported in [[1]](#testing): the kernels are excecuted both in single $y_{s}$ and double precision $y_{d}$. The difference between the two results $\Delta y$ should not exceed the machine epsilon of the single precision floating point representation. This quantity is identified as the unit roundoff $u$. In this the IEEE floating point representation we have $$u = 2^-24 \approx 5.96 \cdot 10^{-8}$$ and therefore $$\Delta y = y_d - y_s \leq u$$ as stated in Highman in his book [[2]](#accuracy). It is also important to note that $\Delta y$ is a double precision floating point number, since it should be able to detect an higher precision with respect to a single precision representation.
The innovative approach introduced in this test suite is to have a theoretical results showing us the correctness of the double precision implementation. In fact, the double precision computation is used as validation result for the single precision one, but no assumption of correctness were done before. In this work, double precision computations are validated using a heuristic approach based on the number $p$ of significand digits that can be estimated using the $\gamma_n = \frac{nu}{1-nu}$ worst case constant known from Higman [[2]](#accuracy) in order to have an upper bound to the number of significand digits. Since this approach is kernel specific, see each test directory to see how this idea is applied to each routine.
## Directory description
Each directory has the name of the computational kernel routines described in the documentation of the version 3.9 of the PSBLAS library. In each directory there are different files and directories:
- parallel/
- serial/
- vectors/
- autotest.sh
- Makefile
- &lt;routine_name&gt;.f90
- psb_&lt;routine_name&gt;_test.f90
- README.md
## Routines
In this test suite were considered only computational routines implemented by PSBLAS, according to the version 3.9 of the documentation. In the following table are reported all the kernels, their implementation and wheter or not they were tested yet.
|**Kernel**| **PSBLAS Subroutine**|**Description**|**Test**|
| ------------------------------- | :--------------------------: | ---------------------------------------------------------------------- | :---------------: |
|**General Dense Matrix Sum**| `psb_geaxpby`| This subroutine is an interface to the computational kernel for dense matrix sum:$$Y \leftarrow \alpha X + \beta Y$$|Yes ✅|
| **Dot product**|`psb_gedot`|This function computes dot product between two vectors x and y.$$dot \leftarrow x^T y$$If x and y are real vectors it computes dot-product as:$$dot \leftarrow x^H y$$|Work in progress :hammer_and_wrench:|
| **Generalized Dot Product** |`psb_gedots`|This subroutine computes a series of dot products among the columns of two dense matrices x and y:$$res(i) \leftarrow x(:,i)^T y(:,i)$$If the matrices are complex, then the usual convention applies, i.e. the conjugate transpose of x is used. If x and y are of rank one, then res is a scalar, else it is a rank one array.|No ❌|
|**Infinity-Norm of Vector**|`psb_normi`/`psb_geamax`|This function computes the infinity-norm of a vector x. If x is a real vector it computes infinity norm as:$$amax \leftarrow max \mid x_i \mid$$else if x is a complex vector then it computes the infinity-norm as:$$amax \leftarrow max(\mid re(x_i) \mid + \mid im(x_i) \mid)$$|No ❌|
|**Generalized Infinity Norm**|`psb_geamaxs`|This subroutine computes a series of infinity norms on the columns of a dense matrix x:$$res(i) \leftarrow max_k \mid x(k,i) \mid$$| No ❌ |
| **1-Norm of Vector**| `psb_norm1` / `psb_geasums`|This function computes the 1-norm of a vector x. If x is a real vector it computes 1-norm as:$$asum \leftarrow \mid \mid x_i \mid \mid$$else if x is a complex vector then it computes 1-norm as:$$asum \leftarrow \mid \mid re(x) \mid \mid_1 + \mid \mid im(x) \mid \mid_1$$|No ❌|
|**Generalized 1-Norm of Vector**|`psb_geasums`|This subroutine computes a series of 1-norms on the columns of a dense matrix x:$$res(i) \leftarrow max_k \mid x(k,i) \mid$$This function computes the 1-norm of a vector x. If x is a real vector it computes 1-norm as:$$res(i) \leftarrow \mid \mid x_i \mid \mid$$else if x is a complex vector then it computes 1-norm as:$$res(i) \leftarrow \mid \mid re(x) \mid \mid_\ + \mid \mid im(x) \mid \mid_1$$|No ❌|
| **2-Norm of Vector**|`psb_norm2` / `psb_genrm2`| This function computes the 2-norm of a vector x. If x is a real vector it computes 2-norm as:$$nrm2 \leftarrow \sqrt{x^T x}$$else if x is a complex vector then it computes 2-norm as:$$nrm2 \leftarrow \sqrt{x^H x}$$|No ❌|
|**Generalized 2-Norm of Vector**|`psb_genrm2s` / `psb_spnrm1` |This subroutine computes a series of 2-norms on the columns of a dense matrix x:$$res(i) \leftarrow \mid \mid x(:,i) \mid \mid_2$$|No ❌|
|**1-Norm of Sparse Matrix**|`psb_norm1`|This function computes the 1-norm of a matrix A:$$nrm1 \leftarrow \mid \mid A \mid \mid_1$$where A represents the global matrix A|No ❌|
|**Infinity Norm of Sparse Matrix**|`psb_normi` / `psb_spnrmi`|This function computes the infinity-norm of a matrix A:$$nrmi \leftarrow \mid \mid A \mid \mid_{\infty}$$where: A represents the global matrix A|No ❌|
|**Sparse Matrix by Dense Matrix Product**| `psb_spmm`|This subroutine computes the Sparse Matrix by Dense Matrix Product:$$y \leftarrow \alpha A x + \beta y$$$$y \leftarrow \alpha A^T x + \beta y$$$$y \leftarrow \alpha A^H x + \beta y$$where: <br> x is the global dense matrix x_{:,:} <br> y is the global dense matrix y_{:,:} <br> A is the global sparse matrix A|Work in progress :hammer_and_wrench:|
|**Triangular System Solve**|`psb_spsm`|This subroutine computes the Triangular System Solve:$$y \leftarrow \alpha T^{-1} x + \beta y$$$$y \leftarrow \alpha D^{-1} x + \beta y$$$$y \leftarrow \alpha T^{-1} D x + \beta y$$$$y \leftarrow \alpha T^{-T} x + \beta y$$$$y \leftarrow \alpha D T^{-T} x + \beta y$$$$y \leftarrow \alpha T^{-T} D x + \beta y$$$$y \leftarrow \alpha T^{-H} x + \beta y$$$$y \leftarrow \alpha D T^{-H} x + \beta y$$$$y \leftarrow \alpha T^{-H} D x + \beta y$$where: <br> x is the global dense matrix x_{:,:} <br> y is the global dense matrix y_{:,:} <br> T is the global sparse block triangular submatrix T <br> D is the scaling diagonal matrix|No ❌|
|**Entrywise Product**|`psb_gemlt`|This function computes the entrywise product between two vectors x and y$$dot \leftarrow x(i)y(i)$$|No ❌|
|**Entrywise Division**|`psb_gediv`|This function computes the entrywise division between two vectors x and y$$div \leftarrow \frac{x(i)}{y(i)}$$|No ❌|
|**Entrywise Inversion**|`psb_geinv`|This function computes the entrywise inverse of a vector x and puts it into y$$inv \leftarrow \frac{1}{x(i)}$$|No ❌|
## TODO
- Merge all the output logs
- Finish the directories description
- Check memory occupancy of parallel/ serial/ and vectors/ directories (Maybe not the best way for lots of rputines?)
## Questions
- Is it correct to use psb_gather even for a single process running?
- Is it correct to shift in 0,xxxx type of notation to compare with the correct number of significand digits?
## References
<a id="testing">[1].</a> Higham, Nicholas J. Testing linear algebra software. Springer US, 1997
<a id="accuracy">[2].</a> Higham, Nicholas J. Accuracy and stability of numerical algorithms. Society for industrial and applied mathematics, 2002.
<a id="psblas">[3],</a> Filippone, Salvatore, and Michele Colajanni. "PSBLAS: A library for parallel linear algebra computation on sparse matrices." ACM Transactions on Mathematical Software (TOMS) 26.4 (2000): 527-550.

@ -0,0 +1,8 @@
set(PSB_geaxpby_source_files
psb_geaxpby_test.f90
geaxpby.f90
)
foreach(file IN LISTS PSB_geaxpby_source_files)
list(APPEND geaxpby_source_files ${CMAKE_CURRENT_LIST_DIR}/${file})
endforeach()

@ -0,0 +1,42 @@
INSTALLDIR=../../..
INCDIR=$(INSTALLDIR)/include/
MODDIR=$(INSTALLDIR)/modules/
include $(INCDIR)/Make.inc.psblas
#
# Libraries used
#
LIBDIR = $(INSTALLDIR)/lib/
PSBLAS_LIB = -L$(LIBDIR) -lpsb_util -lpsb_base
LDLIBS = $(PSBLDLIBS)
FINCLUDES=$(FMFLAG)$(MODDIR) $(FMFLAG).
EXEDIR=./runs
GREEN=\033[0;32m
RED=\033[0;31m
BLUE=\033[0;34m
YELLOW=\033[33m
END_COLOUR=\033[0m
all: runsd psb_geaxpby_test
@printf "$(GREEN)[INFO]\t Compilation success!$(END_COLOUR)\n"
runsd:
@(if test ! -d runs ; then mkdir runs; fi)
@printf "$(BLUE)[INFO]\t Build directory $(EXEDIR) correctly initialized$(END_COLOUR)\n"
psb_geaxpby_test:
@$(FLINK) $(LOPT) psb_geaxpby_test.f90 geaxpby.f90 -o psb_geaxpby_test -I$(MODDIR) -I. $(PSBLAS_LIB) $(LDLIBS)
@mv psb_geaxpby_test $(EXEDIR)
@printf "$(BLUE)[INFO]\t Testing files generated correctly$(END_COLOUR)\n"
clean:
@rm -f $(OBJS)\
*$(.mod) $(EXEDIR)/psb_geaxpby_test
.PHONY: all runsd clean

@ -0,0 +1,62 @@
# Introduction
This is a directory developed by Luca Pepè Sciarria and Simone Staccone froma Tor Vergata University to start to create some unit tests for PSBLAS 3.9, in particular for ```psb_geaxpby``` routine.
## Getting started
Steps to reproduce the tests:
- Compile the code using ``` make ``` (Optional)
- Launch the script ./autotest.sh or with source ./autotest.sh if you want to add modules to the .bashrc file permenently.
- Check the output log file psblas_geaxpby_test.log to collect results
NOTE: If the code is changed and a new compilation is needed to show the changes, the autotest.sh script isn't aware of this scenario, therefore it is necessary to manually recompile the code.
## Test Suite
### Overall Analysys
The ```psb_geaxpby```. The signature of the function is:
```fortran
call psb_geaxpby(alpha, x, beta, y, desc_a, info)
```
### Parameters Values
**x** vectors are located in the vectors/ directory. They are generated randomly using the same seed and then saved on different files based on their characteristics. The size of the vector is choosen accordingly to the size of the matrix column space considered for the single test instance.
|Vector|File Name|Coefficients|Coefficients Description|
|:-:|:-:|:-:|:-:|
|$x_1$|x1.txt|$x_i> 0, \forall i$|Positive coefficients|
|$x_2$|x2.txt|$x_i < 0, \forall i$|Negative coefficients
|$x_3$|x3.txt|$x_i \ne 0, \forall i$|Random coefficients
|$x_4$|x4.txt|$x_i = 0, \forall i$|Null coefficients
**y** vectors are located in the vectors/ directory. They are generated randomly using the same seed and then saved on different files based on their characteristics. The size of the vector is choosen accordingly to the size of the matrix rows space considered for the single test instance.
|Vector|File Name|Coefficients|Coefficients Description|
|:-:|:-:|:-:|:-:|
|$y_1$|y1.txt|$y_i> 0, \forall i$|Positive coefficients|
|$y_2$|y2.txt|$y_i < 0, \forall i$|Negative coefficients
|$y_3$|y3.txt|$y_i \ne 0, \forall i$|Random coefficients
|$y_4$|y4.txt|$y_i = 0, \forall i$|Null coefficients
**$\alpha$**
|$\alpha$|Value|Coefficients Description|
|:-:|:-:|:-:|
|$\alpha_1$|1.0|Positive value|
|$\alpha_2$|-1.0|Negative value|
|$\alpha_3$|0.0|Null value|
**$\beta$**
|$\alpha$|Value|Coefficients Description|
|:-:|:-:|:-:|
|$\beta_1$|1.0|Positive value|
|$\beta_2$|-1.0|Negative value|
|$\beta_3$|0.0|Null value|
## Output
The ouput files generated by the test are automatically compared by the autotest.sh script, but if it is needed to manually run the test here it is the naming convenction used.
The results of the computation will be saved on different files based on the instance of the test considered. In particular the naming conventiona format the output file as sol_x#_y#_a#_b#.mtx, where each # is a number choosen w.r.t. the test instance. (Ex. sol_x1_y1_a1_b1.mtx is the solution computed using the first x vector file , the first y vector file, alpha = 1.0 and beta = 1.0). Moreover, the files will be saved in the serial/ directory if the program is launched using 1 process or in parrallel/ directory if the program is launched with more than one process.
## TODO
- Add computation with broken descriptor and catch the errore result
- Test using complex data
- Try multiple distributions
- Try using a matrix instead of a vector

@ -0,0 +1,60 @@
#!/bin/bash
# Variables definition
dir1="serial"
dir2="parallel"
log_file_name="psblas_geaxpby_test.log"
num_procs=$(nproc)
# Define color codes
GREEN="\033[0;32m"
RED="\033[0;31m"
BLUE="\033[0;34m"
YELLOW="\033[33m"
RESET="\033[0m"
# Check if the executable ELF file exists
if [ ! -f "./runs/psb_geaxpby_test" ]; then
echo -e "${YELLOW}[WARNING] Executable not found. Running make...${RESET}"
make
if [ ! -f "./runs/psb_geaxpby_test" ]; then
echo -e "${RED}[ERROR] Failed to create executable. Check make command.${RESET}"
fi
else
echo -e "${BLUE}[INFO]\t The executable already exists. Skipping the make process.${RESET}"
fi
# Excecute tests and save results
echo -e "${BLUE}[INFO]\t Running the PSBLAS psb_geaxpby test...${RESET}"
echo ""
echo -e "${BLUE}[INFO]\t Starting single process computation${RESET}"
mpirun -np 1 ./runs/psb_geaxpby_test
echo -e "${BLUE}[INFO]\t Single process computation terminated correctly${RESET}"
echo ""
echo -e "${BLUE}[INFO]\t Starting $num_procs processes computation${RESET}"
mpirun -np $num_procs ./runs/psb_geaxpby_test
echo -e "${BLUE}[INFO]\t Multiple processes computation terminated correctly${RESET}"
echo "" >> ${log_file_name}
# Iterate through files in the first directory
for file1 in "$dir1"/*; do
filename=$(basename "$file1") # Extract the filename
file2="$dir2/$filename" # Construct the path for the second directory
# Check if the file exists in the second directory
if [ -f "$file2" ]; then
diff_count=$(diff "$file1" "$file2" | wc -l) # Compare the files
echo "Comparison between $file1 and $file2: $diff_count differences" >> ${log_file_name}
# echo "Comparing $file1 and $file2: $diff_count"
else
echo -e "${RED}[ERROR] File $filename does not exist in $dir2${RESET}"
fi
done
echo -e "${BLUE}[INFO]\t PSBLAS psb_geaxpby test succesfully completed.${RESET}"

@ -0,0 +1,201 @@
program main
use psb_geaxpby_test
use psb_base_mod
implicit none
! MPI variables
integer(psb_ipk_) :: my_rank, np
! Communicator variable
type(psb_ctxt_type) :: ctxt
! parameters array
character(len=64) :: x(4),y(4)
real(psb_dpk_) :: alpha(3), beta(3)
integer(psb_ipk_) :: arr_size
integer(psb_ipk_) :: tests_number, count
! cycle indexes variables
integer(psb_ipk_) :: i,j,k,h,l
integer(psb_ipk_) :: info, ret, unit
! time stats variables
character(len=8) :: date ! YYYYMMDD
character(len=10) :: time ! HHMMSS.sss
character(len=5) :: zones ! Time zone
integer :: values(8)
! others
character(len=:), allocatable :: output_file_name
! Initialize parameters
x(1) = "vectors/x1.mtx"
x(2) = "vectors/x2.mtx"
x(3) = "vectors/x3.mtx"
x(4) = "vectors/x4.mtx"
y(1) = "vectors/y1.mtx"
y(2) = "vectors/y2.mtx"
y(3) = "vectors/y3.mtx"
y(4) = "vectors/y4.mtx"
alpha(1) = done
alpha(2) = -done
alpha(3) = dzero
beta(1) = done
beta(2) = -done
beta(3) = dzero
arr_size = 10000
tests_number = size(x) * size(y) * size(alpha) * size(beta)
count = 0
call psb_init(ctxt)
call psb_info(ctxt,my_rank,np)
if(my_rank == psb_root_) then
! Setup logger output
if(np == 1) then
open(newunit=unit, file='psblas_geaxpby_test.log', status='replace', action='write', iostat=info)
else
open(newunit=unit, file='psblas_geaxpby_test.log', status='old', action='write', position='append', iostat=info)
end if
if (info /= 0) then
print *, 'Error opening output file.'
print *, "I/O Status Code:", info
stop
end if
psb_out_unit = unit
write(psb_out_unit,'(A,A)') 'Welcome to PSBLAS version: ',psb_version_string_
write(psb_out_unit,'(A)') 'This is the psb_geaxpby_test sample program'
write(psb_out_unit,'(A,I0)') 'Number of processes used in this computation: ', np
write(psb_out_unit,'(A)') ''
call generate_vectors(arr_size)
end if
call psb_bcast(ctxt,psb_out_unit)
call psb_barrier(ctxt)
if(my_rank == psb_root_) write(*,'(A)') "[INFO] Starting single precision computation..."
do i=1,size(x)
do j=1,size(y)
do k=1,size(alpha)
do h=1,size(beta)
call psb_geaxpby_kernel(x_file=x(i), y_file=y(j), alpha = real(alpha(k),psb_spk_),&
& beta = real(beta(h),psb_spk_), arr_size = arr_size, ctxt = ctxt, ret = ret, &
& output_file_name = output_file_name)
if(my_rank == psb_root_) then
count = count + 1
call date_and_time(date, time, zones, values)
if(ret /= -1) then
! Success formatted output
write(psb_out_unit,'("[", I4.4,"-",I2.2,"-",I2.2," ",I2.2,":",I2.2,":",I2.2,"] ",&
& A,A,A,I0,A,I0,T110,A)') &
& values(1), values(2), values(3), values(5), values(6), values(7), &
& "Generation geaxpby single precision result file ", &
& output_file_name , ' ', count , "/", tests_number, "[OK]"
else
! Fail formatted output
write(psb_out_unit,'("[", I4.4,"-",I2.2,"-",I2.2," ",I2.2,":",I2.2,":",I2.2,"] ",&
& A,A,A,I0,A,I0,T110,A)') &
& values(1), values(2), values(3), values(5), values(6), values(7), &
& "Generation geaxpby single precision result file ", &
& output_file_name , ' ', count , "/", tests_number, "[FAIL]"
goto 9998
end if
end if
call psb_barrier(ctxt)
end do
end do
end do
end do
if(my_rank == psb_root_) write(*,'(A)') "[INFO] Single precision computation completed succesfully!"
if(my_rank == psb_root_) then
write(psb_out_unit, *) ''
count = 0
end if
if(my_rank == psb_root_) write(*,'(A)') "[INFO] Starting double precision check..."
call psb_barrier(ctxt)
! Here double precision comparison should be done
do i=1,size(x)
do j=1,size(y)
do k=1,size(alpha)
do h=1,size(beta)
call psb_geaxpby_check(x_file=x(i), y_file=y(j), alpha = alpha(k), beta = beta(h), &
& arr_size = arr_size, ctxt = ctxt, ret = ret, output_file_name = output_file_name)
if(my_rank == psb_root_) then
count = count + 1
call date_and_time(date, time, zones, values)
if(ret == 0) then
! Success formatted output
write(psb_out_unit,'("[", I4.4,"-",I2.2,"-",I2.2," ",I2.2,":",I2.2,":",I2.2,"] ",&
& A,A,A,I0,A,I0,T110,A)') &
& values(1), values(2), values(3), values(5), values(6), values(7), &
& "Double precision check on file ", &
& output_file_name , ' ', count , "/", tests_number, "[OK]"
else
! Fail formatted output
write(psb_out_unit,'("[", I4.4,"-",I2.2,"-",I2.2," ",I2.2,":",I2.2,":",I2.2,"] ",&
& A,A,A,I0,A,I0,T110,A)') &
& values(1), values(2), values(3), values(5), values(6), values(7), &
& "Double precision check on file ", &
& output_file_name , ' ', count , "/", tests_number, "[FAIL]"
write(psb_out_unit,'(A,I0)') "[ERROR] Error at element ", abs(ret)
write(psb_out_unit,'(A,F10.8)') "Alpha:", alpha(k)
write(psb_out_unit,'(A,F15.8)') "Beta: ", beta(h)
goto 9999
end if
end if
call psb_barrier(ctxt)
end do
end do
end do
end do
if(my_rank == psb_root_) then
write(*,'(A)') "[INFO] Duble precision check completed succesfully!"
close(unit)
end if
call psb_exit(ctxt)
return
9998 continue
if(my_rank == psb_root_) then
close(unit)
write(*,'(A,I0,A,I0,A)') "[ERROR] Error in geaxpby single precision computation ", &
& count, "/", tests_number, " see log file for details"
end if
9999 continue
if(my_rank == psb_root_) then
close(unit)
write(*,'(A,I0,A,I0,A)') "[ERROR] Error in geaxpby double precision check ", &
& count, "/", tests_number, " see log file for details"
end if
call psb_exit(ctxt)
return
end program main

@ -0,0 +1,567 @@
!> Test program for y = alpha * x + betha * y psb_geaxbpy routine
!! Check the README.md to see all details about the tests.
!!
!! Authors: Luca Pepé Sciarria, Staccone Simone (Tor Vergata University)
!!
!! psb_geaxpby(alpha, x, beta, y, desc_a, info)
!!
!! Type: Synchronous.
!!
!! ======================================
!! | Data type | Precision |
!! ======================================
!! | psb_spk_ | Short Precision Real |
!! | psb_dpk_ | Long Precision Real |
!! | psb_cpk_ | Short Precision Complex|
!! | psb_zpk_ | Long Precision Complex |
!! ======================================
!! Table 1: Data types
!!
!! ROUTINE PARAMETERS
!!
!! Input:
!!
!! alpha Description: the scalar α.
!! Scope: global
!! Type: required
!! Intent: in
!! Specified as: a number of the data type indicated in Table 1.
!!
!! x Description: the local portion of global dense matrix x.
!! Scope: local
!! Type: required
!! Intent: in
!! Specified as: a rank one or two array or an object of type psb_T_vect_type
!! containing numbers of type specified in Table 1. The rank of x must be
!! the same of y.
!!
!! beta Description: the scalar β.
!! Scope: global
!! Type: required
!! Intent: in.
!! Specified as: a number of the data type indicated in Table 1.
!!
!! y Description: the local portion of the global dense matrix y.
!! Scope: local
!! Type: required
!! Intent: inout
!! Specified as: a rank one or two array or an object of type psb_T_vect_type
!! containing numbers of the type indicated in Table 1. The rank of y must
!! be the same of x.
!!
!! desc_a Description: contains data structures for communications.
!! Scope: local
!! Type: required
!! Intent: in
!! Specified as: an object of type psb desc type.
!!
!! Output:
!!
!! y Description: the local portion of the global dense matrix y.
!! Scope: local
!! Type: required
!! Intent: inout
!! Specified as: a rank one or two array or an object of type psb_T_vect_type
!! containing numbers of the type indicated in Table 1. The rank of y must
!! be the same of x.
!!
!! info Description: Error code.
!! Scope: local
!! Type: required
!! Intent: out.
!! Specified as: An integer value; 0 means no error has been detected.
!!
module psb_geaxpby_test
contains
!> @brief Function to excecute psb_geaxpby in single precision and
!! save the results on file
!!
subroutine psb_geaxpby_kernel(x_file, y_file, alpha, beta, arr_size, ctxt, ret, output_file_name)
use psb_base_mod
use psb_util_mod
implicit none
! input parameters
character(len = *), intent(in) :: x_file, y_file
real(psb_spk_), intent(in) :: alpha, beta
integer(psb_ipk_), intent(in) :: arr_size
type(psb_ctxt_type), intent(in) :: ctxt
! output parameters
integer(psb_ipk_), intent(out) :: ret
character(len=:), allocatable, intent(out) :: output_file_name
! vectors
type(psb_s_vect_type) :: x, y
! matrix descriptor data structure
type(psb_desc_type) :: desc_a
! communication context
integer(psb_ipk_) :: my_rank, np, info, err_act
! variables outside PSLBALS data structures
real(psb_spk_), allocatable :: x_global(:), y_global(:)
integer(psb_ipk_) :: i
! others
logical :: exists
info = psb_success_
call psb_info(ctxt,my_rank,np)
if (my_rank < 0) then
! This should not happen, but just in case
call psb_error(ctxt)
endif
! Generate random array for b using always the same seed
if(my_rank == psb_root_) then
allocate(x_global(arr_size))
allocate(y_global(arr_size))
call mm_array_read(x_global,info,filename=x_file)
call mm_array_read(y_global,info,filename=y_file)
end if
! Allocate descriptor as if it was a block rows distribution
call psb_cdall(ctxt, desc_a, info,nl=arr_size/np)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating desc_a data structure"
goto 9999
end if
call psb_cdasb(desc_a, info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error assembling desc_a data structure"
goto 9999
end if
call psb_geall(x,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating x data structure"
goto 9999
end if
! Populate x class using data from x_global vector
call psb_scatter(x_global,x,desc_a,info,root=psb_root_)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in psb_scatter to populate x data structure"
goto 9999
end if
call psb_geall(y,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating y data structure"
goto 9999
end if
! Populate y class using data from y_global vector
call psb_scatter(y_global,y,desc_a,info,root=psb_root_)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in psb_scatter to populate y data structure"
goto 9999
end if
! y = alpha * x + beta * y
call psb_geaxpby(alpha,x,beta,y,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in psb_geaxpby routine"
goto 9999
end if
! Make the root process be the one that saves everything on file
if(np == 1) then
! Check if output directory exists
inquire(file='serial/', exist=exists)
if (.not.exists) then
call system('mkdir serial/')
end if
output_file_name = "serial/"
else
! Check if output directory exists
inquire(file='parallel/', exist=exists)
if (.not.exists) then
call system('mkdir parallel/')
end if
output_file_name = "parallel/"
end if
output_file_name = output_file_name // "sol_" // x_file(9:10) // "_" // y_file(9:10)
if(alpha == sone) then
output_file_name = output_file_name // "_a1"
else if(alpha == -sone) then
output_file_name = output_file_name // "_a2"
else if(alpha == szero) then
output_file_name = output_file_name // "_a3"
end if
if(beta == sone) then
output_file_name = output_file_name // "_b1.mtx"
else if(beta == -sone) then
output_file_name = output_file_name // "_b2.mtx"
else if(beta == szero) then
output_file_name = output_file_name // "_b3.mtx"
end if
! gather the result combining all the partial ones
call psb_gather(y_global, y, desc_a, info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error gathering global vector x to write on file"
goto 9999
end if
! Save result to output file
if(my_rank == psb_root_) then
call mm_array_write(y_global,"Result vector",info,filename=output_file_name)
end if
! Deallocate
call psb_gefree(x, desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in vector x free routine"
goto 9999
end if
call psb_gefree(y, desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in vector y free routine"
goto 9999
end if
call psb_cdfree(desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in matrix descriptor free routine"
goto 9999
end if
if(my_rank == 0) then
deallocate(x_global)
deallocate(y_global)
end if
return
! Error handling
9999 ret = -1
stop
end subroutine
!> @brief Function to excecute psb_geaxpby in double precision and
!! compare the results with the ones on file
!!
subroutine psb_geaxpby_check(x_file, y_file, alpha, beta, arr_size, ctxt, ret, output_file_name)
use psb_base_mod
use psb_util_mod
implicit none
! input parameters
character(len = *), intent(in) :: x_file, y_file
real(psb_dpk_), intent(in) :: alpha, beta
integer(psb_ipk_), intent(in) :: arr_size
type(psb_ctxt_type), intent(in) :: ctxt
! output parameters
integer(psb_ipk_), intent(out) :: ret
character(len=:), allocatable, intent(out) :: output_file_name
! vectors
type(psb_d_vect_type) :: x, y
type(psb_s_vect_type) :: y_check
! matrix descriptor data structure
type(psb_desc_type) :: desc_a
! communication context
integer(psb_ipk_) :: my_rank, np, info, err_act
! variables outside PSLBALS data structures
real(psb_dpk_), allocatable :: x_global(:), y_global(:)
integer(psb_ipk_) :: i
! others
logical :: exists
info = psb_success_
call psb_info(ctxt,my_rank,np)
if (my_rank < 0) then
! This should not happen, but just in case
call psb_error(ctxt)
endif
! Generate random array for b using always the same seed
if(my_rank == psb_root_) then
allocate(x_global(arr_size))
allocate(y_global(arr_size))
call mm_array_read(x_global,info,filename=x_file)
call mm_array_read(y_global,info,filename=y_file)
end if
! Allocate descriptor as if it was a block rows distribution
call psb_cdall(ctxt, desc_a, info,nl=10000/np)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating desc_a data structure"
goto 9999
end if
call psb_cdasb(desc_a, info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error assembling desc_a data structure"
goto 9999
end if
call psb_geall(x,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating x data structure"
goto 9999
end if
! Populate x class using data from x_global vector
call psb_scatter(x_global,x,desc_a,info,root=psb_root_)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in psb_scatter to populate x data structure"
goto 9999
end if
call psb_geall(y,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating y data structure"
goto 9999
end if
! Populate y class using data from y_global vector
call psb_scatter(y_global,y,desc_a,info,root=psb_root_)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in psb_scatter to populate y data structure"
goto 9999
end if
call psb_geall(y_check,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating y_check data structure"
goto 9999
end if
! y = alpha * x + beta * y
call psb_geaxpby(alpha,x,beta,y,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in psb_geaxpby routine"
goto 9999
end if
! gather the result combining all the partial ones
call psb_gather(y_global, y, desc_a, info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error gathering global vector y used for comparison"
goto 9999
end if
if(my_rank == psb_root_) then
! Make the root process be the one that saves everything on file
if(np == 1) then
! Check if output directory exists
inquire(file='serial/', exist=exists)
if(.not.exists) then
write(psb_out_unit,'(A)') "Error in psb_geaxpby_check routine, no single precision result is saved on file"
goto 9999
end if
output_file_name = "serial/"
else
! Check if output directory exists
inquire(file='parallel/', exist=exists)
if(.not.exists) then
write(psb_out_unit,'(A)') "Error in psb_geaxpby_check routine, no single precision result is saved on file"
goto 9999
end if
output_file_name = "parallel/"
end if
output_file_name = output_file_name // "sol_" // x_file(9:10) // "_" // y_file(9:10)
if(alpha == done) then
output_file_name = output_file_name // "_a1"
else if(alpha == -done) then
output_file_name = output_file_name // "_a2"
else if(alpha == dzero) then
output_file_name = output_file_name // "_a3"
end if
if(beta == done) then
output_file_name = output_file_name // "_b1.mtx"
else if(beta == -done) then
output_file_name = output_file_name // "_b2.mtx"
else if(beta == dzero) then
output_file_name = output_file_name // "_b3.mtx"
end if
! Read single precision result from file
call mm_array_read(y_check,info,filename=output_file_name)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in mm_array_read for y_check data structure"
goto 9999
end if
! 5.96e-08 is 2^-24 (Single precision unit roundoff)
! 1.19e-07 is 2^-23 (Single precision unit interval)
do i=1, arr_size
!! write(*, *) abs(y_global(i) - y_check%v%v(i)) > 5.96D-08, &
!! & y_global(i), y_check%v%v(i), output_file_name
if(abs(y_global(i) - y_check%v%v(i)) > 1.19e-07) then
ret = -i
write(psb_out_unit, '(A,F10.8)') "Y computed in double precision: ", y_global(i)
write(psb_out_unit, '(A,F10.8)') "Y read from single precision file: ", y_check%v%v(i)
write(psb_out_unit, '(A,F10.8)') "Diff: ", abs(y_global(i) - y_check%v%v(i))
exit
end if
end do
end if
! Deallocate
call psb_gefree(x, desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in vector x free routine"
goto 9999
end if
call psb_gefree(y, desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in vector y free routine"
goto 9999
end if
call psb_gefree(y_check, desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in vector y_check free routine"
goto 9999
end if
call psb_cdfree(desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in matrix descriptor free routine"
goto 9999
end if
if(my_rank == 0) then
deallocate(x_global)
deallocate(y_global)
end if
ret = 0
return
! Error handling
9999 ret = -1
stop
end subroutine
!> @brief Function to randomly generate x and y vectors
!! and save them on multiple files based on their
!! coefficients values.
!!
subroutine generate_vectors(arr_size)
use psb_base_mod
use psb_util_mod
implicit none
integer(psb_ipk_), intent(in) :: arr_size
real(psb_dpk_), allocatable :: x(:), y(:)
integer(psb_ipk_) :: i, info
logical :: exists
! Check if output directory exists
inquire(file='vectors/', exist=exists)
if (.not.exists) then
call system('mkdir vectors/')
end if
allocate(x(arr_size))
allocate(y(arr_size))
call random_init(repeatable=.true.,image_distinct=.true.)
call random_number(x)
call random_number(y)
! Write only positive in x_1
call mm_array_write(x,"Positive vector",info,filename="vectors/x1.mtx")
call mm_array_write(y,"Positive vector",info,filename="vectors/y1.mtx")
! Write only negative in x_2
do i=1,arr_size
x(i) = -x(i)
end do
do i=1,arr_size
y(i) = -y(i)
end do
call mm_array_write(x,"Negative vector",info,filename="vectors/x2.mtx")
call mm_array_write(y,"Negative vector",info,filename="vectors/y2.mtx")
! Since numbers are less than one and always positive, we have to generate negative ones subtractiong 50
do i=1,arr_size
x(i) = -x(i) ! Make the values positive again
x(i) = x(i) - 0.5
end do
do i=1,arr_size
y(i) = -y(i) ! Make the values positive again
y(i) = y(i) - 0.5
end do
! Write random in x_3
call mm_array_write(x,"Random vector",info,filename="vectors/x3.mtx")
call mm_array_write(y,"Random vector",info,filename="vectors/y3.mtx")
! Write zero in x_4
do i=1,arr_size
x(i) = 0
end do
do i=1,arr_size
y(i) = 0
end do
call mm_array_write(x,"Null vector",info,filename="vectors/x4.mtx")
call mm_array_write(y,"Null vector",info,filename="vectors/y4.mtx")
deallocate(x)
deallocate(y)
end subroutine
end module psb_geaxpby_test

@ -0,0 +1,731 @@
Welcome to PSBLAS version: 3.9.0
This is the psb_geaxpby_test sample program
Number of processes used in this computation: 1
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y1_a1_b1.mtx 1/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y1_a1_b2.mtx 2/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y1_a1_b3.mtx 3/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y1_a2_b1.mtx 4/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y1_a2_b2.mtx 5/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y1_a2_b3.mtx 6/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y1_a3_b1.mtx 7/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y1_a3_b2.mtx 8/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y1_a3_b3.mtx 9/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y2_a1_b1.mtx 10/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y2_a1_b2.mtx 11/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y2_a1_b3.mtx 12/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y2_a2_b1.mtx 13/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y2_a2_b2.mtx 14/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y2_a2_b3.mtx 15/144 [OK]
[2025-06-12 13:37:32] Generation geaxpby single precision result file serial/sol_x1_y2_a3_b1.mtx 16/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y2_a3_b2.mtx 17/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y2_a3_b3.mtx 18/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y3_a1_b1.mtx 19/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y3_a1_b2.mtx 20/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y3_a1_b3.mtx 21/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y3_a2_b1.mtx 22/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y3_a2_b2.mtx 23/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y3_a2_b3.mtx 24/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y3_a3_b1.mtx 25/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y3_a3_b2.mtx 26/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y3_a3_b3.mtx 27/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y4_a1_b1.mtx 28/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y4_a1_b2.mtx 29/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y4_a1_b3.mtx 30/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y4_a2_b1.mtx 31/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y4_a2_b2.mtx 32/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y4_a2_b3.mtx 33/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y4_a3_b1.mtx 34/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y4_a3_b2.mtx 35/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x1_y4_a3_b3.mtx 36/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y1_a1_b1.mtx 37/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y1_a1_b2.mtx 38/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y1_a1_b3.mtx 39/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y1_a2_b1.mtx 40/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y1_a2_b2.mtx 41/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y1_a2_b3.mtx 42/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y1_a3_b1.mtx 43/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y1_a3_b2.mtx 44/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y1_a3_b3.mtx 45/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y2_a1_b1.mtx 46/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y2_a1_b2.mtx 47/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y2_a1_b3.mtx 48/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y2_a2_b1.mtx 49/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y2_a2_b2.mtx 50/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y2_a2_b3.mtx 51/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y2_a3_b1.mtx 52/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y2_a3_b2.mtx 53/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y2_a3_b3.mtx 54/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y3_a1_b1.mtx 55/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y3_a1_b2.mtx 56/144 [OK]
[2025-06-12 13:37:33] Generation geaxpby single precision result file serial/sol_x2_y3_a1_b3.mtx 57/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y3_a2_b1.mtx 58/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y3_a2_b2.mtx 59/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y3_a2_b3.mtx 60/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y3_a3_b1.mtx 61/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y3_a3_b2.mtx 62/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y3_a3_b3.mtx 63/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y4_a1_b1.mtx 64/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y4_a1_b2.mtx 65/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y4_a1_b3.mtx 66/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y4_a2_b1.mtx 67/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y4_a2_b2.mtx 68/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y4_a2_b3.mtx 69/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y4_a3_b1.mtx 70/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y4_a3_b2.mtx 71/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x2_y4_a3_b3.mtx 72/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y1_a1_b1.mtx 73/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y1_a1_b2.mtx 74/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y1_a1_b3.mtx 75/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y1_a2_b1.mtx 76/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y1_a2_b2.mtx 77/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y1_a2_b3.mtx 78/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y1_a3_b1.mtx 79/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y1_a3_b2.mtx 80/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y1_a3_b3.mtx 81/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y2_a1_b1.mtx 82/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y2_a1_b2.mtx 83/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y2_a1_b3.mtx 84/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y2_a2_b1.mtx 85/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y2_a2_b2.mtx 86/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y2_a2_b3.mtx 87/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y2_a3_b1.mtx 88/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y2_a3_b2.mtx 89/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y2_a3_b3.mtx 90/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y3_a1_b1.mtx 91/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y3_a1_b2.mtx 92/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y3_a1_b3.mtx 93/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y3_a2_b1.mtx 94/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y3_a2_b2.mtx 95/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y3_a2_b3.mtx 96/144 [OK]
[2025-06-12 13:37:34] Generation geaxpby single precision result file serial/sol_x3_y3_a3_b1.mtx 97/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x3_y3_a3_b2.mtx 98/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x3_y3_a3_b3.mtx 99/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x3_y4_a1_b1.mtx 100/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x3_y4_a1_b2.mtx 101/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x3_y4_a1_b3.mtx 102/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x3_y4_a2_b1.mtx 103/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x3_y4_a2_b2.mtx 104/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x3_y4_a2_b3.mtx 105/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x3_y4_a3_b1.mtx 106/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x3_y4_a3_b2.mtx 107/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x3_y4_a3_b3.mtx 108/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y1_a1_b1.mtx 109/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y1_a1_b2.mtx 110/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y1_a1_b3.mtx 111/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y1_a2_b1.mtx 112/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y1_a2_b2.mtx 113/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y1_a2_b3.mtx 114/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y1_a3_b1.mtx 115/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y1_a3_b2.mtx 116/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y1_a3_b3.mtx 117/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y2_a1_b1.mtx 118/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y2_a1_b2.mtx 119/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y2_a1_b3.mtx 120/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y2_a2_b1.mtx 121/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y2_a2_b2.mtx 122/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y2_a2_b3.mtx 123/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y2_a3_b1.mtx 124/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y2_a3_b2.mtx 125/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y2_a3_b3.mtx 126/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y3_a1_b1.mtx 127/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y3_a1_b2.mtx 128/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y3_a1_b3.mtx 129/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y3_a2_b1.mtx 130/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y3_a2_b2.mtx 131/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y3_a2_b3.mtx 132/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y3_a3_b1.mtx 133/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y3_a3_b2.mtx 134/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y3_a3_b3.mtx 135/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y4_a1_b1.mtx 136/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y4_a1_b2.mtx 137/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y4_a1_b3.mtx 138/144 [OK]
[2025-06-12 13:37:35] Generation geaxpby single precision result file serial/sol_x4_y4_a2_b1.mtx 139/144 [OK]
[2025-06-12 13:37:36] Generation geaxpby single precision result file serial/sol_x4_y4_a2_b2.mtx 140/144 [OK]
[2025-06-12 13:37:36] Generation geaxpby single precision result file serial/sol_x4_y4_a2_b3.mtx 141/144 [OK]
[2025-06-12 13:37:36] Generation geaxpby single precision result file serial/sol_x4_y4_a3_b1.mtx 142/144 [OK]
[2025-06-12 13:37:36] Generation geaxpby single precision result file serial/sol_x4_y4_a3_b2.mtx 143/144 [OK]
[2025-06-12 13:37:36] Generation geaxpby single precision result file serial/sol_x4_y4_a3_b3.mtx 144/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y1_a1_b1.mtx 1/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y1_a1_b2.mtx 2/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y1_a1_b3.mtx 3/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y1_a2_b1.mtx 4/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y1_a2_b2.mtx 5/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y1_a2_b3.mtx 6/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y1_a3_b1.mtx 7/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y1_a3_b2.mtx 8/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y1_a3_b3.mtx 9/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y2_a1_b1.mtx 10/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y2_a1_b2.mtx 11/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y2_a1_b3.mtx 12/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y2_a2_b1.mtx 13/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y2_a2_b2.mtx 14/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y2_a2_b3.mtx 15/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y2_a3_b1.mtx 16/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y2_a3_b2.mtx 17/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y2_a3_b3.mtx 18/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y3_a1_b1.mtx 19/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y3_a1_b2.mtx 20/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y3_a1_b3.mtx 21/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y3_a2_b1.mtx 22/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y3_a2_b2.mtx 23/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y3_a2_b3.mtx 24/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y3_a3_b1.mtx 25/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y3_a3_b2.mtx 26/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y3_a3_b3.mtx 27/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y4_a1_b1.mtx 28/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y4_a1_b2.mtx 29/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y4_a1_b3.mtx 30/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y4_a2_b1.mtx 31/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y4_a2_b2.mtx 32/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y4_a2_b3.mtx 33/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y4_a3_b1.mtx 34/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y4_a3_b2.mtx 35/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x1_y4_a3_b3.mtx 36/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x2_y1_a1_b1.mtx 37/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x2_y1_a1_b2.mtx 38/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x2_y1_a1_b3.mtx 39/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x2_y1_a2_b1.mtx 40/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x2_y1_a2_b2.mtx 41/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x2_y1_a2_b3.mtx 42/144 [OK]
[2025-06-12 13:37:36] Double precision check on file serial/sol_x2_y1_a3_b1.mtx 43/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y1_a3_b2.mtx 44/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y1_a3_b3.mtx 45/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y2_a1_b1.mtx 46/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y2_a1_b2.mtx 47/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y2_a1_b3.mtx 48/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y2_a2_b1.mtx 49/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y2_a2_b2.mtx 50/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y2_a2_b3.mtx 51/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y2_a3_b1.mtx 52/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y2_a3_b2.mtx 53/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y2_a3_b3.mtx 54/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y3_a1_b1.mtx 55/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y3_a1_b2.mtx 56/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y3_a1_b3.mtx 57/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y3_a2_b1.mtx 58/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y3_a2_b2.mtx 59/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y3_a2_b3.mtx 60/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y3_a3_b1.mtx 61/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y3_a3_b2.mtx 62/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y3_a3_b3.mtx 63/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y4_a1_b1.mtx 64/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y4_a1_b2.mtx 65/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y4_a1_b3.mtx 66/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y4_a2_b1.mtx 67/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y4_a2_b2.mtx 68/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y4_a2_b3.mtx 69/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y4_a3_b1.mtx 70/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y4_a3_b2.mtx 71/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x2_y4_a3_b3.mtx 72/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y1_a1_b1.mtx 73/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y1_a1_b2.mtx 74/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y1_a1_b3.mtx 75/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y1_a2_b1.mtx 76/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y1_a2_b2.mtx 77/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y1_a2_b3.mtx 78/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y1_a3_b1.mtx 79/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y1_a3_b2.mtx 80/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y1_a3_b3.mtx 81/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y2_a1_b1.mtx 82/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y2_a1_b2.mtx 83/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y2_a1_b3.mtx 84/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y2_a2_b1.mtx 85/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y2_a2_b2.mtx 86/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y2_a2_b3.mtx 87/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y2_a3_b1.mtx 88/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y2_a3_b2.mtx 89/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y2_a3_b3.mtx 90/144 [OK]
[2025-06-12 13:37:37] Double precision check on file serial/sol_x3_y3_a1_b1.mtx 91/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y3_a1_b2.mtx 92/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y3_a1_b3.mtx 93/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y3_a2_b1.mtx 94/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y3_a2_b2.mtx 95/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y3_a2_b3.mtx 96/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y3_a3_b1.mtx 97/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y3_a3_b2.mtx 98/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y3_a3_b3.mtx 99/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y4_a1_b1.mtx 100/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y4_a1_b2.mtx 101/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y4_a1_b3.mtx 102/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y4_a2_b1.mtx 103/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y4_a2_b2.mtx 104/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y4_a2_b3.mtx 105/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y4_a3_b1.mtx 106/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y4_a3_b2.mtx 107/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x3_y4_a3_b3.mtx 108/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y1_a1_b1.mtx 109/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y1_a1_b2.mtx 110/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y1_a1_b3.mtx 111/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y1_a2_b1.mtx 112/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y1_a2_b2.mtx 113/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y1_a2_b3.mtx 114/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y1_a3_b1.mtx 115/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y1_a3_b2.mtx 116/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y1_a3_b3.mtx 117/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y2_a1_b1.mtx 118/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y2_a1_b2.mtx 119/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y2_a1_b3.mtx 120/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y2_a2_b1.mtx 121/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y2_a2_b2.mtx 122/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y2_a2_b3.mtx 123/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y2_a3_b1.mtx 124/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y2_a3_b2.mtx 125/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y2_a3_b3.mtx 126/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y3_a1_b1.mtx 127/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y3_a1_b2.mtx 128/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y3_a1_b3.mtx 129/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y3_a2_b1.mtx 130/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y3_a2_b2.mtx 131/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y3_a2_b3.mtx 132/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y3_a3_b1.mtx 133/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y3_a3_b2.mtx 134/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y3_a3_b3.mtx 135/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y4_a1_b1.mtx 136/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y4_a1_b2.mtx 137/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y4_a1_b3.mtx 138/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y4_a2_b1.mtx 139/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y4_a2_b2.mtx 140/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y4_a2_b3.mtx 141/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y4_a3_b1.mtx 142/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y4_a3_b2.mtx 143/144 [OK]
[2025-06-12 13:37:38] Double precision check on file serial/sol_x4_y4_a3_b3.mtx 144/144 [OK]
Welcome to PSBLAS version: 3.9.0
This is the psb_geaxpby_test sample program
Number of processes used in this computation: 40
[2025-06-12 13:37:39] Generation geaxpby single precision result file parallel/sol_x1_y1_a1_b1.mtx 1/144 [OK]
[2025-06-12 13:37:39] Generation geaxpby single precision result file parallel/sol_x1_y1_a1_b2.mtx 2/144 [OK]
[2025-06-12 13:37:39] Generation geaxpby single precision result file parallel/sol_x1_y1_a1_b3.mtx 3/144 [OK]
[2025-06-12 13:37:39] Generation geaxpby single precision result file parallel/sol_x1_y1_a2_b1.mtx 4/144 [OK]
[2025-06-12 13:37:39] Generation geaxpby single precision result file parallel/sol_x1_y1_a2_b2.mtx 5/144 [OK]
[2025-06-12 13:37:39] Generation geaxpby single precision result file parallel/sol_x1_y1_a2_b3.mtx 6/144 [OK]
[2025-06-12 13:37:39] Generation geaxpby single precision result file parallel/sol_x1_y1_a3_b1.mtx 7/144 [OK]
[2025-06-12 13:37:39] Generation geaxpby single precision result file parallel/sol_x1_y1_a3_b2.mtx 8/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y1_a3_b3.mtx 9/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y2_a1_b1.mtx 10/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y2_a1_b2.mtx 11/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y2_a1_b3.mtx 12/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y2_a2_b1.mtx 13/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y2_a2_b2.mtx 14/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y2_a2_b3.mtx 15/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y2_a3_b1.mtx 16/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y2_a3_b2.mtx 17/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y2_a3_b3.mtx 18/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y3_a1_b1.mtx 19/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y3_a1_b2.mtx 20/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y3_a1_b3.mtx 21/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y3_a2_b1.mtx 22/144 [OK]
[2025-06-12 13:37:40] Generation geaxpby single precision result file parallel/sol_x1_y3_a2_b2.mtx 23/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x1_y3_a2_b3.mtx 24/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x1_y3_a3_b1.mtx 25/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x1_y3_a3_b2.mtx 26/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x1_y3_a3_b3.mtx 27/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x1_y4_a1_b1.mtx 28/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x1_y4_a1_b2.mtx 29/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x1_y4_a1_b3.mtx 30/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x1_y4_a2_b1.mtx 31/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x1_y4_a2_b2.mtx 32/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x1_y4_a2_b3.mtx 33/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x1_y4_a3_b1.mtx 34/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x1_y4_a3_b2.mtx 35/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x1_y4_a3_b3.mtx 36/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x2_y1_a1_b1.mtx 37/144 [OK]
[2025-06-12 13:37:41] Generation geaxpby single precision result file parallel/sol_x2_y1_a1_b2.mtx 38/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y1_a1_b3.mtx 39/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y1_a2_b1.mtx 40/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y1_a2_b2.mtx 41/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y1_a2_b3.mtx 42/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y1_a3_b1.mtx 43/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y1_a3_b2.mtx 44/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y1_a3_b3.mtx 45/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y2_a1_b1.mtx 46/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y2_a1_b2.mtx 47/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y2_a1_b3.mtx 48/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y2_a2_b1.mtx 49/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y2_a2_b2.mtx 50/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y2_a2_b3.mtx 51/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y2_a3_b1.mtx 52/144 [OK]
[2025-06-12 13:37:42] Generation geaxpby single precision result file parallel/sol_x2_y2_a3_b2.mtx 53/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y2_a3_b3.mtx 54/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y3_a1_b1.mtx 55/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y3_a1_b2.mtx 56/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y3_a1_b3.mtx 57/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y3_a2_b1.mtx 58/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y3_a2_b2.mtx 59/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y3_a2_b3.mtx 60/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y3_a3_b1.mtx 61/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y3_a3_b2.mtx 62/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y3_a3_b3.mtx 63/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y4_a1_b1.mtx 64/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y4_a1_b2.mtx 65/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y4_a1_b3.mtx 66/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y4_a2_b1.mtx 67/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y4_a2_b2.mtx 68/144 [OK]
[2025-06-12 13:37:43] Generation geaxpby single precision result file parallel/sol_x2_y4_a2_b3.mtx 69/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x2_y4_a3_b1.mtx 70/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x2_y4_a3_b2.mtx 71/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x2_y4_a3_b3.mtx 72/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x3_y1_a1_b1.mtx 73/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x3_y1_a1_b2.mtx 74/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x3_y1_a1_b3.mtx 75/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x3_y1_a2_b1.mtx 76/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x3_y1_a2_b2.mtx 77/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x3_y1_a2_b3.mtx 78/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x3_y1_a3_b1.mtx 79/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x3_y1_a3_b2.mtx 80/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x3_y1_a3_b3.mtx 81/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x3_y2_a1_b1.mtx 82/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x3_y2_a1_b2.mtx 83/144 [OK]
[2025-06-12 13:37:44] Generation geaxpby single precision result file parallel/sol_x3_y2_a1_b3.mtx 84/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y2_a2_b1.mtx 85/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y2_a2_b2.mtx 86/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y2_a2_b3.mtx 87/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y2_a3_b1.mtx 88/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y2_a3_b2.mtx 89/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y2_a3_b3.mtx 90/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y3_a1_b1.mtx 91/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y3_a1_b2.mtx 92/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y3_a1_b3.mtx 93/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y3_a2_b1.mtx 94/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y3_a2_b2.mtx 95/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y3_a2_b3.mtx 96/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y3_a3_b1.mtx 97/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y3_a3_b2.mtx 98/144 [OK]
[2025-06-12 13:37:45] Generation geaxpby single precision result file parallel/sol_x3_y3_a3_b3.mtx 99/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x3_y4_a1_b1.mtx 100/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x3_y4_a1_b2.mtx 101/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x3_y4_a1_b3.mtx 102/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x3_y4_a2_b1.mtx 103/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x3_y4_a2_b2.mtx 104/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x3_y4_a2_b3.mtx 105/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x3_y4_a3_b1.mtx 106/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x3_y4_a3_b2.mtx 107/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x3_y4_a3_b3.mtx 108/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x4_y1_a1_b1.mtx 109/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x4_y1_a1_b2.mtx 110/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x4_y1_a1_b3.mtx 111/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x4_y1_a2_b1.mtx 112/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x4_y1_a2_b2.mtx 113/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x4_y1_a2_b3.mtx 114/144 [OK]
[2025-06-12 13:37:46] Generation geaxpby single precision result file parallel/sol_x4_y1_a3_b1.mtx 115/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y1_a3_b2.mtx 116/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y1_a3_b3.mtx 117/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y2_a1_b1.mtx 118/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y2_a1_b2.mtx 119/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y2_a1_b3.mtx 120/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y2_a2_b1.mtx 121/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y2_a2_b2.mtx 122/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y2_a2_b3.mtx 123/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y2_a3_b1.mtx 124/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y2_a3_b2.mtx 125/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y2_a3_b3.mtx 126/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y3_a1_b1.mtx 127/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y3_a1_b2.mtx 128/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y3_a1_b3.mtx 129/144 [OK]
[2025-06-12 13:37:47] Generation geaxpby single precision result file parallel/sol_x4_y3_a2_b1.mtx 130/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y3_a2_b2.mtx 131/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y3_a2_b3.mtx 132/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y3_a3_b1.mtx 133/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y3_a3_b2.mtx 134/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y3_a3_b3.mtx 135/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y4_a1_b1.mtx 136/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y4_a1_b2.mtx 137/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y4_a1_b3.mtx 138/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y4_a2_b1.mtx 139/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y4_a2_b2.mtx 140/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y4_a2_b3.mtx 141/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y4_a3_b1.mtx 142/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y4_a3_b2.mtx 143/144 [OK]
[2025-06-12 13:37:48] Generation geaxpby single precision result file parallel/sol_x4_y4_a3_b3.mtx 144/144 [OK]
[2025-06-12 13:37:48] Double precision check on file parallel/sol_x1_y1_a1_b1.mtx 1/144 [OK]
[2025-06-12 13:37:48] Double precision check on file parallel/sol_x1_y1_a1_b2.mtx 2/144 [OK]
[2025-06-12 13:37:48] Double precision check on file parallel/sol_x1_y1_a1_b3.mtx 3/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y1_a2_b1.mtx 4/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y1_a2_b2.mtx 5/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y1_a2_b3.mtx 6/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y1_a3_b1.mtx 7/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y1_a3_b2.mtx 8/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y1_a3_b3.mtx 9/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y2_a1_b1.mtx 10/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y2_a1_b2.mtx 11/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y2_a1_b3.mtx 12/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y2_a2_b1.mtx 13/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y2_a2_b2.mtx 14/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y2_a2_b3.mtx 15/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y2_a3_b1.mtx 16/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y2_a3_b2.mtx 17/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y2_a3_b3.mtx 18/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y3_a1_b1.mtx 19/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y3_a1_b2.mtx 20/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y3_a1_b3.mtx 21/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y3_a2_b1.mtx 22/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y3_a2_b2.mtx 23/144 [OK]
[2025-06-12 13:37:49] Double precision check on file parallel/sol_x1_y3_a2_b3.mtx 24/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x1_y3_a3_b1.mtx 25/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x1_y3_a3_b2.mtx 26/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x1_y3_a3_b3.mtx 27/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x1_y4_a1_b1.mtx 28/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x1_y4_a1_b2.mtx 29/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x1_y4_a1_b3.mtx 30/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x1_y4_a2_b1.mtx 31/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x1_y4_a2_b2.mtx 32/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x1_y4_a2_b3.mtx 33/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x1_y4_a3_b1.mtx 34/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x1_y4_a3_b2.mtx 35/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x1_y4_a3_b3.mtx 36/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x2_y1_a1_b1.mtx 37/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x2_y1_a1_b2.mtx 38/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x2_y1_a1_b3.mtx 39/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x2_y1_a2_b1.mtx 40/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x2_y1_a2_b2.mtx 41/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x2_y1_a2_b3.mtx 42/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x2_y1_a3_b1.mtx 43/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x2_y1_a3_b2.mtx 44/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x2_y1_a3_b3.mtx 45/144 [OK]
[2025-06-12 13:37:50] Double precision check on file parallel/sol_x2_y2_a1_b1.mtx 46/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y2_a1_b2.mtx 47/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y2_a1_b3.mtx 48/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y2_a2_b1.mtx 49/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y2_a2_b2.mtx 50/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y2_a2_b3.mtx 51/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y2_a3_b1.mtx 52/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y2_a3_b2.mtx 53/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y2_a3_b3.mtx 54/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y3_a1_b1.mtx 55/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y3_a1_b2.mtx 56/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y3_a1_b3.mtx 57/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y3_a2_b1.mtx 58/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y3_a2_b2.mtx 59/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y3_a2_b3.mtx 60/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y3_a3_b1.mtx 61/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y3_a3_b2.mtx 62/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y3_a3_b3.mtx 63/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y4_a1_b1.mtx 64/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y4_a1_b2.mtx 65/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y4_a1_b3.mtx 66/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y4_a2_b1.mtx 67/144 [OK]
[2025-06-12 13:37:51] Double precision check on file parallel/sol_x2_y4_a2_b2.mtx 68/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x2_y4_a2_b3.mtx 69/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x2_y4_a3_b1.mtx 70/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x2_y4_a3_b2.mtx 71/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x2_y4_a3_b3.mtx 72/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y1_a1_b1.mtx 73/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y1_a1_b2.mtx 74/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y1_a1_b3.mtx 75/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y1_a2_b1.mtx 76/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y1_a2_b2.mtx 77/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y1_a2_b3.mtx 78/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y1_a3_b1.mtx 79/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y1_a3_b2.mtx 80/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y1_a3_b3.mtx 81/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y2_a1_b1.mtx 82/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y2_a1_b2.mtx 83/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y2_a1_b3.mtx 84/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y2_a2_b1.mtx 85/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y2_a2_b2.mtx 86/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y2_a2_b3.mtx 87/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y2_a3_b1.mtx 88/144 [OK]
[2025-06-12 13:37:52] Double precision check on file parallel/sol_x3_y2_a3_b2.mtx 89/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y2_a3_b3.mtx 90/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y3_a1_b1.mtx 91/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y3_a1_b2.mtx 92/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y3_a1_b3.mtx 93/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y3_a2_b1.mtx 94/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y3_a2_b2.mtx 95/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y3_a2_b3.mtx 96/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y3_a3_b1.mtx 97/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y3_a3_b2.mtx 98/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y3_a3_b3.mtx 99/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y4_a1_b1.mtx 100/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y4_a1_b2.mtx 101/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y4_a1_b3.mtx 102/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y4_a2_b1.mtx 103/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y4_a2_b2.mtx 104/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y4_a2_b3.mtx 105/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y4_a3_b1.mtx 106/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y4_a3_b2.mtx 107/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x3_y4_a3_b3.mtx 108/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x4_y1_a1_b1.mtx 109/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x4_y1_a1_b2.mtx 110/144 [OK]
[2025-06-12 13:37:53] Double precision check on file parallel/sol_x4_y1_a1_b3.mtx 111/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y1_a2_b1.mtx 112/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y1_a2_b2.mtx 113/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y1_a2_b3.mtx 114/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y1_a3_b1.mtx 115/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y1_a3_b2.mtx 116/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y1_a3_b3.mtx 117/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y2_a1_b1.mtx 118/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y2_a1_b2.mtx 119/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y2_a1_b3.mtx 120/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y2_a2_b1.mtx 121/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y2_a2_b2.mtx 122/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y2_a2_b3.mtx 123/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y2_a3_b1.mtx 124/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y2_a3_b2.mtx 125/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y2_a3_b3.mtx 126/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y3_a1_b1.mtx 127/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y3_a1_b2.mtx 128/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y3_a1_b3.mtx 129/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y3_a2_b1.mtx 130/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y3_a2_b2.mtx 131/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y3_a2_b3.mtx 132/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y3_a3_b1.mtx 133/144 [OK]
[2025-06-12 13:37:54] Double precision check on file parallel/sol_x4_y3_a3_b2.mtx 134/144 [OK]
[2025-06-12 13:37:55] Double precision check on file parallel/sol_x4_y3_a3_b3.mtx 135/144 [OK]
[2025-06-12 13:37:55] Double precision check on file parallel/sol_x4_y4_a1_b1.mtx 136/144 [OK]
[2025-06-12 13:37:55] Double precision check on file parallel/sol_x4_y4_a1_b2.mtx 137/144 [OK]
[2025-06-12 13:37:55] Double precision check on file parallel/sol_x4_y4_a1_b3.mtx 138/144 [OK]
[2025-06-12 13:37:55] Double precision check on file parallel/sol_x4_y4_a2_b1.mtx 139/144 [OK]
[2025-06-12 13:37:55] Double precision check on file parallel/sol_x4_y4_a2_b2.mtx 140/144 [OK]
[2025-06-12 13:37:55] Double precision check on file parallel/sol_x4_y4_a2_b3.mtx 141/144 [OK]
[2025-06-12 13:37:55] Double precision check on file parallel/sol_x4_y4_a3_b1.mtx 142/144 [OK]
[2025-06-12 13:37:55] Double precision check on file parallel/sol_x4_y4_a3_b2.mtx 143/144 [OK]
[2025-06-12 13:37:55] Double precision check on file parallel/sol_x4_y4_a3_b3.mtx 144/144 [OK]
Comparison between serial/sol_x1_y1_a1_b1.mtx and parallel/sol_x1_y1_a1_b1.mtx: 0 differences
Comparison between serial/sol_x1_y1_a1_b2.mtx and parallel/sol_x1_y1_a1_b2.mtx: 0 differences
Comparison between serial/sol_x1_y1_a1_b3.mtx and parallel/sol_x1_y1_a1_b3.mtx: 0 differences
Comparison between serial/sol_x1_y1_a2_b1.mtx and parallel/sol_x1_y1_a2_b1.mtx: 0 differences
Comparison between serial/sol_x1_y1_a2_b2.mtx and parallel/sol_x1_y1_a2_b2.mtx: 0 differences
Comparison between serial/sol_x1_y1_a2_b3.mtx and parallel/sol_x1_y1_a2_b3.mtx: 0 differences
Comparison between serial/sol_x1_y1_a3_b1.mtx and parallel/sol_x1_y1_a3_b1.mtx: 0 differences
Comparison between serial/sol_x1_y1_a3_b2.mtx and parallel/sol_x1_y1_a3_b2.mtx: 0 differences
Comparison between serial/sol_x1_y1_a3_b3.mtx and parallel/sol_x1_y1_a3_b3.mtx: 0 differences
Comparison between serial/sol_x1_y2_a1_b1.mtx and parallel/sol_x1_y2_a1_b1.mtx: 0 differences
Comparison between serial/sol_x1_y2_a1_b2.mtx and parallel/sol_x1_y2_a1_b2.mtx: 0 differences
Comparison between serial/sol_x1_y2_a1_b3.mtx and parallel/sol_x1_y2_a1_b3.mtx: 0 differences
Comparison between serial/sol_x1_y2_a2_b1.mtx and parallel/sol_x1_y2_a2_b1.mtx: 0 differences
Comparison between serial/sol_x1_y2_a2_b2.mtx and parallel/sol_x1_y2_a2_b2.mtx: 0 differences
Comparison between serial/sol_x1_y2_a2_b3.mtx and parallel/sol_x1_y2_a2_b3.mtx: 0 differences
Comparison between serial/sol_x1_y2_a3_b1.mtx and parallel/sol_x1_y2_a3_b1.mtx: 0 differences
Comparison between serial/sol_x1_y2_a3_b2.mtx and parallel/sol_x1_y2_a3_b2.mtx: 0 differences
Comparison between serial/sol_x1_y2_a3_b3.mtx and parallel/sol_x1_y2_a3_b3.mtx: 0 differences
Comparison between serial/sol_x1_y3_a1_b1.mtx and parallel/sol_x1_y3_a1_b1.mtx: 0 differences
Comparison between serial/sol_x1_y3_a1_b2.mtx and parallel/sol_x1_y3_a1_b2.mtx: 0 differences
Comparison between serial/sol_x1_y3_a1_b3.mtx and parallel/sol_x1_y3_a1_b3.mtx: 0 differences
Comparison between serial/sol_x1_y3_a2_b1.mtx and parallel/sol_x1_y3_a2_b1.mtx: 0 differences
Comparison between serial/sol_x1_y3_a2_b2.mtx and parallel/sol_x1_y3_a2_b2.mtx: 0 differences
Comparison between serial/sol_x1_y3_a2_b3.mtx and parallel/sol_x1_y3_a2_b3.mtx: 0 differences
Comparison between serial/sol_x1_y3_a3_b1.mtx and parallel/sol_x1_y3_a3_b1.mtx: 0 differences
Comparison between serial/sol_x1_y3_a3_b2.mtx and parallel/sol_x1_y3_a3_b2.mtx: 0 differences
Comparison between serial/sol_x1_y3_a3_b3.mtx and parallel/sol_x1_y3_a3_b3.mtx: 0 differences
Comparison between serial/sol_x1_y4_a1_b1.mtx and parallel/sol_x1_y4_a1_b1.mtx: 0 differences
Comparison between serial/sol_x1_y4_a1_b2.mtx and parallel/sol_x1_y4_a1_b2.mtx: 0 differences
Comparison between serial/sol_x1_y4_a1_b3.mtx and parallel/sol_x1_y4_a1_b3.mtx: 0 differences
Comparison between serial/sol_x1_y4_a2_b1.mtx and parallel/sol_x1_y4_a2_b1.mtx: 0 differences
Comparison between serial/sol_x1_y4_a2_b2.mtx and parallel/sol_x1_y4_a2_b2.mtx: 0 differences
Comparison between serial/sol_x1_y4_a2_b3.mtx and parallel/sol_x1_y4_a2_b3.mtx: 0 differences
Comparison between serial/sol_x1_y4_a3_b1.mtx and parallel/sol_x1_y4_a3_b1.mtx: 0 differences
Comparison between serial/sol_x1_y4_a3_b2.mtx and parallel/sol_x1_y4_a3_b2.mtx: 0 differences
Comparison between serial/sol_x1_y4_a3_b3.mtx and parallel/sol_x1_y4_a3_b3.mtx: 0 differences
Comparison between serial/sol_x2_y1_a1_b1.mtx and parallel/sol_x2_y1_a1_b1.mtx: 0 differences
Comparison between serial/sol_x2_y1_a1_b2.mtx and parallel/sol_x2_y1_a1_b2.mtx: 0 differences
Comparison between serial/sol_x2_y1_a1_b3.mtx and parallel/sol_x2_y1_a1_b3.mtx: 0 differences
Comparison between serial/sol_x2_y1_a2_b1.mtx and parallel/sol_x2_y1_a2_b1.mtx: 0 differences
Comparison between serial/sol_x2_y1_a2_b2.mtx and parallel/sol_x2_y1_a2_b2.mtx: 0 differences
Comparison between serial/sol_x2_y1_a2_b3.mtx and parallel/sol_x2_y1_a2_b3.mtx: 0 differences
Comparison between serial/sol_x2_y1_a3_b1.mtx and parallel/sol_x2_y1_a3_b1.mtx: 0 differences
Comparison between serial/sol_x2_y1_a3_b2.mtx and parallel/sol_x2_y1_a3_b2.mtx: 0 differences
Comparison between serial/sol_x2_y1_a3_b3.mtx and parallel/sol_x2_y1_a3_b3.mtx: 0 differences
Comparison between serial/sol_x2_y2_a1_b1.mtx and parallel/sol_x2_y2_a1_b1.mtx: 0 differences
Comparison between serial/sol_x2_y2_a1_b2.mtx and parallel/sol_x2_y2_a1_b2.mtx: 0 differences
Comparison between serial/sol_x2_y2_a1_b3.mtx and parallel/sol_x2_y2_a1_b3.mtx: 0 differences
Comparison between serial/sol_x2_y2_a2_b1.mtx and parallel/sol_x2_y2_a2_b1.mtx: 0 differences
Comparison between serial/sol_x2_y2_a2_b2.mtx and parallel/sol_x2_y2_a2_b2.mtx: 0 differences
Comparison between serial/sol_x2_y2_a2_b3.mtx and parallel/sol_x2_y2_a2_b3.mtx: 0 differences
Comparison between serial/sol_x2_y2_a3_b1.mtx and parallel/sol_x2_y2_a3_b1.mtx: 0 differences
Comparison between serial/sol_x2_y2_a3_b2.mtx and parallel/sol_x2_y2_a3_b2.mtx: 0 differences
Comparison between serial/sol_x2_y2_a3_b3.mtx and parallel/sol_x2_y2_a3_b3.mtx: 0 differences
Comparison between serial/sol_x2_y3_a1_b1.mtx and parallel/sol_x2_y3_a1_b1.mtx: 0 differences
Comparison between serial/sol_x2_y3_a1_b2.mtx and parallel/sol_x2_y3_a1_b2.mtx: 0 differences
Comparison between serial/sol_x2_y3_a1_b3.mtx and parallel/sol_x2_y3_a1_b3.mtx: 0 differences
Comparison between serial/sol_x2_y3_a2_b1.mtx and parallel/sol_x2_y3_a2_b1.mtx: 0 differences
Comparison between serial/sol_x2_y3_a2_b2.mtx and parallel/sol_x2_y3_a2_b2.mtx: 0 differences
Comparison between serial/sol_x2_y3_a2_b3.mtx and parallel/sol_x2_y3_a2_b3.mtx: 0 differences
Comparison between serial/sol_x2_y3_a3_b1.mtx and parallel/sol_x2_y3_a3_b1.mtx: 0 differences
Comparison between serial/sol_x2_y3_a3_b2.mtx and parallel/sol_x2_y3_a3_b2.mtx: 0 differences
Comparison between serial/sol_x2_y3_a3_b3.mtx and parallel/sol_x2_y3_a3_b3.mtx: 0 differences
Comparison between serial/sol_x2_y4_a1_b1.mtx and parallel/sol_x2_y4_a1_b1.mtx: 0 differences
Comparison between serial/sol_x2_y4_a1_b2.mtx and parallel/sol_x2_y4_a1_b2.mtx: 0 differences
Comparison between serial/sol_x2_y4_a1_b3.mtx and parallel/sol_x2_y4_a1_b3.mtx: 0 differences
Comparison between serial/sol_x2_y4_a2_b1.mtx and parallel/sol_x2_y4_a2_b1.mtx: 0 differences
Comparison between serial/sol_x2_y4_a2_b2.mtx and parallel/sol_x2_y4_a2_b2.mtx: 0 differences
Comparison between serial/sol_x2_y4_a2_b3.mtx and parallel/sol_x2_y4_a2_b3.mtx: 0 differences
Comparison between serial/sol_x2_y4_a3_b1.mtx and parallel/sol_x2_y4_a3_b1.mtx: 0 differences
Comparison between serial/sol_x2_y4_a3_b2.mtx and parallel/sol_x2_y4_a3_b2.mtx: 0 differences
Comparison between serial/sol_x2_y4_a3_b3.mtx and parallel/sol_x2_y4_a3_b3.mtx: 0 differences
Comparison between serial/sol_x3_y1_a1_b1.mtx and parallel/sol_x3_y1_a1_b1.mtx: 0 differences
Comparison between serial/sol_x3_y1_a1_b2.mtx and parallel/sol_x3_y1_a1_b2.mtx: 0 differences
Comparison between serial/sol_x3_y1_a1_b3.mtx and parallel/sol_x3_y1_a1_b3.mtx: 0 differences
Comparison between serial/sol_x3_y1_a2_b1.mtx and parallel/sol_x3_y1_a2_b1.mtx: 0 differences
Comparison between serial/sol_x3_y1_a2_b2.mtx and parallel/sol_x3_y1_a2_b2.mtx: 0 differences
Comparison between serial/sol_x3_y1_a2_b3.mtx and parallel/sol_x3_y1_a2_b3.mtx: 0 differences
Comparison between serial/sol_x3_y1_a3_b1.mtx and parallel/sol_x3_y1_a3_b1.mtx: 0 differences
Comparison between serial/sol_x3_y1_a3_b2.mtx and parallel/sol_x3_y1_a3_b2.mtx: 0 differences
Comparison between serial/sol_x3_y1_a3_b3.mtx and parallel/sol_x3_y1_a3_b3.mtx: 0 differences
Comparison between serial/sol_x3_y2_a1_b1.mtx and parallel/sol_x3_y2_a1_b1.mtx: 0 differences
Comparison between serial/sol_x3_y2_a1_b2.mtx and parallel/sol_x3_y2_a1_b2.mtx: 0 differences
Comparison between serial/sol_x3_y2_a1_b3.mtx and parallel/sol_x3_y2_a1_b3.mtx: 0 differences
Comparison between serial/sol_x3_y2_a2_b1.mtx and parallel/sol_x3_y2_a2_b1.mtx: 0 differences
Comparison between serial/sol_x3_y2_a2_b2.mtx and parallel/sol_x3_y2_a2_b2.mtx: 0 differences
Comparison between serial/sol_x3_y2_a2_b3.mtx and parallel/sol_x3_y2_a2_b3.mtx: 0 differences
Comparison between serial/sol_x3_y2_a3_b1.mtx and parallel/sol_x3_y2_a3_b1.mtx: 0 differences
Comparison between serial/sol_x3_y2_a3_b2.mtx and parallel/sol_x3_y2_a3_b2.mtx: 0 differences
Comparison between serial/sol_x3_y2_a3_b3.mtx and parallel/sol_x3_y2_a3_b3.mtx: 0 differences
Comparison between serial/sol_x3_y3_a1_b1.mtx and parallel/sol_x3_y3_a1_b1.mtx: 0 differences
Comparison between serial/sol_x3_y3_a1_b2.mtx and parallel/sol_x3_y3_a1_b2.mtx: 0 differences
Comparison between serial/sol_x3_y3_a1_b3.mtx and parallel/sol_x3_y3_a1_b3.mtx: 0 differences
Comparison between serial/sol_x3_y3_a2_b1.mtx and parallel/sol_x3_y3_a2_b1.mtx: 0 differences
Comparison between serial/sol_x3_y3_a2_b2.mtx and parallel/sol_x3_y3_a2_b2.mtx: 0 differences
Comparison between serial/sol_x3_y3_a2_b3.mtx and parallel/sol_x3_y3_a2_b3.mtx: 0 differences
Comparison between serial/sol_x3_y3_a3_b1.mtx and parallel/sol_x3_y3_a3_b1.mtx: 0 differences
Comparison between serial/sol_x3_y3_a3_b2.mtx and parallel/sol_x3_y3_a3_b2.mtx: 0 differences
Comparison between serial/sol_x3_y3_a3_b3.mtx and parallel/sol_x3_y3_a3_b3.mtx: 0 differences
Comparison between serial/sol_x3_y4_a1_b1.mtx and parallel/sol_x3_y4_a1_b1.mtx: 0 differences
Comparison between serial/sol_x3_y4_a1_b2.mtx and parallel/sol_x3_y4_a1_b2.mtx: 0 differences
Comparison between serial/sol_x3_y4_a1_b3.mtx and parallel/sol_x3_y4_a1_b3.mtx: 0 differences
Comparison between serial/sol_x3_y4_a2_b1.mtx and parallel/sol_x3_y4_a2_b1.mtx: 0 differences
Comparison between serial/sol_x3_y4_a2_b2.mtx and parallel/sol_x3_y4_a2_b2.mtx: 0 differences
Comparison between serial/sol_x3_y4_a2_b3.mtx and parallel/sol_x3_y4_a2_b3.mtx: 0 differences
Comparison between serial/sol_x3_y4_a3_b1.mtx and parallel/sol_x3_y4_a3_b1.mtx: 0 differences
Comparison between serial/sol_x3_y4_a3_b2.mtx and parallel/sol_x3_y4_a3_b2.mtx: 0 differences
Comparison between serial/sol_x3_y4_a3_b3.mtx and parallel/sol_x3_y4_a3_b3.mtx: 0 differences
Comparison between serial/sol_x4_y1_a1_b1.mtx and parallel/sol_x4_y1_a1_b1.mtx: 0 differences
Comparison between serial/sol_x4_y1_a1_b2.mtx and parallel/sol_x4_y1_a1_b2.mtx: 0 differences
Comparison between serial/sol_x4_y1_a1_b3.mtx and parallel/sol_x4_y1_a1_b3.mtx: 0 differences
Comparison between serial/sol_x4_y1_a2_b1.mtx and parallel/sol_x4_y1_a2_b1.mtx: 0 differences
Comparison between serial/sol_x4_y1_a2_b2.mtx and parallel/sol_x4_y1_a2_b2.mtx: 0 differences
Comparison between serial/sol_x4_y1_a2_b3.mtx and parallel/sol_x4_y1_a2_b3.mtx: 0 differences
Comparison between serial/sol_x4_y1_a3_b1.mtx and parallel/sol_x4_y1_a3_b1.mtx: 0 differences
Comparison between serial/sol_x4_y1_a3_b2.mtx and parallel/sol_x4_y1_a3_b2.mtx: 0 differences
Comparison between serial/sol_x4_y1_a3_b3.mtx and parallel/sol_x4_y1_a3_b3.mtx: 0 differences
Comparison between serial/sol_x4_y2_a1_b1.mtx and parallel/sol_x4_y2_a1_b1.mtx: 0 differences
Comparison between serial/sol_x4_y2_a1_b2.mtx and parallel/sol_x4_y2_a1_b2.mtx: 0 differences
Comparison between serial/sol_x4_y2_a1_b3.mtx and parallel/sol_x4_y2_a1_b3.mtx: 0 differences
Comparison between serial/sol_x4_y2_a2_b1.mtx and parallel/sol_x4_y2_a2_b1.mtx: 0 differences
Comparison between serial/sol_x4_y2_a2_b2.mtx and parallel/sol_x4_y2_a2_b2.mtx: 0 differences
Comparison between serial/sol_x4_y2_a2_b3.mtx and parallel/sol_x4_y2_a2_b3.mtx: 0 differences
Comparison between serial/sol_x4_y2_a3_b1.mtx and parallel/sol_x4_y2_a3_b1.mtx: 0 differences
Comparison between serial/sol_x4_y2_a3_b2.mtx and parallel/sol_x4_y2_a3_b2.mtx: 0 differences
Comparison between serial/sol_x4_y2_a3_b3.mtx and parallel/sol_x4_y2_a3_b3.mtx: 0 differences
Comparison between serial/sol_x4_y3_a1_b1.mtx and parallel/sol_x4_y3_a1_b1.mtx: 0 differences
Comparison between serial/sol_x4_y3_a1_b2.mtx and parallel/sol_x4_y3_a1_b2.mtx: 0 differences
Comparison between serial/sol_x4_y3_a1_b3.mtx and parallel/sol_x4_y3_a1_b3.mtx: 0 differences
Comparison between serial/sol_x4_y3_a2_b1.mtx and parallel/sol_x4_y3_a2_b1.mtx: 0 differences
Comparison between serial/sol_x4_y3_a2_b2.mtx and parallel/sol_x4_y3_a2_b2.mtx: 0 differences
Comparison between serial/sol_x4_y3_a2_b3.mtx and parallel/sol_x4_y3_a2_b3.mtx: 0 differences
Comparison between serial/sol_x4_y3_a3_b1.mtx and parallel/sol_x4_y3_a3_b1.mtx: 0 differences
Comparison between serial/sol_x4_y3_a3_b2.mtx and parallel/sol_x4_y3_a3_b2.mtx: 0 differences
Comparison between serial/sol_x4_y3_a3_b3.mtx and parallel/sol_x4_y3_a3_b3.mtx: 0 differences
Comparison between serial/sol_x4_y4_a1_b1.mtx and parallel/sol_x4_y4_a1_b1.mtx: 0 differences
Comparison between serial/sol_x4_y4_a1_b2.mtx and parallel/sol_x4_y4_a1_b2.mtx: 0 differences
Comparison between serial/sol_x4_y4_a1_b3.mtx and parallel/sol_x4_y4_a1_b3.mtx: 0 differences
Comparison between serial/sol_x4_y4_a2_b1.mtx and parallel/sol_x4_y4_a2_b1.mtx: 0 differences
Comparison between serial/sol_x4_y4_a2_b2.mtx and parallel/sol_x4_y4_a2_b2.mtx: 0 differences
Comparison between serial/sol_x4_y4_a2_b3.mtx and parallel/sol_x4_y4_a2_b3.mtx: 0 differences
Comparison between serial/sol_x4_y4_a3_b1.mtx and parallel/sol_x4_y4_a3_b1.mtx: 0 differences
Comparison between serial/sol_x4_y4_a3_b2.mtx and parallel/sol_x4_y4_a3_b2.mtx: 0 differences
Comparison between serial/sol_x4_y4_a3_b3.mtx and parallel/sol_x4_y4_a3_b3.mtx: 0 differences

@ -0,0 +1,8 @@
set(PSB_gedot_source_files
psb_gedot_test.f90
gedot.f90
)
foreach(file IN LISTS PSB_gedot_source_files)
list(APPEND gedot_source_files ${CMAKE_CURRENT_LIST_DIR}/${file})
endforeach()

@ -0,0 +1,65 @@
# Introduction
This is a directory developed by Luca Pepè Sciarria and Simone Staccone froma Tor Vergata University to start to create some unit tests for PSBLAS 3.9, in particular for ```psb_gedot``` routine.
## Getting started
Steps to reproduce the tests:
- Compile the code using ``` make ``` (Optional)
- Launch the script ./autotest.sh or with source ./autotest.sh if you want to add modules to the .bashrc file permenently.
- Check the output log file psblas_gedot_test.log to collect results
NOTE: If the code is changed and a new compilation is needed to show the changes, the autotest.sh script isn't aware of this scenario, therefore it is necessary to manually recompile the code.
## Test Suite
### Overall Analysys
The ```psb_gedot```. The signature of the function is:
```fortran
psb_gedot(x, y, desc_a, info [,global])
```
In the comparison 7 significand digits means having a notation like 0,$d_1 d_2 d_3 ... d_7$*10^7 also.
### Parameters Values
**x** vectors are located in the vectors/ directory. They are generated randomly using the same seed and then saved on different files based on their characteristics. The size of the vector is choosen accordingly to the size of the matrix column space considered for the single test instance.
|Vector|File Name|Coefficients|Coefficients Description|
|:-:|:-:|:-:|:-:|
|$x_1$|x1.txt|$x_i> 0, \forall i$|Positive coefficients|
|$x_2$|x2.txt|$x_i < 0, \forall i$|Negative coefficients
|$x_3$|x3.txt|$x_i \ne 0, \forall i$|Random coefficients
|$x_4$|x4.txt|$x_i = 0, \forall i$|Null coefficients
**y** vectors are located in the vectors/ directory. They are generated randomly using the same seed and then saved on different files based on their characteristics. The size of the vector is choosen accordingly to the size of the matrix rows space considered for the single test instance.
|Vector|File Name|Coefficients|Coefficients Description|
|:-:|:-:|:-:|:-:|
|$y_1$|y1.txt|$y_i> 0, \forall i$|Positive coefficients|
|$y_2$|y2.txt|$y_i < 0, \forall i$|Negative coefficients
|$y_3$|y3.txt|$y_i \ne 0, \forall i$|Random coefficients
|$y_4$|y4.txt|$y_i = 0, \forall i$|Null coefficients
**$\alpha$**
|$\alpha$|Value|Coefficients Description|
|:-:|:-:|:-:|
|$\alpha_1$|1.0|Positive value|
|$\alpha_2$|-1.0|Negative value|
|$\alpha_3$|0.0|Null value|
**$\beta$**
|$\alpha$|Value|Coefficients Description|
|:-:|:-:|:-:|
|$\beta_1$|1.0|Positive value|
|$\beta_2$|-1.0|Negative value|
|$\beta_3$|0.0|Null value|
## Output
The ouput files generated by the test are automatically compared by the autotest.sh script, but if it is needed to manually run the test here it is the naming convenction used.
The results of the computation will be saved on different files based on the instance of the test considered. In particular the naming conventiona format the output file as sol_x#_y#_a#_b#.mtx, where each # is a number choosen w.r.t. the test instance. (Ex. sol_x1_y1_a1_b1.mtx is the solution computed using the first x vector file , the first y vector file, alpha = 1.0 and beta = 1.0). Moreover, the files will be saved in the serial/ directory if the program is launched using 1 process or in parrallel/ directory if the program is launched with more than one process.
## TODO
- Use also global in different ways
- Add computation with broken descriptor and catch the errore result
- Test using complex data ($dot \leftarrow x^H \cdot y$)
- Try multiple distributions
- Fix result_check handling, it should not be an entire vector

@ -0,0 +1,58 @@
#!/bin/bash
# Variables definition
dir1="serial"
dir2="parallel"
log_file_name="psblas_gedot_test.log"
num_procs=$(nproc)
# Define color codes
GREEN="\033[0;32m"
RED="\033[0;31m"
BLUE="\033[0;34m"
YELLOW="\033[33m"
RESET="\033[0m"
# Check if the executable ELF file exists
if [ ! -f "./runs/psb_gedot_test" ]; then
echo -e "${YELLOW}[WARNING] Executable not found. Running make...${RESET}"
make
if [ ! -f "./runs/psb_geaxpby_test" ]; then
echo -e "${RED}[ERROR] Failed to create executable. Check make command.${RESET}"
fi
else
echo -e "${BLUE}[INFO]\t The executable already exists. Skipping the make process.${RESET}"
fi
# Excecute tests and save results
echo -e "${BLUE}[INFO]\t Running the PSBLAS psb_gedot test...${RESET}"
echo ""
echo -e "${BLUE}[INFO]\t Starting single process computation${RESET}"
#mpirun -np 1 ./runs/psb_gedot_test
echo -e "${BLUE}[INFO]\t Single process computation terminated correctly${RESET}"
echo ""
echo -e "${BLUE}[INFO]\t Starting $num_procs processes computation${RESET}"
mpirun -np $num_procs ./runs/psb_gedot_test
echo -e "${BLUE}[INFO]\t Multiple processes computation terminated correctly${RESET}"
echo "" >> ${log_file_name}
# Iterate through files in the first directory
for file1 in "$dir1"/*; do
filename=$(basename "$file1") # Extract the filename
file2="$dir2/$filename" # Construct the path for the second directory
# Check if the file exists in the second directory
if [ -f "$file2" ]; then
diff_count=$(diff "$file1" "$file2" | wc -l) # Compare the files
echo "Comparison between $file1 and $file2: $diff_count differences" >> ${log_file_name}
# echo "Comparing $file1 and $file2: $diff_count"
else
echo -e "${RED}[ERROR] File $filename does not exist in $dir2${RESET}"
fi
done
echo -e "${BLUE}[INFO]\t PSBLAS psb_gedot test succesfully completed.${RESET}"

@ -0,0 +1,178 @@
program main
use psb_gedot_test
use psb_base_mod
implicit none
! MPI variables
integer(psb_ipk_) :: my_rank, np
! Communicator variable
type(psb_ctxt_type) :: ctxt
! parameters array
character(len=64) :: x(4),y(4)
real(psb_dpk_) :: alpha(3), beta(3)
integer(psb_ipk_) :: arr_size
integer(psb_ipk_) :: tests_number, count
! cycle indexes variables
integer(psb_ipk_) :: i,j,k,h,l
integer(psb_ipk_) :: info, ret, unit
! time stats variables
character(len=8) :: date ! YYYYMMDD
character(len=10) :: time ! HHMMSS.sss
character(len=5) :: zones ! Time zone
integer :: values(8)
! others
character(len=:), allocatable :: output_file_name
! Initialize parameters
x(1) = "vectors/x1.mtx"
x(2) = "vectors/x2.mtx"
x(3) = "vectors/x3.mtx"
x(4) = "vectors/x4.mtx"
y(1) = "vectors/y1.mtx"
y(2) = "vectors/y2.mtx"
y(3) = "vectors/y3.mtx"
y(4) = "vectors/y4.mtx"
arr_size = 10000
tests_number = size(x) * size(y) * size(alpha) * size(beta)
count = 0
call psb_init(ctxt)
call psb_info(ctxt,my_rank,np)
if(my_rank == psb_root_) then
! Setup logger output
if(np == 1) then
open(newunit=unit, file='psblas_gedot_test.log', status='replace', action='write', iostat=info)
else
open(newunit=unit, file='psblas_gedot_test.log', status='old', action='write', position='append', iostat=info)
end if
if (info /= 0) then
print *, 'Error opening output file.'
print *, "I/O Status Code:", info
stop
end if
psb_out_unit = unit
write(psb_out_unit,'(A,A)') 'Welcome to PSBLAS version: ',psb_version_string_
write(psb_out_unit,'(A)') 'This is the psb_gedot_test sample program'
write(psb_out_unit,'(A,I0)') 'Number of processes used in this computation: ', np
write(psb_out_unit,'(A)') ''
call generate_vectors(arr_size)
end if
call psb_bcast(ctxt,psb_out_unit)
call psb_barrier(ctxt)
if(my_rank == psb_root_) write(*,'(A)') "[INFO] Starting single precision computation..."
do i=1,size(x)
do j=1,size(y)
call psb_gedot_kernel(x_file=x(i), y_file=y(j), arr_size = arr_size, ctxt = ctxt, &
& ret = ret, output_file_name = output_file_name)
if(my_rank == psb_root_) then
count = count + 1
call date_and_time(date, time, zones, values)
if(ret /= -1) then
! Success formatted output
write(psb_out_unit,'("[", I4.4,"-",I2.2,"-",I2.2," ",I2.2,":",I2.2,":",I2.2,"] ",&
& A,A,A,I0,A,I0,T110,A)') &
& values(1), values(2), values(3), values(5), values(6), values(7), &
& "Generation gedot single precision result file ", &
& output_file_name , ' ', count , "/", tests_number, "[OK]"
else
! Fail formatted output
write(psb_out_unit,'("[", I4.4,"-",I2.2,"-",I2.2," ",I2.2,":",I2.2,":",I2.2,"] ",&
& A,A,A,I0,A,I0,T110,A)') &
& values(1), values(2), values(3), values(5), values(6), values(7), &
& "Generation gedot single precision result file ", &
& output_file_name , ' ', count , "/", tests_number, "[FAIL]"
goto 9998
end if
end if
call psb_barrier(ctxt)
end do
end do
if(my_rank == psb_root_) write(*,'(A)') "[INFO] Single precision computation completed succesfully!"
if(my_rank == psb_root_) then
write(psb_out_unit, *) ''
count = 0
end if
if(my_rank == psb_root_) write(*,'(A)') "[INFO] Starting double precision check..."
call psb_barrier(ctxt)
! Here double precision comparison should be done
do i=1,size(x)
do j=1,size(y)
call psb_gedot_check(x_file=x(i), y_file=y(j), &
& arr_size = arr_size, ctxt = ctxt, ret = ret, output_file_name = output_file_name)
if(my_rank == psb_root_) then
count = count + 1
call date_and_time(date, time, zones, values)
if(ret == 0) then
! Success formatted output
write(psb_out_unit,'("[", I4.4,"-",I2.2,"-",I2.2," ",I2.2,":",I2.2,":",I2.2,"] ",&
& A,A,A,I0,A,I0,T110,A)') &
& values(1), values(2), values(3), values(5), values(6), values(7), &
& "Double precision check on file ", &
& output_file_name , ' ', count , "/", tests_number, "[OK]"
else
! Fail formatted output
write(psb_out_unit,'("[", I4.4,"-",I2.2,"-",I2.2," ",I2.2,":",I2.2,":",I2.2,"] ",&
& A,A,A,I0,A,I0,T110,A)') &
& values(1), values(2), values(3), values(5), values(6), values(7), &
& "Double precision check on file ", &
& output_file_name , ' ', count , "/", tests_number, "[FAIL]"
write(psb_out_unit,'(A,I0)') "[ERROR] Error at element ", abs(ret)
goto 9999
end if
end if
call psb_barrier(ctxt)
end do
end do
if(my_rank == psb_root_) then
write(*,'(A)') "[INFO] Duble precision check completed succesfully!"
close(unit)
end if
call psb_exit(ctxt)
return
9998 continue
if(my_rank == psb_root_) then
close(unit)
write(*,'(A,I0,A,I0,A)') "[ERROR] Error in gedot single precision computation ", &
& count, "/", tests_number, " see log file for details"
end if
9999 continue
if(my_rank == psb_root_) then
close(unit)
write(*,'(A,I0,A,I0,A)') "[ERROR] Error in gedot double precision check ", &
& count, "/", tests_number, " see log file for details"
end if
call psb_exit(ctxt)
return
end program main

@ -0,0 +1,42 @@
INSTALLDIR=../../..
INCDIR=$(INSTALLDIR)/include/
MODDIR=$(INSTALLDIR)/modules/
include $(INCDIR)/Make.inc.psblas
#
# Libraries used
#
LIBDIR = $(INSTALLDIR)/lib/
PSBLAS_LIB = -L$(LIBDIR) -lpsb_util -lpsb_base
LDLIBS = $(PSBLDLIBS)
FINCLUDES=$(FMFLAG)$(MODDIR) $(FMFLAG).
EXEDIR=./runs
GREEN=\033[0;32m
RED=\033[0;31m
BLUE=\033[0;34m
YELLOW=\033[33m
END_COLOUR=\033[0m
all: runsd psb_gedot_test
@printf "$(GREEN)[INFO]\t Compilation success!$(END_COLOUR)\n"
runsd:
@(if test ! -d runs ; then mkdir runs; fi)
@printf "$(BLUE)[INFO]\t Build directory $(EXEDIR) correctly initialized$(END_COLOUR)\n"
psb_gedot_test:
@$(FLINK) $(LOPT) psb_gedot_test.f90 gedot.f90 -o psb_gedot_test -I$(MODDIR) -I. $(PSBLAS_LIB) $(LDLIBS)
@mv psb_gedot_test $(EXEDIR)
@printf "$(BLUE)[INFO]\t Testing files generated correctly$(END_COLOUR)\n"
clean:
@rm -f $(OBJS)\
*$(.mod) $(EXEDIR)/psb_gedot_test
.PHONY: all runsd clean

@ -0,0 +1,561 @@
!> Test program for y = x^T * y or y = x^H * y psb_gedot routine
!! Check the README.md to see all details about the tests.
!!
!! Authors: Luca Pepé Sciarria, Staccone Simone (Tor Vergata University)
!!
!! psb_gedot(x, y, desc_a, info [,global])
!!
!! Type: Synchronous.
!!
!! ======================================
!! | Data type | Precision |
!! ======================================
!! | psb_spk_ | Short Precision Real |
!! | psb_dpk_ | Long Precision Real |
!! | psb_cpk_ | Short Precision Complex|
!! | psb_zpk_ | Long Precision Complex |
!! ======================================
!! Table 1: Data types
!!
!! ROUTINE PARAMETERS
!!
!! Input:
!!
!! x Description: the local portion of global dense matrix x.
!! Scope: local
!! Type: required
!! Intent: in
!! Specified as: a rank one or two array or an object of type psb_T_vect_type
!! containing numbers of type specified in Table 1. The rank of x must be
!! the same of y.
!!
!! y Description: the local portion of the global dense matrix y.
!! Scope: local
!! Type: required
!! Intent: inout
!! Specified as: a rank one or two array or an object of type psb_T_vect_type
!! containing numbers of the type indicated in Table 1. The rank of y must
!! be the same of x.
!!
!! desc_a Description: contains data structures for communications.
!! Scope: local
!! Type: required
!! Intent: in
!! Specified as: an object of type psb desc type.
!!
!! global Descritption: Specifies whether the computation should include the global
!! reduction across all processes.
!! Scope: global
!! Type: optional
!! Intent: in
!! Specified as: a logical scalar.
!! Default: global=.true.
!!
!! Output:
!!
!! Function value the dot product of vectors x and y.
!! Scope: global unless the optional variable global=.false.
!1 has been specified
!! Specified as: a number of the data type indicated in Table 1.
!!
!! info Description: Error code.
!! Scope: local
!! Type: required
!! Intent: out
!! Specified as: An integer value; 0 means no error has been detected.
!!
!!
!! NOTES
!!
!! 1. The computation of a global result requires a global communication, which
!! entails a significant overhead. It may be necessary and/or advisable to
!! compute multiple dot products at the same time; in this case, it is possible
!! to improve the runtime efficiency by using the following scheme:
!!
!! vres(1) = psb_gedot(x1,y1,desc_a,info,global=.false.)
!! vres(2) = psb_gedot(x2,y2,desc_a,info,global=.false.)
!! vres(3) = psb_gedot(x3,y3,desc_a,info,global=.false.)
!! call psb_sum(ctxt,vres(1:3))
!!
!! In this way the global communication, which for small sizes is a latency-
!! bound operation, is invoked only once.
!!
module psb_gedot_test
use psb_base_mod
use psb_util_mod
contains
!> @brief Function to excecute psb_geaxpby in single precision and
!! save the results on file
!!
subroutine psb_gedot_kernel(x_file, y_file, arr_size, ctxt, ret, output_file_name)
implicit none
! input parameters
character(len = *), intent(in) :: x_file, y_file
integer(psb_ipk_), intent(in) :: arr_size
type(psb_ctxt_type), intent(in) :: ctxt
! output parameters
integer(psb_ipk_), intent(out) :: ret
character(len=:), allocatable, intent(out) :: output_file_name
! vectors
type(psb_s_vect_type) :: x, y
! matrix descriptor data structure
type(psb_desc_type) :: desc_a
! communication context
integer(psb_ipk_) :: my_rank, np, info, err_act
! variables outside PSLBALS data structures
real(psb_spk_), allocatable :: x_global(:), y_global(:)
integer(psb_ipk_) :: i
! others
logical :: exists
real(psb_spk_) :: result(1)
info = psb_success_
call psb_info(ctxt,my_rank,np)
if (my_rank < 0) then
! This should not happen, but just in case
call psb_error(ctxt)
endif
! Generate random array for b using always the same seed
if(my_rank == psb_root_) then
allocate(x_global(arr_size))
allocate(y_global(arr_size))
call mm_array_read(x_global,info,filename=x_file)
call mm_array_read(y_global,info,filename=y_file)
end if
! Allocate descriptor as if it was a block rows distribution
call psb_cdall(ctxt, desc_a, info,nl=arr_size/np)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating desc_a data structure"
goto 9999
end if
call psb_cdasb(desc_a, info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error assembling desc_a data structure"
goto 9999
end if
call psb_geall(x,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating x data structure"
goto 9999
end if
! Populate x class using data from x_global vector
call psb_scatter(x_global,x,desc_a,info,root=psb_root_)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in psb_scatter to populate x data structure"
goto 9999
end if
call psb_geall(y,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating y data structure"
goto 9999
end if
! Populate y class using data from y_global vector
call psb_scatter(y_global,y,desc_a,info,root=psb_root_)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in psb_scatter to populate y data structure"
goto 9999
end if
! y = x^T * y
result(1) = psb_gedot(x,y,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in psb_gedot routine"
goto 9999
end if
! Make the root process be the one that saves everything on file
if(np == 1) then
! Check if output directory exists
inquire(file='serial/', exist=exists)
if (.not.exists) then
call system('mkdir serial/')
end if
output_file_name = "serial/"
else
! Check if output directory exists
inquire(file='parallel/', exist=exists)
if (.not.exists) then
call system('mkdir parallel/')
end if
output_file_name = "parallel/"
end if
output_file_name = output_file_name // "sol_" // x_file(9:10) // "_" // y_file(9:10) // ".mtx"
! Save result to output file
if(my_rank == psb_root_) then
call mm_array_write(result,"Result of the scalar product computation",info,filename=output_file_name)
end if
! Deallocate
call psb_gefree(x, desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in vector x free routine"
goto 9999
end if
call psb_gefree(y, desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in vector y free routine"
goto 9999
end if
call psb_cdfree(desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in matrix descriptor free routine"
goto 9999
end if
if(my_rank == 0) then
deallocate(x_global)
deallocate(y_global)
end if
return
! Error handling
9999 ret = -1
stop
end subroutine
!> @brief Function to excecute psb_geaxpby in double precision and
!! compare the results with the ones on file
!!
subroutine psb_gedot_check(x_file, y_file, arr_size, ctxt, ret, output_file_name)
implicit none
! input parameters
character(len = *), intent(in) :: x_file, y_file
integer(psb_ipk_), intent(in) :: arr_size
type(psb_ctxt_type), intent(in) :: ctxt
! output parameters
integer(psb_ipk_), intent(out) :: ret
character(len=:), allocatable, intent(out) :: output_file_name
! vectors
type(psb_d_vect_type) :: x, y
type(psb_s_vect_type) :: result_check
! matrix descriptor data structure
type(psb_desc_type) :: desc_a
! communication context
integer(psb_ipk_) :: my_rank, np, info, err_act
! variables outside PSLBALS data structures
real(psb_dpk_), allocatable :: x_global(:), y_global(:)
integer(psb_ipk_) :: i
! others
logical :: exists
real(psb_dpk_) :: result(1)
info = psb_success_
call psb_info(ctxt,my_rank,np)
if (my_rank < 0) then
! This should not happen, but just in case
call psb_error(ctxt)
endif
! Generate random array for b using always the same seed
if(my_rank == psb_root_) then
allocate(x_global(arr_size))
allocate(y_global(arr_size))
call mm_array_read(x_global,info,filename=x_file)
call mm_array_read(y_global,info,filename=y_file)
end if
! Allocate descriptor as if it was a block rows distribution
call psb_cdall(ctxt, desc_a, info,nl=10000/np)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating desc_a data structure"
goto 9999
end if
call psb_cdasb(desc_a, info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error assembling desc_a data structure"
goto 9999
end if
call psb_geall(x,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating x data structure"
goto 9999
end if
! Populate x class using data from x_global vector
call psb_scatter(x_global,x,desc_a,info,root=psb_root_)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in psb_scatter to populate x data structure"
goto 9999
end if
call psb_geall(y,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating y data structure"
goto 9999
end if
! Populate y class using data from y_global vector
call psb_scatter(y_global,y,desc_a,info,root=psb_root_)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in psb_scatter to populate y data structure"
goto 9999
end if
call psb_geall(result_check,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error allocating y_check data structure"
goto 9999
end if
! y = x^T * y
result(1) = psb_gedot(x,y,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in psb_gedot routine"
goto 9999
end if
if(my_rank == psb_root_) then
! Make the root process be the one that saves everything on file
if(np == 1) then
! Check if output directory exists
inquire(file='serial/', exist=exists)
if(.not.exists) then
write(psb_out_unit,'(A)') "Error in psb_gedot_check routine, no single precision result is saved on file"
goto 9999
end if
output_file_name = "serial/"
else
! Check if output directory exists
inquire(file='parallel/', exist=exists)
if(.not.exists) then
write(psb_out_unit,'(A)') "Error in psb_gedot_check routine, no single precision result is saved on file"
goto 9999
end if
output_file_name = "parallel/"
end if
output_file_name = output_file_name // "sol_" // x_file(9:10) // "_" // y_file(9:10) // ".mtx"
! Read single precision result from file
call mm_array_read(result_check,info,filename=output_file_name)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in mm_array_read for y_check data structure"
goto 9999
end if
! 5.96e-08 is 2^-24 (Single precision unit roundoff)
! 1.19e-07 is 2^-23 (Single precision unit interval)
!! call shift_decimal_double(result(1))
!! call shift_decimal_single(result_check%v%v(1))
! write(*,*) result(1),result_check%v%v(1), (arr_size * 1.19D-07) / (done-arr_size * 1.19D-07)
if(abs(result(1) - result_check%v%v(1)) > (arr_size * 1.19D-07) / (done-arr_size * 1.19D-07)) then
ret = -1
return
end if
end if
call psb_barrier(ctxt)
! Deallocate
call psb_gefree(x, desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in vector x free routine"
goto 9999
end if
call psb_gefree(y, desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in vector y free routine"
goto 9999
end if
call psb_gefree(result_check, desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in vector y_check free routine"
goto 9999
end if
call psb_cdfree(desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,'(A)') "Error in matrix descriptor free routine"
goto 9999
end if
if(my_rank == 0) then
deallocate(x_global)
deallocate(y_global)
end if
ret = 0
return
! Error handling
9999 ret = -1
stop
end subroutine
subroutine shift_decimal_double(n)
implicit none
real(psb_dpk_),intent(inout) :: n
integer :: n_digits
character(len=20) :: int_str
! Convert the absolute value of the integer part to string
write(int_str, '(I0)') int(abs(n))
! Count number of digits
n_digits = len_trim(adjustl(int_str))
! Shift the decimal point
n = abs(n) / 10.0**n_digits
end subroutine
subroutine shift_decimal_single(n)
implicit none
real(psb_spk_),intent(inout) :: n
integer :: n_digits
character(len=20) :: int_str
! Convert the absolute value of the integer part to string
write(int_str, '(I0)') int(abs(n))
! Count number of digits
n_digits = len_trim(adjustl(int_str))
! Shift the decimal point
n = abs(n) / 10.0**n_digits
end subroutine
!> @brief Function to randomly generate x and y vectors
!! and save them on multiple files based on their
!! coefficients values.
!!
subroutine generate_vectors(arr_size)
implicit none
integer(psb_ipk_), intent(in) :: arr_size
real(psb_dpk_), allocatable :: x(:), y(:)
integer(psb_ipk_) :: i, info
logical :: exists
! Check if output directory exists
inquire(file='vectors/', exist=exists)
if (.not.exists) then
call system('mkdir vectors/')
end if
allocate(x(arr_size))
allocate(y(arr_size))
call random_init(repeatable=.true.,image_distinct=.true.)
call random_number(x)
call random_number(y)
! Write only positive in x_1
call mm_array_write(x,"Positive vector",info,filename="vectors/x1.mtx")
call mm_array_write(y,"Positive vector",info,filename="vectors/y1.mtx")
! Write only negative in x_2
do i=1,arr_size
x(i) = -x(i)
end do
do i=1,arr_size
y(i) = -y(i)
end do
call mm_array_write(x,"Negative vector",info,filename="vectors/x2.mtx")
call mm_array_write(y,"Negative vector",info,filename="vectors/y2.mtx")
! Since numbers are less than one and always positive, we have to generate negative ones subtractiong 50
do i=1,arr_size
x(i) = -x(i) ! Make the values positive again
x(i) = x(i) - 0.5
end do
do i=1,arr_size
y(i) = -y(i) ! Make the values positive again
y(i) = y(i) - 0.5
end do
! Write random in x_3
call mm_array_write(x,"Random vector",info,filename="vectors/x3.mtx")
call mm_array_write(y,"Random vector",info,filename="vectors/y3.mtx")
! Write zero in x_4
do i=1,arr_size
x(i) = 0
end do
do i=1,arr_size
y(i) = 0
end do
call mm_array_write(x,"Null vector",info,filename="vectors/x4.mtx")
call mm_array_write(y,"Null vector",info,filename="vectors/y4.mtx")
deallocate(x)
deallocate(y)
end subroutine
end module psb_gedot_test

@ -0,0 +1,415 @@
Welcome to PSBLAS version: 3.9.0
This is the psb_gedot_test sample program
Number of processes used in this computation: 1
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:27:44] Generation gedot single precision result file serial/sol_x4_y4.mtx 16/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:27:44] Double precision check on file serial/sol_x4_y4.mtx 16/144 [OK]
Welcome to PSBLAS version: 3.9.0
This is the psb_gedot_test sample program
Number of processes used in this computation: 40
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:28:19] Generation gedot single precision result file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:28:20] Generation gedot single precision result file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:28:20] Generation gedot single precision result file parallel/sol_x4_y4.mtx 16/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:28:20] Double precision check on file parallel/sol_x4_y4.mtx 16/144 [OK]
Comparison between serial/sol_x1_y1.mtx and parallel/sol_x1_y1.mtx: 4 differences
Comparison between serial/sol_x1_y2.mtx and parallel/sol_x1_y2.mtx: 4 differences
Comparison between serial/sol_x1_y3.mtx and parallel/sol_x1_y3.mtx: 4 differences
Comparison between serial/sol_x1_y4.mtx and parallel/sol_x1_y4.mtx: 0 differences
Comparison between serial/sol_x2_y1.mtx and parallel/sol_x2_y1.mtx: 4 differences
Comparison between serial/sol_x2_y2.mtx and parallel/sol_x2_y2.mtx: 4 differences
Comparison between serial/sol_x2_y3.mtx and parallel/sol_x2_y3.mtx: 4 differences
Comparison between serial/sol_x2_y4.mtx and parallel/sol_x2_y4.mtx: 0 differences
Comparison between serial/sol_x3_y1.mtx and parallel/sol_x3_y1.mtx: 4 differences
Comparison between serial/sol_x3_y2.mtx and parallel/sol_x3_y2.mtx: 4 differences
Comparison between serial/sol_x3_y3.mtx and parallel/sol_x3_y3.mtx: 4 differences
Comparison between serial/sol_x3_y4.mtx and parallel/sol_x3_y4.mtx: 0 differences
Comparison between serial/sol_x4_y1.mtx and parallel/sol_x4_y1.mtx: 0 differences
Comparison between serial/sol_x4_y2.mtx and parallel/sol_x4_y2.mtx: 0 differences
Comparison between serial/sol_x4_y3.mtx and parallel/sol_x4_y3.mtx: 0 differences
Comparison between serial/sol_x4_y4.mtx and parallel/sol_x4_y4.mtx: 0 differences
Welcome to PSBLAS version: 3.9.0
This is the psb_gedot_test sample program
Number of processes used in this computation: 40
[2025-06-12 13:32:44] Generation gedot single precision result file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:32:44] Generation gedot single precision result file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:32:45] Generation gedot single precision result file parallel/sol_x4_y4.mtx 16/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:32:45] Double precision check on file parallel/sol_x4_y4.mtx 16/144 [OK]
Comparison between serial/sol_x1_y1.mtx and parallel/sol_x1_y1.mtx: 4 differences
Comparison between serial/sol_x1_y2.mtx and parallel/sol_x1_y2.mtx: 4 differences
Comparison between serial/sol_x1_y3.mtx and parallel/sol_x1_y3.mtx: 4 differences
Comparison between serial/sol_x1_y4.mtx and parallel/sol_x1_y4.mtx: 0 differences
Comparison between serial/sol_x2_y1.mtx and parallel/sol_x2_y1.mtx: 4 differences
Comparison between serial/sol_x2_y2.mtx and parallel/sol_x2_y2.mtx: 4 differences
Comparison between serial/sol_x2_y3.mtx and parallel/sol_x2_y3.mtx: 4 differences
Comparison between serial/sol_x2_y4.mtx and parallel/sol_x2_y4.mtx: 0 differences
Comparison between serial/sol_x3_y1.mtx and parallel/sol_x3_y1.mtx: 4 differences
Comparison between serial/sol_x3_y2.mtx and parallel/sol_x3_y2.mtx: 4 differences
Comparison between serial/sol_x3_y3.mtx and parallel/sol_x3_y3.mtx: 4 differences
Comparison between serial/sol_x3_y4.mtx and parallel/sol_x3_y4.mtx: 0 differences
Comparison between serial/sol_x4_y1.mtx and parallel/sol_x4_y1.mtx: 0 differences
Comparison between serial/sol_x4_y2.mtx and parallel/sol_x4_y2.mtx: 0 differences
Comparison between serial/sol_x4_y3.mtx and parallel/sol_x4_y3.mtx: 0 differences
Comparison between serial/sol_x4_y4.mtx and parallel/sol_x4_y4.mtx: 0 differences
Welcome to PSBLAS version: 3.9.0
This is the psb_gedot_test sample program
Number of processes used in this computation: 40
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:33:40] Generation gedot single precision result file parallel/sol_x4_y4.mtx 16/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:33:41] Double precision check on file parallel/sol_x4_y4.mtx 16/144 [OK]
Comparison between serial/sol_x1_y1.mtx and parallel/sol_x1_y1.mtx: 4 differences
Comparison between serial/sol_x1_y2.mtx and parallel/sol_x1_y2.mtx: 4 differences
Comparison between serial/sol_x1_y3.mtx and parallel/sol_x1_y3.mtx: 4 differences
Comparison between serial/sol_x1_y4.mtx and parallel/sol_x1_y4.mtx: 0 differences
Comparison between serial/sol_x2_y1.mtx and parallel/sol_x2_y1.mtx: 4 differences
Comparison between serial/sol_x2_y2.mtx and parallel/sol_x2_y2.mtx: 4 differences
Comparison between serial/sol_x2_y3.mtx and parallel/sol_x2_y3.mtx: 4 differences
Comparison between serial/sol_x2_y4.mtx and parallel/sol_x2_y4.mtx: 0 differences
Comparison between serial/sol_x3_y1.mtx and parallel/sol_x3_y1.mtx: 4 differences
Comparison between serial/sol_x3_y2.mtx and parallel/sol_x3_y2.mtx: 4 differences
Comparison between serial/sol_x3_y3.mtx and parallel/sol_x3_y3.mtx: 4 differences
Comparison between serial/sol_x3_y4.mtx and parallel/sol_x3_y4.mtx: 0 differences
Comparison between serial/sol_x4_y1.mtx and parallel/sol_x4_y1.mtx: 0 differences
Comparison between serial/sol_x4_y2.mtx and parallel/sol_x4_y2.mtx: 0 differences
Comparison between serial/sol_x4_y3.mtx and parallel/sol_x4_y3.mtx: 0 differences
Comparison between serial/sol_x4_y4.mtx and parallel/sol_x4_y4.mtx: 0 differences
Welcome to PSBLAS version: 3.9.0
This is the psb_gedot_test sample program
Number of processes used in this computation: 40
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:34:42] Generation gedot single precision result file parallel/sol_x4_y4.mtx 16/144 [OK]
[2025-06-12 13:34:42] Double precision check on file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:34:42] Double precision check on file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:34:42] Double precision check on file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:34:42] Double precision check on file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:34:42] Double precision check on file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:34:42] Double precision check on file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:34:42] Double precision check on file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:34:42] Double precision check on file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:34:42] Double precision check on file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:34:42] Double precision check on file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:34:42] Double precision check on file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:34:42] Double precision check on file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:34:43] Double precision check on file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:34:43] Double precision check on file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:34:43] Double precision check on file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:34:43] Double precision check on file parallel/sol_x4_y4.mtx 16/144 [OK]
Comparison between serial/sol_x1_y1.mtx and parallel/sol_x1_y1.mtx: 4 differences
Comparison between serial/sol_x1_y2.mtx and parallel/sol_x1_y2.mtx: 4 differences
Comparison between serial/sol_x1_y3.mtx and parallel/sol_x1_y3.mtx: 4 differences
Comparison between serial/sol_x1_y4.mtx and parallel/sol_x1_y4.mtx: 0 differences
Comparison between serial/sol_x2_y1.mtx and parallel/sol_x2_y1.mtx: 4 differences
Comparison between serial/sol_x2_y2.mtx and parallel/sol_x2_y2.mtx: 4 differences
Comparison between serial/sol_x2_y3.mtx and parallel/sol_x2_y3.mtx: 4 differences
Comparison between serial/sol_x2_y4.mtx and parallel/sol_x2_y4.mtx: 0 differences
Comparison between serial/sol_x3_y1.mtx and parallel/sol_x3_y1.mtx: 4 differences
Comparison between serial/sol_x3_y2.mtx and parallel/sol_x3_y2.mtx: 4 differences
Comparison between serial/sol_x3_y3.mtx and parallel/sol_x3_y3.mtx: 4 differences
Comparison between serial/sol_x3_y4.mtx and parallel/sol_x3_y4.mtx: 0 differences
Comparison between serial/sol_x4_y1.mtx and parallel/sol_x4_y1.mtx: 0 differences
Comparison between serial/sol_x4_y2.mtx and parallel/sol_x4_y2.mtx: 0 differences
Comparison between serial/sol_x4_y3.mtx and parallel/sol_x4_y3.mtx: 0 differences
Comparison between serial/sol_x4_y4.mtx and parallel/sol_x4_y4.mtx: 0 differences
Welcome to PSBLAS version: 3.9.0
This is the psb_gedot_test sample program
Number of processes used in this computation: 40
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:35:32] Generation gedot single precision result file parallel/sol_x4_y4.mtx 16/144 [OK]
[2025-06-12 13:35:32] Double precision check on file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:35:32] Double precision check on file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:35:33] Double precision check on file parallel/sol_x4_y4.mtx 16/144 [OK]
Comparison between serial/sol_x1_y1.mtx and parallel/sol_x1_y1.mtx: 4 differences
Comparison between serial/sol_x1_y2.mtx and parallel/sol_x1_y2.mtx: 4 differences
Comparison between serial/sol_x1_y3.mtx and parallel/sol_x1_y3.mtx: 4 differences
Comparison between serial/sol_x1_y4.mtx and parallel/sol_x1_y4.mtx: 0 differences
Comparison between serial/sol_x2_y1.mtx and parallel/sol_x2_y1.mtx: 4 differences
Comparison between serial/sol_x2_y2.mtx and parallel/sol_x2_y2.mtx: 4 differences
Comparison between serial/sol_x2_y3.mtx and parallel/sol_x2_y3.mtx: 4 differences
Comparison between serial/sol_x2_y4.mtx and parallel/sol_x2_y4.mtx: 0 differences
Comparison between serial/sol_x3_y1.mtx and parallel/sol_x3_y1.mtx: 4 differences
Comparison between serial/sol_x3_y2.mtx and parallel/sol_x3_y2.mtx: 4 differences
Comparison between serial/sol_x3_y3.mtx and parallel/sol_x3_y3.mtx: 4 differences
Comparison between serial/sol_x3_y4.mtx and parallel/sol_x3_y4.mtx: 0 differences
Comparison between serial/sol_x4_y1.mtx and parallel/sol_x4_y1.mtx: 0 differences
Comparison between serial/sol_x4_y2.mtx and parallel/sol_x4_y2.mtx: 0 differences
Comparison between serial/sol_x4_y3.mtx and parallel/sol_x4_y3.mtx: 0 differences
Comparison between serial/sol_x4_y4.mtx and parallel/sol_x4_y4.mtx: 0 differences
Welcome to PSBLAS version: 3.9.0
This is the psb_gedot_test sample program
Number of processes used in this computation: 40
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:37:17] Generation gedot single precision result file parallel/sol_x4_y4.mtx 16/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:37:17] Double precision check on file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:37:18] Double precision check on file parallel/sol_x4_y4.mtx 16/144 [OK]
Comparison between serial/sol_x1_y1.mtx and parallel/sol_x1_y1.mtx: 4 differences
Comparison between serial/sol_x1_y2.mtx and parallel/sol_x1_y2.mtx: 4 differences
Comparison between serial/sol_x1_y3.mtx and parallel/sol_x1_y3.mtx: 4 differences
Comparison between serial/sol_x1_y4.mtx and parallel/sol_x1_y4.mtx: 0 differences
Comparison between serial/sol_x2_y1.mtx and parallel/sol_x2_y1.mtx: 4 differences
Comparison between serial/sol_x2_y2.mtx and parallel/sol_x2_y2.mtx: 4 differences
Comparison between serial/sol_x2_y3.mtx and parallel/sol_x2_y3.mtx: 4 differences
Comparison between serial/sol_x2_y4.mtx and parallel/sol_x2_y4.mtx: 0 differences
Comparison between serial/sol_x3_y1.mtx and parallel/sol_x3_y1.mtx: 4 differences
Comparison between serial/sol_x3_y2.mtx and parallel/sol_x3_y2.mtx: 4 differences
Comparison between serial/sol_x3_y3.mtx and parallel/sol_x3_y3.mtx: 4 differences
Comparison between serial/sol_x3_y4.mtx and parallel/sol_x3_y4.mtx: 0 differences
Comparison between serial/sol_x4_y1.mtx and parallel/sol_x4_y1.mtx: 0 differences
Comparison between serial/sol_x4_y2.mtx and parallel/sol_x4_y2.mtx: 0 differences
Comparison between serial/sol_x4_y3.mtx and parallel/sol_x4_y3.mtx: 0 differences
Comparison between serial/sol_x4_y4.mtx and parallel/sol_x4_y4.mtx: 0 differences
Welcome to PSBLAS version: 3.9.0
This is the psb_gedot_test sample program
Number of processes used in this computation: 40
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:37:56] Generation gedot single precision result file parallel/sol_x4_y4.mtx 16/144 [OK]
[2025-06-12 13:37:56] Double precision check on file parallel/sol_x1_y1.mtx 1/144 [OK]
[2025-06-12 13:37:56] Double precision check on file parallel/sol_x1_y2.mtx 2/144 [OK]
[2025-06-12 13:37:56] Double precision check on file parallel/sol_x1_y3.mtx 3/144 [OK]
[2025-06-12 13:37:57] Double precision check on file parallel/sol_x1_y4.mtx 4/144 [OK]
[2025-06-12 13:37:57] Double precision check on file parallel/sol_x2_y1.mtx 5/144 [OK]
[2025-06-12 13:37:57] Double precision check on file parallel/sol_x2_y2.mtx 6/144 [OK]
[2025-06-12 13:37:57] Double precision check on file parallel/sol_x2_y3.mtx 7/144 [OK]
[2025-06-12 13:37:57] Double precision check on file parallel/sol_x2_y4.mtx 8/144 [OK]
[2025-06-12 13:37:57] Double precision check on file parallel/sol_x3_y1.mtx 9/144 [OK]
[2025-06-12 13:37:57] Double precision check on file parallel/sol_x3_y2.mtx 10/144 [OK]
[2025-06-12 13:37:57] Double precision check on file parallel/sol_x3_y3.mtx 11/144 [OK]
[2025-06-12 13:37:57] Double precision check on file parallel/sol_x3_y4.mtx 12/144 [OK]
[2025-06-12 13:37:57] Double precision check on file parallel/sol_x4_y1.mtx 13/144 [OK]
[2025-06-12 13:37:57] Double precision check on file parallel/sol_x4_y2.mtx 14/144 [OK]
[2025-06-12 13:37:57] Double precision check on file parallel/sol_x4_y3.mtx 15/144 [OK]
[2025-06-12 13:37:57] Double precision check on file parallel/sol_x4_y4.mtx 16/144 [OK]
Comparison between serial/sol_x1_y1.mtx and parallel/sol_x1_y1.mtx: 4 differences
Comparison between serial/sol_x1_y2.mtx and parallel/sol_x1_y2.mtx: 4 differences
Comparison between serial/sol_x1_y3.mtx and parallel/sol_x1_y3.mtx: 4 differences
Comparison between serial/sol_x1_y4.mtx and parallel/sol_x1_y4.mtx: 0 differences
Comparison between serial/sol_x2_y1.mtx and parallel/sol_x2_y1.mtx: 4 differences
Comparison between serial/sol_x2_y2.mtx and parallel/sol_x2_y2.mtx: 4 differences
Comparison between serial/sol_x2_y3.mtx and parallel/sol_x2_y3.mtx: 4 differences
Comparison between serial/sol_x2_y4.mtx and parallel/sol_x2_y4.mtx: 0 differences
Comparison between serial/sol_x3_y1.mtx and parallel/sol_x3_y1.mtx: 4 differences
Comparison between serial/sol_x3_y2.mtx and parallel/sol_x3_y2.mtx: 4 differences
Comparison between serial/sol_x3_y3.mtx and parallel/sol_x3_y3.mtx: 4 differences
Comparison between serial/sol_x3_y4.mtx and parallel/sol_x3_y4.mtx: 0 differences
Comparison between serial/sol_x4_y1.mtx and parallel/sol_x4_y1.mtx: 0 differences
Comparison between serial/sol_x4_y2.mtx and parallel/sol_x4_y2.mtx: 0 differences
Comparison between serial/sol_x4_y3.mtx and parallel/sol_x4_y3.mtx: 0 differences
Comparison between serial/sol_x4_y4.mtx and parallel/sol_x4_y4.mtx: 0 differences

File diff suppressed because it is too large Load Diff

@ -0,0 +1,8 @@
set(PSB_spmm_source_files
psb_spmm_test.f90
spmm.f90
)
foreach(file IN LISTS PSB_spmm_source_files)
list(APPEND spmm_source_files ${CMAKE_CURRENT_LIST_DIR}/${file})
endforeach()

@ -0,0 +1,42 @@
INSTALLDIR=../../..
INCDIR=$(INSTALLDIR)/include/
MODDIR=$(INSTALLDIR)/modules/
include $(INCDIR)/Make.inc.psblas
#
# Libraries used
#
LIBDIR = $(INSTALLDIR)/lib/
PSBLAS_LIB = -L$(LIBDIR) -lpsb_util -lpsb_base
LDLIBS = $(PSBLDLIBS)
FINCLUDES=$(FMFLAG)$(MODDIR) $(FMFLAG).
EXEDIR=./runs
GREEN=\033[0;32m
RED=\033[0;31m
BLUE=\033[0;34m
YELLOW=\033[33m
END_COLOUR=\033[0m
all: runsd psb_spmm_test
@printf "$(GREEN)[INFO]\tCompilation success!$(END_COLOUR)\n"
runsd:
@(if test ! -d runs ; then mkdir runs; fi)
@printf "$(BLUE)[INFO]\tBuild directory $(EXEDIR) correctly initialized$(END_COLOUR)\n"
psb_spmm_test:
@$(FLINK) $(LOPT) psb_spmm_test.f90 spmm.f90 -o psb_spmm_test -I$(MODDIR) -I. $(PSBLAS_LIB) $(LDLIBS)
@mv psb_spmm_test $(EXEDIR)
@printf "$(BLUE)[INFO]\tTesting files generated correctly$(END_COLOUR)\n"
clean:
@rm -f $(OBJS)\
*$(.mod) $(EXEDIR)/psb_spmm_test
.PHONY: all runsd clean

@ -0,0 +1,85 @@
# Introduction
This is a directory developed by Luca Pepè Sciarria and Simone Staccone froma Tor Vergata University to start to create some unit tests for PSBLAS 3.9, in particular for ```psb_spmm``` routine.
## Environment
These tests are developed using a linux environment, in particular Rocky Linux 9.5 (Blue Onyx).
The compiler used is:
- gnu 12.2.1
The necessary dependnces are:
- mpich 4.2.2
- psblas 3.9
In order to have the exact same environment used for testing compile PSBALS library using cuda 12.5.
## Getting started
Steps to reproduce the tests:
- Compile the code using ``` make ```
- Insert the matrix files inside the matrix/ directory (or create one if it doesn't exists; psblas3/test/spmm/matrix/)
- Launch the script autotest.sh (Still not implemented)
- Check the output log file test_log.txt to collect results
## Test goal
Check the correctness of the matrix-vector multiplication $y = Ax$ using the **psb_spmm** routine, checking for all the test suite cases.
## Test Suite
### Overall Analysys
The subroutine psb_spmm materialize in three different procedures:
```psb_sspmm```, ```psb_sspmv```, ```psb_sspmv_vect```. The focus is on testing the ```psb_sspmv``` one, so as first step let's analyze the signature of the function to study a satisfyng input parameter space. The signature of the function is:
```fortran
psb_sspmv(alpha, a, x, beta, y,&
& desc_a, info, trans, work,doswap)
```
In order to have a black-box approach and to consider only the functionalities of the routine, the parameters ***work*** and ***doswap*** are not considered during this tests, since they are used in situation that presents internally to the library code. Moreover, info it's an output parameter used to signal some kind of error, therefore it is not part of the input space.
There are seven parameters left. In the following tables are highlighted the values assigned to each parameter. THe test are runned composing a Cartesian product of all the possible values assigned to each parameter.
### Parameters Values
**x** vectors are located in the vectors/ directory. They are generated randomly using the same seed and then saved on different files based on their characteristics. The size of the vector is choosen accordingly to the size of the matrix column space considered for the single test instance.
|Vector|File Name|Coefficients|Coefficients Description|
|:-:|:-:|:-:|:-:|
|$x_1$|x1.txt|$x_i> 0, \forall i$|Positive coefficients|
|$x_2$|x2.txt|$x_i < 0, \forall i$|Negative coefficients
|$x_3$|x3.txt|$x_i \ne 0, \forall i$|Random coefficients
|$x_4$|x4.txt|$x_i = 0, \forall i$|Null coefficients
**y** vectors are located in the vectors/ directory. They are generated randomly using the same seed and then saved on different files based on their characteristics. The size of the vector is choosen accordingly to the size of the matrix rows space considered for the single test instance.
|Vector|File Name|Coefficients|Coefficients Description|
|:-:|:-:|:-:|:-:|
|$y_1$|y1.txt|$y_i> 0, \forall i$|Positive coefficients|
|$y_2$|y2.txt|$y_i < 0, \forall i$|Negative coefficients
|$y_3$|y3.txt|$y_i \ne 0, \forall i$|Random coefficients
|$y_4$|y4.txt|$y_i = 0, \forall i$|Null coefficients
**$\alpha$**
|$\alpha$|Value|Coefficients Description|
|:-:|:-:|:-:|
|$\alpha_1$|1.0|Positive value|
|$\alpha_2$|-1.0|Negative value|
|$\alpha_3$|0.0|Null value|
**$\beta$**
|$\alpha$|Value|Coefficients Description|
|:-:|:-:|:-:|
|$\beta_1$|1.0|Positive value|
|$\beta_2$|-1.0|Negative value|
|$\beta_3$|0.0|Null value|
**trans**
## Output
The ouput files generated by the test are automatically compared by the autotest.sh script, but if it is needed to manually run the test here it is the naming convenction used.
The results of the computation will be saved on different files based on the instance of the test considered. In particular the naming conventiona format the output file as sol_m#_x#_y#_a#_b#.mtx, where each # is a number choosen w.r.t. the test instance. (Ex. sol_m1_x1_y1_a1_b1.mtx is the solution computed using the first matrix file, the first x vector file , the first y vector file, alpha = 1.0 and beta = 1.0). Moreover, the files will be saved in the serial/ directory if the program is launched using 1 process or in parrallel/ directory if the program is launched with more than one process.
## Notes
For now only integer multiplication is tested and on a single matrix
## TODO
- Update docs
- Parametrize also on trans parameter
- Create the script autotest.sh
- Write the I/O output on log file
- ...

@ -0,0 +1,292 @@
!> Test program for y = AX psb_spmm routine
!! Check the README.md to see all details about the tests.
!!
!! Author: Luca Pepé Sciarria, Staccone Simone (Tor Vergata University)
module psb_spmm_test
contains
subroutine psb_spmm_kernel(mtx_file,x_file, y_file, alpha, beta, ctxt)
use psb_base_mod
use psb_util_mod
implicit none
! input parameters
character(len = *), intent(in) :: mtx_file, x_file, y_file
real(psb_spk_), intent(in) :: alpha, beta
character(len=:), allocatable :: output_file_name
! sparse matrices
type(psb_sspmat_type) :: a
type(psb_lsspmat_type) :: aux_a
! vectors
type(psb_s_vect_type) :: x, y
! matrix descriptor data structure
type(psb_desc_type) :: desc_a
! communication context
type(psb_ctxt_type), intent(in) :: ctxt
integer(psb_ipk_) :: my_rank, np, info, err_act
! matrix parameters
integer(psb_ipk_) :: rows, cols, nnz
integer(psb_ipk_) :: nr, nt ! In BLOCK ROWS distributin, the number of rows
! variables outside PSLBALS data structures
real(psb_spk_), allocatable :: x_global(:), y_global(:)
integer(psb_ipk_) :: i
info = psb_success_
call psb_info(ctxt,my_rank,np)
if (my_rank < 0) then
! This should not happen, but just in case
call psb_error(ctxt)
endif
call mm_mat_read(aux_a,info,filename=mtx_file)
if(info /= psb_success_) then
write(psb_out_unit,*) "Error while reading matric ", mtx_file
goto 9999
end if
! part_block it's a macro defined in psb_blockpart_mod to identify BLOCK ROWS distribution
call psb_matdist(aux_a, a, ctxt,desc_a,info,fmt="COO",parts=part_block)
rows = aux_a%get_nrows()
cols = aux_a%get_ncols()
nnz = aux_a%get_nzeros()
call psb_bcast(ctxt,rows)
call psb_bcast(ctxt,cols)
call psb_bcast(ctxt,nnz)
! Generate random array for b using always the same seed
if(my_rank == psb_root_) then
allocate(x_global(cols))
allocate(y_global(rows))
call mm_array_read(x_global,info,filename=x_file)
call mm_array_read(y_global,info,filename=y_file)
end if
call psb_geall(x,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,*) "Error allocating x data structure"
goto 9999
end if
! Populate x class using data from x_global vector
call psb_scatter(x_global,x,desc_a,info,root=psb_root_)
if(info /= psb_success_) then
write(psb_out_unit,*) "Error in psb_scatter to populate x data structure"
goto 9999
end if
call psb_geall(y,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,*) "Error allocating y data structure"
goto 9999
end if
! Populate y class using data from y_global vector
call psb_scatter(y_global,y,desc_a,info,root=psb_root_)
if(info /= psb_success_) then
write(psb_out_unit,*) "Error in psb_scatter to populate y data structure"
goto 9999
end if
! y = alpha * A * x + beta * y
call psb_spmm(alpha,a,x,beta,y,desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,*) "Error in psb_spmm routine"
goto 9999
end if
! Make the root process be the one that saves everything on file
if(np == 1) then
output_file_name = "serial/"
else
output_file_name = "parallel/"
end if
! Insert matrix number in output file naming convention
output_file_name = output_file_name // "sol_" // x_file(9:10) // "_" // y_file(9:10)
if(alpha == sone) then
output_file_name = output_file_name // "_a1"
else if(alpha == -sone) then
output_file_name = output_file_name // "_a2"
else if(alpha == szero) then
output_file_name = output_file_name // "_a3"
end if
if(beta == sone) then
output_file_name = output_file_name // "_b1.mtx"
else if(beta == -sone) then
output_file_name = output_file_name // "_b2.mtx"
else if(beta == szero) then
output_file_name = output_file_name // "_b3.mtx"
end if
! Save result to output file
call mm_array_write(y,"Result vector",info,filename=output_file_name)
! Deallocate
call psb_gefree(x, desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,*) "Error in vector x free routine"
goto 9999
end if
call psb_gefree(y, desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,*) "Error in vector y free routine"
goto 9999
end if
call psb_spfree(a, desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,*) "Error in matrix A free routine"
goto 9999
end if
call psb_cdfree(desc_a,info)
if(info /= psb_success_) then
write(psb_out_unit,*) "Error in matrix descriptor free routine"
goto 9999
end if
if(my_rank == 0) then
deallocate(x_global)
deallocate(y_global)
end if
return
! Error handling
9999 call psb_error(ctxt)
call psb_error_handler(ctxt,err_act)
stop
end subroutine
!> @brief Function to randomly generate x and y vectors
!! and save them on multiple files based on their
!! coefficients values.
!!
subroutine generate_vectors(rows, cols)
use psb_base_mod
use psb_util_mod
implicit none
integer(psb_ipk_), intent(in) :: rows, cols
real(psb_spk_), allocatable :: x(:), y(:)
integer(psb_ipk_) :: i, info
allocate(x(rows))
allocate(y(cols))
call random_init(repeatable=.true.,image_distinct=.true.)
call random_number(x)
call random_number(y)
! Write only positive in x_1
call mm_array_write(x,"Positive vector",info,filename="vectors/x1.mtx")
call mm_array_write(y,"Positive vector",info,filename="vectors/y1.mtx")
! Write only negative in x_2
do i=1,rows
x(i) = -x(i)
end do
do i=1,cols
y(i) = -y(i)
end do
call mm_array_write(x,"Negative vector",info,filename="vectors/x2.mtx")
call mm_array_write(y,"Negative vector",info,filename="vectors/y2.mtx")
! Since numbers are less than one and always positive, we have to generate negative ones subtractiong 50
do i=1,rows
x(i) = -x(i) ! Make the values positive again
x(i) = x(i) - 0.5
end do
do i=1,cols
y(i) = -y(i) ! Make the values positive again
y(i) = y(i) - 0.5
end do
! Write random in x_3
call mm_array_write(x,"Random vector",info,filename="vectors/x3.mtx")
call mm_array_write(y,"Random vector",info,filename="vectors/y3.mtx")
! Write zero in x_4
do i=1,rows
x(i) = 0
end do
do i=1,cols
y(i) = 0
end do
call mm_array_write(x,"Null vector",info,filename="vectors/x4.mtx")
call mm_array_write(y,"Null vector",info,filename="vectors/y4.mtx")
deallocate(x)
deallocate(y)
end subroutine
subroutine read_matrix_market_size(filename, rows, cols)
implicit none
character(len=*), intent(in) :: filename
integer, intent(out) :: rows, cols
integer :: ret, nnz
character(len=256) :: line
logical :: found
integer :: unit
! Open the file
open(newunit=unit, file=filename, status='old', action='read', iostat=ret)
if (ret /= 0) then
print *, 'Error opening file: ', filename
stop
end if
found = .false.
! Skip comment lines (starting with %)
do
read(unit, '(A)', iostat=ret) line
if (ret /= 0) exit
if (line(1:1) /= '%') then
read(line, *) rows, cols, nnz
found = .true.
exit
end if
end do
if (.not. found) then
print *, 'Error: header not found in Matrix Market file.'
stop
end if
close(unit)
end subroutine read_matrix_market_size
end module psb_spmm_test

@ -0,0 +1,74 @@
program main
use psb_spmm_test
use psb_base_mod
implicit none
! matrix stats variables
integer(psb_ipk_) :: rows, cols
! MPI variables
integer(psb_ipk_) :: my_rank, np
! parameters array
character(len=64) :: x(4),y(4)
real(psb_ipk_) :: alpha(3), beta(3)
! cycle indexes variables
integer(psb_ipk_) :: i,j,k,h,l
! Communicator variable
type(psb_ctxt_type) :: ctxt
! Initialize parameters
x(1) = "vectors/x1.mtx"
x(2) = "vectors/x2.mtx"
x(3) = "vectors/x3.mtx"
x(4) = "vectors/x4.mtx"
y(1) = "vectors/y1.mtx"
y(2) = "vectors/y2.mtx"
y(3) = "vectors/y3.mtx"
y(4) = "vectors/y4.mtx"
alpha(1) = sone
alpha(2) = -sone
alpha(3) = szero
beta(1) = sone
beta(2) = -sone
beta(3) = szero
call psb_init(ctxt)
call psb_info(ctxt,my_rank,np)
if(my_rank == psb_root_) then
write(psb_out_unit,*) 'Welcome to PSBLAS version: ',psb_version_string_
write(psb_out_unit,*) 'This is the psb_spmm_test sample program'
call read_matrix_market_size("matrix/1138_bus.mtx", rows, cols)
call generate_vectors(rows,cols)
end if
call psb_barrier(ctxt)
!! 1138_bus matrix (sparse)
do i=1,size(x)
do j=1,size(y)
do k=1,size(alpha)
do h=1,size(beta)
call psb_spmm_kernel(mtx_file="matrix/1138_bus.mtx",x_file=x(i), y_file=y(j), &
& alpha = alpha(k), beta = beta(h), ctxt = ctxt)
end do
end do
end do
end do
call psb_exit(ctxt)
return
end program main

@ -0,0 +1,59 @@
/* Test program for checking if two arrays (output of alpha*A*x + beta*y) are the same
* Check the README.md to see all details about the tests.
*
* Author: Luca Pepé Sciarria, Staccone Simone (Tor Vergata University)
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
void compare_files(const char *file1, const char *file2) {
FILE *file_s = fopen(file1, "r");
FILE *file_p = fopen(file2, "r");
if (!file_s || !file_p) {
perror("Error opening files");
exit(EXIT_FAILURE);
}
int n_s, n_p;
fscanf(file_s, "%d", &n_s);
fscanf(file_p, "%d", &n_p); // Assuming both files have the same number of lines
if (n_s != n_p) {
fprintf(stderr, "Error, differnet file sizes $d, $d", n_s, n_p);
exit(EXIT_FAILURE);
}
double value_s, value_p;
for (int i = 0; i < n_s; i++) {
fscanf(file_s, "%lf", &value_s);
fscanf(file_p, "%lf", &value_p);
if (value_s != value_p) {
printf("Index %d: %.2lf != %.2lf\n", i, value_s, value_p);
}
}
fclose(file_s);
fclose(file_p);
}
int main(int argc, char *argv[]) {
if (argc != 2) {
fprintf(stderr, "Usage: %s <partial_filename>\n", argv[0]);
return EXIT_FAILURE;
}
char file_s[256], file_p[256];
snprintf(file_s, sizeof(file_s), "%s_serial.txt", argv[1]);
snprintf(file_p, sizeof(file_p), "%s_parallel.txt", argv[1]);
compare_files(file_s, file_p);
return EXIT_SUCCESS;
}

@ -0,0 +1,117 @@
#!/bin/bash
# Variables definition
terminal_width=$(tput cols) # Get the terminal width
separator=$(printf "%0.s=" $(seq 1 $terminal_width)) # Generate separator of correct length
flag=0
log_file_name="psblas_test_results.log"
base_dir=$(pwd)
# Define color codes
GREEN="\033[0;32m"
RED="\033[0;31m"
BLUE="\033[0;34m"
YELLOW="\033[33m"
RESET="\033[0m"
# Function to center text
center_text() {
local text="$1"
printf "\033[0;32m%*s\033[0m\n" $(((${#text} + terminal_width) / 2)) "$text"
}
# Script start
clear
# Welcome message
echo -e "${GREEN}${separator}${RESET}"
center_text "PSBLAS Computational Routines Test Suite"
echo -e "${GREEN}${separator}${RESET}"
center_text "Welcome to the PSBLAS Computational Routines Test Suite!"
center_text "This script compares the results of serial and parallel computations"
center_text "for all the computational routines documented on the version 3.9 of PSBLAS."
echo -e "${GREEN}${separator}${RESET}"
echo -e "${BLUE}[INFO]\t Starting environment check for required modules...${RESET}"
# Check and load required modules
required_modules=("gnu/12.2.1-sys" "mpich/4.2.2" "cuda/12.5")
for module in "${required_modules[@]}"; do
if ! module list 2>&1 | grep -q "$module"; then
echo -e "${YELLOW}[WARNING] Module not found, loading $module${RESET}"
module load "$module"
flag=1
if ! grep -q "module load $module" "$HOME/.bashrc"; then
echo -e "[INFO]\t Adding 'module load $module' to $bashrc..."
echo "module load $module" >> "$HOME/.bashrc"
# else
# echo "'module load $module' is already present in $bashrc."
fi
else
echo -e "[INFO]\t Found module $module."
fi
done
# Update .bashrc if necessary
if [ $flag -eq 1 ]; then
echo -e "[INFO]\t Reloading $HOME/.bashrc..."
source ~/.bashrc
fi
# Inform the user about environment persistence
if [ "$$" -eq "$PPID" ]; then
echo -e "${YELLOW}[WARNING] Modules loaded in this script will not persist after the script finishes.${RESET}"
echo -e "${YELLOW}[WARNING] Run the script using 'source autotest.sh' to make the changes persist.${RESET}"
fi
echo -e "${BLUE}[INFO]\t Environment check for required modules completed.${RESET}"
echo ""
# Iterate through first-layer subdirectories
for dir in "$base_dir"/*/; do
# Skip the current directory itself
if [ "$dir" = "." ]; then
continue
fi
echo -e "${BLUE}${separator}${RESET}"
echo -e "${BLUE}[INFO]\t Entering directory: $(pwd)/$(basename "$dir")${RESET}"
( # excecute script in a subshell, otherwise the dir search will stop
cd "$dir"
# Check if autotest.sh exists before executing it
if [ -f autotest.sh ]; then
chmod +x autotest.sh
./autotest.sh
else
echo -e "${YELLOW}[WARNING] autotest.sh not found in $(pwd). Skipping $(basename "$dir") kernel${RESET}"
fi
# Append contents of any .log file in the subdirectory to the main log file
log_files=$(find . -maxdepth 1 -type f -name "*.log")
if [ -n "$log_files" ]; then
for log_file in $log_files; do
cat "$log_file" >> "../${log_file_name}"
echo ' ' >> "../${log_file_name}"
done
else
echo -e "${YELLOW}[WARNING] No .log files found in $(pwd). Skipping log append.${RESET}"
fi
)
# Return to the parent directory
echo -e "${BLUE}[INFO]\t Leaving directory: $(pwd)/$(basename "$dir")${RESET}"
echo -e "${BLUE}${separator}${RESET}"
echo ""
done
echo -e "${BLUE}[INFO]\t Finished processing all subdirectories.${RESET}"
echo -e "${GREEN}[INFO]\t All tests completed successfully. Results are logged in ${log_file_name}.${RESET}"

@ -0,0 +1,56 @@
cmake_minimum_required(VERSION 3.10)
project(pargen Fortran)
# Check for the installation path for psblas
if(NOT DEFINED PSBLAS_INSTALL_DIR)
message(FATAL_ERROR "Please specify the path to the psblas installation directory using -DPSBLAS_INSTALL_DIR=<path>")
endif()
# Set the include and library directories based on the provided path
set(INSTALLDIR "${PSBLAS_INSTALL_DIR}")
set(INCDIR "${INSTALLDIR}/include")
set(MODDIR "${INSTALLDIR}/modules")
set(LIBDIR "${INSTALLDIR}/lib")
# Find the psblas package
find_package(psblas REQUIRED PATHS ${INSTALLDIR})
# Include directories for the Fortran compiler
include_directories(${INCDIR} ${MODDIR})
# Define executable directory
set(EXEDIR "${CMAKE_CURRENT_SOURCE_DIR}/runs")
# Ensure the executable directory exists
file(MAKE_DIRECTORY ${EXEDIR})
# Source file use in all the targets
set(COMMON_SOURCE getp.f90)
# Define sources for the executables
set(SOURCES_SFOBJS ${COMMON_SOURCE} psb_sf_sample.f90)
set(SOURCES_DFOBJS ${COMMON_SOURCE} psb_df_sample.f90)
set(SOURCES_CFOBJS ${COMMON_SOURCE} psb_cf_sample.f90)
set(SOURCES_ZFOBJS ${COMMON_SOURCE} psb_zf_sample.f90)
# Create executables
add_executable(psb_sf_sample ${SOURCES_SFOBJS})
target_link_libraries(psb_sf_sample psblas::util psblas::linsolve psblas::prec psblas::base)
add_executable(psb_df_sample ${SOURCES_DFOBJS})
target_link_libraries(psb_df_sample psblas::util psblas::linsolve psblas::prec psblas::base)
add_executable(psb_cf_sample ${SOURCES_CFOBJS})
target_link_libraries(psb_cf_sample psblas::util psblas::linsolve psblas::prec psblas::base)
add_executable(psb_zf_sample ${SOURCES_ZFOBJS})
target_link_libraries(psb_zf_sample psblas::util psblas::linsolve psblas::prec psblas::base)
# Set output directory for executables
foreach(target psb_sf_sample psb_df_sample psb_cf_sample psb_zf_sample)
set_target_properties(${target} PROPERTIES
RUNTIME_OUTPUT_DIRECTORY ${EXEDIR}
)
endforeach()

@ -2,10 +2,10 @@ cmake_minimum_required(VERSION 3.10)
project(HelloWorld Fortran)
# Accept a user-defined library path
set(LIBRARY_DIR "" CACHE PATH "Path to the library directory")
set(PSBLAS_INSTALL_DIR "" CACHE PATH "Path to the library directory")
# Check if the user provided a library directory
if(NOT LIBRARY_DIR)
if(NOT PSBLAS_INSTALL_DIR)
message(FATAL_ERROR "Library directory not specified! Use -DLIBRARY_DIR=path/to/library")
endif()
@ -13,7 +13,7 @@ endif()
include(CMakePackageConfigHelpers)
# Find the package
find_package(psblas REQUIRED PATHS ${LIBRARY_DIR}/lib/cmake/psblas NO_DEFAULT_PATH)
find_package(psblas REQUIRED PATHS ${PSBLAS_INSTALL_DIR}/lib/cmake/psblas NO_DEFAULT_PATH)
# Check if the package was found
if(NOT psblas_FOUND)
@ -21,7 +21,7 @@ if(NOT psblas_FOUND)
endif()
# Include directories for the library
include_directories(${LIBRARY_DIR}/include) # Path to header files
include_directories(${PSBLAS_INSTALL_DIR}/include) # Path to header files
include_directories(${psblas_DIR}/modules) # Path to module files
message(STATUS "Library directory: ${psblas_DIR}")

@ -22,10 +22,10 @@ contains
! 0 0
! declaration of VA,IA,JA
integer(psb_ipk_) :: nnz=2
integer(psb_ipk_) :: m=2
integer(psb_ipk_) :: nnz=2 ! non zero
integer(psb_ipk_) :: m=2 !
integer(psb_ipk_) :: k=2
integer(psb_ipk_) :: IA(2)=(/1, 1/)
integer(psb_ipk_) :: IA(2)=(/1, 1/) ! coordinate representation
integer(psb_ipk_) :: JA(2)=(/1, 2/)
real*8 :: VA(2)=(/1, 1/)
real*8 :: x(2)=(/1, 1/)! reference x
@ -41,20 +41,20 @@ contains
goto 9999
endif
call psb_barrier(ctxt)
call psb_cdall(ctxt,desc_a,info,nl=m)
call psb_cdall(ctxt,desc_a,info,nl=m) ! specify index space m. Init desc_a
if (info /= psb_success_)goto 9996
call psb_spall(a,desc_a,info,nnz=nnz)
call psb_spall(a,desc_a,info,nnz=nnz) ! Init matrix a
if (info /= psb_success_)goto 9996
call psb_barrier(ctxt)
call psb_spins(nnz,IA,JA,VA,a,desc_a,info)
call psb_spins(nnz,IA,JA,VA,a,desc_a,info) ! insert nnz values VA into matrix a in coordinates (IA, JA). Representation is given by the number of parameters: either COO or CSR. This one is COO
if (info /= psb_success_)goto 9996
call psb_cdasb(desc_a,info)
call psb_cdasb(desc_a,info) ! assemblatore comunicatore
if (info /= psb_success_)goto 9996
call psb_spasb(a,desc_a,info,afmt=afmt)
call psb_spasb(a,desc_a,info,afmt=afmt) ! "broadcast" the generated matrix. After this it can be used. Dovrebbe risolvere problemi di halo. afmt indicated the required format
if(info.ne.0)print *,"matrix assembly failed"
if(info.ne.0)goto 9996
call psb_spmm(alpha,A,x,beta,y,desc_a,info,transa)
call psb_spmm(alpha,A,x,beta,y,desc_a,info,transa) !Sparse Matrix Dense Vectore Multiplication: alphaAx + betay.
if(info.ne.0)print *,"psb_spmm failed"
if(info.ne.0)goto 9996
do i=1,2

Loading…
Cancel
Save