LCOV - code coverage report
Current view: top level - types - types_mpimat.F90 (source / functions) Hit Total Coverage
Test: combined.info Lines: 133 215 61.9 %
Date: 2019-09-08 04:53:50 Functions: 11 17 64.7 %

          Line data    Source code
       1             : !--------------------------------------------------------------------------------
       2             : ! Copyright (c) 2016 Peter Grünberg Institut, Forschungszentrum Jülich, Germany
       3             : ! This file is part of FLEUR and available as free software under the conditions
       4             : ! of the MIT license as expressed in the LICENSE file in more detail.
       5             : !--------------------------------------------------------------------------------
       6             : 
       7             : MODULE m_types_mpimat
       8             :   USE m_judft
       9             :   USE m_types_rcmat
      10             :   IMPLICIT NONE
      11             :   PRIVATE
      12             :   INTEGER,PARAMETER    :: DEFAULT_BLOCKSIZE=64
      13             :   INTEGER, PARAMETER   :: dlen_=9
      14             : 
      15             :   !<This data-type extends the basic t_mat for distributed matrices.
      16             :   !<
      17             :   !<It stores the additional mpi_communicator and sets up a blacs grid for the matrix.
      18             :   !<This can be used to perform scalapack calls on the matrix with little additional input.
      19             :   !<The copy procedure is overwritten from t_mat to enable also redistribution of the matrix.
      20             :   TYPE  t_blacsdata
      21             :      INTEGER:: no_use
      22             :      INTEGER:: mpi_com                          !> mpi-communiator over which matrix is distributed
      23             :      INTEGER:: blacs_desc(dlen_)                !> blacs descriptor
      24             :      !> 1: =1
      25             :      !> 2: context
      26             :      !> 3,4: global matrix size
      27             :      !> 5,6: block sizes
      28             :      !> 7,8: row/colum of grid for first row/colum of matrix
      29             :      !> 9: leading dimension of local matrix
      30             :      INTEGER:: npcol,nprow                     !> the number of columns/rows in the processor grid
      31             :      INTEGER:: mycol,myrow
      32             :   END TYPE t_blacsdata
      33             :   
      34             :   
      35             :   TYPE,EXTENDS(t_mat):: t_mpimat
      36             :      INTEGER                   :: global_size1,global_size2        !> this is the size of the full-matrix
      37             :      TYPE(t_blacsdata),POINTER :: blacsdata
      38             :    CONTAINS
      39             :      PROCEDURE   :: copy => mpimat_copy     !<overwriten from t_mat, also performs redistribution
      40             :      PROCEDURE   :: move => mpimat_move     !<overwriten from t_mat, also performs redistribution
      41             :      PROCEDURE   :: free => mpimat_free     !<overwriten from t_mat, takes care of blacs-grids
      42             :      PROCEDURE   :: multiply =>mpimat_multiply  !<overwriten from t_mat, takes care of blacs-grids
      43             :      PROCEDURE   :: init_details => mpimat_init
      44             :      PROCEDURE   :: init_template =>mpimat_init_template     !<overwriten from t_mat, also calls alloc in t_mat
      45             :      PROCEDURE   :: add_transpose => mpimat_add_transpose !<overwriten from t_mat
      46             :      PROCEDURE   :: generate_full_matrix    ! construct full matrix if only upper triangle of hermitian matrix is given
      47             :      PROCEDURE   :: print_matrix
      48             :      PROCEDURE   :: from_non_dist
      49             :      FINAL :: finalize, finalize_1d, finalize_2d, finalize_3d
      50             :   END TYPE t_mpimat
      51             :   
      52             :   PUBLIC t_mpimat
      53             : 
      54             : CONTAINS
      55             : 
      56           0 :   SUBROUTINE mpimat_multiply(mat1,mat2,res)
      57             :     CLASS(t_mpimat),INTENT(INOUT)     :: mat1
      58             :     CLASS(t_mat),INTENT(IN)           :: mat2
      59             :     CLASS(t_mat),INTENT(OUT),OPTIONAL :: res
      60             : 
      61             : #ifdef CPP_SCALAPACK
      62           0 :     TYPE(t_mpimat)::m,r
      63           0 :     IF (.NOT.PRESENT(res)) CALL judft_error("BUG: in mpicase the multiply requires the optional result argument")
      64             :     SELECT TYPE(mat2)
      65             :     TYPE IS (t_mpimat)
      66           0 :        SELECT TYPE(res)
      67             :        TYPE is (t_mpimat)
      68           0 :           CALL m%init(mat1,mat2%global_size1,mat2%global_size2)
      69           0 :           CALL m%copy(mat2,1,1)
      70           0 :           CALL r%init(mat1,res%global_size1,res%global_size2)
      71           0 :           IF (mat1%l_real) THEN
      72             :              CALL pdgemm('N','N',mat1%global_size1, m%global_size2,mat1%global_size2, 1.0, &
      73             :                   mat1%data_r, 1,1,mat1%blacsdata%blacs_desc, &
      74             :                   m%data_r, 1,1,m%blacsdata%blacs_desc,0.0, &
      75           0 :                   r%data_r, 1,1,r%blacsdata%blacs_desc )
      76             :           ELSE
      77             :              CALL pzgemm('N','N',mat1%global_size1, m%global_size2,mat1%global_size2, CMPLX(1.0,0.0), &
      78             :                   mat1%data_c, 1,1,mat1%blacsdata%blacs_desc, &
      79             :                   m%data_c, 1,1,m%blacsdata%blacs_desc,CMPLX(0.0,0.0), &
      80           0 :                   r%data_c, 1,1,r%blacsdata%blacs_desc )
      81             :           ENDIF
      82           0 :           CALL res%copy(r,1,1)
      83           0 :           CALL r%free()
      84           0 :           CALL m%free()
      85             :        CLASS default
      86           0 :           CALL judft_error("BUG in mpimat%multiply")
      87             :        END SELECT
      88             :     CLASS default
      89           0 :        CALL judft_error("BUG in mpimat%multiply")
      90             :     END SELECT
      91             : #endif    
      92           0 :   END SUBROUTINE mpimat_multiply
      93             :           
      94             : 
      95             :     
      96             : 
      97           0 :   SUBROUTINE print_matrix(mat,fileno)
      98             :     CLASS(t_mpimat),INTENT(INOUT) ::mat
      99             :     INTEGER:: fileno
     100             : 
     101             : #ifdef CPP_SCALAPACK
     102             :     INCLUDE 'mpif.h'
     103             :     INTEGER,EXTERNAL:: indxl2g
     104             :     CHARACTER(len=10)::filename
     105             :     INTEGER :: irank,isize,i,j,npr,npc,r,c,tmp,err,status(MPI_STATUS_SIZE) 
     106             : 
     107           0 :     CALL MPI_COMM_RANK(mat%blacsdata%mpi_com,irank,err)
     108           0 :     CALL MPI_COMM_SIZE(mat%blacsdata%mpi_com,isize,err)
     109             : 
     110           0 :     tmp=0
     111             : 
     112           0 :     IF (irank>0) CALL MPI_RECV(tmp,1,MPI_INTEGER,irank-1,0,mat%blacsdata%mpi_com,status,err) !lock
     113           0 :     WRITE(filename,"(a,i0)") "out.",fileno
     114           0 :     OPEN(fileno,file=filename,access='append')
     115             :     
     116           0 :     CALL blacs_gridinfo(mat%blacsdata%blacs_desc(2),npr,npc,r,c)
     117           0 :     DO i=1,mat%matsize1
     118           0 :        DO j=1,mat%matsize2
     119           0 :           IF (mat%l_real) THEN
     120           0 :              WRITE(fileno,"(5(i0,1x),2(f10.5,1x))") irank,i,j,indxl2g(i,mat%blacsdata%blacs_desc(5),r,0,npr),&
     121           0 :                   indxl2g(j,mat%blacsdata%blacs_desc(6),c,0,npc),mat%data_r(i,j)
     122             :           ELSE
     123           0 :              WRITE(fileno,"(5(i0,1x),2(f10.5,1x))") irank,i,j,indxl2g(i,mat%blacsdata%blacs_desc(5),r,0,npr),&
     124           0 :                   indxl2g(j,mat%blacsdata%blacs_desc(6),c,0,npc),mat%data_c(i,j)
     125             :           END IF
     126             :        ENDDO
     127             :     ENDDO
     128           0 :     CLOSE(fileno)
     129           0 :     IF (irank+1<isize) CALL MPI_SEND(tmp,1,MPI_INTEGER,irank+1,0,mat%blacsdata%mpi_com,err)
     130             :     
     131             : #endif    
     132           0 :   END SUBROUTINE print_matrix
     133             : 
     134        1768 :   SUBROUTINE generate_full_matrix(mat)
     135             :     CLASS(t_mpimat),INTENT(INOUT) ::mat
     136             :     
     137             :     INTEGER :: i,j,i_glob,j_glob,myid,err,np
     138        1768 :     COMPLEX,ALLOCATABLE:: tmp_c(:,:)
     139        1768 :     REAL,ALLOCATABLE   :: tmp_r(:,:)
     140             : #ifdef CPP_SCALAPACK
     141             :     INCLUDE 'mpif.h'
     142             :     INTEGER, EXTERNAL    :: numroc, indxl2g  !SCALAPACK functions
     143             : 
     144        1768 :     CALL MPI_COMM_RANK(mat%blacsdata%mpi_com,myid,err)
     145        1768 :     CALL MPI_COMM_SIZE(mat%blacsdata%mpi_com,np,err)
     146             :     !Set lower part of matrix to zero
     147             :  
     148      526668 :     DO i=1,mat%matsize1
     149   108565802 :        DO j=1,mat%matsize2
     150             :           ! Get global column corresponding to i and number of local rows up to
     151             :           ! and including the diagonal, these are unchanged in A
     152   108039134 :           i_glob = indxl2g(i,     mat%blacsdata%blacs_desc(5), mat%blacsdata%myrow, 0, mat%blacsdata%nprow)
     153   108039134 :           j_glob = indxl2g(j,     mat%blacsdata%blacs_desc(6), mat%blacsdata%mycol, 0, mat%blacsdata%npcol)
     154             : 
     155   108039134 :           IF (i_glob>j_glob) THEN
     156    53888342 :              IF (mat%l_real) THEN
     157    37308516 :                 mat%data_r(i,j) = 0.0
     158             :              ELSE
     159    16579826 :                 mat%data_c(i,j) = 0.0
     160             :              ENDIF
     161             :           ENDIF
     162   108564034 :           IF (i_glob==j_glob) THEN
     163      262450 :              IF (mat%l_real) THEN
     164      121198 :                 mat%data_r(i,j) =  mat%data_r(i,j)/2.0
     165             :              ELSE
     166      141252 :                 mat%data_c(i,j) =  mat%data_c(i,j)/2.0
     167             :              ENDIF
     168             :           ENDIF
     169             :        ENDDO
     170             :     ENDDO
     171             : 
     172        1768 :     IF (mat%l_real) THEN
     173         420 :        ALLOCATE(tmp_r(mat%matsize1,mat%matsize2))
     174         420 :        tmp_r=mat%data_r
     175             :     ELSE
     176        1348 :        ALLOCATE(tmp_c(mat%matsize1,mat%matsize2))
     177        1348 :        tmp_c=mat%data_c
     178             :     END IF
     179        1768 :     CALL MPI_BARRIER(mat%blacsdata%mpi_com,i)
     180        1768 :  IF (mat%l_real) THEN
     181             : #ifdef CPP_SCALAPACK          
     182             : 
     183         420 :        CALL pdgeadd('t',mat%global_size1,mat%global_size2,1.0,tmp_r,1,1,mat%blacsdata%blacs_desc,1.0,mat%data_r,1,1,mat%blacsdata%blacs_desc)
     184             :     ELSE
     185        1348 :        CALL pzgeadd('c',mat%global_size1,mat%global_size2,CMPLX(1.0,0.0),tmp_c,1,1,mat%blacsdata%blacs_desc,CMPLX(1.0,0.0),mat%data_c,1,1,mat%blacsdata%blacs_desc)
     186             : #endif
     187             :     END IF
     188             : 
     189             : 
     190             : #endif
     191        1768 :   END SUBROUTINE generate_full_matrix
     192             : 
     193             : 
     194        1328 :   SUBROUTINE mpimat_add_transpose(mat,mat1)
     195             :     CLASS(t_mpimat),INTENT(INOUT) ::mat
     196             :     CLASS(t_mat),INTENT(INOUT) ::mat1
     197             : 
     198             :     INTEGER:: i,ii,n_size,n_rank
     199             : 
     200             :     SELECT TYPE(mat1)
     201             :     TYPE IS (t_mpimat)
     202             : #ifdef CPP_MPI    
     203        1328 :     CALL MPI_COMM_RANK(mat%blacsdata%mpi_com,n_rank,i)
     204        1328 :     CALL MPI_COMM_SIZE(mat%blacsdata%mpi_com,n_size,i)
     205             : #endif
     206             :     !Set lower part of matrix to zero...
     207        1328 :        ii=0
     208       68064 :        DO i=n_rank+1,MIN(mat%global_size1,mat%global_size2),n_size
     209       66736 :           ii=ii+1
     210       68064 :           IF (mat%l_real) THEN
     211           0 :              mat%data_r(i+1:,ii)=0.0
     212           0 :              mat1%data_r(i+1:,ii)=0.0
     213             :           ELSE
     214       66736 :              mat%data_c(i+1:,ii)=0.0
     215       66736 :              mat1%data_c(i+1:,ii)=0.0
     216             :           ENDIF
     217             :        ENDDO
     218        1328 :        IF (mat%l_real) THEN
     219             : #ifdef CPP_SCALAPACK          
     220             : 
     221           0 :        CALL pdgeadd('t',mat1%global_size1,mat1%global_size2,1.0,mat1%data_r,1,1,mat1%blacsdata%blacs_desc,1.0,mat%data_r,1,1,mat%blacsdata%blacs_desc)
     222             :     ELSE
     223        1328 :        CALL pzgeadd('c',mat1%global_size1,mat1%global_size2,CMPLX(1.0,0.0),mat1%data_c,1,1,mat1%blacsdata%blacs_desc,CMPLX(1.0,0.0),mat%data_c,1,1,mat1%blacsdata%blacs_desc)
     224             : #endif
     225             :     END IF
     226             :     !Now multiply the diagonal of the matrix by 1/2
     227             : 
     228        1328 :     ii=0
     229       70720 :     DO i=n_rank+1,MIN(mat%global_size1,mat%global_size2),n_size
     230       66736 :        ii=ii+1
     231       68064 :        IF (mat%l_real) THEN
     232           0 :           mat%data_r(i,ii)=mat%data_r(i,ii)/2
     233             :        ELSE
     234       66736 :           mat%data_c(i,ii)=mat%data_c(i,ii)/2
     235             :        END IF
     236             :     ENDDO
     237             :     CLASS default
     238           0 :        CALL judft_error("Inconsistent types in t_mpimat_add_transpose")
     239             :     END SELECT
     240             :     
     241        1328 :   END SUBROUTINE mpimat_add_transpose
     242             : 
     243        5308 :   SUBROUTINE mpimat_copy(mat,mat1,n1,n2)
     244             :     IMPLICIT NONE
     245             :     CLASS(t_mpimat),INTENT(INOUT)::mat
     246             :     CLASS(t_mat),INTENT(IN)      ::mat1
     247             :     INTEGER,INTENT(IN) ::n1,n2
     248             : #ifdef CPP_SCALAPACK
     249             :     SELECT TYPE(mat1)
     250             :     TYPE IS(t_mpimat)
     251        5308 :        IF (mat%l_real) THEN
     252         630 :           CALL pdgemr2d(Mat1%global_size1,mat1%global_size2,mat1%data_r,1,1,mat1%blacsdata%blacs_desc,mat%data_r,n1,n2,mat%blacsdata%blacs_desc,mat%blacsdata%blacs_desc(2))
     253             :        ELSE
     254        4678 :           CALL pzgemr2d(mat1%global_size1,mat1%global_size2,mat1%data_c,1,1,mat1%blacsdata%blacs_desc,mat%data_c,n1,n2,mat%blacsdata%blacs_desc,mat%blacsdata%blacs_desc(2))
     255             :        END IF
     256             :     CLASS DEFAULT
     257           0 :        CALL judft_error("Wrong datatype in copy")
     258             :     END SELECT
     259             : #endif    
     260        5308 :   END SUBROUTINE mpimat_copy
     261             : 
     262           0 :   SUBROUTINE from_non_dist(mat,mat1)
     263             :     IMPLICIT NONE
     264             :     CLASS(t_mpimat),INTENT(INOUT)::mat
     265             :     TYPE(t_mat),INTENT(IN)       ::mat1
     266             : 
     267             :     INTEGER:: blacs_desc(9),irank,ierr,umap(1,1),np
     268             : #ifdef CPP_SCALAPACK
     269           0 :     blacs_desc=(/1,-1,mat1%matsize1,mat1%matsize2,mat1%matsize1,mat1%matsize2,0,0,mat1%matsize1/)
     270             : 
     271           0 :     CALL MPI_COMM_RANK(mat%blacsdata%mpi_com,irank,ierr)
     272           0 :     umap(1,1)=0
     273           0 :     CALL BLACS_GET(mat%blacsdata%blacs_desc(2),10,blacs_desc(2))
     274           0 :     CALL BLACS_GRIDMAP(blacs_desc(2),umap,1,1,1)
     275           0 :     IF (mat%l_real) THEN
     276           0 :        CALL pdgemr2d(Mat1%matsize1,mat1%matsize2,mat1%data_r,1,1,blacs_desc,mat%data_r,1,1,mat%blacsdata%blacs_desc,mat%blacsdata%blacs_desc(2))
     277             :     ELSE
     278           0 :        CALL pzgemr2d(mat1%matsize1,mat1%matsize2,mat1%data_c,1,1,blacs_desc,mat%data_c,1,1,mat%blacsdata%blacs_desc,mat%blacsdata%blacs_desc(2))
     279             :     END IF
     280             : #endif    
     281           0 :   END SUBROUTINE from_non_dist
     282             :     
     283             : 
     284             : 
     285         440 :   SUBROUTINE mpimat_move(mat,mat1)
     286             :     IMPLICIT NONE
     287             :     CLASS(t_mpimat),INTENT(INOUT)::mat
     288             :     CLASS(t_mat),INTENT(INOUT)   ::mat1
     289         440 :     CALL mat%copy(mat1,1,1)
     290         440 :   END SUBROUTINE mpimat_move
     291             : 
     292           0 :   SUBROUTINE finalize(mat)
     293             :     IMPLICIT NONE
     294             :     TYPE(t_mpimat),INTENT(INOUT) :: mat
     295        3536 :     CALL mpimat_free(mat)
     296           0 :   END SUBROUTINE finalize
     297             : 
     298           0 :   SUBROUTINE finalize_1d(mat)
     299             :     IMPLICIT NONE
     300             :     
     301             :     TYPE(t_mpimat),INTENT(INOUT) :: mat(:)
     302             :     INTEGER                      :: i
     303           0 :     DO i = 1,size(mat)
     304           0 :        CALL mpimat_free(mat(i))
     305             :     ENDDO
     306           0 :   END SUBROUTINE finalize_1d
     307             : 
     308        1768 :   SUBROUTINE finalize_2d(mat)
     309             :     IMPLICIT NONE
     310             :     
     311             :     TYPE(t_mpimat),INTENT(INOUT) :: mat(:,:)
     312             :     INTEGER                      :: i,j
     313             : 
     314        4864 :     DO i = 1,size(mat, dim=1)
     315       10616 :        DO j = 1,size(mat, dim=2)
     316        8848 :           CALL mpimat_free(mat(i,j))
     317             :       ENDDO
     318             :     ENDDO
     319        1768 :   END SUBROUTINE finalize_2d
     320             :   
     321           0 :   SUBROUTINE finalize_3d(mat)
     322             :     IMPLICIT NONE
     323             :     
     324             :     TYPE(t_mpimat),INTENT(INOUT) :: mat(:,:,:)
     325             :     INTEGER                      :: i,j,k
     326             : 
     327           0 :     DO i = 1,size(mat, dim=1)
     328           0 :        DO j = 1,size(mat, dim=2)
     329           0 :          DO k = 1,size(mat, dim=3)
     330           0 :              CALL mpimat_free(mat(i,j,k))
     331             :          ENDDO
     332             :       ENDDO
     333             :     ENDDO
     334           0 :   END SUBROUTINE finalize_3d
     335             : 
     336       17692 :   SUBROUTINE mpimat_free(mat)
     337             :     IMPLICIT NONE
     338             :     CLASS(t_mpimat),INTENT(INOUT) :: mat
     339             :     INTEGER :: ierr
     340       17692 :     IF (ALLOCATED(mat%data_r)) DEALLOCATE(mat%data_r)
     341       17692 :     IF (ALLOCATED(mat%data_c)) DEALLOCATE(mat%data_c)
     342       17692 :     IF (ASSOCIATED(mat%blacsdata)) THEN
     343        9288 :        IF (mat%blacsdata%no_use>1) THEN
     344        4644 :           mat%blacsdata%no_use=mat%blacsdata%no_use-1
     345        4644 :           mat%blacsdata=>null()
     346             :        ELSE
     347             : #ifdef CPP_SCALAPACK    
     348        4644 :           CALL BLACS_GRIDEXIT(mat%blacsdata%blacs_desc(2),ierr)
     349        4644 :           DEALLOCATE(mat%blacsdata)
     350             : #endif
     351             :        END IF
     352             :     ENDIF
     353       17692 :   END SUBROUTINE mpimat_free
     354             : 
     355             :   !>Initialization of the distributed matrix.
     356             :   !!
     357             :   !! The argument l_2d controls the kind of distribution used:
     358             :   !!  - TRUE: the matrix is a Scalapack BLOCK-CYCLIC distribution
     359             :   !!  - FALSE: the matrix is distributed in a one-dimensional column cyclic distribution with blocksize 1
     360             :   !! as used in the parallel matrix setup of FLEUR
     361        4644 :   SUBROUTINE mpimat_init(mat,l_real,matsize1,matsize2,mpi_subcom,l_2d,nb_x,nb_y)
     362             :     IMPLICIT NONE
     363             :     CLASS(t_mpimat)             :: mat
     364             :     INTEGER,INTENT(IN),OPTIONAL :: matsize1,matsize2,mpi_subcom
     365             :     LOGICAL,INTENT(IN),OPTIONAL :: l_real,l_2d
     366             :     INTEGER,INTENT(IN),OPTIONAL :: nb_y,nb_x
     367             : #ifdef CPP_SCALAPACK    
     368             :     INTEGER::nbx,nby,irank,ierr
     369             :     include 'mpif.h'
     370        4644 :     nbx=DEFAULT_BLOCKSIZE; nby=DEFAULT_BLOCKSIZE
     371        4644 :     IF (PRESENT(nb_x)) nbx=nb_x
     372        4644 :     IF (PRESENT(nb_y)) nby=nb_y
     373        4644 :     IF (.NOT.(PRESENT(matsize1).AND.PRESENT(matsize2).AND.PRESENT(mpi_subcom).AND.PRESENT(l_real).AND.PRESENT(l_2d)))&
     374           0 :          CALL judft_error("Optional arguments must be present in mpimat_init")
     375        4644 :     mat%global_size1=matsize1
     376        4644 :     mat%global_size2=matsize2
     377        4644 :     ALLOCATE(mat%blacsdata)
     378        4644 :     mat%blacsdata%no_use=1
     379             :     CALL priv_create_blacsgrid(mpi_subcom,l_2d,matsize1,matsize2,nbx,nby,&
     380        4644 :          mat%blacsdata,mat%matsize1,mat%matsize2)
     381        4644 :     mat%blacsdata%mpi_com=mpi_subcom
     382        4644 :     CALL mat%alloc(l_real) !Attention,sizes determined in call to priv_create_blacsgrid
     383             :     !check if this matrix is actually distributed over MPI_COMM_SELF
     384        4644 :     IF (mpi_subcom==MPI_COMM_SELF) THEN
     385           0 :        CALL MPI_COMM_RANK(mpi_subcom,irank,ierr)
     386           0 :        IF (irank>0) mat%blacsdata%blacs_desc(2)=-1
     387             :     END IF
     388             : #endif    
     389        4644 :   END SUBROUTINE mpimat_init
     390             : 
     391        4644 :   SUBROUTINE mpimat_init_template(mat,templ,global_size1,global_size2)
     392             :     IMPLICIT NONE
     393             :     CLASS(t_mpimat),INTENT(INOUT)  :: mat
     394             :     CLASS(t_mat),INTENT(IN)        :: templ
     395             :     INTEGER,INTENT(IN),OPTIONAL    :: global_size1,global_size2
     396             : 
     397             :     INTEGER::numroc
     398             :     EXTERNAL::numroc
     399             :     
     400             :     SELECT TYPE(templ)
     401             :     TYPE IS (t_mpimat)
     402        4644 :        mat%l_real=templ%l_real
     403        4644 :        IF (PRESENT(global_size1).AND.PRESENT(global_size2)) THEN
     404           0 :           ALLOCATE(mat%blacsdata)
     405           0 :           mat%blacsdata=templ%blacsdata
     406           0 :           mat%blacsdata%no_use=1
     407           0 :           mat%blacsdata%blacs_desc(3)=global_size1
     408           0 :           mat%blacsdata%blacs_desc(4)=global_size2
     409           0 :           mat%global_size1=global_size1
     410           0 :           mat%global_size2=global_size2
     411             : #ifdef CPP_SCALAPACK    
     412           0 :           mat%matsize1=NUMROC( global_size1,mat%blacsdata%blacs_desc(5), mat%blacsdata%myrow, mat%blacsdata%blacs_desc(7), mat%blacsdata%nprow )
     413           0 :           mat%matsize1=NUMROC( global_size2,mat%blacsdata%blacs_desc(6), mat%blacsdata%mycol, mat%blacsdata%blacs_desc(8), mat%blacsdata%npcol )
     414             : #endif    
     415             :        ELSE
     416        4644 :           mat%matsize1=templ%matsize1
     417        4644 :           mat%matsize2=templ%matsize2
     418        4644 :           mat%global_size1=templ%global_size1
     419        4644 :           mat%global_size2=templ%global_size2
     420        4644 :           mat%blacsdata=>templ%blacsdata
     421        4644 :           mat%blacsdata%no_use=mat%blacsdata%no_use+1
     422             :        ENDIF
     423        9288 :        CALL mat%alloc()
     424             :       
     425             :        CLASS default
     426           0 :           CALL judft_error("Mixed initialization in t_mpimat not possible(BUG)")
     427             :     END SELECT
     428        4644 :   END SUBROUTINE mpimat_init_template
     429             : 
     430             :     
     431        4644 :   SUBROUTINE priv_create_blacsgrid(mpi_subcom,l_2d,m1,m2,nbc,nbr,blacsdata,local_size1,local_size2)
     432             :     IMPLICIT NONE
     433             :     INTEGER,INTENT(IN) :: mpi_subcom
     434             :     INTEGER,INTENT(IN) :: m1,m2
     435             :     INTEGER,INTENT(INOUT)::nbc,nbr
     436             :     LOGICAL,INTENT(IN) :: l_2d
     437             :     type(t_blacsdata),INTENT(OUT)::blacsdata
     438             :     INTEGER,INTENT(OUT):: local_size1,local_size2
     439             :    
     440             : #ifdef CPP_SCALAPACK
     441             :     INCLUDE 'mpif.h'
     442             :     INTEGER     :: myrowssca,mycolssca
     443             :     INTEGER     :: iamblacs,npblacs,np,myid,mycol,myrow
     444             :     INTEGER     :: nprow2,npcol2
     445             :     INTEGER     :: k,i,j,ictextblacs
     446             :     INTEGER     :: ierr
     447             : 
     448        4644 :     INTEGER,ALLOCATABLE :: iblacsnums(:),ihelp(:),iusermap(:,:)
     449             : 
     450             :     EXTERNAL descinit, blacs_get
     451             :     EXTERNAL blacs_pinfo, blacs_gridinit
     452             :  
     453             :     !Determine rank and no of processors
     454        4644 :     CALL MPI_COMM_RANK(mpi_subcom,myid,ierr)
     455        4644 :     CALL MPI_COMM_SIZE(mpi_subcom,np,ierr)
     456             : 
     457             : 
     458             :     ! compute processor grid, as square as possible
     459             :     ! If not square with more rows than columns
     460        4644 :     IF (l_2d) THEN
     461         884 :        distloop: DO j=INT(SQRT(REAL(np))),1,-1
     462           0 :           IF ( (np/j) * j == np) THEN
     463         884 :              blacsdata%npcol = np/j
     464         884 :              blacsdata%nprow = j
     465         884 :              EXIT distloop
     466             :           ENDIF
     467             :        ENDDO distloop
     468             :     ELSE
     469        3760 :        nbc=1
     470        3760 :        nbr=MAX(m1,m2)
     471        3760 :        blacsdata%npcol=np
     472        3760 :        blacsdata%nprow=1
     473             :     ENDIF
     474        4644 :     ALLOCATE(iblacsnums(np),ihelp(np),iusermap(blacsdata%nprow,blacsdata%npcol))
     475             : 
     476             :     !   A blacsdata%nprow*blacsdata%npcol processor grid will be created
     477             :     !   Row and column index myrow, mycol of this processor in the grid
     478             :     !   and distribution of A and B in ScaLAPACK
     479             :     !   The local processor will get myrowssca rows and mycolssca columns
     480             :     !   of A and B
     481             :     !
     482             : 
     483        4644 :     myrow = myid/blacsdata%npcol  ! my row number in the BLACS blacsdata%nprow*blacsdata%npcol grid
     484        4644 :     mycol = myid -(myid/blacsdata%npcol)*blacsdata%npcol  ! my column number in the BLACS blacsdata%nprow*blacsdata%npcol grid
     485             :     !
     486             :     !  Now allocate Asca to put the elements of Achi or receivebuffer to
     487             :     !
     488        4644 :     myrowssca=(m1-1)/(nbr*blacsdata%nprow)*nbr+ MIN(MAX(m1-(m1-1)/(nbr*blacsdata%nprow)*nbr*blacsdata%nprow-nbr*myrow,0),nbr)
     489             :     !     Number of rows the local process gets in ScaLAPACK distribution
     490        4644 :     mycolssca=(m2-1)/(nbc*blacsdata%npcol)*nbc+ MIN(MAX(m2-(m2-1)/(nbc*blacsdata%npcol)*nbc*blacsdata%npcol-nbc*mycol,0),nbc)
     491             :   
     492             : 
     493             :     !Get BLACS ranks for all MPI ranks
     494        4644 :     CALL BLACS_PINFO(iamblacs,npblacs)  ! iamblacs = local process rank (e.g. myid)
     495             :     ! npblacs  = number of available processes
     496       13932 :     iblacsnums=-2
     497       13932 :     ihelp=-2
     498        4644 :     ihelp(myid+1)=iamblacs ! Get the Blacs id corresponding to the MPI id
     499        4644 :     CALL MPI_ALLREDUCE(ihelp, iblacsnums, np,MPI_INTEGER,MPI_MAX,mpi_subcom,ierr)
     500        4644 :     IF (ierr.NE.0) STOP 'Error in allreduce for BLACS nums' 
     501             : 
     502             :     !     iblacsnums(i) is the BLACS-process number of MPI-process i-1
     503        4644 :     k = 1
     504        9288 :     DO i = 1, blacsdata%nprow 
     505       18576 :        DO j = 1, blacsdata%npcol  
     506        9288 :           iusermap(i,j) = iblacsnums(k)
     507       13932 :           k = k + 1
     508             :        ENDDO
     509             :     ENDDO
     510             : !#ifdef CPP_BLACSDEFAULT    
     511             :     !Get the Blacs default context
     512        4644 :     CALL BLACS_GET(0,0,ictextblacs)
     513             : !#else
     514             : !    ictextblacs=mpi_subcom
     515             : !#endif    
     516             :     ! Create the Grid
     517        4644 :     CALL BLACS_GRIDMAP(ictextblacs,iusermap,size(iusermap,1),blacsdata%nprow,blacsdata%npcol)
     518             :     !     Now control, whether the BLACS grid is the one we wanted
     519        4644 :     CALL BLACS_GRIDINFO(ictextblacs, nprow2,npcol2,blacsdata%myrow,blacsdata%mycol)
     520        4644 :     IF (nprow2 /= blacsdata%nprow) THEN
     521           0 :        WRITE(6,*) 'Wrong number of rows in BLACS grid'
     522           0 :        WRITE(6,*) 'nprow=',blacsdata%nprow,' nprow2=',nprow2
     523           0 :        call judft_error('Wrong number of rows in BLACS grid')
     524             :     ENDIF
     525        4644 :     IF (npcol2 /= blacsdata%npcol) THEN
     526           0 :        WRITE(6,*) 'Wrong number of columns in BLACS grid'
     527           0 :        WRITE(6,*) 'npcol=',blacsdata%npcol,' npcol2=',npcol2
     528           0 :        call judft_error('Wrong number of columns in BLACS grid')
     529             : 
     530             :     ENDIF
     531             : 
     532             :     !Create the descriptors
     533        4644 :     CALL descinit(blacsdata%blacs_desc,m1,m2,nbr,nbc,0,0,ictextblacs,myrowssca,ierr)
     534        4644 :     IF (ierr /=0 ) call judft_error('Creation of BLACS descriptor failed')
     535        4644 :     local_size1=myrowssca
     536        4644 :     local_size2=mycolssca
     537             : #endif
     538        4644 :   END SUBROUTINE priv_create_blacsgrid
     539       33168 : END MODULE m_types_mpimat

Generated by: LCOV version 1.13