Index: main_mpi.F
===================================================================
--- main_mpi.F  (revision)
+++ main_mpi.F  (working copy)
@@ -55,7 +55,31 @@
       LOGICAL     LVCAIMAGES
       REAL(q)     VCAIMAGES
 
+#ifdef VASP_MPMD
+      LOGICAL LMPMD
+      INTEGER mpmd_group_number
+      LMPMD =.FALSE.
+      CALL RDATAB(.TRUE.,'INCAR',IO%IU5,'LMPMD','=','#',';','L', &
+     &            IDUM,RDUM,CDUM,LMPMD,CHARAC,N,1,IERR)
+      IF (((IERR/=0).AND.(IERR/=3)).OR.((IERR==0).AND.(N<1))) THEN
+        IF (IO%IU0>=0) WRITE(IO%IU0,*)'Error reading item ''LMPMD'' from file INCAR.'
+        STOP
+      ENDIF
+
+      IF (LMPMD) THEN
+        mpmd_group_number = 0
+      ELSE
+        mpmd_group_number = -1
+      ENDIF
+
+      CALL M_init(COMM_WORLD, mpmd_group_number)
+      IF (LMPMD) THEN
+        WRITE (DIR_APP, "('vasp',I3.3,'/')") mpmd_group_number
+        DIR_LEN=LEN_TRIM(DIR_APP)
+      ENDIF
+#else
       CALL M_init(COMM_WORLD)
+#endif
 
       VCAIMAGES=-1
       CALL RDATAB(IO%LOPEN,INCAR,IO%IU5,'VCAIMAGES','=','#',';','F', &
@@ -290,9 +314,20 @@
          STOP
       ENDIF
 
+#if defined(VASP_MPMD)
+      IF (LMPMD) THEN
+        IF (IO%IU0>=0) WRITE (IO%IU0,'(A,A)') "MPMD: output directory ", DIR_APP(1:DIR_LEN)
+      ELSE
+        node=COMM_CHAIN%NODE_ME
+        CALL MAKE_DIR_APP(node)
+      ENDIF
+#endif
+
+#ifndef VASP_MPMD
       node=COMM_CHAIN%NODE_ME
 
       CALL MAKE_DIR_APP(node)
+#endif
 
 ! if all nodes should write (giving complete mess) do not use the
 ! following line
Index: mpi.F
===================================================================
--- mpi.F   (revision)
+++ mpi.F   (working copy)
@@ -87,6 +87,9 @@
         INTEGER NODE_ME           ! node id starting from 1 ... NCPU
         INTEGER IONODE            ! node which has to do IO (set to 0 for no IO)
         INTEGER NCPU              ! total number of proc in this communicator
+#ifdef VASP_MPMD
+        INTEGER mpmd_client_rank
+#endif
       END TYPE
 
 ! Standard MPI include file.
@@ -116,6 +119,151 @@
 #endif
 
       CONTAINS
+
+#ifdef VASP_MPMD
+      SUBROUTINE mpmd_mpi_init(mpmd_mpi_comm, mpmd_group_number, mpmd_client_rank)
+      IMPLICIT NONE
+      INTEGER, INTENT(OUT) :: mpmd_mpi_comm, mpmd_client_rank
+      INTEGER :: i, j, ierr, process_type, irank, isize 
+      INTEGER :: npotentials, nclients, mpmd_group_number
+      INTEGER :: potential_group_size
+      INTEGER :: orig_group, new_group, new_comm, my_comm
+      INTEGER, ALLOCATABLE, DIMENSION(:) :: process_types
+      INTEGER, ALLOCATABLE, DIMENSION(:) :: potential_ranks
+      CHARACTER (LEN=16) nclients_str
+#if defined(CLIENT_MPI)
+      INTEGER, ALLOCATABLE, DIMENSION(:) :: client_ranks
+#endif
+
+      CALLMPI(MPI_Barrier(MPI_COMM_WORLD, ierr))
+
+      CALLMPI(MPI_Comm_rank(MPI_COMM_WORLD, irank, ierr))
+      CALLMPI(MPI_Comm_size(MPI_COMM_WORLD, isize, ierr))
+
+      ALLOCATE(process_types(isize))
+      ALLOCATE(potential_ranks(0:isize-1))
+#if defined(CLIENT_MPI)
+      ALLOCATE(client_ranks(0:isize-1))
+        write(*,'(a,2i4)') 'mpi started: iproc_world,nproc_world ',irank,isize
+#endif
+      process_type = 2
+      CALLMPI(MPI_Allgather(process_type, 1, MPI_INTEGER, process_types, 1 , MPI_INTEGER, MPI_COMM_WORLD, ierr)) 
+#ifndef CLIENT_MPI
+      CALL GETENV('EON_NUMBER_OF_CLIENTS', nclients_str)
+      IF (LEN_TRIM(nclients_str) .GT. 0) THEN
+        READ(nclients_str,*) nclients
+      ELSE
+        nclients = 1
+      ENDIF
+#endif
+#if defined(CLIENT_MPI)
+      nclients = 0
+      j = 0
+      DO i=1,isize
+        IF (process_types(i) .EQ. 1) THEN
+          client_ranks(j) = i-1
+          j = j + 1
+          nclients = nclients + 1 
+        ENDIF
+      ENDDO
+        write(*,'(a,i4)') 'nclients ',nclients
+        CALLMPI(MPI_Comm_group(MPI_COMM_WORLD, orig_group, ierr))
+        CALLMPI(MPI_Group_incl(orig_group, nclients, client_ranks, new_group, ierr))
+        CALLMPI(MPI_Comm_create(MPI_COMM_WORLD, new_group, new_comm, ierr))
+#endif
+
+      npotentials = 0
+      j = 0
+      DO i=1,isize
+        IF (process_types(i) .EQ. 2) THEN
+          potential_ranks(j) = i-1
+          j = j + 1
+          npotentials = npotentials + 1 
+        ENDIF
+      ENDDO
+
+      potential_group_size = npotentials/nclients
+
+      DO i=1,nclients
+        CALLMPI(MPI_Comm_group(MPI_COMM_WORLD, orig_group, ierr))
+        CALLMPI(MPI_Group_incl(orig_group, potential_group_size, potential_ranks((i-1)*potential_group_size:), new_group, ierr))
+        CALLMPI(MPI_Comm_create(MPI_COMM_WORLD, new_group, new_comm, ierr))
+        IF (new_comm /= MPI_COMM_NULL) THEN
+          my_comm = new_comm
+          mpmd_group_number = i-1
+        ENDIF
+      ENDDO
+
+      mpmd_mpi_comm = my_comm
+
+      CALLMPI(MPI_Comm_rank(my_comm, irank, ierr))
+      IF (irank==0 ) THEN
+        CALL mpmd_recv_poscar(mpmd_group_number, mpmd_client_rank)
+        WRITE(*,*) "finished mpmd_mpi_init"
+      ENDIF
+      CALLMPI(MPI_Barrier(my_comm, ierr))
+
+      ENDSUBROUTINE mpmd_mpi_init
+
+    SUBROUTINE mpmd_recv_poscar(mpmd_group_number, mpmd_client_rank)
+      INTEGER :: N
+      INTEGER :: pbc, i, j, ierr, mpmd_group_number, prev_type
+      INTEGER ::  mpmd_client_rank
+      INTEGER,ALLOCATABLE,DIMENSION(:) :: atomicNrs, ntype
+      INTEGER,DIMENSION(1024) :: icwd
+      REAL(q),ALLOCATABLE,DIMENSION(:) :: R
+      REAL(q),DIMENSION(9) :: box
+      CHARACTER(LEN=20) :: poscar_file 
+
+      INTEGER stat(MPI_STATUS_SIZE)
+
+      CALLMPI(MPI_Probe(MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, stat, ierr))
+      mpmd_client_rank = stat(MPI_SOURCE)
+
+      CALLMPI(MPI_Recv(N, 1, MPI_INTEGER, mpmd_client_rank, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE, ierr))
+
+      ALLOCATE(atomicNrs(N))
+      ALLOCATE(R(3*N))
+      ALLOCATE(ntype(N))
+      
+      CALLMPI(MPI_Recv(atomicNrs, N, MPI_INTEGER, mpmd_client_rank, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE, ierr))
+      CALLMPI(MPI_Recv(R, 3*N, MPI_DOUBLE_PRECISION, mpmd_client_rank, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE, ierr))
+      CALLMPI(MPI_Recv(box, 9, MPI_DOUBLE_PRECISION, mpmd_client_rank, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE, ierr))
+      CALLMPI(MPI_Recv(pbc, 1, MPI_INTEGER, mpmd_client_rank, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE, ierr))
+      CALLMPI(MPI_Recv(icwd, 1024, MPI_INTEGER, mpmd_client_rank, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE, ierr))
+
+      WRITE(poscar_file, "('vasp',I3.3,'/POSCAR')") mpmd_group_number
+      OPEN(UNIT=30,FILE=poscar_file, ACCESS='SEQUENTIAL', STATUS='REPLACE')
+
+      WRITE(30,*) ' '
+      WRITE(30,'(F)') 1.0
+      WRITE(30,'(F,F,F)') box(1), box(2), box(3)
+      WRITE(30,'(F,F,F)') box(4), box(5), box(6)
+      WRITE(30,'(F,F,F)') box(7), box(8), box(9)
+
+      ntype=0
+      prev_type=atomicNrs(1)
+      j=1
+      DO i=1,N
+        IF (atomicNrs(i) /= prev_type) THEN
+          j = j+1 
+        ENDIF
+        ntype(j) = ntype(j) + 1 
+        prev_type = atomicNrs(i)
+      ENDDO
+
+      WRITE(30, *) ntype(1:j)
+      WRITE(30, "('Selective dynamics')")
+      WRITE(30, "('Cartesian')")
+
+      DO i=1,N
+        WRITE(30,"(F,F,F' T T T')") R(3*(i-1)+1), R(3*(i-1)+2), R(3*(i-1)+3)
+      ENDDO
+
+      CLOSE(30)
+
+    END SUBROUTINE
+#endif
 !----------------------------------------------------------------------
 !
 ! M_init: initialise the basic communications
@@ -123,12 +271,19 @@
 !
 !----------------------------------------------------------------------
 !
-      SUBROUTINE M_init( COMM)
+#ifdef VASP_MPMD
+      SUBROUTINE M_init(COMM, mpmd_group_number)
+#else
+      SUBROUTINE M_init(COMM)
+#endif
       IMPLICIT NONE
       INCLUDE "pm.inc"
 
      TYPE(communic) COMM
       INTEGER i, ierror
+#ifdef VASP_MPMD
+      INTEGER mpmd_mpi_comm, mpmd_group_number, mpmd_client_rank
+#endif
 
       call MPI_init( ierror )
       IF ( ierror /= MPI_success ) THEN
@@ -140,7 +295,17 @@
 ! set only NCPU, NODE_ME and IONODE
 ! no internal setup done at this point
 !
+#if defined(VASP_MPMD)
+      if (mpmd_group_number .EQ. -1) THEN
+        COMM%MPI_COMM= MPI_comm_world
+      ELSE
+        CALL mpmd_mpi_init(mpmd_mpi_comm, mpmd_group_number, mpmd_client_rank)
+        COMM%MPI_COMM=  mpmd_mpi_comm
+        COMM%mpmd_client_rank = mpmd_client_rank
+      ENDIF
+#else
       COMM%MPI_COMM= MPI_comm_world
+#endif
 
       call MPI_comm_rank( COMM%MPI_COMM, COMM%NODE_ME, ierror )
       IF ( ierror /= MPI_success ) &
