Alert.png The wiki is deprecated and due to be decommissioned by the end of September 2022.
The content is being migrated to other supports, new updates will be ignored and lost.
If needed you can get in touch with EGI SDIS team using operations @ egi.eu.

Difference between revisions of "MPI Certification Job template"

From EGIWiki
Jump to navigation Jump to search
Line 17: Line 17:
  #Requirements = other.GlueCEInfoLRMSType == "PBS" || other.GlueCEInfoLRMSType == "LSF";
  #Requirements = other.GlueCEInfoLRMSType == "PBS" || other.GlueCEInfoLRMSType == "LSF";
  Requirements  = other.GlueCEInfoHostName == "unipa-ce-01.pa.pi2s2.it";
  Requirements  = other.GlueCEInfoHostName == "unipa-ce-01.pa.pi2s2.it";
$ less hello.c
#include <stdio.h>
#include <mpi.h>
int main(int argc, char *argv[]) {
  int numprocs, rank, namelen;
  char processor_name[MPI_MAX_PROCESSOR_NAME];
  MPI_Init(&argc, &argv);
  MPI_Comm_size(MPI_COMM_WORLD, &numprocs);
  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
  MPI_Get_processor_name(processor_name, &namelen);
  printf("Process %d on %s out of %d\n", rank, processor_name, numprocs);
  MPI_Finalize();
}

Revision as of 10:49, 16 December 2010

MPI Certification Job

Examples of needed files:

$ less newmpi.jdl 
JobType = "Normal";
CPUNumber = 2;
Executable = "mpi-start-wrapper.sh";
Arguments = "hello MPICH";
StdOutput = "mpi-test.out";
StdError = "mpi-test.err";
InputSandbox = {"mpi-start-wrapper.sh","mpi-hooks.sh","hello.c"};
OutputSandbox = {"mpi-test.err","mpi-test.out"};
#Requirements =
#  Member("MPICH", other.GlueHostApplicationSoftwareRunTimeEnvironment)
 # && Member("OPENMPI", other.GlueHostApplicationSoftwareRunTimeEnvironment);
#Requirements = other.GlueCEInfoLRMSType == "PBS" || other.GlueCEInfoLRMSType == "LSF";
Requirements  = other.GlueCEInfoHostName == "unipa-ce-01.pa.pi2s2.it";


$ less hello.c
#include <stdio.h>
#include <mpi.h>

int main(int argc, char *argv[]) {
  int numprocs, rank, namelen;
  char processor_name[MPI_MAX_PROCESSOR_NAME];

  MPI_Init(&argc, &argv);
  MPI_Comm_size(MPI_COMM_WORLD, &numprocs);
  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
  MPI_Get_processor_name(processor_name, &namelen);

  printf("Process %d on %s out of %d\n", rank, processor_name, numprocs);

  MPI_Finalize();
}