├── .gitignore ├── Makefile-java.include ├── Makefile.am ├── PROBLEMS.TXT ├── README ├── autogen.sh ├── collective ├── AllToAllW.java ├── Allgather.java ├── AllgatherInPlace.java ├── Allgatherv.java ├── AllgathervInPlace.java ├── Allreduce.java ├── AllreduceInPlace.java ├── Alltoall.java ├── Alltoallv.java ├── Barrier.java ├── Bcast.java ├── Exscan.java ├── ExscanInPlace.java ├── Gather.java ├── GatherInPlace.java ├── Gatherv.java ├── GathervInPlace.java ├── IAllToAllW.java ├── Iallgather.java ├── IallgatherInPlace.java ├── Iallgatherv.java ├── IallgathervInPlace.java ├── Iallreduce.java ├── IallreduceInPlace.java ├── Ialltoall.java ├── Ialltoallv.java ├── Ibarrier.java ├── Ibcast.java ├── Iexscan.java ├── IexscanInPlace.java ├── Igather.java ├── IgatherInPlace.java ├── Igatherv.java ├── IgathervInPlace.java ├── Ireduce.java ├── IreduceBig.java ├── IreduceComplexC.java ├── IreduceInPlace.java ├── IreduceLoc.java ├── IreduceScatter.java ├── IreduceScatterBlock.java ├── IreduceScatterBlockInPlace.java ├── IreduceScatterInPlace.java ├── Iscan.java ├── IscanInPlace.java ├── Iscatter.java ├── IscatterInPlace.java ├── Iscatterv.java ├── IscattervInPlace.java ├── Makefile ├── OpTest.java ├── Reduce.java ├── ReduceBig.java ├── ReduceComplexC.java ├── ReduceInPlace.java ├── ReduceLoc.java ├── ReduceScatter.java ├── ReduceScatterBlock.java ├── ReduceScatterBlockInPlace.java ├── ReduceScatterInPlace.java ├── Scan.java ├── ScanInPlace.java ├── Scatter.java ├── ScatterInPlace.java ├── Scatterv.java ├── ScattervInPlace.java └── todo │ ├── Alltoallw.java │ ├── BcastStruct.java │ ├── IbcastStruct.java │ ├── IstructGatherv.java │ ├── OmpitestError.java │ ├── OmpitestProgress.java │ └── StructGatherv.java ├── collective_intercomm ├── AllgatherInter.java ├── AllreduceInter.java ├── AlltoallInter.java ├── AlltoallvInter.java ├── BarrierInter.java ├── BcastInter.java ├── GatherInter.java ├── OmpitestError.java ├── OmpitestProgress.java ├── ReduceInter.java ├── ReduceScatterInter.java ├── ReduceScatterInter2.java ├── ScatterInter.java ├── ScattervInter.java ├── make_collective_intercomm └── todo │ ├── AlltoallwInter.java │ ├── OmpitestError.java │ └── OmpitestProgress.java ├── communicator ├── Attr.java ├── CommCreateGroup.java ├── CommDupWithInfo.java ├── Commdup.java ├── Commfree.java ├── Compare.java ├── InterComm.java ├── Mpisplit.java ├── OmpitestError.java ├── OmpitestProgress.java ├── make_communicator └── todo │ ├── OmpitestError.java │ ├── OmpitestProgress.java │ └── SelfAtexit.java ├── configure.ac ├── datatype ├── Bakstr.java ├── Getel.java ├── Loop.java ├── OmpitestError.java ├── OmpitestProgress.java ├── Paktest.java ├── Pptransp.java ├── Structsr.java ├── Structsr2.java ├── Transp.java ├── Transp2.java ├── Transp3.java ├── make_datatype └── todo │ ├── Bottom.java │ ├── Lbub.java │ ├── Lbub2.java │ ├── OmpitestError.java │ ├── OmpitestProgress.java │ ├── Strangest1.java │ ├── Transpa.java │ ├── Zero1.java │ ├── Zero2.java │ ├── Zero3.java │ ├── Zero5.java │ └── Zero6.java ├── dynamic ├── ClientServer.java ├── LoopChild.java ├── LoopSpawn.java ├── NiceMsgs.java ├── NoDisconnect.java ├── OmpitestError.java ├── OmpitestProgress.java ├── Spawn.java ├── SpawnMultiple.java ├── make_dynamic └── todo │ ├── CommJoin.java │ ├── OmpitestError.java │ └── OmpitestProgress.java ├── environment ├── Abort.java ├── Attrs.java ├── AttrsPrintValue.java ├── CommErrhandler.java ├── FileErrhandler.java ├── Final.java ├── Finalized.java ├── GetLibVersion.java ├── GetVersion.java ├── InitThread.java ├── InitThreadFunneled.java ├── InitThreadMultiple.java ├── InitThreadSerialized.java ├── Initialized.java ├── IsThrMain.java ├── OmpitestConfig.java ├── OmpitestError.java ├── OmpitestProgress.java ├── Pcontrol.java ├── Procname.java ├── QueryThread.java ├── WinErrhandler.java ├── Wtime.java ├── make_environment └── todo │ ├── Err.java │ ├── OmpitestConfig.java │ ├── OmpitestError.java │ └── OmpitestProgress.java ├── group ├── Compare.java ├── GroupTest.java ├── Groupfree.java ├── OmpitestConfig.java ├── OmpitestError.java ├── OmpitestProgress.java ├── Range.java └── make_group ├── info ├── Create00.java ├── Delete20.java ├── Get30.java ├── GetValuelen40.java ├── Getnkeys50.java ├── InfoEnv60.java ├── OmpitestError.java ├── OmpitestProgress.java ├── Set10.java └── make_info ├── io ├── FileAll.java ├── FileAtAll.java ├── FileAtomicity.java ├── FileStatusGetCount.java ├── OmpitestError.java ├── OmpitestProgress.java └── make_io ├── onesided ├── CAccumulate.java ├── CAccumulateAtomic.java ├── CCreate.java ├── CCreateDisp.java ├── CCreateInfo.java ├── CCreateInfoHalf.java ├── CCreateNoFree.java ├── CCreateSize.java ├── CFenceAsserts.java ├── CFencePut1.java ├── CFenceSimple.java ├── CFetchAndOp.java ├── CFlush.java ├── CGet.java ├── CGetBig.java ├── CLockIllegal.java ├── CPut.java ├── CPutBig.java ├── CRGet.java ├── CRPut.java ├── CReqops.java ├── CWinAttr.java ├── CWinErrhandler.java ├── OmpitestConfig.java ├── OmpitestError.java ├── OmpitestProgress.java ├── TestMpiRmaCompareAndSwap.java ├── WinAllocate.java ├── WinName.java └── make_onesided ├── performance ├── BcastPerformanceMsgSize.java ├── BcastPerformanceNumProc.java ├── Fedora │ ├── c_Fedora_net_100000_intValues.dat │ ├── c_Fedora_net_10000_intValues.dat │ ├── c_Fedora_net_2_tasks.dat │ ├── c_Fedora_net_4_tasks.dat │ ├── c_Fedora_net_50000_intValues.dat │ ├── c_Fedora_net_8_tasks.dat │ ├── c_Fedora_shm_100000_intValues.dat │ ├── c_Fedora_shm_10000_intValues.dat │ ├── c_Fedora_shm_2_tasks.dat │ ├── c_Fedora_shm_4_tasks.dat │ ├── c_Fedora_shm_50000_intValues.dat │ ├── c_Fedora_shm_8_tasks.dat │ ├── c_Fedora_x86_64_net_100000_intValues_cputime.dat │ ├── c_Fedora_x86_64_net_100000_intValues_elapsedtime.dat │ ├── c_Fedora_x86_64_net_10000_intValues_cputime.dat │ ├── c_Fedora_x86_64_net_10000_intValues_elapsedtime.dat │ ├── c_Fedora_x86_64_net_2_tasks_cputime.dat │ ├── c_Fedora_x86_64_net_2_tasks_elapsedtime.dat │ ├── c_Fedora_x86_64_net_4_tasks_cputime.dat │ ├── c_Fedora_x86_64_net_4_tasks_elapsedtime.dat │ ├── c_Fedora_x86_64_net_50000_intValues_cputime.dat │ ├── c_Fedora_x86_64_net_50000_intValues_elapsedtime.dat │ ├── c_Fedora_x86_64_net_8_tasks_cputime.dat │ ├── c_Fedora_x86_64_net_8_tasks_elapsedtime.dat │ ├── c_Fedora_x86_64_shm_100000_intValues_cputime.dat │ ├── c_Fedora_x86_64_shm_100000_intValues_elapsedtime.dat │ ├── c_Fedora_x86_64_shm_10000_intValues_cputime.dat │ ├── c_Fedora_x86_64_shm_10000_intValues_elapsedtime.dat │ ├── c_Fedora_x86_64_shm_2_tasks_cputime.dat │ ├── c_Fedora_x86_64_shm_2_tasks_elapsedtime.dat │ ├── c_Fedora_x86_64_shm_4_tasks_cputime.dat │ ├── c_Fedora_x86_64_shm_4_tasks_elapsedtime.dat │ ├── c_Fedora_x86_64_shm_50000_intValues_cputime.dat │ ├── c_Fedora_x86_64_shm_50000_intValues_elapsedtime.dat │ ├── c_Fedora_x86_64_shm_8_tasks_cputime.dat │ ├── c_Fedora_x86_64_shm_8_tasks_elapsedtime.dat │ ├── create_cputime_curves.sh │ ├── create_elapsedtime_curves.sh │ ├── fedora_msgSize_c_java_cputime.pdf │ ├── fedora_msgSize_c_java_cputime.plt │ ├── fedora_msgSize_c_java_elapsedtime.pdf │ ├── fedora_msgSize_c_java_elapsedtime.plt │ ├── fedora_numProc_c_java_cputime.pdf │ ├── fedora_numProc_c_java_cputime.plt │ ├── fedora_numProc_c_java_elapsedtime.pdf │ ├── fedora_numProc_c_java_elapsedtime.plt │ ├── java_Fedora_net_100000_intValues.dat │ ├── java_Fedora_net_10000_intValues.dat │ ├── java_Fedora_net_2_tasks.dat │ ├── java_Fedora_net_4_tasks.dat │ ├── java_Fedora_net_50000_intValues.dat │ ├── java_Fedora_net_8_tasks.dat │ ├── java_Fedora_shm_100000_intValues.dat │ ├── java_Fedora_shm_10000_intValues.dat │ ├── java_Fedora_shm_2_tasks.dat │ ├── java_Fedora_shm_4_tasks.dat │ ├── java_Fedora_shm_50000_intValues.dat │ ├── java_Fedora_shm_8_tasks.dat │ ├── java_Fedora_x86_64_net_100000_intValues_cputime.dat │ ├── java_Fedora_x86_64_net_100000_intValues_elapsedtime.dat │ ├── java_Fedora_x86_64_net_10000_intValues_cputime.dat │ ├── java_Fedora_x86_64_net_10000_intValues_elapsedtime.dat │ ├── java_Fedora_x86_64_net_2_tasks_cputime.dat │ ├── java_Fedora_x86_64_net_2_tasks_elapsedtime.dat │ ├── java_Fedora_x86_64_net_4_tasks_cputime.dat │ ├── java_Fedora_x86_64_net_4_tasks_elapsedtime.dat │ ├── java_Fedora_x86_64_net_50000_intValues_cputime.dat │ ├── java_Fedora_x86_64_net_50000_intValues_elapsedtime.dat │ ├── java_Fedora_x86_64_net_8_tasks_cputime.dat │ ├── java_Fedora_x86_64_net_8_tasks_elapsedtime.dat │ ├── java_Fedora_x86_64_shm_100000_intValues_cputime.dat │ ├── java_Fedora_x86_64_shm_100000_intValues_elapsedtime.dat │ ├── java_Fedora_x86_64_shm_10000_intValues_cputime.dat │ ├── java_Fedora_x86_64_shm_10000_intValues_elapsedtime.dat │ ├── java_Fedora_x86_64_shm_2_tasks_cputime.dat │ ├── java_Fedora_x86_64_shm_2_tasks_elapsedtime.dat │ ├── java_Fedora_x86_64_shm_4_tasks_cputime.dat │ ├── java_Fedora_x86_64_shm_4_tasks_elapsedtime.dat │ ├── java_Fedora_x86_64_shm_50000_intValues_cputime.dat │ ├── java_Fedora_x86_64_shm_50000_intValues_elapsedtime.dat │ ├── java_Fedora_x86_64_shm_8_tasks_cputime.dat │ └── java_Fedora_x86_64_shm_8_tasks_elapsedtime.dat ├── OSUBIBW.java ├── OSUBW.java ├── OSULatency.java ├── OSUMbwMr.java ├── bcastMsgSizeC.sh ├── bcastMsgSizeJava.sh ├── bcastNumProcC.sh ├── bcastNumProcJava.sh ├── bcastPerformanceMsgSize.c ├── bcastPerformanceNumProc.c ├── fedora_bcastMsgSizeC.sh ├── fedora_bcastMsgSizeJava.sh ├── fedora_bcastNumProcC.sh ├── fedora_bcastNumProcJava.sh └── gnuplot │ ├── c_Linux_100000_intValues.dat │ ├── c_Linux_10000_intValues.dat │ ├── c_Linux_2_tasks.dat │ ├── c_Linux_4_tasks.dat │ ├── c_Linux_50000_intValues.dat │ ├── c_Linux_8_tasks.dat │ ├── java_Linux_100000_intValues.dat │ ├── java_Linux_10000_intValues.dat │ ├── java_Linux_2_tasks.dat │ ├── java_Linux_4_tasks.dat │ ├── java_Linux_50000_intValues.dat │ ├── java_Linux_8_tasks.dat │ ├── linpc1_msgSize_c.plt │ ├── linpc1_msgSize_c_java.plt │ ├── linpc1_msgSize_java.plt │ ├── linpc1_numProc_c.plt │ ├── linpc1_numProc_c_java.plt │ ├── linpc1_numProc_java.plt │ ├── unix │ ├── linpc1_msgSize_c.plt │ ├── linpc1_msgSize_c_java.plt │ ├── linpc1_msgSize_java.plt │ ├── linpc1_numProc_c.plt │ ├── linpc1_numProc_c_java.plt │ └── linpc1_numProc_java.plt │ └── windows │ ├── linpc1_msgSize_c.plt │ ├── linpc1_msgSize_c_java.plt │ ├── linpc1_msgSize_java.plt │ ├── linpc1_numProc_c.plt │ ├── linpc1_numProc_c_java.plt │ └── linpc1_numProc_java.plt ├── pt2pt ├── Bsend.java ├── BsendFree.java ├── Free.java ├── Getcount.java ├── Improbe.java ├── Interf.java ├── Iprobe.java ├── Isend.java ├── Makefile.am ├── Mprobe.java ├── MprobeMpich.java ├── OmpitestError.java ├── OmpitestProgress.java ├── Probe.java ├── Rsend.java ├── Rsend2.java ├── Send.java ├── Send2.java ├── Sendrecv.java ├── SendrecvRep.java ├── Seq.java ├── Ssend.java ├── Start.java ├── Startall.java ├── Test1.java ├── Test2.java ├── Test3.java ├── Testall.java ├── Testany.java ├── Testsome.java ├── Waitall.java ├── Waitany.java ├── Waitnull.java ├── Waitsome.java ├── Wildcard.java ├── make_pt2pt └── todo │ ├── Allocmem.java │ ├── Buffer.java │ ├── OmpitestError.java │ └── OmpitestProgress.java ├── random ├── AttrErrorCode.java ├── OmpitestConfig.java ├── OmpitestError.java ├── OmpitestProgress.java ├── OpCommutative.java ├── ReduceLocal.java ├── Ticket_1944_BcastLoop.java ├── Ticket_1944_Test4.java ├── Ticket_1984_Littlehang.java ├── Ticket_2014_BasicSendRecv.java ├── make_random └── todo │ ├── AllocMem.java │ ├── OmpiAffinityStr.java │ ├── OmpitestConfig.java │ ├── OmpitestError.java │ ├── OmpitestProgress.java │ └── RingMmap.java ├── reporting ├── Makefile.am ├── OmpitestConfig.java.in ├── OmpitestError.java └── OmpitestProgress.java ├── request ├── GetStatus.java └── OmpitestError.java ├── status ├── OmpitestError.java ├── SetCancelled.java ├── SetElements.java └── SetElementsX.java ├── test ├── Makefile ├── OmpitestError.java ├── OmpitestProgress.java ├── TestCheckSize1.java ├── TestCheckSize2.java ├── TestCheckSize3.java ├── TestCheckSize4.java ├── TestError1.java ├── TestError2.java ├── TestError3.java ├── TestError4.java ├── TestNeedEven1.java ├── TestNeedEven2.java ├── TestNeedEven3.java ├── TestProgress.java ├── TestWarning1.java ├── TestWarning2.java ├── TestWarning3.java └── TestWarning4.java └── topology ├── Cart.java ├── Dimscreate.java ├── Distgraph1.java ├── Graph.java ├── OmpitestConfig.java ├── OmpitestError.java ├── OmpitestProgress.java ├── Sub.java ├── Sub2.java └── make_topology /.gitignore: -------------------------------------------------------------------------------- 1 | .libs 2 | .deps 3 | .libs 4 | .svn 5 | *.la 6 | *.lo 7 | *.o 8 | *.so 9 | *.a 10 | .dirstamp 11 | *.dSYM 12 | *.S 13 | *.loT 14 | *.orig 15 | *.rej 16 | *.class 17 | *.xcscheme 18 | *.plist 19 | *~ 20 | *\\# 21 | 22 | *.log 23 | *.trs 24 | *.mod 25 | 26 | Makefile 27 | Makefile.in 28 | 29 | configure 30 | config.log 31 | config.status 32 | autom4te.cache 33 | config 34 | aclocal.m4 35 | reporting/OmpitestConfig.java 36 | -------------------------------------------------------------------------------- /Makefile-java.include: -------------------------------------------------------------------------------- 1 | # -*- makefile -*- 2 | # 3 | # Copyright (c) 2013 Cisco Systems, Inc. All rights reserved. 4 | # $COPYRIGHT$ 5 | # 6 | # Additional copyrights may follow 7 | # 8 | # $HEADER$ 9 | # 10 | 11 | # A little verbosity magic; "make" will show the terse output. "make 12 | # V=1" will show the actual commands used (just like the other 13 | # Automake-generated compilation/linker rules). 14 | V=0 15 | 16 | OMPI_V_JAVAC = $(ompi__v_JAVAC_$V) 17 | ompi__v_JAVAC_ = $(ompi__v_JAVAC_$AM_DEFAULT_VERBOSITY) 18 | ompi__v_JAVAC_0 = @echo " JAVAC " `basename $@`; 19 | 20 | OMPI_V_JAVAH = $(ompi__v_JAVAH_$V) 21 | ompi__v_JAVAH_ = $(ompi__v_JAVAH_$AM_DEFAULT_VERBOSITY) 22 | ompi__v_JAVAH_0 = @echo " JAVAH " `basename $@`; 23 | 24 | OMPI_V_JAR = $(ompi__v_JAR_$V) 25 | ompi__v_JAR_ = $(ompi__v_JAR_$AM_DEFAULT_VERBOSITY) 26 | ompi__v_JAR_0 = @echo " JAR " `basename $@`; 27 | 28 | OMPI_V_MKDIR = $(ompi__v_MKDIR_$V) 29 | ompi__v_MKDIR_ = $(ompi__v_MKDIR_$AM_DEFAULT_VERBOSITY) 30 | ompi__v_MKDIR_0 = @echo " MKDIR " $@; 31 | 32 | -------------------------------------------------------------------------------- /Makefile.am: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2013 Cisco Systems, Inc. All rights reserved. 3 | # $COPYRIGHT$ 4 | # 5 | # Additional copyrights may follow 6 | # 7 | # $HEADER$ 8 | # 9 | 10 | SUBDIRS = test 11 | -------------------------------------------------------------------------------- /autogen.sh: -------------------------------------------------------------------------------- 1 | : 2 | autoreconf -ivf 3 | -------------------------------------------------------------------------------- /collective/Allgather.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "allgather.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Allgather.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Allgather 15 | { 16 | private final static int MAXLEN = 1000; 17 | 18 | public static void main (String args[]) throws MPIException 19 | { 20 | int myself, tasks; 21 | int out[], in[]; 22 | 23 | MPI.Init(args); 24 | myself = MPI.COMM_WORLD.getRank(); 25 | tasks = MPI.COMM_WORLD.getSize(); 26 | 27 | in = new int[MAXLEN * tasks]; 28 | out = new int[MAXLEN]; 29 | for (int j = 1; j <= MAXLEN; j *= 10) { 30 | for (int i = 0; i < j; i++) { 31 | out[i] = myself; 32 | } 33 | MPI.COMM_WORLD.allGather(out, j, MPI.INT, in, j, MPI.INT); 34 | 35 | for (int i = 0; i < tasks; i++) { 36 | for (int k = 0; k < j; k++) { 37 | if (in[k + i * j] != i) { 38 | OmpitestError.ompitestError(OmpitestError.getFileName(), 39 | OmpitestError.getLineNumber(), 40 | "bad answer (" + in[k] + 41 | ") at index " + (k + i * j) + 42 | " of " + (j * tasks) + 43 | " (should be " + i + ")\n"); 44 | break; 45 | } 46 | } 47 | } 48 | } 49 | MPI.COMM_WORLD.barrier(); 50 | MPI.Finalize(); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /collective/AllgatherInPlace.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "allgather_in_place.c" from the 4 | * "ompi-ibm-10.0" regression test package. The formatting of 5 | * the code is mainly the same as in the original file. 6 | * 7 | * 8 | * File: AllgatherInPlace.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class AllgatherInPlace 15 | { 16 | private final static int MAXLEN = 1000; 17 | 18 | public static void main (String args[]) throws MPIException 19 | { 20 | int myself, tasks; 21 | int in[]; 22 | 23 | MPI.Init(args); 24 | myself = MPI.COMM_WORLD.getRank(); 25 | tasks = MPI.COMM_WORLD.getSize(); 26 | 27 | in = new int[MAXLEN * tasks]; 28 | for (int j = 1; j <= MAXLEN; j *= 10) { 29 | for (int i = 0; i < j; i++) { 30 | in[i + j * myself] = myself; 31 | } 32 | 33 | MPI.COMM_WORLD.allGather(in, j, MPI.INT); 34 | 35 | for (int i = 0; i < tasks; i++) { 36 | for (int k = 0; k < j; k++) { 37 | if (in[k + i * j] != i) { 38 | OmpitestError.ompitestError(OmpitestError.getFileName(), 39 | OmpitestError.getLineNumber(), 40 | "rank " + myself + 41 | " bad answer (" + in[k + i * j] + 42 | ") at index " + (k + i * j) + 43 | " of " + (j * tasks) + 44 | " (should be " + i + ")\n"); 45 | break; 46 | } 47 | } 48 | } 49 | } 50 | 51 | MPI.COMM_WORLD.barrier(); 52 | MPI.Finalize(); 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /collective/Allreduce.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "allreduce.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Allreduce.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Allreduce 15 | { 16 | private final static int MAXLEN = 100000; 17 | 18 | public static void main (String args[]) throws MPIException 19 | { 20 | int myself, tasks; 21 | int out[] = new int[MAXLEN], 22 | in[] = new int[MAXLEN]; 23 | 24 | MPI.Init(args); 25 | myself = MPI.COMM_WORLD.getRank(); 26 | tasks = MPI.COMM_WORLD.getSize(); 27 | 28 | for (int j = 1; j <= MAXLEN; j *= 10) { 29 | for (int i = 0; i < j; i++) { 30 | out[i] = i; 31 | } 32 | 33 | MPI.COMM_WORLD.allReduce(out,in,j,MPI.INT,MPI.SUM); 34 | 35 | for (int k = 0; k < j; k++) { 36 | if (in[k] != k * tasks) { 37 | OmpitestError.ompitestError(OmpitestError.getFileName(), 38 | OmpitestError.getLineNumber(), 39 | "bad answer (" + in[k] + 40 | ") at index " + k + 41 | " of " + j + " (should be " + 42 | (k * tasks) + ")\n"); 43 | break; 44 | } 45 | } 46 | } 47 | 48 | MPI.COMM_WORLD.barrier(); 49 | MPI.Finalize(); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /collective/AllreduceInPlace.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "allreduce_in_place.c" from the 4 | * "ompi-ibm-10.0" regression test package. The formatting of 5 | * the code is mainly the same as in the original file. 6 | * 7 | * 8 | * File: AllreduceInPlace.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class AllreduceInPlace 15 | { 16 | private final static int MAXLEN = 100000; 17 | 18 | public static void main (String args[]) throws MPIException 19 | { 20 | int myself, tasks; 21 | int in[] = new int[MAXLEN]; 22 | 23 | MPI.Init(args); 24 | myself = MPI.COMM_WORLD.getRank(); 25 | tasks = MPI.COMM_WORLD.getSize(); 26 | 27 | for (int j = 1; j <= MAXLEN; j *= 10) { 28 | for (int i = 0; i < j; i++) { 29 | in[i] = i; 30 | } 31 | 32 | MPI.COMM_WORLD.allReduce(in, j, MPI.INT, MPI.SUM); 33 | 34 | for (int k = 0; k < j; k++) { 35 | if (in[k] != k * tasks) { 36 | OmpitestError.ompitestError(OmpitestError.getFileName(), 37 | OmpitestError.getLineNumber(), 38 | "bad answer (" + in[k] + 39 | ") at index " + k + 40 | " of " + j + " (should be " + 41 | (k * tasks) + ")\n"); 42 | break; 43 | } 44 | } 45 | } 46 | 47 | MPI.COMM_WORLD.barrier(); 48 | MPI.Finalize(); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /collective/Alltoall.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "alltoall.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Alltoall.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Alltoall 15 | { 16 | private final static int MAXLEN = 10000; 17 | 18 | public static void main (String args[]) throws MPIException 19 | { 20 | int myself,tasks; 21 | int out[], in[]; 22 | 23 | MPI.Init(args); 24 | myself = MPI.COMM_WORLD.getRank(); 25 | tasks = MPI.COMM_WORLD.getSize(); 26 | 27 | in = new int[MAXLEN * tasks]; 28 | out = new int[MAXLEN * tasks]; 29 | for (int i = 0; i < MAXLEN * tasks; ++i) { 30 | out[i] = myself; 31 | } 32 | for (int j = 1; j <= MAXLEN; j *= 10) { 33 | 34 | MPI.COMM_WORLD.allToAll(out, j, MPI.INT, in, j, MPI.INT); 35 | 36 | for (int i = 0; i < tasks; ++i) { 37 | for (int k = 0; k < j; ++k) { 38 | if (in[k + i * j] != i) { 39 | OmpitestError.ompitestError(OmpitestError.getFileName(), 40 | OmpitestError.getLineNumber(), 41 | " bad answer (" + in[k + i * j] + 42 | ") at index " + (k + i * j) + 43 | " of " + (j * tasks) + 44 | " (should be " + i + ")\n"); 45 | break; 46 | } 47 | } 48 | } 49 | } 50 | MPI.COMM_WORLD.barrier(); 51 | MPI.Finalize(); 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /collective/Alltoallv.java: -------------------------------------------------------------------------------- 1 | import mpi.*; 2 | 3 | public class Alltoallv 4 | { 5 | private final static int MAXLEN = 10000; 6 | 7 | public static void main (String args[]) throws MPIException 8 | { 9 | MPI.Init(args); 10 | 11 | int myself = MPI.COMM_WORLD.getRank(), 12 | tasks = MPI.COMM_WORLD.getSize(); 13 | 14 | int[] sdispls = new int[tasks], 15 | scounts = new int[tasks], 16 | rdispls = new int[tasks], 17 | rcounts = new int[tasks]; 18 | 19 | int[] in = new int[MAXLEN * tasks], 20 | out = new int[MAXLEN * tasks]; 21 | 22 | for(int i = 0; i < MAXLEN * tasks; ++i) { 23 | out[i] = myself; 24 | } 25 | 26 | for(int j = 1; j <= MAXLEN; j *= 10) { 27 | for(int i = 0; i < tasks; i++) { 28 | scounts[i] = rcounts[i] = j; 29 | sdispls[i] = rdispls[i] = i * j; 30 | } 31 | 32 | MPI.COMM_WORLD.allToAllv(out, scounts, sdispls, MPI.INT, 33 | in, rcounts, rdispls, MPI.INT); 34 | 35 | for(int i = 0; i < tasks; ++i) { 36 | for(int k = 0; k < j; ++k) { 37 | if(in[k + i * j] != i) { 38 | OmpitestError.ompitestError(OmpitestError.getFileName(), 39 | OmpitestError.getLineNumber(), 40 | " bad answer (" + in[k + i * j] + 41 | ") at index " + (k + i * j) + 42 | " of " + (j * tasks) + 43 | " (should be " + i + ")\n"); 44 | break; 45 | } 46 | } 47 | } 48 | } 49 | 50 | MPI.COMM_WORLD.barrier(); 51 | MPI.Finalize(); 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /collective/Barrier.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "barrier.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Barrier.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Barrier 15 | { 16 | public static void main (String args[]) throws MPIException, 17 | InterruptedException 18 | { 19 | int me, tasks; 20 | double t1, t2; 21 | 22 | MPI.Init(args); 23 | me = MPI.COMM_WORLD.getRank(); 24 | tasks = MPI.COMM_WORLD.getSize(); 25 | 26 | /* We need at least 2 to run */ 27 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 28 | OmpitestError.getLineNumber(), 29 | 2, true); 30 | 31 | Thread.sleep(1000); 32 | 33 | t1 = MPI.wtime(); 34 | MPI.COMM_WORLD.barrier(); 35 | t2 = MPI.wtime(); 36 | 37 | if (t2 < t1) { 38 | OmpitestError.ompitestError(OmpitestError.getFileName(), 39 | OmpitestError.getLineNumber(), 40 | "MPI_Wtime reports that we got " + 41 | "out of the barrier before we " + 42 | "got in!\n" + 43 | "We entered the barrier at: " + 44 | t1 + "\n" + 45 | "We exited the barrier at: " + 46 | t2 + "\n"); 47 | } 48 | 49 | MPI.Finalize(); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /collective/Bcast.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "bcast.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Bcast.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Bcast 15 | { 16 | private final static int MAXLEN = 100000; 17 | 18 | public static void main (String args[]) throws MPIException 19 | { 20 | int root, myself, tasks; 21 | int out[] = new int[MAXLEN]; 22 | 23 | MPI.Init(args); 24 | myself = MPI.COMM_WORLD.getRank(); 25 | tasks = MPI.COMM_WORLD.getSize(); 26 | 27 | root = tasks - 1; 28 | for (int j = 1; j <= MAXLEN; j *= 10) { 29 | if (myself == root) { 30 | for (int i = 0; i < j; i++) { 31 | out[i] = i; 32 | } 33 | } 34 | 35 | MPI.COMM_WORLD.bcast(out, j, MPI.INT, root); 36 | 37 | for (int k = 0; k < j; k++) { 38 | if (out[k] != k) { 39 | OmpitestError.ompitestError(OmpitestError.getFileName(), 40 | OmpitestError.getLineNumber(), 41 | "bad answer (" + out[k] + 42 | ") at index " + k + 43 | " of " + j + 44 | " (should be " + k + ")\n"); 45 | break; 46 | } 47 | } 48 | } 49 | MPI.COMM_WORLD.barrier(); 50 | MPI.Finalize(); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /collective/Exscan.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "exscan.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Exscan.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Exscan 15 | { 16 | private final static int MAXLEN = 10000; 17 | 18 | public static void main (String args[]) throws MPIException 19 | { 20 | int myself,tasks; 21 | int out[] = new int[MAXLEN], 22 | in[] = new int[MAXLEN]; 23 | 24 | MPI.Init(args); 25 | myself = MPI.COMM_WORLD.getRank(); 26 | tasks = MPI.COMM_WORLD.getSize(); 27 | 28 | for(int j=1;j<=MAXLEN;j*=10) { 29 | for(int i=0;i MPI.THREAD_MULTIPLE) 37 | OmpitestError.ompitestError(OmpitestError.getFileName(), 38 | OmpitestError.getLineNumber(), 39 | "ERROR: MPI_Init_thread returned " + 40 | "an illegal value\n"); 41 | rank = MPI.COMM_WORLD.getRank(); 42 | if (0 == rank) { 43 | System.out.printf("PASS: MPI_Init_thread with " + 44 | "MPI_THREAD_MULTIPLE returned " + 45 | "MPI_THREAD_%s\n", 46 | (provided == MPI.THREAD_SINGLE) ? "SINGLE" : 47 | (provided == MPI.THREAD_FUNNELED) ? "FUNNELED" : 48 | (provided == MPI.THREAD_SERIALIZED) ? "SERIALIZED" : "MULTIPLE"); 49 | } 50 | MPI.Finalize(); 51 | System.exit(0); 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /environment/InitThreadSerialized.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "init_thread_serialized.c" from the 4 | * "ompi-ibm-10.0" regression test package. The formatting of 5 | * the code is mainly the same as in the original file. 6 | * 7 | * 8 | * File: InitThreadSerialized.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class InitThreadSerialized 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | if (OmpitestConfig.OMPITEST_HAVE_MPI_THREADS == 0) { 19 | int rank; 20 | 21 | MPI.Init(args); 22 | 23 | rank = MPI.COMM_WORLD.getRank(); 24 | if (0 == rank) { 25 | System.out.printf("Skipping test because this test was " + 26 | "compiled without MPI thread support\n"); 27 | } 28 | MPI.Finalize(); 29 | System.exit(77); 30 | } else { 31 | int provided = MPI.InitThread(args, MPI.THREAD_SERIALIZED); 32 | 33 | if (provided < MPI.THREAD_SERIALIZED) 34 | OmpitestError.ompitestError(OmpitestError.getFileName(), 35 | OmpitestError.getLineNumber(), 36 | "ERROR: MPI_Init_thread returned " + 37 | "less than MPI_THREAD_SERIALIZED\n"); 38 | MPI.Finalize(); 39 | System.exit(0); 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /environment/Initialized.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "initialized.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Initialized.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Initialized 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | if (MPI.isInitialized()) { 19 | OmpitestError.ompitestError(OmpitestError.getFileName(), 20 | OmpitestError.getLineNumber(), 21 | "ERROR: MPI_Initialized returned " + 22 | "TRUE before initialization\n"); 23 | } 24 | 25 | MPI.Init(args); 26 | if (!MPI.isInitialized()) { 27 | OmpitestError.ompitestError(OmpitestError.getFileName(), 28 | OmpitestError.getLineNumber(), 29 | "ERROR: MPI_Initialized returned " + 30 | "FALSE after initialization\n"); 31 | } 32 | 33 | MPI.Finalize(); 34 | if (!MPI.isInitialized()) { 35 | OmpitestError.ompitestError(OmpitestError.getFileName(), 36 | OmpitestError.getLineNumber(), 37 | "ERROR: MPI_Initialized returned " + 38 | "FALSE after initialization\n"); 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /environment/IsThrMain.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "is_thr_main.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: IsThrMain.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class IsThrMain 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | int provided = MPI.InitThread(args, MPI.THREAD_SINGLE); 19 | 20 | if (provided < MPI.THREAD_SINGLE) 21 | OmpitestError.ompitestError(OmpitestError.getFileName(), 22 | OmpitestError.getLineNumber(), 23 | "ERROR: MPI_Init_thread returned " + 24 | "less than MPI_THREAD_SINGLE\n"); 25 | 26 | if(!MPI.isThreadMain()) 27 | OmpitestError.ompitestError(OmpitestError.getFileName(), 28 | OmpitestError.getLineNumber(), 29 | "ERROR: MPI_Is_thread_main did not " + 30 | "return \"true\"\n"); 31 | MPI.Finalize(); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /environment/OmpitestConfig.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestConfig.java -------------------------------------------------------------------------------- /environment/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestError.java -------------------------------------------------------------------------------- /environment/OmpitestProgress.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestProgress.java -------------------------------------------------------------------------------- /environment/Pcontrol.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "pcontrol.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Pcontrol.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Pcontrol 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | int me,tasks; 19 | Object obj = null; 20 | 21 | MPI.Init(args); 22 | me = MPI.COMM_WORLD.getRank(); 23 | tasks = MPI.COMM_WORLD.getSize(); 24 | 25 | /* Java doesn't support a variable number of arguments, so that 26 | * you have to store all arguments in an object which you can 27 | * hand over to pControl(). 28 | */ 29 | MPI.pControl(1, obj); 30 | 31 | MPI.COMM_WORLD.barrier(); 32 | MPI.Finalize(); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /environment/Procname.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "procname.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Procname.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.net.*; 13 | import mpi.*; 14 | 15 | public class Procname 16 | { 17 | public static void main (String args[]) throws MPIException, 18 | UnknownHostException 19 | { 20 | int me, len; 21 | String name, tmp; 22 | 23 | MPI.Init(args); 24 | me = MPI.COMM_WORLD.getRank(); 25 | 26 | name = MPI.getProcessorName (); 27 | tmp = InetAddress.getLocalHost().getHostName(); 28 | if(!name.equals(tmp)) 29 | OmpitestError.ompitestError(OmpitestError.getFileName(), 30 | OmpitestError.getLineNumber(), 31 | "ERROR, processor name = " + 32 | name + ", should be " + tmp + "\n"); 33 | MPI.COMM_WORLD.barrier(); 34 | MPI.Finalize(); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /environment/QueryThread.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "query_thread.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: QueryThread.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class QueryThread 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | int provided = MPI.InitThread(args, MPI.THREAD_SINGLE); 19 | 20 | if (provided < MPI.THREAD_SINGLE) 21 | OmpitestError.ompitestError(OmpitestError.getFileName(), 22 | OmpitestError.getLineNumber(), 23 | "ERROR: MPI_Init_thread returned " + 24 | "less than MPI_THREAD_SINGLE\n"); 25 | 26 | if (provided != MPI.queryThread()) 27 | OmpitestError.ompitestError(OmpitestError.getFileName(), 28 | OmpitestError.getLineNumber(), 29 | "ERROR: MPI_Query_thread returned " + 30 | "a different value than " + 31 | "MPI_Init_thread\n"); 32 | MPI.Finalize(); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /environment/Wtime.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "wtime.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Wtime.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Wtime 15 | { 16 | public static void main (String args[]) throws MPIException, 17 | InterruptedException 18 | { 19 | double time, delta, min; 20 | double tick1, tick2; 21 | int rank; 22 | 23 | MPI.Init(args); 24 | rank = MPI.COMM_WORLD.getRank(); 25 | 26 | if (rank == 0) { 27 | Thread.sleep(1); 28 | 29 | tick1 = MPI.wtick(); 30 | 31 | for (int i = 0; i < 100; ++i) { 32 | tick2 = MPI.wtick(); 33 | if ((tick2 - tick1) > 1e-06) { 34 | System.out.printf("wtick variation: %10.10f, %10.10f\n", 35 | tick1, tick2); 36 | break; 37 | } 38 | } 39 | 40 | min = -1; 41 | 42 | for (int i = 0; i < 100; ++i) { 43 | time = MPI.wtime(); 44 | while ((delta = MPI.wtime() - time) <= 0); 45 | 46 | if ((min < 0) || (min > delta)) 47 | min = delta; 48 | } 49 | 50 | System.out.printf("resolution = %10.10f, wtick = %10.10f\n", 51 | min, MPI.wtick()); 52 | } 53 | 54 | MPI.Finalize(); 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /environment/todo/OmpitestConfig.java: -------------------------------------------------------------------------------- 1 | ../../reporting/OmpitestConfig.java -------------------------------------------------------------------------------- /environment/todo/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../../reporting/OmpitestError.java -------------------------------------------------------------------------------- /environment/todo/OmpitestProgress.java: -------------------------------------------------------------------------------- 1 | ../../reporting/OmpitestProgress.java -------------------------------------------------------------------------------- /group/Compare.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "compare.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Compare.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Compare 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | int cmp; 19 | int r1[] = {0, 1}; 20 | int r2[] = {1, 2}; 21 | Group group; 22 | Group g1, g2; 23 | 24 | MPI.Init(args); 25 | 26 | /* We need at least 3 to run */ 27 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 28 | OmpitestError.getLineNumber(), 29 | 3, true); 30 | 31 | group = MPI.COMM_WORLD.getGroup(); 32 | g1 = group.incl(r1); 33 | g2 = group.incl(r2); 34 | 35 | cmp = Group.compare(g1, g2); 36 | if (MPI.IDENT == cmp) { 37 | OmpitestError.ompitestError(OmpitestError.getFileName(), 38 | OmpitestError.getLineNumber(), 39 | "ERROR in MPI_Group_compare, " + 40 | "should not be MPI_IDENT\n"); 41 | } 42 | g1.free(); 43 | g2.free(); 44 | group.free(); 45 | 46 | MPI.Finalize(); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /group/Groupfree.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "groupfree.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Groupfree.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Groupfree 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | int me, tasks; 19 | Group group, newgroup; 20 | 21 | MPI.Init(args); 22 | me = MPI.COMM_WORLD.getRank(); 23 | tasks = MPI.COMM_WORLD.getSize(); 24 | 25 | group = MPI.COMM_WORLD.getGroup(); 26 | 27 | for(int i = 0; i < 100; i++) { 28 | newgroup = Group.union(group,group); 29 | newgroup.free(); 30 | } 31 | 32 | MPI.COMM_WORLD.barrier(); 33 | group.free(); 34 | MPI.Finalize(); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /group/OmpitestConfig.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestConfig.java -------------------------------------------------------------------------------- /group/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestError.java -------------------------------------------------------------------------------- /group/OmpitestProgress.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestProgress.java -------------------------------------------------------------------------------- /group/make_group: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Compile and run all group programs. 4 | # 5 | # As default this script runs first with two processes and second 6 | # with four processes on the local machine. You can add more options 7 | # to "mpiexec" on the command line, e.g.: 8 | # 9 | # make_group -np 6 -host sunpc1,linpc1,tyr 10 | # 11 | # to create six processes running on three machines. 12 | # 13 | # 14 | # File: make_group Author: S. Gross 15 | # 16 | 17 | TWO_PROC="Compare \ 18 | Group \ 19 | Groupfree \ 20 | Range" 21 | 22 | NUM_PROC=$TWO_PROC 23 | 24 | # number of processes 25 | NP=8 26 | 27 | 28 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 29 | echo "++++++++++++++++ ++++++++++++++++" 30 | echo "++++++++++++++++ mpiexec -np 2 java ... ++++++++++++++++" 31 | echo "++++++++++++++++ ++++++++++++++++" 32 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 33 | for i in $TWO_PROC; do 34 | echo " " 35 | echo " " 36 | echo " " 37 | echo =========================== $i =========================== 38 | mpijavac $i.java 39 | mpiexec -np 2 java $i 40 | done 41 | 42 | 43 | echo " " 44 | echo " " 45 | echo " " 46 | echo " " 47 | echo " " 48 | echo " " 49 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 50 | echo "++++" 51 | echo "++++ mpiexec -np $NP $* java ..." 52 | echo "++++" 53 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 54 | for i in $NUM_PROC; do 55 | echo " " 56 | echo " " 57 | echo " " 58 | echo =========================== $i =========================== 59 | mpijavac $i.java 60 | mpiexec -np $NP $* java $i 61 | done 62 | 63 | rm *.class 64 | -------------------------------------------------------------------------------- /info/Create00.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "00_create.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Create00.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Create00 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | MPI.Init(args); 19 | 20 | /* Pretty simple test -- call MPI_Info_create and ensure that it 21 | * doesn't return an error 22 | */ 23 | Info info1 = new Info(), 24 | info2 = new Info(), 25 | info3 = new Info(); 26 | 27 | /* Free them so that we are bcheck clean */ 28 | info1.free(); 29 | info2.free(); 30 | info3.free(); 31 | 32 | MPI.Finalize(); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /info/Delete20.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "20_delete.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Delete20.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Delete20 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | String key1 = "key1", 19 | key2 = "key2key2", 20 | key3 = "key3key3key3", 21 | value1 = "value1", 22 | value2 = "value2 value2", 23 | value3 = "value3 value3 value3"; 24 | 25 | MPI.Init(args); 26 | 27 | /* Pretty simple test -- call MPI_Info_delete and ensure that it 28 | * doesn't return an error 29 | */ 30 | Info info1 = new Info(), 31 | info2 = new Info(), 32 | info3 = new Info(); 33 | 34 | info1.set(key1, value1); 35 | info2.set(key2, value2); 36 | info3.set(key3, value3); 37 | 38 | info1.delete(key1); 39 | info2.delete(key2); 40 | info3.delete(key3); 41 | 42 | /* Free them so that we are bcheck clean */ 43 | info1.free(); 44 | info2.free(); 45 | info3.free(); 46 | 47 | MPI.Finalize(); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /info/Getnkeys50.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "50_getnkeys.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Getnkeys50.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Getnkeys50 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | String key1 = "key1", 19 | key2 = "key2key2", 20 | key3 = "key3key3key3", 21 | value1 = "value1", 22 | value2 = "value2 value2", 23 | value3 = "value3 value3 value3"; 24 | 25 | MPI.Init(args); 26 | 27 | /* Pretty simple test -- call MPI_Info_get_nkeys and ensure that it 28 | * returns the right number 29 | */ 30 | Info info = new Info(); 31 | 32 | int nkeys = info.size(); 33 | if (nkeys != 0) 34 | OmpitestError.ompitestError(OmpitestError.getFileName(), 35 | OmpitestError.getLineNumber(), 36 | "Info_get_nkeys reported " + nkeys + 37 | " on an empty MPI_Info handle\n"); 38 | 39 | info.set(key1, value2); 40 | info.set(key1, value1); 41 | info.set(key2, value2); 42 | info.set(key3, value3); 43 | 44 | nkeys = info.size(); 45 | if (nkeys != 3) 46 | OmpitestError.ompitestError(OmpitestError.getFileName(), 47 | OmpitestError.getLineNumber(), 48 | "Info_get_nkeys reported " + nkeys + 49 | "; expected 3\n"); 50 | 51 | /* Free it so that we are bcheck clean */ 52 | info.free(); 53 | 54 | MPI.Finalize(); 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /info/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestError.java -------------------------------------------------------------------------------- /info/OmpitestProgress.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestProgress.java -------------------------------------------------------------------------------- /info/Set10.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "10_set.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Set10.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Set10 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | String key1 = "key1", 19 | key2 = "key2key2", 20 | key3 = "key3key3key3", 21 | value1 = "value1", 22 | value2 = "value2 value2", 23 | value3 = "value3 value3 value3"; 24 | 25 | MPI.Init(args); 26 | 27 | /* Pretty simple test -- call MPI_Info_set a few times and ensure 28 | * that they doesn't return an error 29 | */ 30 | Info info1 = new Info(), 31 | info2 = new Info(), 32 | info3 = new Info(); 33 | 34 | info1.set(key1, value2); 35 | info1.set(key1, value1); 36 | info2.set(key2, value2); 37 | info3.set(key3, value3); 38 | 39 | /* Free them so that we are bcheck clean */ 40 | info1.free(); 41 | info2.free(); 42 | info3.free(); 43 | 44 | MPI.Finalize(); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /info/make_info: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Compile and run all info programs. 4 | # 5 | # As default this script runs first with two processes and second 6 | # with four processes on the local machine. You can add more options 7 | # to "mpiexec" on the command line, e.g.: 8 | # 9 | # make_info -np 6 -host sunpc1,linpc1,tyr 10 | # 11 | # to create six processes running on three machines. 12 | # 13 | # 14 | # File: make_info Author: S. Gross 15 | # 16 | 17 | TWO_PROC="Create00 \ 18 | Delete20 \ 19 | Get30 \ 20 | GetValuelen40 \ 21 | Getnkeys50 \ 22 | InfoEnv60 \ 23 | Set10" 24 | 25 | NUM_PROC=$TWO_PROC 26 | 27 | # number of processes 28 | NP=8 29 | 30 | 31 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 32 | echo "++++++++++++++++ ++++++++++++++++" 33 | echo "++++++++++++++++ mpiexec -np 2 java ... ++++++++++++++++" 34 | echo "++++++++++++++++ ++++++++++++++++" 35 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 36 | for i in $TWO_PROC; do 37 | echo " " 38 | echo " " 39 | echo " " 40 | echo =========================== $i =========================== 41 | mpijavac $i.java 42 | mpiexec -np 2 java $i 43 | done 44 | 45 | 46 | echo " " 47 | echo " " 48 | echo " " 49 | echo " " 50 | echo " " 51 | echo " " 52 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 53 | echo "++++" 54 | echo "++++ mpiexec -np $NP $* java ..." 55 | echo "++++" 56 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 57 | for i in $NUM_PROC; do 58 | echo " " 59 | echo " " 60 | echo " " 61 | echo =========================== $i =========================== 62 | mpijavac $i.java 63 | mpiexec -np $NP $* java $i 64 | done 65 | 66 | rm *.class 67 | -------------------------------------------------------------------------------- /io/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestError.java -------------------------------------------------------------------------------- /io/OmpitestProgress.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestProgress.java -------------------------------------------------------------------------------- /io/make_io: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Compile and run all io programs. 4 | # 5 | # As default this script runs first with two processes and second 6 | # with four processes on the local machine. You can add more options 7 | # to "mpiexec" on the command line, e.g.: 8 | # 9 | # make_io -np 6 -host sunpc1,linpc1,tyr 10 | # 11 | # to create six processes running on three machines. 12 | # 13 | # 14 | # File: make_io Author: S. Gross 15 | # 16 | 17 | TWO_PROC="FileStatusGetCount \ 18 | FileAll \ 19 | FileAtAll \ 20 | FileAtomicity" 21 | 22 | NUM_PROC=$TWO_PROC 23 | 24 | # number of processes 25 | NP=8 26 | 27 | 28 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 29 | echo "++++++++++++++++ ++++++++++++++++" 30 | echo "++++++++++++++++ mpiexec -np 2 java ... ++++++++++++++++" 31 | echo "++++++++++++++++ ++++++++++++++++" 32 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 33 | for i in $TWO_PROC; do 34 | echo " " 35 | echo " " 36 | echo " " 37 | echo =========================== $i =========================== 38 | mpijavac $i.java 39 | mpiexec -np 2 java $i 40 | done 41 | 42 | 43 | echo " " 44 | echo " " 45 | echo " " 46 | echo " " 47 | echo " " 48 | echo " " 49 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 50 | echo "++++" 51 | echo "++++ mpiexec -np $NP $* java ..." 52 | echo "++++" 53 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 54 | for i in $NUM_PROC; do 55 | echo " " 56 | echo " " 57 | echo " " 58 | echo =========================== $i =========================== 59 | mpijavac $i.java 60 | mpiexec -np $NP $* java $i 61 | done 62 | 63 | rm *.class 64 | -------------------------------------------------------------------------------- /onesided/CAccumulate.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "c_accumulate.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: CAccumulate.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class CAccumulate 16 | { 17 | public static void main (String args[]) throws MPIException 18 | { 19 | int rank, size, expected, i; 20 | 21 | MPI.Init(args); 22 | rank = MPI.COMM_WORLD.getRank(); 23 | size = MPI.COMM_WORLD.getSize(); 24 | 25 | IntBuffer sendBuf = MPI.newIntBuffer(1), 26 | recvBuf = MPI.newIntBuffer(1); 27 | 28 | Win win = new Win(recvBuf, 1, 1, MPI.INFO_NULL, MPI.COMM_WORLD); 29 | sendBuf.put(0, rank + 100); 30 | recvBuf.put(0, 0); 31 | 32 | /* Accumulate to everyone, just for the heck of it */ 33 | win.fence(MPI.MODE_NOPRECEDE); 34 | for (i = 0; i < size; ++i) 35 | win.accumulate(sendBuf, 1, MPI.INT, i, 0, 1, MPI.INT, MPI.SUM); 36 | win.fence(MPI.MODE_NOPUT | MPI.MODE_NOSUCCEED); 37 | 38 | for (expected = 0, i = 0; i < size; i++) 39 | expected += (i + 100); 40 | if (recvBuf.get(0) != expected) 41 | OmpitestError.ompitestError(OmpitestError.getFileName(), 42 | OmpitestError.getLineNumber(), 43 | "Rank " + rank + " got " + recvBuf.get(0) + 44 | " when it expected " + expected + "\n"); 45 | 46 | win.free(); 47 | MPI.Finalize(); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /onesided/CCreate.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "c_create.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: CCreate.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class CCreate 16 | { 17 | public static void main (String args[]) throws MPIException 18 | { 19 | MPI.Init(args); 20 | int rank = MPI.COMM_WORLD.getRank(); 21 | int size = MPI.COMM_WORLD.getSize(); 22 | 23 | //MPI_Alloc_mem(sizeof(int), MPI.INFO_NULL, &buffer); 24 | IntBuffer buffer = MPI.newIntBuffer(1); 25 | 26 | Win win = new Win(buffer, 1, 1, MPI.INFO_NULL, MPI.COMM_WORLD); 27 | 28 | win.free(); 29 | //MPI_Free_mem(buffer); 30 | MPI.Finalize(); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /onesided/CCreateDisp.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "c_create_disp.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: CCreateDisp.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class CCreateDisp 16 | { 17 | public static void main (String args[]) throws MPIException 18 | { 19 | MPI.Init(args); 20 | int rank = MPI.COMM_WORLD.getRank(); 21 | int size = MPI.COMM_WORLD.getSize(); 22 | 23 | //MPI_Alloc_mem(sizeof(int), MPI.INFO_NULL, &buffer); 24 | IntBuffer buffer = MPI.newIntBuffer(1); 25 | 26 | Win win = new Win(buffer, 1, (rank%4)+1, MPI.INFO_NULL, MPI.COMM_WORLD); 27 | 28 | win.free(); 29 | //MPI_Free_mem(buffer); 30 | MPI.Finalize(); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /onesided/CCreateInfo.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "c_create_info.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: CCreateInfo.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class CCreateInfo 16 | { 17 | public static void main (String args[]) throws MPIException 18 | { 19 | MPI.Init(args); 20 | int rank = MPI.COMM_WORLD.getRank(); 21 | int size = MPI.COMM_WORLD.getSize(); 22 | 23 | IntBuffer buffer = MPI.newIntBuffer(1); 24 | Info info = new Info(); 25 | info.set("no_locks", "true"); 26 | Win win = new Win(buffer, 1, 1, info, MPI.COMM_WORLD); 27 | win.free(); 28 | info.free(); 29 | MPI.Finalize(); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /onesided/CCreateInfoHalf.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "c_create_info_half.c" from the 4 | * "ompi-ibm-10.0" regression test package. The formatting of 5 | * the code is mainly the same as in the original file. 6 | * 7 | * 8 | * File: CCreateInfoHalf.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class CCreateInfoHalf 16 | { 17 | public static void main (String args[]) throws MPIException 18 | { 19 | MPI.Init(args); 20 | int rank = MPI.COMM_WORLD.getRank(); 21 | int size = MPI.COMM_WORLD.getSize(); 22 | 23 | IntBuffer buffer = MPI.newIntBuffer(1); 24 | 25 | Info info = new Info(); 26 | if (rank % 2 != 0) { 27 | info.set("no_locks", "false"); 28 | } 29 | Win win = new Win(buffer, 1, 1, info, MPI.COMM_WORLD); 30 | win.free(); 31 | info.free(); 32 | MPI.Finalize(); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /onesided/CCreateNoFree.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "c_create_no_free.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: CCreateNoFree.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class CCreateNoFree 16 | { 17 | public static void main (String args[]) throws MPIException 18 | { 19 | MPI.Init(args); 20 | int rank = MPI.COMM_WORLD.getRank(); 21 | int size = MPI.COMM_WORLD.getSize(); 22 | 23 | IntBuffer buffer = MPI.newIntBuffer(1); 24 | Win win = new Win(buffer, 1, 1, MPI.INFO_NULL, MPI.COMM_WORLD); 25 | MPI.Finalize(); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /onesided/CCreateSize.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "c_create_size.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: CCreateSize.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class CCreateSize 16 | { 17 | public static void main (String args[]) throws MPIException 18 | { 19 | MPI.Init(args); 20 | int rank = MPI.COMM_WORLD.getRank(); 21 | int size = MPI.COMM_WORLD.getSize(); 22 | 23 | /* 24 | if (rank == 0) { 25 | buffer = new int[0]; 26 | } else { 27 | MPI_Alloc_mem(sizeof(int) * rank, MPI.INFO_NULL, &buffer); 28 | } 29 | */ 30 | 31 | IntBuffer buffer = MPI.newIntBuffer(rank); 32 | Win win = new Win(buffer, rank, 1, MPI.INFO_NULL, MPI.COMM_WORLD); 33 | win.free(); 34 | 35 | /* 36 | if (rank != 0) 37 | MPI_Free_mem(buffer); 38 | */ 39 | 40 | MPI.Finalize(); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /onesided/CFenceAsserts.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "c_fence_asserts.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: CFenceAsserts.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class CFenceAsserts 16 | { 17 | public static void main (String args[]) throws MPIException 18 | { 19 | MPI.Init(args); 20 | IntBuffer buffer = MPI.newIntBuffer(1); 21 | 22 | Win win = new Win(buffer, 1, 1, MPI.INFO_NULL, MPI.COMM_WORLD); 23 | win.fence(MPI.MODE_NOPRECEDE|MPI.MODE_NOSTORE); 24 | win.fence(MPI.MODE_NOSUCCEED|MPI.MODE_NOPUT); 25 | win.free(); 26 | 27 | MPI.Finalize(); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /onesided/CFencePut1.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "c_fence_put_1.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: CFencePut1.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class CFencePut1 16 | { 17 | public static void main (String args[]) throws MPIException 18 | { 19 | MPI.Init(args); 20 | int rank = MPI.COMM_WORLD.getRank(); 21 | int size = MPI.COMM_WORLD.getSize(); 22 | IntBuffer buffer = MPI.newIntBuffer(2); 23 | 24 | buffer.put(0, rank); 25 | buffer.put(1, 0); 26 | 27 | Win win = new Win(buffer, 2, 1, MPI.INFO_NULL, MPI.COMM_WORLD); 28 | win.fence(MPI.MODE_NOPRECEDE|MPI.MODE_NOSTORE); 29 | win.put(buffer, 1, MPI.INT, (rank + 1) % size, 1, 1, MPI.INT); 30 | win.fence(MPI.MODE_NOSUCCEED|MPI.MODE_NOPUT); 31 | 32 | if (buffer.get(1) != (rank + size - 1) % size) { 33 | OmpitestError.ompitestError(OmpitestError.getFileName(), 34 | OmpitestError.getLineNumber(), 35 | "Put appears to have failed. " + 36 | "Found " + buffer.get(1) + 37 | ", expected " + 38 | ((rank + size - 1) % size) + "."); 39 | } 40 | 41 | win.free(); 42 | MPI.Finalize(); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /onesided/CFenceSimple.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "c_fence_simple.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: CFenceSimple.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class CFenceSimple 16 | { 17 | public static void main (String args[]) throws MPIException 18 | { 19 | MPI.Init(args); 20 | IntBuffer buffer = MPI.newIntBuffer(1); 21 | Win win = new Win(buffer, 1, 1, MPI.INFO_NULL, MPI.COMM_WORLD); 22 | win.fence(0); 23 | win.free(); 24 | MPI.Finalize(); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /onesided/CWinErrhandler.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "c_win_errhandler.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: CWinErrhandler.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class CWinErrhandler 16 | { 17 | public static void main (String args[]) throws MPIException 18 | { 19 | MPI.Init(args); 20 | 21 | IntBuffer buffer = MPI.newIntBuffer(1); 22 | Win win = new Win(buffer, 1, 1, MPI.INFO_NULL, MPI.COMM_WORLD); 23 | win.setErrhandler(MPI.ERRORS_RETURN); 24 | win.callErrhandler(MPI.ERR_OTHER); 25 | /* success is not aborting ;) */ 26 | win.free(); 27 | 28 | MPI.Finalize(); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /onesided/OmpitestConfig.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestConfig.java -------------------------------------------------------------------------------- /onesided/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestError.java -------------------------------------------------------------------------------- /onesided/OmpitestProgress.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestProgress.java -------------------------------------------------------------------------------- /onesided/WinAllocate.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "win_allocate.c" from the ibm 4 | * regression test package found in the ompi-tests repository. 5 | * The formatting of the code is similar to the original file. 6 | * 7 | * 8 | * File: WinAllocate.java Author: N. Graham 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class WinAllocate { 16 | 17 | private static final int BASE_SIZE = 8192; 18 | 19 | public static void main(String[] args) throws MPIException 20 | { 21 | CharBuffer myPtr = MPI.newCharBuffer(0); 22 | Win win, sharedWin; 23 | int rank, size, shmRank, shmNproc, peer; 24 | int mySize, peer_size; 25 | int peer_disp; 26 | CharBuffer peerPtr = MPI.newCharBuffer(0); 27 | Comm shmComm; 28 | 29 | MPI.Init(args); 30 | MPI.COMM_WORLD.setErrhandler(MPI.ERRORS_RETURN); 31 | rank = MPI.COMM_WORLD.getRank(); 32 | size = MPI.COMM_WORLD.getSize(); 33 | 34 | shmComm = MPI.COMM_WORLD.splitType(Comm.TYPE_SHARED, rank, MPI.INFO_NULL); 35 | 36 | shmRank = MPI.COMM_WORLD.getRank(); 37 | shmNproc = MPI.COMM_WORLD.getSize(); 38 | 39 | mySize = BASE_SIZE + (shmRank + 1); 40 | 41 | win = new Win(mySize, 1, MPI.INFO_NULL, shmComm, myPtr, Win.FLAVOR_PRIVATE); 42 | sharedWin = new Win(mySize, 1, MPI.INFO_NULL, shmComm, myPtr, Win.FLAVOR_SHARED); 43 | 44 | win.free(); 45 | sharedWin.free(); 46 | shmComm.free(); 47 | 48 | MPI.Finalize(); 49 | if(rank == 0) 50 | System.out.println("Test Completed"); 51 | } 52 | } -------------------------------------------------------------------------------- /onesided/WinName.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a test for the getName and setName methods in Win.java. 4 | * 5 | * 6 | * File: WinName.java Author: N. Graham 7 | * 8 | */ 9 | 10 | import java.nio.IntBuffer; 11 | import mpi.*; 12 | 13 | public class WinName { 14 | 15 | public static void main(String[] args) throws MPIException { 16 | String testName = "testName"; 17 | MPI.Init(args); 18 | 19 | int rank = MPI.COMM_WORLD.getRank(); 20 | IntBuffer winArea = MPI.newIntBuffer(1); 21 | 22 | Win win = new Win(winArea, 1, 1, MPI.INFO_NULL, MPI.COMM_WORLD); 23 | 24 | win.setName(testName); 25 | 26 | if(win.getName().equals(testName)) { 27 | if(rank == 0) { 28 | System.out.println("Test Passed"); 29 | } 30 | } else { 31 | if(rank == 0) { 32 | System.out.println("Test Failed"); 33 | } 34 | } 35 | win.free(); 36 | MPI.Finalize(); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_net_100000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 3.425426 2 | 4 6.091408 3 | 6 9.022457 4 | 8 11.699096 5 | 10 14.540115 6 | 12 17.266935 7 | 14 22.503482 8 | 16 27.304082 9 | 18 29.629157 10 | 20 34.279728 11 | 22 37.263542 12 | 24 41.105222 13 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_net_10000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 0.346598 2 | 4 0.377005 3 | 6 0.408750 4 | 8 0.444147 5 | 10 0.472349 6 | 12 0.506978 7 | 14 0.541480 8 | 16 0.574874 9 | 18 0.607435 10 | 20 0.635886 11 | 22 0.680807 12 | 24 0.710314 13 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_net_2_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.346228 2 | 20000 0.693736 3 | 30000 1.040018 4 | 40000 1.386190 5 | 50000 1.732207 6 | 60000 2.079005 7 | 70000 2.425119 8 | 80000 2.771927 9 | 90000 3.119562 10 | 100000 3.425493 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_net_4_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.375219 2 | 20000 0.806949 3 | 30000 1.292373 4 | 40000 1.834370 5 | 50000 2.407874 6 | 60000 2.997212 7 | 70000 3.313360 8 | 80000 3.878931 9 | 90000 4.432388 10 | 100000 6.281699 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_net_50000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 1.733472 2 | 4 2.407088 3 | 6 3.014573 4 | 8 3.776649 5 | 10 4.325564 6 | 12 5.033238 7 | 14 5.730390 8 | 16 5.175450 9 | 18 7.020250 10 | 20 7.552657 11 | 22 8.208078 12 | 24 6.478840 13 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_net_8_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.441582 2 | 20000 1.043294 3 | 30000 1.809458 4 | 40000 2.733968 5 | 50000 3.647928 6 | 60000 4.946564 7 | 70000 5.131967 8 | 80000 6.133828 9 | 90000 7.268685 10 | 100000 11.648377 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_shm_100000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 0.069012 2 | 4 0.263896 3 | 6 0.319504 4 | 8 0.444875 5 | 10 0.474750 6 | 12 0.551540 7 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_shm_10000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 0.022203 2 | 4 0.044018 3 | 6 0.059354 4 | 8 0.071551 5 | 10 0.083057 6 | 12 0.087374 7 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_shm_2_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.024818 2 | 20000 0.040725 3 | 30000 0.061209 4 | 40000 0.075925 5 | 50000 0.095310 6 | 60000 0.106973 7 | 70000 0.127477 8 | 80000 0.140946 9 | 90000 0.156208 10 | 100000 0.068282 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_shm_4_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.045277 2 | 20000 0.084083 3 | 30000 0.120180 4 | 40000 0.160023 5 | 50000 0.194644 6 | 60000 0.236483 7 | 70000 0.273530 8 | 80000 0.311209 9 | 90000 0.350506 10 | 100000 0.262711 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_shm_50000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 0.091514 2 | 4 0.193901 3 | 6 0.273482 4 | 8 0.348399 5 | 10 0.361460 6 | 12 0.399060 7 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_shm_8_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.072186 2 | 20000 0.143547 3 | 30000 0.208494 4 | 40000 0.281269 5 | 50000 0.350209 6 | 60000 0.437997 7 | 70000 0.512310 8 | 80000 0.527975 9 | 90000 0.584605 10 | 100000 0.441669 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_net_100000_intValues_cputime.dat: -------------------------------------------------------------------------------- 1 | 2 6.470000 2 | 4 23.590000 3 | 6 52.850000 4 | 8 91.890000 5 | 10 143.060000 6 | 12 204.620000 7 | 14 328.440000 8 | 16 433.570000 9 | 18 548.820000 10 | 20 681.580000 11 | 22 820.440000 12 | 24 980.020000 13 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_net_100000_intValues_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 2 3.425426 2 | 4 6.091408 3 | 6 9.022457 4 | 8 11.699096 5 | 10 14.540115 6 | 12 17.266935 7 | 14 22.503482 8 | 16 27.304082 9 | 18 29.629157 10 | 20 34.279728 11 | 22 37.263542 12 | 24 41.105222 13 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_net_10000_intValues_cputime.dat: -------------------------------------------------------------------------------- 1 | 2 0.640000 2 | 4 2.340000 3 | 6 4.090000 4 | 8 7.670000 5 | 10 10.260000 6 | 12 13.780000 7 | 14 18.400000 8 | 16 23.740000 9 | 18 29.600000 10 | 20 36.280000 11 | 22 43.660000 12 | 24 51.680000 13 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_net_10000_intValues_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 2 0.346598 2 | 4 0.377005 3 | 6 0.408750 4 | 8 0.444147 5 | 10 0.472349 6 | 12 0.506978 7 | 14 0.541480 8 | 16 0.574874 9 | 18 0.607435 10 | 20 0.635886 11 | 22 0.680807 12 | 24 0.710314 13 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_net_2_tasks_cputime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.650000 2 | 20000 1.300000 3 | 30000 1.940000 4 | 40000 2.590000 5 | 50000 3.240000 6 | 60000 3.920000 7 | 70000 4.570000 8 | 80000 5.210000 9 | 90000 5.880000 10 | 100000 6.470000 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_net_2_tasks_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.346228 2 | 20000 0.693736 3 | 30000 1.040018 4 | 40000 1.386190 5 | 50000 1.732207 6 | 60000 2.079005 7 | 70000 2.425119 8 | 80000 2.771927 9 | 90000 3.119562 10 | 100000 3.425493 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_net_4_tasks_cputime.dat: -------------------------------------------------------------------------------- 1 | 10000 2.490000 2 | 20000 4.340000 3 | 30000 6.120000 4 | 40000 8.100000 5 | 50000 10.020000 6 | 60000 12.030000 7 | 70000 14.140000 8 | 80000 16.120000 9 | 90000 18.060000 10 | 100000 24.170000 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_net_4_tasks_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.375219 2 | 20000 0.806949 3 | 30000 1.292373 4 | 40000 1.834370 5 | 50000 2.407874 6 | 60000 2.997212 7 | 70000 3.313360 8 | 80000 3.878931 9 | 90000 4.432388 10 | 100000 6.281699 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_net_50000_intValues_cputime.dat: -------------------------------------------------------------------------------- 1 | 2 3.260000 2 | 4 10.030000 3 | 6 20.120000 4 | 8 34.070000 5 | 10 50.170000 6 | 12 71.210000 7 | 14 95.350000 8 | 16 121.960000 9 | 18 152.970000 10 | 20 185.390000 11 | 22 223.990000 12 | 24 266.640000 13 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_net_50000_intValues_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 2 1.733472 2 | 4 2.407088 3 | 6 3.014573 4 | 8 3.776649 5 | 10 4.325564 6 | 12 5.033238 7 | 14 5.730390 8 | 16 5.175450 9 | 18 7.020250 10 | 20 7.552657 11 | 22 8.208078 12 | 24 6.478840 13 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_net_8_tasks_cputime.dat: -------------------------------------------------------------------------------- 1 | 10000 7.690000 2 | 20000 13.050000 3 | 30000 20.020000 4 | 40000 26.340000 5 | 50000 33.420000 6 | 60000 40.960000 7 | 70000 47.010000 8 | 80000 53.520000 9 | 90000 60.890000 10 | 100000 91.580000 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_net_8_tasks_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.441582 2 | 20000 1.043294 3 | 30000 1.809458 4 | 40000 2.733968 5 | 50000 3.647928 6 | 60000 4.946564 7 | 70000 5.131967 8 | 80000 6.133828 9 | 90000 7.268685 10 | 100000 11.648377 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_shm_100000_intValues_cputime.dat: -------------------------------------------------------------------------------- 1 | 2 0.140000 2 | 4 1.070000 3 | 6 1.900000 4 | 8 3.540000 5 | 10 4.750000 6 | 12 6.590000 7 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_shm_100000_intValues_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 2 0.069012 2 | 4 0.263896 3 | 6 0.319504 4 | 8 0.444875 5 | 10 0.474750 6 | 12 0.551540 7 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_shm_10000_intValues_cputime.dat: -------------------------------------------------------------------------------- 1 | 2 0.040000 2 | 4 0.190000 3 | 6 0.370000 4 | 8 0.570000 5 | 10 0.830000 6 | 12 1.060000 7 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_shm_10000_intValues_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 2 0.022203 2 | 4 0.044018 3 | 6 0.059354 4 | 8 0.071551 5 | 10 0.083057 6 | 12 0.087374 7 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_shm_2_tasks_cputime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.060000 2 | 20000 0.080000 3 | 30000 0.110000 4 | 40000 0.140000 5 | 50000 0.190000 6 | 60000 0.210000 7 | 70000 0.250000 8 | 80000 0.300000 9 | 90000 0.320000 10 | 100000 0.140000 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_shm_2_tasks_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.024818 2 | 20000 0.040725 3 | 30000 0.061209 4 | 40000 0.075925 5 | 50000 0.095310 6 | 60000 0.106973 7 | 70000 0.127477 8 | 80000 0.140946 9 | 90000 0.156208 10 | 100000 0.068282 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_shm_4_tasks_cputime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.180000 2 | 20000 0.330000 3 | 30000 0.470000 4 | 40000 0.650000 5 | 50000 0.770000 6 | 60000 0.940000 7 | 70000 1.100000 8 | 80000 1.260000 9 | 90000 1.360000 10 | 100000 1.060000 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_shm_4_tasks_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.045277 2 | 20000 0.084083 3 | 30000 0.120180 4 | 40000 0.160023 5 | 50000 0.194644 6 | 60000 0.236483 7 | 70000 0.273530 8 | 80000 0.311209 9 | 90000 0.350506 10 | 100000 0.262711 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_shm_50000_intValues_cputime.dat: -------------------------------------------------------------------------------- 1 | 2 0.200000 2 | 4 0.770000 3 | 6 1.650000 4 | 8 2.760000 5 | 10 3.610000 6 | 12 4.770000 7 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_shm_50000_intValues_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 2 0.091514 2 | 4 0.193901 3 | 6 0.273482 4 | 8 0.348399 5 | 10 0.361460 6 | 12 0.399060 7 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_shm_8_tasks_cputime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.580000 2 | 20000 1.140000 3 | 30000 1.640000 4 | 40000 2.250000 5 | 50000 2.770000 6 | 60000 3.490000 7 | 70000 4.080000 8 | 80000 4.210000 9 | 90000 4.660000 10 | 100000 3.530000 11 | -------------------------------------------------------------------------------- /performance/Fedora/c_Fedora_x86_64_shm_8_tasks_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.072186 2 | 20000 0.143547 3 | 30000 0.208494 4 | 40000 0.281269 5 | 50000 0.350209 6 | 60000 0.437997 7 | 70000 0.512310 8 | 80000 0.527975 9 | 90000 0.584605 10 | 100000 0.441669 11 | -------------------------------------------------------------------------------- /performance/Fedora/fedora_msgSize_c_java_cputime.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-mpi/ompi-java-test/1a3295a7cf8c8ea30366b2f9e50d48726354b47f/performance/Fedora/fedora_msgSize_c_java_cputime.pdf -------------------------------------------------------------------------------- /performance/Fedora/fedora_msgSize_c_java_cputime.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse font ",7" samplen 2 spacing 0.8 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | #set xtics 10000 10 | #set ytics 10 11 | set logscale y 12 | set title "CPU time to send 1000 times a buffer of \"int\" values" 13 | set xlabel "Number of \"int\" values in buffer" 14 | set ylabel "CPU time in seconds" 15 | set style data linespoints 16 | set pointsize 0.8 17 | set xrange [0:100000] 18 | plot "c_Fedora_shm_4_tasks.dat",\ 19 | "c_Fedora_shm_8_tasks.dat",\ 20 | "java_Fedora_shm_4_tasks.dat",\ 21 | "java_Fedora_shm_8_tasks.dat", \ 22 | "c_Fedora_net_4_tasks.dat",\ 23 | "c_Fedora_net_8_tasks.dat",\ 24 | "java_Fedora_net_4_tasks.dat",\ 25 | "java_Fedora_net_8_tasks.dat" 26 | replot 27 | set terminal pdf 28 | set output "fedora_msgSize_c_java_cputime.pdf" 29 | replot 30 | pause 5 31 | -------------------------------------------------------------------------------- /performance/Fedora/fedora_msgSize_c_java_elapsedtime.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-mpi/ompi-java-test/1a3295a7cf8c8ea30366b2f9e50d48726354b47f/performance/Fedora/fedora_msgSize_c_java_elapsedtime.pdf -------------------------------------------------------------------------------- /performance/Fedora/fedora_msgSize_c_java_elapsedtime.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse font ",7" samplen 2 spacing 0.8 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | #set xtics 10000 10 | #set ytics 10 11 | set logscale y 12 | set title "Elapsed time to send 1000 times a buffer of \"int\" values" 13 | set xlabel "Number of \"int\" values in buffer" 14 | set ylabel "Elapsed time in seconds" 15 | set style data linespoints 16 | set pointsize 0.8 17 | set xrange [0:100000] 18 | plot "c_Fedora_shm_4_tasks.dat",\ 19 | "c_Fedora_shm_8_tasks.dat",\ 20 | "java_Fedora_shm_4_tasks.dat",\ 21 | "java_Fedora_shm_8_tasks.dat", \ 22 | "c_Fedora_net_4_tasks.dat",\ 23 | "c_Fedora_net_8_tasks.dat",\ 24 | "java_Fedora_net_4_tasks.dat",\ 25 | "java_Fedora_net_8_tasks.dat" 26 | replot 27 | set terminal pdf 28 | set output "fedora_msgSize_c_java_elapsedtime.pdf" 29 | replot 30 | pause 5 31 | -------------------------------------------------------------------------------- /performance/Fedora/fedora_numProc_c_java_cputime.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-mpi/ompi-java-test/1a3295a7cf8c8ea30366b2f9e50d48726354b47f/performance/Fedora/fedora_numProc_c_java_cputime.pdf -------------------------------------------------------------------------------- /performance/Fedora/fedora_numProc_c_java_cputime.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse font ",7" samplen 2 spacing 0.8 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 2 10 | #set ytics 10 11 | set logscale y 12 | set title "CPU time to send 1000 times a buffer of \"int\" values" 13 | set xlabel "Number of processes in MPI_COMM_WORLD" 14 | set ylabel "CPU time in seconds" 15 | set style data linespoints 16 | set pointsize 0.8 17 | set xrange [0:24] 18 | plot "c_Fedora_shm_10000_intValues.dat",\ 19 | "c_Fedora_shm_50000_intValues.dat",\ 20 | "c_Fedora_shm_100000_intValues.dat",\ 21 | "java_Fedora_shm_10000_intValues.dat",\ 22 | "java_Fedora_shm_50000_intValues.dat",\ 23 | "java_Fedora_shm_100000_intValues.dat", \ 24 | "c_Fedora_net_10000_intValues.dat",\ 25 | "c_Fedora_net_50000_intValues.dat",\ 26 | "c_Fedora_net_100000_intValues.dat",\ 27 | "java_Fedora_net_10000_intValues.dat",\ 28 | "java_Fedora_net_50000_intValues.dat",\ 29 | "java_Fedora_net_100000_intValues.dat" 30 | replot 31 | set terminal pdf 32 | set output "fedora_numProc_c_java_cputime.pdf" 33 | replot 34 | pause 5 35 | -------------------------------------------------------------------------------- /performance/Fedora/fedora_numProc_c_java_elapsedtime.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-mpi/ompi-java-test/1a3295a7cf8c8ea30366b2f9e50d48726354b47f/performance/Fedora/fedora_numProc_c_java_elapsedtime.pdf -------------------------------------------------------------------------------- /performance/Fedora/fedora_numProc_c_java_elapsedtime.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse font ",7" samplen 2 spacing 0.8 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 2 10 | #set ytics 10 11 | set logscale y 12 | set title "Elapsed time to send 1000 times a buffer of \"int\" values" 13 | set xlabel "Number of processes in MPI_COMM_WORLD" 14 | set ylabel "Elapsed time in seconds" 15 | set style data linespoints 16 | set pointsize 0.8 17 | set xrange [0:24] 18 | plot "c_Fedora_shm_10000_intValues.dat",\ 19 | "c_Fedora_shm_50000_intValues.dat",\ 20 | "c_Fedora_shm_100000_intValues.dat",\ 21 | "java_Fedora_shm_10000_intValues.dat",\ 22 | "java_Fedora_shm_50000_intValues.dat",\ 23 | "java_Fedora_shm_100000_intValues.dat", \ 24 | "c_Fedora_net_10000_intValues.dat",\ 25 | "c_Fedora_net_50000_intValues.dat",\ 26 | "c_Fedora_net_100000_intValues.dat",\ 27 | "java_Fedora_net_10000_intValues.dat",\ 28 | "java_Fedora_net_50000_intValues.dat",\ 29 | "java_Fedora_net_100000_intValues.dat" 30 | replot 31 | set terminal pdf 32 | set output "fedora_numProc_c_java_elapsedtime.pdf" 33 | replot 34 | pause 5 35 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_net_100000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 3.4284629821777344 2 | 4 6.167755842208862 3 | 6 8.843456983566284 4 | 8 11.718394041061401 5 | 10 14.5244460105896 6 | 12 17.562458038330078 7 | 14 23.984012126922607 8 | 16 24.67383909225464 9 | 18 30.652122020721436 10 | 20 34.26218008995056 11 | 22 37.45812487602234 12 | 24 39.77241396903992 13 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_net_10000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 0.3494880199432373 2 | 4 0.3769519329071045 3 | 6 0.4110128879547119 4 | 8 0.44219207763671875 5 | 10 0.47729015350341797 6 | 12 0.507843017578125 7 | 14 0.5432138442993164 8 | 16 0.5668509006500244 9 | 18 0.604910135269165 10 | 20 0.6562719345092773 11 | 22 0.674821138381958 12 | 24 0.7098760604858398 13 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_net_2_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.34982991218566895 2 | 20000 0.6966109275817871 3 | 30000 1.041551113128662 4 | 40000 1.3887770175933838 5 | 50000 1.8289449214935303 6 | 60000 2.080904006958008 7 | 70000 2.4276950359344482 8 | 80000 2.7751240730285645 9 | 90000 3.1208181381225586 10 | 100000 3.4273829460144043 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_net_4_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.3789651393890381 2 | 20000 0.8085839748382568 3 | 30000 1.2951910495758057 4 | 40000 1.8322839736938477 5 | 50000 2.410648822784424 6 | 60000 3.000051975250244 7 | 70000 3.315732955932617 8 | 80000 3.872434139251709 9 | 90000 4.4279561042785645 10 | 100000 6.180783033370972 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_net_50000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 1.7340540885925293 2 | 4 2.403304100036621 3 | 6 3.037099838256836 4 | 8 3.711580991744995 5 | 10 4.324384927749634 6 | 12 4.968112945556641 7 | 14 5.64357590675354 8 | 16 6.262057065963745 9 | 18 6.9323890209198 10 | 20 7.573770999908447 11 | 22 8.276149988174438 12 | 24 8.85390591621399 13 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_net_8_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.44213294982910156 2 | 20000 1.0556972026824951 3 | 30000 1.8110599517822266 4 | 40000 2.7771739959716797 5 | 50000 3.7235450744628906 6 | 60000 4.806872844696045 7 | 70000 5.118767976760864 8 | 80000 6.08653712272644 9 | 90000 7.136720895767212 10 | 100000 11.790457010269165 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_shm_100000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 0.13406705856323242 2 | 4 0.3376638889312744 3 | 6 0.39705896377563477 4 | 8 0.5065779685974121 5 | 10 0.5416660308837891 6 | 12 0.6196930408477783 7 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_shm_10000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 0.031599998474121094 2 | 4 0.05603504180908203 3 | 6 0.07024598121643066 4 | 8 0.09001922607421875 5 | 10 0.08814001083374023 6 | 12 0.10253095626831055 7 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_shm_2_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.03253984451293945 2 | 20000 0.05949282646179199 3 | 30000 0.08158087730407715 4 | 40000 0.10639119148254395 5 | 50000 0.1253519058227539 6 | 60000 0.14873409271240234 7 | 70000 0.17026400566101074 8 | 80000 0.1961650848388672 9 | 90000 0.2128751277923584 10 | 100000 0.13370704650878906 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_shm_4_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.05668497085571289 2 | 20000 0.1028449535369873 3 | 30000 0.14390802383422852 4 | 40000 0.19015812873840332 5 | 50000 0.227647066116333 6 | 60000 0.2781529426574707 7 | 70000 0.31453800201416016 8 | 80000 0.3578500747680664 9 | 90000 0.40828609466552734 10 | 100000 0.3342289924621582 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_shm_50000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 0.12837004661560059 2 | 4 0.22838521003723145 3 | 6 0.3141610622406006 4 | 8 0.39736390113830566 5 | 10 0.41719985008239746 6 | 12 0.4620239734649658 7 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_shm_8_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.08737516403198242 2 | 20000 0.15694093704223633 3 | 30000 0.24606108665466309 4 | 40000 0.30585694313049316 5 | 50000 0.3777651786804199 6 | 60000 0.47005701065063477 7 | 70000 0.5340149402618408 8 | 80000 0.6031930446624756 9 | 90000 0.6473889350891113 10 | 100000 0.507357120513916 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_net_100000_intValues_cputime.dat: -------------------------------------------------------------------------------- 1 | 2 6.461540626 2 | 4 23.89194592 3 | 6 51.846523901 4 | 8 92.114260074 5 | 10 142.968583493 6 | 12 207.854521374 7 | 14 334.111885844 8 | 16 422.235468384 9 | 18 554.205523931 10 | 20 682.329419902 11 | 22 827.853275112 12 | 24 972.420518155 13 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_net_100000_intValues_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 2 3.4284629821777344 2 | 4 6.167755842208862 3 | 6 8.843456983566284 4 | 8 11.718394041061401 5 | 10 14.5244460105896 6 | 12 17.562458038330078 7 | 14 23.984012126922607 8 | 16 24.67383909225464 9 | 18 30.652122020721436 10 | 20 34.26218008995056 11 | 22 37.45812487602234 12 | 24 39.77241396903992 13 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_net_10000_intValues_cputime.dat: -------------------------------------------------------------------------------- 1 | 2 0.64281543 2 | 4 2.373287841 3 | 6 4.157824022 4 | 8 7.705681603 5 | 10 10.144443382 6 | 12 13.978116289 7 | 14 18.447926739 8 | 16 23.832736895 9 | 18 29.617104758 10 | 20 36.418296983 11 | 22 43.664781375 12 | 24 51.705416415 13 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_net_10000_intValues_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 2 0.3494880199432373 2 | 4 0.3769519329071045 3 | 6 0.4110128879547119 4 | 8 0.44219207763671875 5 | 10 0.47729015350341797 6 | 12 0.507843017578125 7 | 14 0.5432138442993164 8 | 16 0.5668509006500244 9 | 18 0.604910135269165 10 | 20 0.6562719345092773 11 | 22 0.674821138381958 12 | 24 0.7098760604858398 13 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_net_2_tasks_cputime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.647452388 2 | 20000 1.297756897 3 | 30000 1.952486721 4 | 40000 2.603506473 5 | 50000 3.358194655 6 | 60000 3.912346765 7 | 70000 4.55659339 8 | 80000 5.219641258 9 | 90000 5.862344491 10 | 100000 6.453902718 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_net_2_tasks_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.34982991218566895 2 | 20000 0.6966109275817871 3 | 30000 1.041551113128662 4 | 40000 1.3887770175933838 5 | 50000 1.8289449214935303 6 | 60000 2.080904006958008 7 | 70000 2.4276950359344482 8 | 80000 2.7751240730285645 9 | 90000 3.1208181381225586 10 | 100000 3.4273829460144043 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_net_4_tasks_cputime.dat: -------------------------------------------------------------------------------- 1 | 10000 2.361076862 2 | 20000 4.346038729 3 | 30000 6.14805353 4 | 40000 8.091704156 5 | 50000 10.036156132 6 | 60000 12.02626665 7 | 70000 14.183328224 8 | 80000 16.129757696 9 | 90000 18.367694208 10 | 100000 23.949377924 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_net_4_tasks_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.3789651393890381 2 | 20000 0.8085839748382568 3 | 30000 1.2951910495758057 4 | 40000 1.8322839736938477 5 | 50000 2.410648822784424 6 | 60000 3.000051975250244 7 | 70000 3.315732955932617 8 | 80000 3.872434139251709 9 | 90000 4.4279561042785645 10 | 100000 6.180783033370972 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_net_50000_intValues_cputime.dat: -------------------------------------------------------------------------------- 1 | 2 3.243714213 2 | 4 10.050054184 3 | 6 20.220828852 4 | 8 33.446926613 5 | 10 50.523919979 6 | 12 70.196894453 7 | 14 93.856501479 8 | 16 121.67905409 9 | 18 151.623564557 10 | 20 185.149394249 11 | 22 222.453440253 12 | 24 263.227645887 13 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_net_50000_intValues_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 2 1.7340540885925293 2 | 4 2.403304100036621 3 | 6 3.037099838256836 4 | 8 3.711580991744995 5 | 10 4.324384927749634 6 | 12 4.968112945556641 7 | 14 5.64357590675354 8 | 16 6.262057065963745 9 | 18 6.9323890209198 10 | 20 7.573770999908447 11 | 22 8.276149988174438 12 | 24 8.85390591621399 13 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_net_8_tasks_cputime.dat: -------------------------------------------------------------------------------- 1 | 10000 7.630312074 2 | 20000 13.394609829 3 | 30000 19.718882055 4 | 40000 26.706353068 5 | 50000 33.764916168 6 | 60000 40.219097267 7 | 70000 47.4953702 8 | 80000 54.254652435 9 | 90000 60.254029614 10 | 100000 92.589245362 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_net_8_tasks_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.44213294982910156 2 | 20000 1.0556972026824951 3 | 30000 1.8110599517822266 4 | 40000 2.7771739959716797 5 | 50000 3.7235450744628906 6 | 60000 4.806872844696045 7 | 70000 5.118767976760864 8 | 80000 6.08653712272644 9 | 90000 7.136720895767212 10 | 100000 11.790457010269165 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_shm_100000_intValues_cputime.dat: -------------------------------------------------------------------------------- 1 | 2 0.267342865 2 | 4 1.347090414 3 | 6 2.372514242 4 | 8 4.039154175 5 | 10 5.43706048 6 | 12 7.40761486 7 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_shm_100000_intValues_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 2 0.13406705856323242 2 | 4 0.3376638889312744 3 | 6 0.39705896377563477 4 | 8 0.5065779685974121 5 | 10 0.5416660308837891 6 | 12 0.6196930408477783 7 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_shm_10000_intValues_cputime.dat: -------------------------------------------------------------------------------- 1 | 2 0.059020112 2 | 4 0.214954214 3 | 6 0.407174799 4 | 8 0.697926993 5 | 10 0.856191589 6 | 12 1.198999002 7 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_shm_10000_intValues_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 2 0.031599998474121094 2 | 4 0.05603504180908203 3 | 6 0.07024598121643066 4 | 8 0.09001922607421875 5 | 10 0.08814001083374023 6 | 12 0.10253095626831055 7 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_shm_2_tasks_cputime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.060592243 2 | 20000 0.112703508 3 | 30000 0.15767706 4 | 40000 0.20640132 5 | 50000 0.248837618 6 | 60000 0.294151308 7 | 70000 0.340047645 8 | 80000 0.389132578 9 | 90000 0.427269143 10 | 100000 0.269818053 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_shm_2_tasks_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.03253984451293945 2 | 20000 0.05949282646179199 3 | 30000 0.08158087730407715 4 | 40000 0.10639119148254395 5 | 50000 0.1253519058227539 6 | 60000 0.14873409271240234 7 | 70000 0.17026400566101074 8 | 80000 0.1961650848388672 9 | 90000 0.2128751277923584 10 | 100000 0.13370704650878906 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_shm_4_tasks_cputime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.218013176 2 | 20000 0.402699182 3 | 30000 0.56733102 4 | 40000 0.745212823 5 | 50000 0.91846898 6 | 60000 1.105376121 7 | 70000 1.266810834 8 | 80000 1.440435612 9 | 90000 1.626849138 10 | 100000 1.346845028 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_shm_4_tasks_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.05668497085571289 2 | 20000 0.1028449535369873 3 | 30000 0.14390802383422852 4 | 40000 0.19015812873840332 5 | 50000 0.227647066116333 6 | 60000 0.2781529426574707 7 | 70000 0.31453800201416016 8 | 80000 0.3578500747680664 9 | 90000 0.40828609466552734 10 | 100000 0.3342289924621582 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_shm_50000_intValues_cputime.dat: -------------------------------------------------------------------------------- 1 | 2 0.253100877 2 | 4 0.921349254 3 | 6 1.872408573 4 | 8 3.160900494 5 | 10 4.146307399 6 | 12 5.509528709 7 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_shm_50000_intValues_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 2 0.12837004661560059 2 | 4 0.22838521003723145 3 | 6 0.3141610622406006 4 | 8 0.39736390113830566 5 | 10 0.41719985008239746 6 | 12 0.4620239734649658 7 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_shm_8_tasks_cputime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.68006492 2 | 20000 1.236481924 3 | 30000 1.949449484 4 | 40000 2.429214104 5 | 50000 3.041126653 6 | 60000 3.741882264 7 | 70000 4.254361146 8 | 80000 4.84483357 9 | 90000 5.157229079 10 | 100000 4.04618998 11 | -------------------------------------------------------------------------------- /performance/Fedora/java_Fedora_x86_64_shm_8_tasks_elapsedtime.dat: -------------------------------------------------------------------------------- 1 | 10000 0.08737516403198242 2 | 20000 0.15694093704223633 3 | 30000 0.24606108665466309 4 | 40000 0.30585694313049316 5 | 50000 0.3777651786804199 6 | 60000 0.47005701065063477 7 | 70000 0.5340149402618408 8 | 80000 0.6031930446624756 9 | 90000 0.6473889350891113 10 | 100000 0.507357120513916 11 | -------------------------------------------------------------------------------- /performance/gnuplot/c_Linux_100000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 0.550000 2 | 4 7.000000 3 | 6 19.140000 4 | 8 28.290000 5 | 10 38.670000 6 | 12 44.100000 7 | 14 69.340000 8 | 16 80.320000 9 | -------------------------------------------------------------------------------- /performance/gnuplot/c_Linux_10000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 0.120000 2 | 4 0.790000 3 | 6 1.090000 4 | 8 1.970000 5 | 10 2.060000 6 | 12 2.330000 7 | 14 3.440000 8 | 16 5.250000 9 | -------------------------------------------------------------------------------- /performance/gnuplot/c_Linux_2_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.130000 2 | 20000 0.240000 3 | 30000 0.310000 4 | 40000 0.420000 5 | 50000 0.540000 6 | 60000 0.600000 7 | 70000 0.690000 8 | 80000 0.790000 9 | 90000 0.950000 10 | 100000 0.590000 11 | -------------------------------------------------------------------------------- /performance/gnuplot/c_Linux_4_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 1.000000 2 | 20000 1.080000 3 | 30000 1.600000 4 | 40000 1.980000 5 | 50000 3.990000 6 | 60000 3.610000 7 | 70000 5.430000 8 | 80000 5.360000 9 | 90000 6.170000 10 | 100000 7.070000 11 | -------------------------------------------------------------------------------- /performance/gnuplot/c_Linux_50000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 0.510000 2 | 4 3.850000 3 | 6 6.600000 4 | 8 9.800000 5 | 10 12.560000 6 | 12 17.030000 7 | 14 20.860000 8 | 16 24.830000 9 | -------------------------------------------------------------------------------- /performance/gnuplot/c_Linux_8_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 1.620000 2 | 20000 2.400000 3 | 30000 5.300000 4 | 40000 8.310000 5 | 50000 9.700000 6 | 60000 11.390000 7 | 70000 16.580000 8 | 80000 17.350000 9 | 90000 18.780000 10 | 100000 31.460000 11 | -------------------------------------------------------------------------------- /performance/gnuplot/java_Linux_100000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 1.041406905 2 | 4 11.129938599 3 | 6 25.037227342 4 | 8 36.668985396 5 | 10 49.563278107 6 | 12 59.782575868 7 | 14 86.479609278 8 | 16 91.230597387 9 | -------------------------------------------------------------------------------- /performance/gnuplot/java_Linux_10000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 0.158763953 2 | 4 1.124451255 3 | 6 1.227253139 4 | 8 2.440357105 5 | 10 3.412236048 6 | 12 4.476970394 7 | 14 5.00888658 8 | 16 6.096390464 9 | -------------------------------------------------------------------------------- /performance/gnuplot/java_Linux_2_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.169071143 2 | 20000 0.346641417 3 | 30000 0.450040493 4 | 40000 0.603211847 5 | 50000 0.699989494 6 | 60000 0.901930327 7 | 70000 1.05529108 8 | 80000 1.267582198 9 | 90000 1.636963633 10 | 100000 1.070368222 11 | -------------------------------------------------------------------------------- /performance/gnuplot/java_Linux_4_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 0.914610182 2 | 20000 1.742676618 3 | 30000 2.536045701 4 | 40000 3.588925678 5 | 50000 4.369841909 6 | 60000 6.268852629 7 | 70000 6.936231059 8 | 80000 8.350494066 9 | 90000 9.184344594 10 | 100000 11.769954889 11 | -------------------------------------------------------------------------------- /performance/gnuplot/java_Linux_50000_intValues.dat: -------------------------------------------------------------------------------- 1 | 2 0.700681385 2 | 4 4.211860537 3 | 6 11.038060581 4 | 8 13.375469091 5 | 10 16.09692621 6 | 12 22.417128897 7 | 14 27.618367917 8 | 16 27.733347936 9 | -------------------------------------------------------------------------------- /performance/gnuplot/java_Linux_8_tasks.dat: -------------------------------------------------------------------------------- 1 | 10000 1.958841558 2 | 20000 4.784058815 3 | 30000 7.863270435 4 | 40000 11.367219043 5 | 50000 13.41175019 6 | 60000 17.896041348 7 | 70000 20.089032245 8 | 80000 24.085360835 9 | 90000 26.205421196 10 | 100000 32.138840738 11 | -------------------------------------------------------------------------------- /performance/gnuplot/linpc1_msgSize_c.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 10000 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of \"int\" values in buffer" 13 | set ylabel "CPU time in seconds" 14 | set style data linespoints 15 | set xrange [0:100000] 16 | plot "c_Linux_2_tasks.dat",\ 17 | "c_Linux_4_tasks.dat",\ 18 | "c_Linux_8_tasks.dat" 19 | replot 20 | set terminal pdf 21 | set output "linpc1_msgSize_c.pdf" 22 | replot 23 | pause 5 24 | -------------------------------------------------------------------------------- /performance/gnuplot/linpc1_msgSize_c_java.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 10000 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of \"int\" values in buffer" 13 | set ylabel "CPU time in seconds" 14 | set style data linespoints 15 | set xrange [0:100000] 16 | plot "c_Linux_4_tasks.dat",\ 17 | "c_Linux_8_tasks.dat",\ 18 | "java_Linux_4_tasks.dat",\ 19 | "java_Linux_8_tasks.dat" 20 | replot 21 | set terminal pdf 22 | set output "linpc1_msgSize_c_java.pdf" 23 | replot 24 | pause 5 25 | -------------------------------------------------------------------------------- /performance/gnuplot/linpc1_msgSize_java.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 10000 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of \"int\" values in buffer" 13 | set ylabel "CPU time in seconds" 14 | set style data linespoints 15 | set xrange [0:100000] 16 | plot "java_Linux_2_tasks.dat",\ 17 | "java_Linux_4_tasks.dat",\ 18 | "java_Linux_8_tasks.dat" 19 | replot 20 | set terminal pdf 21 | set output "linpc1_msgSize_java.pdf" 22 | replot 23 | pause 5 24 | -------------------------------------------------------------------------------- /performance/gnuplot/linpc1_numProc_c.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 2 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of processes in MPI_COMM_WORLD" 13 | set ylabel "CPU time in seconds" 14 | set style data linespoints 15 | set xrange [0:16] 16 | plot "c_Linux_10000_intValues.dat",\ 17 | "c_Linux_50000_intValues.dat",\ 18 | "c_Linux_100000_intValues.dat" 19 | replot 20 | set terminal pdf 21 | set output "linpc1_numProc_c.pdf" 22 | replot 23 | pause 5 24 | -------------------------------------------------------------------------------- /performance/gnuplot/linpc1_numProc_c_java.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 2 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of processes in MPI_COMM_WORLD" 13 | set ylabel "CPU time in seconds" 14 | set style data linespoints 15 | set xrange [0:16] 16 | plot "c_Linux_50000_intValues.dat" with lines,\ 17 | "c_Linux_100000_intValues.dat" with lines,\ 18 | "java_Linux_50000_intValues.dat" with lines,\ 19 | "java_Linux_100000_intValues.dat" with lines 20 | replot 21 | set terminal pdf 22 | set output "linpc1_numProc_c_java.pdf" 23 | replot 24 | pause 5 25 | -------------------------------------------------------------------------------- /performance/gnuplot/linpc1_numProc_java.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 2 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of processes in MPI_COMM_WORLD" 13 | set ylabel "CPU time in seconds" 14 | set style data linespoints 15 | set xrange [0:16] 16 | plot "java_Linux_10000_intValues.dat",\ 17 | "java_Linux_50000_intValues.dat",\ 18 | "java_Linux_100000_intValues.dat" 19 | replot 20 | set terminal pdf 21 | set output "linpc1_numProc_java.pdf" 22 | replot 23 | pause 5 24 | -------------------------------------------------------------------------------- /performance/gnuplot/unix/linpc1_msgSize_c.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 10000 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of \"int\" values in buffer" 13 | set ylabel "CPU time in seconds" 14 | set style data linespoints 15 | set xrange [0:100000] 16 | plot "c_Linux_2_tasks.dat",\ 17 | "c_Linux_4_tasks.dat",\ 18 | "c_Linux_8_tasks.dat" 19 | replot 20 | set terminal pdf 21 | set output "linpc1_msgSize_c.pdf" 22 | replot 23 | pause 5 24 | -------------------------------------------------------------------------------- /performance/gnuplot/unix/linpc1_msgSize_c_java.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 10000 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of \"int\" values in buffer" 13 | set ylabel "CPU time in seconds" 14 | set style data linespoints 15 | set xrange [0:100000] 16 | plot "c_Linux_4_tasks.dat",\ 17 | "c_Linux_8_tasks.dat",\ 18 | "java_Linux_4_tasks.dat",\ 19 | "java_Linux_8_tasks.dat" 20 | replot 21 | set terminal pdf 22 | set output "linpc1_msgSize_c_java.pdf" 23 | replot 24 | pause 5 25 | -------------------------------------------------------------------------------- /performance/gnuplot/unix/linpc1_msgSize_java.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 10000 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of \"int\" values in buffer" 13 | set ylabel "CPU time in seconds" 14 | set style data linespoints 15 | set xrange [0:100000] 16 | plot "java_Linux_2_tasks.dat",\ 17 | "java_Linux_4_tasks.dat",\ 18 | "java_Linux_8_tasks.dat" 19 | replot 20 | set terminal pdf 21 | set output "linpc1_msgSize_java.pdf" 22 | replot 23 | pause 5 24 | -------------------------------------------------------------------------------- /performance/gnuplot/unix/linpc1_numProc_c.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 2 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of processes in MPI_COMM_WORLD" 13 | set ylabel "CPU time in seconds" 14 | set style data linespoints 15 | set xrange [0:16] 16 | plot "c_Linux_10000_intValues.dat",\ 17 | "c_Linux_50000_intValues.dat",\ 18 | "c_Linux_100000_intValues.dat" 19 | replot 20 | set terminal pdf 21 | set output "linpc1_numProc_c.pdf" 22 | replot 23 | pause 5 24 | -------------------------------------------------------------------------------- /performance/gnuplot/unix/linpc1_numProc_c_java.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 2 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of processes in MPI_COMM_WORLD" 13 | set ylabel "CPU time in seconds" 14 | set style data linespoints 15 | set xrange [0:16] 16 | plot "c_Linux_50000_intValues.dat" with lines,\ 17 | "c_Linux_100000_intValues.dat" with lines,\ 18 | "java_Linux_50000_intValues.dat" with lines,\ 19 | "java_Linux_100000_intValues.dat" with lines 20 | replot 21 | set terminal pdf 22 | set output "linpc1_numProc_c_java.pdf" 23 | replot 24 | pause 5 25 | -------------------------------------------------------------------------------- /performance/gnuplot/unix/linpc1_numProc_java.plt: -------------------------------------------------------------------------------- 1 | reset 2 | set terminal x11 3 | #set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 2 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of processes in MPI_COMM_WORLD" 13 | set ylabel "CPU time in seconds" 14 | set style data linespoints 15 | set xrange [0:16] 16 | plot "java_Linux_10000_intValues.dat",\ 17 | "java_Linux_50000_intValues.dat",\ 18 | "java_Linux_100000_intValues.dat" 19 | replot 20 | set terminal pdf 21 | set output "linpc1_numProc_java.pdf" 22 | replot 23 | pause 5 24 | -------------------------------------------------------------------------------- /performance/gnuplot/windows/linpc1_msgSize_c.plt: -------------------------------------------------------------------------------- 1 | reset 2 | #set terminal x11 3 | set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 10000 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of \"int\" values in buffer" 13 | set ylabel "CPU time in seconds" 14 | set xrange [0:100000] 15 | plot "c_Linux_2_tasks.dat" with lines,\ 16 | "c_Linux_4_tasks.dat" with lines,\ 17 | "c_Linux_8_tasks.dat" with lines 18 | replot 19 | set terminal latex 20 | set output "linpc1_msgSize_c.tex" 21 | replot 22 | set terminal png 23 | set output "linpc1_msgSize_c.png" 24 | replot 25 | pause 5 26 | -------------------------------------------------------------------------------- /performance/gnuplot/windows/linpc1_msgSize_c_java.plt: -------------------------------------------------------------------------------- 1 | reset 2 | #set terminal x11 3 | set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 10000 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of \"int\" values in buffer" 13 | set ylabel "CPU time in seconds" 14 | set xrange [0:100000] 15 | plot "c_Linux_4_tasks.dat" with lines,\ 16 | "c_Linux_8_tasks.dat" with lines,\ 17 | "java_Linux_4_tasks.dat" with lines,\ 18 | "java_Linux_8_tasks.dat" with lines 19 | replot 20 | set terminal latex 21 | set output "linpc1_msgSize_c_java.tex" 22 | replot 23 | set terminal png 24 | set output "linpc1_msgSize_c_java.png" 25 | replot 26 | pause 5 27 | -------------------------------------------------------------------------------- /performance/gnuplot/windows/linpc1_msgSize_java.plt: -------------------------------------------------------------------------------- 1 | reset 2 | #set terminal x11 3 | set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 10000 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of \"int\" values in buffer" 13 | set ylabel "CPU time in seconds" 14 | set xrange [0:100000] 15 | plot "java_Linux_2_tasks.dat" with lines,\ 16 | "java_Linux_4_tasks.dat" with lines,\ 17 | "java_Linux_8_tasks.dat" with lines 18 | replot 19 | set terminal latex 20 | set output "linpc1_msgSize_java.tex" 21 | replot 22 | set terminal png 23 | set output "linpc1_msgSize_java.png" 24 | replot 25 | pause 5 26 | -------------------------------------------------------------------------------- /performance/gnuplot/windows/linpc1_numProc_c.plt: -------------------------------------------------------------------------------- 1 | reset 2 | #set terminal x11 3 | set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 2 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of processes in MPI_COMM_WORLD" 13 | set ylabel "CPU time in seconds" 14 | set xrange [0:16] 15 | plot "c_Linux_10000_intValues.dat" with lines,\ 16 | "c_Linux_50000_intValues.dat" with lines,\ 17 | "c_Linux_100000_intValues.dat" with lines 18 | replot 19 | set terminal latex 20 | set output "linpc1_numProc_c.tex" 21 | replot 22 | set terminal png 23 | set output "linpc1_numProc_c.png" 24 | replot 25 | pause 5 26 | -------------------------------------------------------------------------------- /performance/gnuplot/windows/linpc1_numProc_c_java.plt: -------------------------------------------------------------------------------- 1 | reset 2 | #set terminal x11 3 | set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 2 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of processes in MPI_COMM_WORLD" 13 | set ylabel "CPU time in seconds" 14 | set xrange [0:16] 15 | plot "c_Linux_50000_intValues.dat" with lines,\ 16 | "c_Linux_100000_intValues.dat" with lines,\ 17 | "java_Linux_50000_intValues.dat" with lines,\ 18 | "java_Linux_100000_intValues.dat" with lines 19 | replot 20 | set terminal latex 21 | set output "linpc1_numProc_c_java.tex" 22 | replot 23 | set terminal png 24 | set output "linpc1_numProc_c_java.png" 25 | replot 26 | pause 5 27 | -------------------------------------------------------------------------------- /performance/gnuplot/windows/linpc1_numProc_java.plt: -------------------------------------------------------------------------------- 1 | reset 2 | #set terminal x11 3 | set terminal windows 4 | set key left top Left reverse 5 | set nolabel 6 | set border 3 7 | set lmargin 12 8 | set rmargin 12 9 | set xtics 2 10 | set ytics 10 11 | set title "Time to send 1000 times a buffer of \"int\" values" 12 | set xlabel "Number of processes in MPI_COMM_WORLD" 13 | set ylabel "CPU time in seconds" 14 | set xrange [0:16] 15 | plot "java_Linux_10000_intValues.dat" with lines,\ 16 | "java_Linux_50000_intValues.dat" with lines,\ 17 | "java_Linux_100000_intValues.dat" with lines 18 | replot 19 | set terminal latex 20 | set output "linpc1_numProc_java.tex" 21 | replot 22 | set terminal png 23 | set output "linpc1_numProc_java.png" 24 | replot 25 | pause 5 26 | -------------------------------------------------------------------------------- /pt2pt/Makefile.am: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-mpi/ompi-java-test/1a3295a7cf8c8ea30366b2f9e50d48726354b47f/pt2pt/Makefile.am -------------------------------------------------------------------------------- /pt2pt/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestError.java -------------------------------------------------------------------------------- /pt2pt/OmpitestProgress.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestProgress.java -------------------------------------------------------------------------------- /pt2pt/Rsend.java: -------------------------------------------------------------------------------- 1 | /* Function: - tests synchonicity of MPI.Rsend between two ranks 2 | * 3 | * This file is a port from "rsend.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Rsend.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Rsend 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | int buf[], len, me; 19 | Status status; 20 | 21 | buf = new int[10]; 22 | len = buf.length; 23 | MPI.Init(args); 24 | me = MPI.COMM_WORLD.getRank(); 25 | 26 | /* We need at least 2 to run */ 27 | 28 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 29 | OmpitestError.getLineNumber(), 30 | 2, true); 31 | 32 | /* Clear out so that we can be bcheck clean */ 33 | 34 | for (int i = 0; i < len; ++i) { 35 | buf[i] = 0; 36 | } 37 | 38 | MPI.COMM_WORLD.barrier (); 39 | if (me == 0) { 40 | for (int i = 0; i < 1000000; ++i) { 41 | ; 42 | } 43 | MPI.COMM_WORLD.rSend (buf, len, MPI.CHAR, 1, 1); 44 | } else if (me == 1) { 45 | status = MPI.COMM_WORLD.recv (buf, len, MPI.CHAR, 0, 1); 46 | } 47 | 48 | MPI.COMM_WORLD.barrier (); 49 | MPI.Finalize(); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /pt2pt/Rsend2.java: -------------------------------------------------------------------------------- 1 | /* Function: - tests synchonicity of MPI.Rsend between two ranks 2 | * 3 | * This file is a port from "rsend2.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Rsend2.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import java.util.*; 14 | import mpi.*; 15 | import static mpi.MPI.slice; 16 | 17 | public class Rsend2 18 | { 19 | public static void main (String args[]) throws MPIException 20 | { 21 | int tasks, bytes; 22 | Request req[]; 23 | 24 | IntBuffer me = MPI.newIntBuffer(1), 25 | data = MPI.newIntBuffer(1000); 26 | 27 | MPI.Init(args); 28 | me.put(0, MPI.COMM_WORLD.getRank()); 29 | tasks = MPI.COMM_WORLD.getSize(); 30 | req = new Request[1000]; 31 | 32 | for(int i = 0; i < tasks; i++) 33 | req[2*i+1] = MPI.COMM_WORLD.iRecv(slice(data, i), 1, MPI.INT, i, 1); 34 | MPI.COMM_WORLD.barrier (); 35 | 36 | for(int i = 0; i < tasks; i++) 37 | req[2*i] = MPI.COMM_WORLD.irSend(me, 1, MPI.INT, i, 1); 38 | Request.waitAll(Arrays.copyOf(req, 2 * tasks)); 39 | 40 | MPI.COMM_WORLD.barrier (); 41 | MPI.Finalize(); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /pt2pt/Seq.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "seq.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Seq.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Seq 15 | { 16 | private final static int ITER = 30; 17 | 18 | public static void main (String args[]) throws MPIException 19 | { 20 | int me, tasks; 21 | int data[] = new int[1]; 22 | Status status; 23 | 24 | MPI.Init(args); 25 | me = MPI.COMM_WORLD.getRank(); 26 | tasks = MPI.COMM_WORLD.getSize(); 27 | data[0] = -1; 28 | if(me == 0) { 29 | for(int i = 0; i < (tasks - 1) * ITER; i++) 30 | status = MPI.COMM_WORLD.recv(data, 1, MPI.INT, MPI.ANY_SOURCE, 1); 31 | } else { 32 | data[0] = me; 33 | for(int i = 0; i < ITER; i++) 34 | MPI.COMM_WORLD.send (data, 1, MPI.INT, 0, 1); 35 | } 36 | MPI.COMM_WORLD.barrier (); 37 | MPI.Finalize(); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /pt2pt/Test1.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "test1.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Test1.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class Test1 16 | { 17 | public static void main (String args[]) throws MPIException 18 | { 19 | int me; 20 | int outmsg[] = new int[1]; 21 | IntBuffer inmsg = MPI.newIntBuffer(1); 22 | boolean flag = false; 23 | Request msgid; 24 | MPI.Init(args); 25 | 26 | me = MPI.COMM_WORLD.getRank(); 27 | inmsg.put(0, -1); 28 | 29 | /* We need at least 2 to run */ 30 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 31 | OmpitestError.getLineNumber(), 32 | 2, true); 33 | 34 | if (me == 1) { 35 | outmsg[0] = 5; 36 | MPI.COMM_WORLD.send (outmsg, 1, MPI.INT, 0, 1); 37 | } 38 | if (me == 0) { 39 | msgid = MPI.COMM_WORLD.iRecv(inmsg, 1, MPI.INT, 40 | MPI.ANY_SOURCE, MPI.ANY_TAG); 41 | Status status = null; 42 | 43 | while(status == null) 44 | status = msgid.testStatus(); 45 | 46 | if(inmsg.get(0) != 5 || status.getSource() != 1 || status.getTag() != 1) 47 | OmpitestError.ompitestError(OmpitestError.getFileName(), 48 | OmpitestError.getLineNumber(), 49 | "flag, inmsg, src, tag = \"" + 50 | flag + ", " + inmsg.get(0) + ", " + 51 | status.getSource() + ", " + 52 | status.getTag() + 53 | "\", should be \"true, 5, 1, 1\"\n"); 54 | } 55 | MPI.COMM_WORLD.barrier (); 56 | MPI.Finalize(); 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /pt2pt/Test3.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "test3.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Test3.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import mpi.*; 14 | 15 | public class Test3 16 | { 17 | public static void main (String args[]) throws MPIException 18 | { 19 | IntBuffer out = MPI.newIntBuffer(1), 20 | in = MPI.newIntBuffer(1); 21 | 22 | int myself, tasks; 23 | Request req1, req2; 24 | 25 | MPI.Init(args); 26 | myself = MPI.COMM_WORLD.getRank(); 27 | tasks = MPI.COMM_WORLD.getSize(); 28 | 29 | /* We need at least 2 to run */ 30 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 31 | OmpitestError.getLineNumber(), 32 | 2, true); 33 | in.put(0, -1); 34 | out.put(0, 1); 35 | 36 | if (myself < 2) { 37 | if (myself == 0) { 38 | req1 = MPI.COMM_WORLD.iSend(out, 1, MPI.INT, 1, 1); 39 | req2 = MPI.COMM_WORLD.iRecv(in, 1, MPI.INT, 1, 2); 40 | for (;;) { 41 | if(req1.test()) 42 | break; 43 | } 44 | for (;;) { 45 | if(req2.test()) 46 | break; 47 | } 48 | } else if (myself == 1) { 49 | MPI.COMM_WORLD.send (out, 1, MPI.INT, 0, 2); 50 | MPI.COMM_WORLD.recv(in, 1, MPI.INT, 0, 1); 51 | } 52 | if (in.get(0) != 1) 53 | OmpitestError.ompitestError(OmpitestError.getFileName(), 54 | OmpitestError.getLineNumber(), 55 | "ERROR IN TASK " + myself + 56 | " (" + in.get(0) + ")\n"); 57 | } 58 | MPI.COMM_WORLD.barrier (); 59 | MPI.Finalize(); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /pt2pt/Waitall.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "waitall.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Waitall.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import java.nio.*; 13 | import java.util.*; 14 | import mpi.*; 15 | import static mpi.MPI.slice; 16 | 17 | public class Waitall 18 | { 19 | public static void main (String args[]) throws MPIException 20 | { 21 | int tasks, bytes; 22 | IntBuffer me = MPI.newIntBuffer(1), 23 | data = MPI.newIntBuffer(1000); 24 | Request req[]; 25 | 26 | MPI.Init(args); 27 | me.put(0, MPI.COMM_WORLD.getRank()); 28 | tasks = MPI.COMM_WORLD.getSize(); 29 | req = new Request[1000]; 30 | 31 | for (int i = 0; i < tasks; i++) { 32 | req[2*i] = MPI.COMM_WORLD.iSend(me, 1, MPI.INT, i, 1); 33 | req[2*i+1] = MPI.COMM_WORLD.iRecv(slice(data, i), 1, MPI.INT, i, 1); 34 | } 35 | Request.waitAll(Arrays.copyOf(req, 2 * tasks)); 36 | 37 | for (int i = 0; i < tasks; i++) { 38 | req[2*i] = MPI.COMM_WORLD.iSend(me, 1, MPI.INT, i, 1); 39 | req[2*i+1] = MPI.COMM_WORLD.iRecv(slice(data, i), 1, MPI.INT, i, 1); 40 | } 41 | Request.waitAll(Arrays.copyOf(req, 2 * tasks)); 42 | 43 | /* Also try giving a 0 count and ensure everything is ok */ 44 | Request.waitAll(new Request[0]); 45 | 46 | MPI.COMM_WORLD.barrier (); 47 | MPI.Finalize(); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /pt2pt/Waitnull.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "waitnull.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Waitnull.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Waitnull 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | int me, tasks; 19 | Request request; 20 | 21 | MPI.Init(args); 22 | me = MPI.COMM_WORLD.getRank(); 23 | tasks = MPI.COMM_WORLD.getSize(); 24 | 25 | request = MPI.REQUEST_NULL; 26 | request.waitFor(); 27 | 28 | MPI.COMM_WORLD.barrier (); 29 | MPI.Finalize(); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /pt2pt/Wildcard.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "wildcard.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: Wildcard.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Wildcard 15 | { 16 | private final static int ITER = 10; 17 | 18 | public static void main (String args[]) throws MPIException 19 | { 20 | int me, tasks, tag, expected; 21 | int val[] = new int[1]; 22 | Status status; 23 | 24 | MPI.Init(args); 25 | me = MPI.COMM_WORLD.getRank(); 26 | tasks = MPI.COMM_WORLD.getSize(); 27 | val[0] = -1; 28 | 29 | /* We need at least 2 to run */ 30 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 31 | OmpitestError.getLineNumber(), 32 | 2, true); 33 | 34 | if (me == 0) { 35 | for (int i = 0; i < (tasks - 1) * ITER; i++) { 36 | status = MPI.COMM_WORLD.recv (val, 1, MPI.INT, 37 | MPI.ANY_SOURCE, i / (tasks - 1)); 38 | expected = status.getSource() * 1000 + status.getTag(); 39 | if (val[0] != expected) 40 | OmpitestError.ompitestError(OmpitestError.getFileName(), 41 | OmpitestError.getLineNumber(), 42 | "ERROR, val = " + val[0] + 43 | ", should be " + expected + "\n"); 44 | } 45 | } else { 46 | for (int i = 0; i < ITER; i++) { 47 | tag = i; 48 | val[0] = me * 1000 + tag; 49 | MPI.COMM_WORLD.send (val, 1, MPI.INT, 0, tag); 50 | } 51 | } 52 | MPI.COMM_WORLD.barrier (); 53 | MPI.Finalize(); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /pt2pt/todo/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../../reporting/OmpitestError.java -------------------------------------------------------------------------------- /pt2pt/todo/OmpitestProgress.java: -------------------------------------------------------------------------------- 1 | ../../reporting/OmpitestProgress.java -------------------------------------------------------------------------------- /random/OmpitestConfig.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestConfig.java -------------------------------------------------------------------------------- /random/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestError.java -------------------------------------------------------------------------------- /random/OmpitestProgress.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestProgress.java -------------------------------------------------------------------------------- /random/OpCommutative.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "op_commutative.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: OpCommutative.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class OpCommutative 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | boolean commute = false; 19 | Op op; 20 | 21 | MPI.Init(args); 22 | 23 | op = new Op(func, commute); 24 | if (commute) { 25 | OmpitestError.ompitestError(OmpitestError.getFileName(), 26 | OmpitestError.getLineNumber(), 27 | "op should not be commutative"); 28 | } 29 | op.free(); 30 | 31 | commute = true; 32 | op = new Op(func, commute); 33 | if (!commute) { 34 | OmpitestError.ompitestError(OmpitestError.getFileName(), 35 | OmpitestError.getLineNumber(), 36 | "op should be commutative"); 37 | } 38 | op.free(); 39 | 40 | MPI.Finalize(); 41 | } 42 | 43 | 44 | private static final UserFunction func = new UserFunction() 45 | {@Override public void call(Object inVec, Object inOutVec, 46 | int count, Datatype dt) throws MPIException 47 | { 48 | /* Just for compilation purposes */ 49 | }}; 50 | } 51 | -------------------------------------------------------------------------------- /random/ReduceLocal.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from reduce_local".c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: ReduceLocal.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class ReduceLocal 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | int src[] = new int [] { 1, 2, 3 }; 19 | int dest[] = new int [] { 0, 0, 0 }; 20 | 21 | MPI.Init(args); 22 | 23 | MPI.COMM_WORLD.reduceLocal(src, dest, src.length, MPI.INT, MPI.SUM); 24 | for (int i = 0; i < src.length; ++i) { 25 | if (dest[i] != src[i]) { 26 | OmpitestError.ompitestError(OmpitestError.getFileName(), 27 | OmpitestError.getLineNumber(), 28 | "REDUCE_LOCAL MPI_SUM failed; " + 29 | "dest[" + i + "] = " + dest[i] + 30 | ", expected " + src[i] + "\n"); 31 | } 32 | } 33 | 34 | MPI.Finalize(); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /random/Ticket_1944_BcastLoop.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "ticket-1944-bcast-loop.c" from the 4 | * "ompi-ibm-10.0" regression test package. The formatting of 5 | * the code is mainly the same as in the original file. 6 | * 7 | * 8 | * File: Ticket_1944_BcastLoop.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Ticket_1944_BcastLoop 15 | { 16 | private final static int numm = 100; 17 | private final static int numt = 142; 18 | 19 | public static void main (String args[]) throws MPIException 20 | { 21 | int myid, iter_mod; 22 | double workarray1[] = new double[561], 23 | workarray2[] = new double[561]; 24 | 25 | MPI.Init(args); 26 | myid = MPI.COMM_WORLD.getRank(); 27 | 28 | if (0 == args.length) { 29 | iter_mod = 1000; 30 | if (0 == myid) { 31 | System.out.printf("Defaulting to show only every %dth\n" + 32 | "iteration; run with any value as\n" + 33 | "args[0] to show every iteration\n\n", 34 | iter_mod); 35 | } 36 | } else { 37 | iter_mod = 1; 38 | if (0 == myid) { 39 | System.out.printf("Showing every iteration\n"); 40 | } 41 | } 42 | 43 | for (int m = 0; m < numm; ++m) { 44 | if (m % iter_mod == 0) { 45 | System.out.printf("rank %d, m = %d\n", myid, m); 46 | System.out.flush(); 47 | } 48 | 49 | for (int nt = 0; nt <= numt; ++nt) { 50 | if (0 == myid) { 51 | for (int i = 0; i < 561; ++i) { 52 | workarray1[i] = numm * numt * i; 53 | workarray2[i] = numm * numt * (i + 1); 54 | } 55 | } 56 | MPI.COMM_WORLD.bcast(workarray1, 561, MPI.DOUBLE, 0); 57 | MPI.COMM_WORLD.bcast(workarray2, 561, MPI.DOUBLE, 0); 58 | } 59 | } 60 | MPI.Finalize(); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /random/Ticket_2014_BasicSendRecv.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "ticket-2014-basic-send-recv.c" from the 4 | * "ompi-ibm-10.0" regression test package. The formatting of 5 | * the code is mainly the same as in the original file. 6 | * 7 | * 8 | * File: Ticket_2014_BasicSendRecv.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class Ticket_2014_BasicSendRecv 15 | { 16 | private final static int TAG = 1234; 17 | 18 | public static void main (String args[]) throws MPIException 19 | { 20 | int rank, size, peer; 21 | int resp[] = new int[1]; 22 | Status status; 23 | Datatype type; 24 | 25 | MPI.Init(args); 26 | rank = MPI.COMM_WORLD.getRank(); 27 | size = MPI.COMM_WORLD.getSize(); 28 | 29 | if(size != 2) { 30 | System.out.printf("Please 2 processes\n"); 31 | MPI.Finalize(); 32 | System.exit(0); 33 | } 34 | 35 | type = MPI.INT.clone(); 36 | 37 | if( rank == 0 ) { 38 | peer = 1; 39 | status = MPI.COMM_WORLD.recv(resp, 1, type, peer, TAG); 40 | System.out.printf("Manager: Received (%d) from Rank %d with " + 41 | "Tag %d\n", 42 | resp[0], status.getSource(), status.getTag()); 43 | } 44 | else { 45 | peer = 0; 46 | resp[0] = 12345; 47 | MPI.COMM_WORLD.send (resp, 1, type, 0, TAG * rank); 48 | } 49 | 50 | MPI.COMM_WORLD.barrier(); 51 | 52 | type.free(); 53 | MPI.Finalize(); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /random/todo/OmpiAffinityStr.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a port from "ompi-affinity-str.c" from the "ompi-ibm-10.0" 4 | * regression test package. The formatting of the code is 5 | * mainly the same as in the original file. 6 | * 7 | * 8 | * File: OmpiAffinityStr.java Author: S. Gross 9 | * 10 | */ 11 | 12 | import mpi.*; 13 | 14 | public class OmpiAffinityStr 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | if (OmpitestConfig.OMPI_HAVE_MPI_EXT_AFFINITY == 1) { 19 | int rank; 20 | char ompi_bound[OMPI_AFFINITY_STRING_MAX]; 21 | char current_binding[OMPI_AFFINITY_STRING_MAX]; 22 | char exists[OMPI_AFFINITY_STRING_MAX]; 23 | 24 | MPI.Init(args); 25 | rank = MPI.COMM_WORLD.getRank(); 26 | 27 | OMPI_Affinity_str(OMPI_AFFINITY_RSRC_STRING_FMT, 28 | ompi_bound, current_binding, exists); 29 | System.out.printf("rank %d (resource string): \n" + 30 | " ompi_bound: %s\n" + 31 | " current_binding: %s\n" + 32 | " exists: %s\n", 33 | rank, ompi_bound, current_binding, exists); 34 | 35 | OMPI_Affinity_str(OMPI_AFFINITY_LAYOUT_FMT, 36 | ompi_bound, current_binding, exists); 37 | System.out.printf("rank %d (layout): \n" + 38 | " ompi_bound: %s\n" + 39 | " current_binding: %s\n" + 40 | " exists: %s\n", 41 | rank, ompi_bound, current_binding, exists); 42 | MPI.Finalize(); 43 | System.exit(0); 44 | } else { 45 | /* The "affinity" extension is not available */ 46 | System.exit(77); 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /random/todo/OmpitestConfig.java: -------------------------------------------------------------------------------- 1 | ../../reporting/OmpitestConfig.java -------------------------------------------------------------------------------- /random/todo/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../../reporting/OmpitestError.java -------------------------------------------------------------------------------- /random/todo/OmpitestProgress.java: -------------------------------------------------------------------------------- 1 | ../../reporting/OmpitestProgress.java -------------------------------------------------------------------------------- /reporting/Makefile.am: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-mpi/ompi-java-test/1a3295a7cf8c8ea30366b2f9e50d48726354b47f/reporting/Makefile.am -------------------------------------------------------------------------------- /reporting/OmpitestConfig.java.in: -------------------------------------------------------------------------------- 1 | /* 2 | * Results from configure script that are needed in Java code. 3 | */ 4 | 5 | public class OmpitestConfig 6 | { 7 | final static int OMPITEST_CHECKING_MPI_API_PARAMS = 8 | @OMPITEST_CHECKING_MPI_API_PARAMS@; 9 | final static int OMPI_PARAM_CHECK = 10 | @OMPI_PARAM_CHECK@; 11 | final static int OMPITEST_HAVE_MPI_THREADS = 12 | @OMPITEST_HAVE_MPI_THREADS@; 13 | final static int HAVE_DIST_GRAPH = 14 | @HAVE_DIST_GRAPH@; 15 | final static int OMPI_HAVE_MPI_EXT_AFFINITY = 16 | @OMPI_HAVE_MPI_EXT_AFFINITY@; 17 | } 18 | -------------------------------------------------------------------------------- /request/GetStatus.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a modified version of Test1.java which has been 4 | * repurposed to test the java binding of MPI_REQUEST_GET_STATUS. 5 | * 6 | * 7 | * File: GetStatus.java Author: N. Graham 8 | * 9 | */ 10 | 11 | import java.nio.*; 12 | import mpi.*; 13 | 14 | public class GetStatus 15 | { 16 | public static void main (String args[]) throws MPIException 17 | { 18 | int me; 19 | int outmsg[] = new int[1]; 20 | IntBuffer inmsg = MPI.newIntBuffer(1); 21 | boolean flag = false; 22 | Request msgid; 23 | MPI.Init(args); 24 | 25 | me = MPI.COMM_WORLD.getRank(); 26 | inmsg.put(0, -1); 27 | 28 | /* We need at least 2 to run */ 29 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 30 | OmpitestError.getLineNumber(), 31 | 2, true); 32 | 33 | if (me == 1) { 34 | outmsg[0] = 5; 35 | MPI.COMM_WORLD.send (outmsg, 1, MPI.INT, 0, 1); 36 | } 37 | if (me == 0) { 38 | msgid = MPI.COMM_WORLD.iRecv(inmsg, 1, MPI.INT, 39 | MPI.ANY_SOURCE, MPI.ANY_TAG); 40 | Status status = null; 41 | 42 | while(status == null) 43 | status = msgid.getStatus(); 44 | 45 | msgid.free(); 46 | 47 | if(inmsg.get(0) != 5 || status.getSource() != 1 || status.getTag() != 1) 48 | OmpitestError.ompitestError(OmpitestError.getFileName(), 49 | OmpitestError.getLineNumber(), 50 | "flag, inmsg, src, tag = \"" + 51 | flag + ", " + inmsg.get(0) + ", " + 52 | status.getSource() + ", " + 53 | status.getTag() + 54 | "\", should be \"true, 5, 1, 1\"\n"); 55 | } 56 | MPI.COMM_WORLD.barrier(); 57 | 58 | if(me == 0) { 59 | System.out.println("Test Passed"); 60 | } 61 | 62 | MPI.Finalize(); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /request/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestError.java -------------------------------------------------------------------------------- /status/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestError.java -------------------------------------------------------------------------------- /status/SetCancelled.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a modified version of Rsend.java that has been repurposed 4 | * to test for the setCancelled method in Status.java. 5 | * 6 | * 7 | * File: SetCancelled.java Author: N. Graham 8 | * 9 | */ 10 | 11 | import mpi.*; 12 | 13 | public class SetCancelled { 14 | 15 | public static void main(String[] args) throws MPIException { 16 | int buf[], len, me; 17 | Status status; 18 | 19 | buf = new int[10]; 20 | len = buf.length; 21 | MPI.Init(args); 22 | me = MPI.COMM_WORLD.getRank(); 23 | 24 | /* We need at least 2 to run */ 25 | 26 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 27 | OmpitestError.getLineNumber(), 28 | 2, true); 29 | 30 | if(me == 0) { 31 | MPI.COMM_WORLD.rSend (buf, len, MPI.CHAR, 1, 1); 32 | } else if (me == 1) { 33 | status = MPI.COMM_WORLD.recv (buf, len, MPI.CHAR, 0, 1); 34 | 35 | status.setCancelled(true); 36 | 37 | if(status.isCancelled()) { 38 | System.out.println("Test Passed"); 39 | } else { 40 | System.out.println("Test Failed"); 41 | } 42 | } 43 | 44 | 45 | MPI.COMM_WORLD.barrier (); 46 | MPI.Finalize(); 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /status/SetElements.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a modified version of Rsend.java that has been repurposed 4 | * to test for the setElements method in Status.java. 5 | * 6 | * 7 | * File: SetElements.java Author: N. Graham 8 | * 9 | */ 10 | 11 | import mpi.*; 12 | 13 | public class SetElements { 14 | 15 | public static void main(String[] args) throws MPIException { 16 | int buf[], len, me; 17 | Status status; 18 | int numElements = 10; 19 | 20 | buf = new int[10]; 21 | len = buf.length; 22 | MPI.Init(args); 23 | me = MPI.COMM_WORLD.getRank(); 24 | 25 | /* We need at least 2 to run */ 26 | 27 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 28 | OmpitestError.getLineNumber(), 29 | 2, true); 30 | 31 | if(me == 0) { 32 | MPI.COMM_WORLD.rSend (buf, len, MPI.CHAR, 1, 1); 33 | } else if (me == 1) { 34 | status = MPI.COMM_WORLD.recv (buf, len, MPI.CHAR, 0, 1); 35 | 36 | status.setElements(MPI.INT, numElements); 37 | 38 | if(status.getElements(MPI.INT) == numElements) { 39 | System.out.println("Test Passed"); 40 | } else { 41 | System.out.println("Test Failed"); 42 | } 43 | } 44 | 45 | MPI.COMM_WORLD.barrier (); 46 | MPI.Finalize(); 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /status/SetElementsX.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is a modified version of Rsend.java that has been repurposed 4 | * to test for the setElementsX and getElementsX methods in Status.java. 5 | * 6 | * 7 | * File: SetElementsX.java Author: N. Graham 8 | * 9 | */ 10 | 11 | import mpi.*; 12 | 13 | public class SetElementsX { 14 | 15 | public static void main(String[] args) throws MPIException { 16 | int buf[], len, me; 17 | Status status; 18 | Count numElements = new Count(10); 19 | 20 | buf = new int[10]; 21 | len = buf.length; 22 | MPI.Init(args); 23 | me = MPI.COMM_WORLD.getRank(); 24 | 25 | /* We need at least 2 to run */ 26 | 27 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 28 | OmpitestError.getLineNumber(), 29 | 2, true); 30 | 31 | if(me == 0) { 32 | MPI.COMM_WORLD.rSend (buf, len, MPI.CHAR, 1, 1); 33 | } else if (me == 1) { 34 | status = MPI.COMM_WORLD.recv (buf, len, MPI.CHAR, 0, 1); 35 | 36 | status.setElementsX(MPI.INT, numElements); 37 | 38 | if(status.getElementsX(MPI.INT).equals(numElements)) { 39 | System.out.println("Test Passed"); 40 | } else { 41 | System.out.println("Test Failed"); 42 | } 43 | } 44 | 45 | MPI.COMM_WORLD.barrier (); 46 | MPI.Finalize(); 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /test/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestError.java -------------------------------------------------------------------------------- /test/OmpitestProgress.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestProgress.java -------------------------------------------------------------------------------- /test/TestCheckSize1.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestCheckSize. 2 | * 3 | * mpijavac TestCheckSize1.java 4 | * mpiexec -np 1 java TestCheckSize1 5 | * 6 | * File: TestCheckSize1.java Author: S. Gross 7 | * 8 | */ 9 | 10 | import mpi.*; 11 | 12 | public class TestCheckSize1 13 | { 14 | public static void main (String args[]) throws MPIException 15 | { 16 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 17 | OmpitestError.getLineNumber(), 18 | 2, true); 19 | MPI.Init (args); 20 | MPI.Finalize(); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /test/TestCheckSize2.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestCheckSize. 2 | * 3 | * mpijavac TestCheckSize2.java 4 | * mpiexec -np 1 java TestCheckSize2 5 | * mpiexec -np 2 java TestCheckSize2 6 | * 7 | * File: TestCheckSize2.java Author: S. Gross 8 | * 9 | */ 10 | 11 | import mpi.*; 12 | 13 | public class TestCheckSize2 14 | { 15 | public static void main (String args[]) throws MPIException 16 | { 17 | MPI.Init (args); 18 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 19 | OmpitestError.getLineNumber(), 20 | 2, true); 21 | MPI.Finalize(); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /test/TestCheckSize3.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestCheckSize. 2 | * 3 | * mpijavac TestCheckSize3.java 4 | * mpiexec -np 1 java TestCheckSize3 5 | * mpiexec -np 2 java TestCheckSize3 6 | * 7 | * File: TestCheckSize3.java Author: S. Gross 8 | * 9 | */ 10 | 11 | import mpi.*; 12 | 13 | public class TestCheckSize3 14 | { 15 | public static void main (String args[]) throws MPIException 16 | { 17 | MPI.Init (args); 18 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 19 | OmpitestError.getLineNumber(), 20 | 2, false); 21 | MPI.Finalize(); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /test/TestCheckSize4.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestCheckSize. 2 | * 3 | * mpijavac TestCheckSize4.java 4 | * mpiexec -np 1 java TestCheckSize4 5 | * 6 | * File: TestCheckSize4.java Author: S. Gross 7 | * 8 | */ 9 | 10 | import mpi.*; 11 | 12 | public class TestCheckSize4 13 | { 14 | public static void main (String args[]) throws MPIException 15 | { 16 | MPI.Init (args); 17 | MPI.Finalize(); 18 | OmpitestError.ompitestCheckSize(OmpitestError.getFileName(), 19 | OmpitestError.getLineNumber(), 20 | 2, false); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /test/TestError1.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestError. 2 | * 3 | * mpijavac TestError1.java 4 | * mpiexec -np 1 java TestError1 5 | * 6 | * File: TestError1.java Author: S. Gross 7 | * 8 | */ 9 | 10 | import mpi.*; 11 | 12 | public class TestError1 13 | { 14 | public static void main (String args[]) throws MPIException 15 | { 16 | OmpitestError.ompitestError(OmpitestError.getFileName(), 17 | OmpitestError.getLineNumber(), 18 | "Error message before MPI.Init()"); 19 | MPI.Init (args); 20 | MPI.Finalize(); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /test/TestError2.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestError. 2 | * 3 | * mpijavac TestError2.java 4 | * mpiexec -np 1 java TestError2 5 | * 6 | * File: TestError2.java Author: S. Gross 7 | * 8 | */ 9 | 10 | import mpi.*; 11 | 12 | public class TestError2 13 | { 14 | public static void main (String args[]) throws MPIException 15 | { 16 | MPI.Init (args); 17 | OmpitestError.ompitestError(OmpitestError.getFileName(), 18 | OmpitestError.getLineNumber(), 19 | "Error message between MPI.Init() " + 20 | "and MPI.Finalize()"); 21 | MPI.Finalize(); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /test/TestError3.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestError. 2 | * 3 | * mpijavac TestError3.java 4 | * mpiexec -np 1 java TestError2 5 | * 6 | * File: TestError3.java Author: S. Gross 7 | * 8 | */ 9 | 10 | import mpi.*; 11 | 12 | public class TestError3 13 | { 14 | public static void main (String args[]) throws MPIException 15 | { 16 | MPI.Init (args); 17 | MPI.Finalize(); 18 | OmpitestError.ompitestError(OmpitestError.getFileName(), 19 | OmpitestError.getLineNumber(), 20 | "Error message after MPI.Finalize()"); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /test/TestError4.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestError. 2 | * 3 | * mpijavac TestError4.java 4 | * mpiexec -np 1 java TestError4 5 | * 6 | * File: TestError4.java Author: S. Gross 7 | * 8 | */ 9 | 10 | import mpi.*; 11 | 12 | public class TestError4 13 | { 14 | public static void main (String args[]) throws MPIException 15 | { 16 | MPI.Init (args); 17 | OmpitestError.ompitestError(MPI.COMM_WORLD, "MPI.COMM_WORLD", 18 | OmpitestError.getFileName(), 19 | OmpitestError.getLineNumber(), 20 | "Error message between MPI.Init() " + 21 | "and MPI.Finalize()"); 22 | MPI.Finalize(); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /test/TestNeedEven1.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestNeedEven. 2 | * 3 | * mpijavac TestNeedEven1.java 4 | * mpiexec -np 1 java TestNeedEven1 5 | * 6 | * File: TestNeedEven1.java Author: S. Gross 7 | * 8 | */ 9 | 10 | import mpi.*; 11 | 12 | public class TestNeedEven1 13 | { 14 | public static void main (String args[]) throws MPIException 15 | { 16 | OmpitestError.ompitestNeedEven(OmpitestError.getFileName(), 17 | OmpitestError.getLineNumber()); 18 | MPI.Init (args); 19 | MPI.Finalize(); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /test/TestNeedEven2.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestNeedEven. 2 | * 3 | * mpijavac TestNeedEven2.java 4 | * mpiexec -np 1 java TestNeedEven2 5 | * mpiexec -np 2 java TestNeedEven2 6 | * 7 | * File: TestNeedEven2.java Author: S. Gross 8 | * 9 | */ 10 | 11 | import mpi.*; 12 | 13 | public class TestNeedEven2 14 | { 15 | public static void main (String args[]) throws MPIException 16 | { 17 | MPI.Init (args); 18 | OmpitestError.ompitestNeedEven(OmpitestError.getFileName(), 19 | OmpitestError.getLineNumber()); 20 | MPI.Finalize(); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /test/TestNeedEven3.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestNeedEven. 2 | * 3 | * mpijavac TestNeedEven3.java 4 | * mpiexec -np 1 java TestNeedEven3 5 | * 6 | * File: TestNeedEven3.java Author: S. Gross 7 | * 8 | */ 9 | 10 | import mpi.*; 11 | 12 | public class TestNeedEven3 13 | { 14 | public static void main (String args[]) throws MPIException 15 | { 16 | MPI.Init (args); 17 | OmpitestError.ompitestNeedEven(OmpitestError.getFileName(), 18 | OmpitestError.getLineNumber()); 19 | MPI.Finalize(); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /test/TestProgress.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestProgress. 2 | * 3 | * mpijavac TestProgress.java 4 | * mpiexec -np 1 java TestProgress 5 | * 6 | * File: TestProgress.java Author: S. Gross 7 | * 8 | */ 9 | 10 | import mpi.*; 11 | 12 | public class TestProgress 13 | { 14 | final static int TOTAL = 40; /* total number of steps */ 15 | final static int SLEEP = 200; /* 200 ms */ 16 | 17 | public static void main (String args[]) throws MPIException, 18 | InterruptedException 19 | { 20 | MPI.Init (args); 21 | OmpitestProgress.ompitestProgressStart(TOTAL); 22 | for (int i = 0; i < TOTAL; ++i) { 23 | OmpitestProgress.ompitestProgress(i); 24 | Thread.sleep(SLEEP); 25 | } 26 | OmpitestProgress.ompitestProgressEnd(); 27 | MPI.Finalize(); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /test/TestWarning1.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestWarning. 2 | * 3 | * mpijavac TestWarning1.java 4 | * mpiexec -np 1 java TestWarning1 5 | * 6 | * File: TestWarning1.java Author: S. Gross 7 | * 8 | */ 9 | 10 | import mpi.*; 11 | 12 | public class TestWarning1 13 | { 14 | public static void main (String args[]) throws MPIException 15 | { 16 | OmpitestError.ompitestWarning(OmpitestError.getFileName(), 17 | OmpitestError.getLineNumber(), 18 | "Warning message before MPI.Init()"); 19 | MPI.Init (args); 20 | MPI.Finalize(); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /test/TestWarning2.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestWarning. 2 | * 3 | * mpijavac TestWarning2.java 4 | * mpiexec -np 1 java TestWarning2 5 | * 6 | * File: TestWarning2.java Author: S. Gross 7 | * 8 | */ 9 | 10 | import mpi.*; 11 | 12 | public class TestWarning2 13 | { 14 | public static void main (String args[]) throws MPIException 15 | { 16 | MPI.Init (args); 17 | OmpitestError.ompitestWarning(OmpitestError.getFileName(), 18 | OmpitestError.getLineNumber(), 19 | "Warning message between MPI.Init() " + 20 | "and MPI.Finalize()"); 21 | MPI.Finalize(); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /test/TestWarning3.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestWarning. 2 | * 3 | * mpijavac TestWarning3.java 4 | * mpiexec -np 1 java TestWarning2 5 | * 6 | * File: TestWarning3.java Author: S. Gross 7 | * 8 | */ 9 | 10 | import mpi.*; 11 | 12 | public class TestWarning3 13 | { 14 | public static void main (String args[]) throws MPIException 15 | { 16 | MPI.Init (args); 17 | MPI.Finalize(); 18 | OmpitestError.ompitestWarning(OmpitestError.getFileName(), 19 | OmpitestError.getLineNumber(), 20 | "Warning message after MPI.Finalize()"); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /test/TestWarning4.java: -------------------------------------------------------------------------------- 1 | /* Test program for ompitestWarning. 2 | * 3 | * mpijavac TestWarning4.java 4 | * mpiexec -np 1 java TestWarning4 5 | * 6 | * File: TestWarning4.java Author: S. Gross 7 | * 8 | */ 9 | 10 | import mpi.*; 11 | 12 | public class TestWarning4 13 | { 14 | public static void main (String args[]) throws MPIException 15 | { 16 | MPI.Init (args); 17 | OmpitestError.ompitestWarning(MPI.COMM_WORLD, "MPI.COMM_WORLD", 18 | OmpitestError.getFileName(), 19 | OmpitestError.getLineNumber(), 20 | "Warning message between MPI.Init() " + 21 | "and MPI.Finalize()"); 22 | MPI.Finalize(); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /topology/OmpitestConfig.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestConfig.java -------------------------------------------------------------------------------- /topology/OmpitestError.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestError.java -------------------------------------------------------------------------------- /topology/OmpitestProgress.java: -------------------------------------------------------------------------------- 1 | ../reporting/OmpitestProgress.java -------------------------------------------------------------------------------- /topology/make_topology: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Compile and run all topology programs. 4 | # 5 | # As default this script runs first with two processes and second 6 | # with four processes on the local machine. You can add more options 7 | # to "mpiexec" on the command line, e.g.: 8 | # 9 | # make_topology -np 6 -host sunpc1,linpc1,tyr 10 | # 11 | # to create six processes running on three machines. 12 | # 13 | # 14 | # File: make_topology Author: S. Gross 15 | # 16 | 17 | TWO_PROC="Cart \ 18 | Dimscreate \ 19 | Distgraph1 \ 20 | Graph \ 21 | Sub \ 22 | Sub2" 23 | 24 | NUM_PROC=$TWO_PROC 25 | 26 | # number of processes 27 | NP=8 28 | 29 | 30 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 31 | echo "++++++++++++++++ ++++++++++++++++" 32 | echo "++++++++++++++++ mpiexec -np 2 java ... ++++++++++++++++" 33 | echo "++++++++++++++++ ++++++++++++++++" 34 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 35 | for i in $TWO_PROC; do 36 | echo " " 37 | echo " " 38 | echo " " 39 | echo =========================== $i =========================== 40 | mpijavac $i.java 41 | mpiexec -np 2 java $i 42 | done 43 | 44 | 45 | echo " " 46 | echo " " 47 | echo " " 48 | echo " " 49 | echo " " 50 | echo " " 51 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 52 | echo "++++" 53 | echo "++++ mpiexec -np $NP $* java ..." 54 | echo "++++" 55 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 56 | for i in $NUM_PROC; do 57 | echo " " 58 | echo " " 59 | echo " " 60 | echo =========================== $i =========================== 61 | mpijavac $i.java 62 | mpiexec -np $NP $* java $i 63 | done 64 | 65 | rm *.class 66 | --------------------------------------------------------------------------------