Posted
Filed under Computer/HPC
requirement :
   MPI program and environment in the system.
   attached file based on MPI(mvapich/openmpi) of OFED.

download hpl-2.0 & atlas 3.8.3
http://sourceforge.net/projects/math-atlas/files/Stable/3.8.3/


make a temporary directory for compile/setup
$ mkdir -p /kage
copy hpl & atlas to /kage

extract ATLAS
$ tar zxvf atlas-xxxx.tar.gz
$ cd ATLAS

create configuration file
$ vi opt.conf
--------------------------------------------------------------------------------------------------------------
#http://math-atlas.sourceforge.net/atlas_install/
arch=Linux_Xeon_SSE2
mkdir -p $arch
cd $arch
../configure -b 64 -D -c -DPentiumCPS=240 --prefix=/kage/hpl/atlas

#../configure -b 32 \                 # Currently the BCCD only supports 32-bit
#   -t -1 \                                  # -1 tells ATLAS to try to autodetect th
#   -Si cputhrchk 0 \                 # Do not check for CPU throttling
#   --prefix=$HOME/hpl/atlas \   # Could be anywhere, but note this path,
#   --nof77 \                              # Don't worry about FORTRAN
#   --cc=/usr/bin/gcc \               # Use gcc
#   -C ic /usr/bin/gcc                  # Really, use gcc (see doc for explainat


mkdir -p /kage/hpl/atlas
mkdir -p /kage/hpl/lib/atlas
make build
make check
make time
make install
--------------------------------------------------------------------------------------------------------------

compile ATLAS
$ sh opt.conf

extract hpl-2.0
$ tar zxvf hpl-2.0.xxx.tar.gz
$ cd hpl-2.0

create configuration file
$ vi setup/Make.Linux_ATHLON_CBLAS
--------------------------------------------------------------------------------------------------------------
SHELL        = /bin/sh
CD             = cd
CP              = cp
LN_S          = ln -s
MKDIR        = mkdir
RM             = /bin/rm -f
TOUCH       = touch
HOME        = /kage
ARCH         = Linux_ATHLON_CBLAS
TOPdir       = $(HOME)/hpl-2.0
INCdir       = $(TOPdir)/include
BINdir       = $(TOPdir)/bin/$(ARCH)
LIBdir       = $(TOPdir)/lib/$(ARCH)
HPLlib       = $(LIBdir)/libhpl.a

#MPdir        = /usr/local/mpi
#MPdir        = /usr/mpi/gcc/mvapich-1.2.0
MPdir        = /usr/mpi/gcc/openmpi-1.4.1
MPinc        = -I$(MPdir)/include
#MPlib        = $(MPdir)/lib/libmpich.a
MPlib        = $(MPdir)/lib64/libmpi.so
#MPlib        = $(MPdir)/lib64/libvt.mpi.a
#LAdir        = $(HOME)/netlib/ARCHIVES/Linux_ATHLON
LAdir        = $(HOME)/hpl/atlas
LAinc        =
LAlib        = $(LAdir)/lib/libcblas.a $(LAdir)/lib/libatlas.a
F2CDEFS      =

# ----------------------------------------------------------------------
# - HPL includes / libraries / specifics
# ----------------------------------------------------------------------
HPL_INCLUDES = -I$(INCdir) -I$(INCdir)/$(ARCH) $(LAinc) $(MPinc)
HPL_LIBS     = $(HPLlib) $(LAlib) $(MPlib)
HPL_OPTS     = -DHPL_CALL_CBLAS
HPL_DEFS     = $(F2CDEFS) $(HPL_OPTS) $(HPL_INCLUDES)

# ----------------------------------------------------------------------
# - Compilers / linkers - Optimization flags
# ----------------------------------------------------------------------
CC           = /usr/bin/gcc
CCNOOPT      = $(HPL_DEFS)
CCFLAGS      = $(HPL_DEFS) -fomit-frame-pointer -O3 -funroll-loops -W -W
LINKER       = /usr/bin/gcc
LINKFLAGS    = $(CCFLAGS)
ARCHIVER     = ar
ARFLAGS      = r
RANLIB       = echo
--------------------------------------------------------------------------------------------------------------

$ ln -s setup/Make.Linux_ATHLON_CBLAS .
$ vi opt.conf
--------------------------------------------------------------------------------------------------------------
#make arch=Linux_ATHLON_CBLAS clean
make arch=Linux_ATHLON_CBLAS
--------------------------------------------------------------------------------------------------------------

compile
$ sh opt.conf

test run hpl with 4 process in localhost
$ cd/kage/hpl-2.0/bin/Linux_ATHLON_CBLAS
$ vi hostlist
--------------------------------------------------------------------------------------------------------------
localhost
localhost
localhost
localhost
--------------------------------------------------------------------------------------------------------------
$ /usr/mpi/gcc/openmpi-1.4.1/bin/mpirun -np 4 -machinefile ./hostlist ./xhpl >& hpl.out


check output
$ tail -f hpl.out

~~~~~~~~
================================================================================
T/V                N    NB     P     Q               Time                 Gflops
--------------------------------------------------------------------------------
WR00R2R4          35     4     4     1               0.00              2.112e-01
--------------------------------------------------------------------------------
||Ax-b||_oo/(eps*(||A||_oo*||x||_oo+||b||_oo)*N)=        0.0217524 ...... PASSED
================================================================================

Finished    864 tests with the following results:
            864 tests completed and passed residual checks,
              0 tests completed and failed residual checks,
              0 tests skipped because of illegal input values.
--------------------------------------------------------------------------------

End of Tests.
================================================================================

it is about 0.21GFlops for not optimized hardware/hpl input data





If you want full test for hardware stress.
$ vi stress.sh
--------------------------------------------------------------------------------------------------------------
MPI_BIN=/usr/mpi/gcc/openmpi-1.4.1/bin
[ -f hostlist ] && rm -f hostlist
for i in $(seq 1 $(cat /proc/cpuinfo | grep "^processor" | wc -l)); do
   echo $(hostname) >> hostlist
done
${MPI_BIN}/mpirun -np $(cat hostlist|wc -l) -machinefile hostlist ./xhpl
--------------------------------------------------------------------------------------------------------------


If you want install script then download below attached file.
and modify first few line for PATH.
run the file like as " sh hpl_install.sh" then it will be automatic install HPL.


simple progress)
1. download hpl-2.0.tar.gz
2. download atlas3.8.3.tar.gz
3. download above kage_hpl-2.0.tgz
4. make a directory for temporary
5. copy 3 files to that directory.
6. modify first few line for path in hpl_install.sh
7. run hpl_install.sh
8. go to hpl/bin directory.
9. create input data like as "./configure.sh"
10. run HPL like as "./run.sh"
11. you can see result.


*)
check CPU number when it has a problem of number of CPU.
 1. configure.sh
    CPU=
 2. run.sh
    NP=

If "CPU=" has a digit number then "NP=" has same number.
If "CPU=" has no digit number then "NP=" has no digit number.
You can choose even number for "CPU=".

2011/04/05 05:45 2011/04/05 05:45

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park :: (basic)installation for hpl-2.0 with ATLAS, gcc and mpich

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

CEP's Blog -by Kage Park

[로그인][오픈아이디란?]