1
1

Add an allocator tool for finding HDFS file locations and obtaining allocations for those nodes (supports both Hadoop 1 and 2). Split the Java support into two parts: detection of Java support and request for Java bindings.

This commit was SVN r26414.
Этот коммит содержится в:
Ralph Castain 2012-05-09 01:13:49 +00:00
родитель 02aa36f2e5
Коммит fd796cce0a
14 изменённых файлов: 751 добавлений и 186 удалений

Просмотреть файл

@ -563,10 +563,21 @@ OPAL_CHECK_COMPILER_VERSION_ID
##################################
# Java compiler characteristics
##################################
# Needed for ORTE - OMPI will AC_REQUIRE where needed
m4_ifdef([project_orte], [ORTE_SETUP_JAVA])
# We don't need Java unless we're building Open MPI; ORTE and OPAL do
# not use Java at all
m4_ifdef([project_ompi], [OMPI_SETUP_JAVA])
##################################
# Java MPI Binding request
##################################
# Only needed for OMPI
m4_ifdef([project_ompi], [OMPI_SETUP_JAVA_BINDINGS])
##################################
# Hadoop support
##################################
# We can't use Hadoop support unless we're building ORTE
m4_ifdef([project_orte], [ORTE_SETUP_HADOOP])
##################################

Просмотреть файл

@ -1,3 +1,4 @@
enable_hadoop=yes
enable_opal_multi_threads=no
enable_ft_thread=no
enable_mem_debug=no

Просмотреть файл

@ -1,3 +1,4 @@
enable_hadoop=yes
enable_mem_debug=yes
enable_mem_profile=no
enable_debug_symbols=yes

Просмотреть файл

@ -22,21 +22,24 @@ dnl $HEADER$
dnl
# This macro is necessary to get the title to be displayed first. :-)
AC_DEFUN([OMPI_SETUP_JAVA_BANNER],[
ompi_show_subtitle "Java compiler"
AC_DEFUN([OMPI_SETUP_JAVA_BINDINGS_BANNER],[
ompi_show_subtitle "Java MPI bindings"
])
# OMPI_SETUP_JAVA()
# OMPI_SETUP_JAVA_BINDINGS()
# ----------------
# Do everything required to setup the Java compiler. Safe to AC_REQUIRE
# Do everything required to setup the Java MPI bindings. Safe to AC_REQUIRE
# this macro.
AC_DEFUN([OMPI_SETUP_JAVA],[
AC_REQUIRE([OMPI_SETUP_JAVA_BANNER])
AC_DEFUN([OMPI_SETUP_JAVA_BINDINGS],[
# must have Java setup
AC_REQUIRE([ORTE_SETUP_JAVA])
AC_REQUIRE([OMPI_SETUP_JAVA_BINDINGS_BANNER])
AC_MSG_CHECKING([if want Java bindings])
AC_ARG_ENABLE(mpi-java,
AC_HELP_STRING([--enable-mpi-java],
[enable Java MPI bindings (default: enabled)]))
[enable Java MPI bindings (default: disabled)]))
# Only build the Java bindings if requested
if test "$enable_mpi_java" = "yes"; then
@ -45,12 +48,18 @@ AC_DEFUN([OMPI_SETUP_JAVA],[
AC_MSG_CHECKING([if shared libraries are enabled])
AS_IF([test "$enable_shared" != "yes"],
[AC_MSG_RESULT([no])
AS_IF([test "$enable_mpi_java" = "yes"],
[AC_MSG_WARN([Java bindings cannot be built without shared libraries])
AC_MSG_ERROR([Cannot continue])],
[AC_MSG_WARN([Java bindings will not build as they require --enable-shared])
WANT_MPI_JAVA_SUPPORT=0])],
AC_MSG_WARN([Java bindings cannot be built without shared libraries])
AC_MSG_WARN([Please reconfigure with --enable-shared])
AC_MSG_ERROR([Cannot continue])],
[AC_MSG_RESULT([yes])])
# must have Java support
AC_MSG_CHECKING([if Java support was found])
AS_IF([test "$orte_java_happy" = "yes"],
[AC_MSG_RESULT([yes])],
[AC_MSG_WARN([Java MPI bindings requested, but Java support was not found])
AC_MSG_WARN([Please reconfigure the --with-jdk options to where Java])
AC_MSG_WARN([support can be found])
AC_MSG_ERROR([Cannot continue])])
else
AC_MSG_RESULT([no])
WANT_MPI_JAVA_SUPPORT=0
@ -59,148 +68,14 @@ AC_DEFUN([OMPI_SETUP_JAVA],[
[do we want java mpi bindings])
AM_CONDITIONAL(OMPI_WANT_JAVA_BINDINGS, test "$WANT_MPI_JAVA_SUPPORT" = "1")
AC_ARG_WITH(jdk-dir,
AC_HELP_STRING([--with-jdk-dir(=DIR)],
[Location of the JDK header directory. If you use this option, do not specify --with-jdk-bindir or --with-jdk-headers.]))
AC_ARG_WITH(jdk-bindir,
AC_HELP_STRING([--with-jdk-bindir(=DIR)],
[Location of the JDK bin directory. If you use this option, you must also use --with-jdk-headers (and you must NOT use --with-jdk-dir)]))
AC_ARG_WITH(jdk-headers,
AC_HELP_STRING([--with-jdk-headers(=DIR)],
[Location of the JDK header directory. If you use this option, you must also use --with-jdk-bindir (and you must NOT use --with-jdk-dir)]))
# Check for bozo case: ensue a directory was specified
AS_IF([test "$with_jdk_dir" = "yes" -o "$with_jdk_dir" = "no"],
[AC_MSG_WARN([Must specify a directory name for --with-jdk-dir])
AC_MSG_ERROR([Cannot continue])])
AS_IF([test "$with_jdk_bindir" = "yes" -o "$with_jdk_bindir" = "no"],
[AC_MSG_WARN([Must specify a directory name for --with-jdk-bindir])
AC_MSG_ERROR([Cannot continue])])
AS_IF([test "$with_jdk_headers" = "yes" -o "$with_jdk_headers" = "no"],
[AC_MSG_WARN([Must specify a directory name for --with-jdk-headers])
AC_MSG_ERROR([Cannot continue])])
# Check for bozo case: either specify --with-jdk-dir or
# (--with-jdk-bindir, --with-jdk-headers) -- not both.
bad=0
AS_IF([test -n "$with_jdk_dir" -a -n "$with_jdk_bindir" -o \
-n "$with_jdk_dir" -a -n "$with_jdk_headers"],[bad=1])
AS_IF([test -z "$with_jdk_bindir" -a -n "$with_jdk_headers" -o \
-n "$with_jdk_bindir" -a -z "$with_jdk_headers"],[bad=1])
AS_IF([test "$bad" = "1"],
[AC_MSG_WARN([Either specify --with-jdk-dir or both of (--with-jdk_bindir, --with-jdk-headers) -- not both.])
AC_MSG_ERROR([Cannot continue])])
AS_IF([test -n "$with_jdk_dir"],
[with_jdk_bindir=$with_jdk_dir/bin
with_jdk_headers=$with_jdk_dir/include])
##################################################################
# with_jdk_dir can now be ignored; with_jdk_bindir and
# with_jdk_headers will be either empty or have valid values.
##################################################################
# Some java installations are in obscure places. So let's
# hard-code a few of the common ones so that users don't have to
# specify --with-java-<foo>=LONG_ANNOYING_DIRECTORY.
AS_IF([test "$WANT_MPI_JAVA_SUPPORT" = "1" -a -z "$with_jdk_dir" \
-a -z "$with_jdk_dir" -a -z "$with_jdk_bindir"],
[ # OS X Snow Leopard and Lion (10.6 and 10.7 -- did not
# check prior versions)
found=0
dir=/System/Library/Frameworks/JavaVM.framework/Versions/Current/Headers
AS_IF([test -d $dir], [found=1
with_jdk_headers=$dir
with_jdk_bindir=/usr/bin])
# Various Linux
dir='/usr/lib/jvm/java-*-openjdk-*/include/'
jnih=`ls $dir/jni.h 2>/dev/null | head -n 1`
AS_IF([test -r "$jnih"],
[with_jdk_headers=`dirname $jnih`
OPAL_WHICH([javac], [with_jdk_bindir])
AS_IF([test -n "$with_jdk_bindir"],
[found=1
with_jdk_bindir=`dirname $with_jdk_bindir`],
[with_jdk_headers=])],
[dir='/usr/lib/jvm/default-java/include/'
jnih=`ls $dir/jni.h 2>/dev/null | head -n 1`
AS_IF([test -r "$jnih"],
[with_jdk_headers=`dirname $jnih`
OPAL_WHICH([javac], [with_jdk_bindir])
AS_IF([test -n "$with_jdk_bindir"],
[found=1
with_jdk_bindir=`dirname $with_jdk_bindir`],
[with_jdk_headers=])])])
# Solaris
dir=/usr/java
AS_IF([test "$found" -eq 0 -a -d $dir],
[with_jdk_headers=$dir/include
with_jdk_bindir=$dir/bin])
# If we think we found them, announce
AS_IF([test -n "$with_jdk_headers" -a "$with_jdk_bindir"],
[AC_MSG_NOTICE([guessing that JDK headers are in $with_jdk_headers])
AC_MSG_NOTICE([guessing that JDK javac is in $with_jdk_bindir])])
])
# Find javac and jni.h
AS_IF([test "$WANT_MPI_JAVA_SUPPORT" = "1"],
[OMPI_CHECK_WITHDIR([jdk-bindir], [$with_jdk_bindir], [javac])
OMPI_CHECK_WITHDIR([jdk-headers], [$with_jdk_headers], [jni.h])])
# Look for various Java-related programs
ompi_java_happy=no
AS_IF([test "$WANT_MPI_JAVA_SUPPORT" = "1"],
[PATH_save=$PATH
AS_IF([test -n "$with_jdk_bindir" -a "$with_jdk_bindir" != "yes" -a "$with_jdk_bindir" != "no"],
[PATH="$PATH:$with_jdk_bindir"])
AC_PATH_PROG(JAVAC, javac)
AC_PATH_PROG(JAVAH, javah)
AC_PATH_PROG(JAR, jar)
PATH=$PATH_save
# Check to see if we have all 3 programs.
AS_IF([test -z "$JAVAC" -o -z "$JAVAH" -o -z "$JAR"],
[ompi_java_happy=no],
[ompi_java_happy=yes])
])
# Look for jni.h
AS_IF([test "$WANT_MPI_JAVA_SUPPORT" = "1" -a "$ompi_java_happy" = "yes"],
[CPPFLAGS_save=$CPPFLAGS
AS_IF([test -n "$with_jdk_headers" -a "$with_jdk_headers" != "yes" -a "$with_jdk_headers" != "no"],
[OMPI_JDK_CPPFLAGS="-I$with_jdk_headers"
# Some flavors of JDK also require -I<blah>/linux.
# See if that's there, and if so, add a -I for that,
# too. Ugh.
AS_IF([test -d "$with_jdk_headers/linux"],
[OMPI_JDK_CPPFLAGS="$OMPI_JDK_CPPFLAGS -I$with_jdk_headers/linux"])
# Solaris JDK also require -I<blah>/solaris.
# See if that's there, and if so, add a -I for that,
# too. Ugh.
AS_IF([test -d "$with_jdk_headers/solaris"],
[OMPI_JDK_CPPFLAGS="$OMPI_JDK_CPPFLAGS -I$with_jdk_headers/solaris"])
CPPFLAGS="$CPPFLAGS $OMPI_JDK_CPPFLAGS"])
AC_CHECK_HEADER([jni.h], [],
[ompi_java_happy=no])
CPPFLAGS=$CPPFLAGS_save
])
AC_SUBST(OMPI_JDK_CPPFLAGS)
# Check for pinning support
# Uncomment when ready (or delete if we don't want it)
AS_IF([test "$WANT_MPI_JAVA_SUPPORT" = "1" -a "$ompi_java_happy" = "yes"],
AS_IF([test "$WANT_MPI_JAVA_SUPPORT" = "1"],
[dnl OMPI_JAVA_CHECK_PINNING
echo ======we should check for java pinning support here...
])
# Are we happy?
AS_IF([test "$WANT_MPI_JAVA_SUPPORT" = "1" -a "$ompi_java_happy" = "no"],
[AC_MSG_WARN([Java MPI bindings requested, but unable to find proper support])
AC_MSG_ERROR([Cannot continue])])
AS_IF([test "$WANT_MPI_JAVA_SUPPORT" = "1"],
[AC_MSG_WARN([******************************************************])
AC_MSG_WARN([*** Java MPI bindings are provided on a provisional])

Просмотреть файл

@ -12,7 +12,7 @@
if OMPI_WANT_JAVA_BINDINGS
# Get the include files that were generated from the .java source files
AM_CPPFLAGS = -I$(top_builddir)/ompi/mpi/java/java $(OMPI_JDK_CPPFLAGS) $(LTDLINCL)
AM_CPPFLAGS = -I$(top_builddir)/ompi/mpi/java/java $(ORTE_JDK_CPPFLAGS) $(LTDLINCL)
headers = \
mpiJava.h

Просмотреть файл

@ -33,5 +33,7 @@ AC_DEFUN([ORTE_CONFIG_FILES],[
orte/tools/orte-migrate/Makefile
orte/tools/orte-info/Makefile
orte/tools/mapreduce/Makefile
orte/tools/allocators/Makefile
orte/tools/allocators/hdfsalloc.pl
])
])

69
orte/config/orte_setup_hadoop.m4 Обычный файл
Просмотреть файл

@ -0,0 +1,69 @@
dnl -*- shell-script -*-
dnl
dnl Copyright (c) 2012 Los Alamos National Security, Inc. All rights reserved.
dnl $COPYRIGHT$
dnl
dnl Additional copyrights may follow
dnl
dnl $HEADER$
dnl
# This macro is necessary to get the title to be displayed first. :-)
AC_DEFUN([ORTE_SETUP_HADOOP_BANNER],[
ompi_show_subtitle "HADOOP class libraries"
])
# ORTE_SETUP_HADOOP()
# ----------------
# Do everything required to setup the HADOOP libraries. Safe to AC_REQUIRE
# this macro.
AC_DEFUN([ORTE_SETUP_HADOOP],[
AC_REQUIRE([ORTE_SETUP_HADOOP_BANNER])
AC_MSG_CHECKING([if want Hadoop support])
AC_ARG_ENABLE(hadoop,
AC_HELP_STRING([--enable-hadoop],
[Enable Hadoop support - path to Hadoop taken from environment]))
# Collect the jars
ORTE_HAVE_HADOOP_SERIES_1=0
ORTE_HAVE_HADOOP_SERIES_2=0
# Only build the Hadoop support if requested
if test "$enable_hadoop" == "yes"; then
AC_MSG_RESULT([yes])
WANT_HADOOP_SUPPORT=1
# if this is Hadoop 2.x, we will find a share/hadoop/common
# directory under the location given in the environ
AC_MSG_CHECKING([for Hadoop 2.0 commons directory])
AS_IF([test "x$HADOOP_COMMON_HOME" != "x" -a -d "$HADOOP_COMMON_HOME/share/hadoop/common"],
[AC_MSG_RESULT([found])
ORTE_HAVE_HADOOP_SERIES_2=1],
[AC_MSG_RESULT([not found])
# check instead for Hadoop 1.0.2
AC_MSG_CHECKING([for Hadoop 1.0.2])
AS_IF([test "x$HADOOP_HOME" != "x" -a -f "$HADOOP_HOME/hadoop-core-1.0.2.jar"],
[AC_MSG_RESULT([found])
ORTE_HAVE_HADOOP_SERIES_1=1],
[AC_MSG_RESULT([not found])
AC_MSG_WARN([HADOOP support requested but supported version not found])
AC_MSG_ERROR([Cannot continue])])])
else
AC_MSG_RESULT([no])
WANT_HADOOP_SUPPORT=0
fi
AC_SUBST([ORTE_HAVE_HADOOP_SERIES_1])
AC_DEFINE_UNQUOTED([ORTE_WANT_HADOOP_SUPPORT], [$WANT_HADOOP_SUPPORT],
[do we want hadoop support])
AM_CONDITIONAL(ORTE_WANT_HADOOP_SUPPORT, test "$WANT_HADOOP_SUPPORT" = "1")
AM_CONDITIONAL(ORTE_HAVE_HADOOP_SERIES1, test "$ORTE_HAVE_HADOOP_SERIES_1" = "1")
AC_DEFINE_UNQUOTED([ORTE_HAVE_HADOOP_SERIES1], [$ORTE_HAVE_HADOOP_SERIES_1],
[do we have MRV1])
AM_CONDITIONAL(ORTE_HAVE_HADOOP_SERIES2, test "$ORTE_HAVE_HADOOP_SERIES_2" = "1")
AC_DEFINE_UNQUOTED([ORTE_HAVE_HADOOP_SERIES2], [$ORTE_HAVE_HADOOP_SERIES_2],
[do we have MRV2])
])

174
orte/config/orte_setup_java.m4 Обычный файл
Просмотреть файл

@ -0,0 +1,174 @@
dnl -*- shell-script -*-
dnl
dnl Copyright (c) 2004-2005 The Trustees of Indiana University and Indiana
dnl University Research and Technology
dnl Corporation. All rights reserved.
dnl Copyright (c) 2004-2006 The University of Tennessee and The University
dnl of Tennessee Research Foundation. All rights
dnl reserved.
dnl Copyright (c) 2004-2008 High Performance Computing Center Stuttgart,
dnl University of Stuttgart. All rights reserved.
dnl Copyright (c) 2004-2006 The Regents of the University of California.
dnl All rights reserved.
dnl Copyright (c) 2006-2012 Los Alamos National Security, LLC. All rights
dnl reserved.
dnl Copyright (c) 2007-2012 Oracle and/or its affiliates. All rights reserved.
dnl Copyright (c) 2008-2012 Cisco Systems, Inc. All rights reserved.
dnl $COPYRIGHT$
dnl
dnl Additional copyrights may follow
dnl
dnl $HEADER$
dnl
# This macro is necessary to get the title to be displayed first. :-)
AC_DEFUN([ORTE_SETUP_JAVA_BANNER],[
ompi_show_subtitle "Java compiler"
])
# ORTE_SETUP_JAVA()
# ----------------
# Do everything required to setup the Java compiler. Safe to AC_REQUIRE
# this macro.
AC_DEFUN([ORTE_SETUP_JAVA],[
AC_REQUIRE([ORTE_SETUP_JAVA_BANNER])
AC_ARG_WITH(jdk-dir,
AC_HELP_STRING([--with-jdk-dir(=DIR)],
[Location of the JDK header directory. If you use this option, do not specify --with-jdk-bindir or --with-jdk-headers.]))
AC_ARG_WITH(jdk-bindir,
AC_HELP_STRING([--with-jdk-bindir(=DIR)],
[Location of the JDK bin directory. If you use this option, you must also use --with-jdk-headers (and you must NOT use --with-jdk-dir)]))
AC_ARG_WITH(jdk-headers,
AC_HELP_STRING([--with-jdk-headers(=DIR)],
[Location of the JDK header directory. If you use this option, you must also use --with-jdk-bindir (and you must NOT use --with-jdk-dir)]))
# Check for bozo case: ensure a directory was specified
AS_IF([test "$with_jdk_dir" = "yes" -o "$with_jdk_dir" = "no"],
[AC_MSG_WARN([Must specify a directory name for --with-jdk-dir])
AC_MSG_ERROR([Cannot continue])])
AS_IF([test "$with_jdk_bindir" = "yes" -o "$with_jdk_bindir" = "no"],
[AC_MSG_WARN([Must specify a directory name for --with-jdk-bindir])
AC_MSG_ERROR([Cannot continue])])
AS_IF([test "$with_jdk_headers" = "yes" -o "$with_jdk_headers" = "no"],
[AC_MSG_WARN([Must specify a directory name for --with-jdk-headers])
AC_MSG_ERROR([Cannot continue])])
# Check for bozo case: either specify --with-jdk-dir or
# (--with-jdk-bindir, --with-jdk-headers) -- not both.
bad=0
AS_IF([test -n "$with_jdk_dir" -a -n "$with_jdk_bindir" -o \
-n "$with_jdk_dir" -a -n "$with_jdk_headers"],[bad=1])
AS_IF([test -z "$with_jdk_bindir" -a -n "$with_jdk_headers" -o \
-n "$with_jdk_bindir" -a -z "$with_jdk_headers"],[bad=1])
AS_IF([test "$bad" = "1"],
[AC_MSG_WARN([Either specify --with-jdk-dir or both of (--with-jdk_bindir, --with-jdk-headers) -- not both.])
AC_MSG_ERROR([Cannot continue])])
AS_IF([test -n "$with_jdk_dir"],
[with_jdk_bindir=$with_jdk_dir/bin
with_jdk_headers=$with_jdk_dir/include])
##################################################################
# with_jdk_dir can now be ignored; with_jdk_bindir and
# with_jdk_headers will be either empty or have valid values.
##################################################################
# Some java installations are in obscure places. So let's
# hard-code a few of the common ones so that users don't have to
# specify --with-java-<foo>=LONG_ANNOYING_DIRECTORY.
AS_IF([test -z "$with_jdk_dir" -a -z "$with_jdk_dir" -a -z "$with_jdk_bindir"],
[ # OS X Snow Leopard and Lion (10.6 and 10.7 -- did not
# check prior versions)
found=0
dir=/System/Library/Frameworks/JavaVM.framework/Versions/Current/Headers
AS_IF([test -d $dir], [found=1
with_jdk_headers=$dir
with_jdk_bindir=/usr/bin])
# Various Linux
dir='/usr/lib/jvm/java-*-openjdk-*/include/'
jnih=`ls $dir/jni.h 2>/dev/null | head -n 1`
AS_IF([test -r "$jnih"],
[with_jdk_headers=`dirname $jnih`
OPAL_WHICH([javac], [with_jdk_bindir])
AS_IF([test -n "$with_jdk_bindir"],
[found=1
with_jdk_bindir=`dirname $with_jdk_bindir`],
[with_jdk_headers=])],
[dir='/usr/lib/jvm/default-java/include/'
jnih=`ls $dir/jni.h 2>/dev/null | head -n 1`
AS_IF([test -r "$jnih"],
[with_jdk_headers=`dirname $jnih`
OPAL_WHICH([javac], [with_jdk_bindir])
AS_IF([test -n "$with_jdk_bindir"],
[found=1
with_jdk_bindir=`dirname $with_jdk_bindir`],
[with_jdk_headers=])])])
# Solaris
dir=/usr/java
AS_IF([test "$found" -eq 0 -a -d $dir],
[with_jdk_headers=$dir/include
with_jdk_bindir=$dir/bin])
# If we think we found them, announce
AS_IF([test -n "$with_jdk_headers" -a "$with_jdk_bindir"],
[AC_MSG_NOTICE([guessing that JDK headers are in $with_jdk_headers])
AC_MSG_NOTICE([guessing that JDK javac is in $with_jdk_bindir])])
])
# Find javac and jni.h
OMPI_CHECK_WITHDIR([jdk-bindir], [$with_jdk_bindir], [javac])
OMPI_CHECK_WITHDIR([jdk-headers], [$with_jdk_headers], [jni.h])
# Look for various Java-related programs
orte_java_happy=no
PATH_save=$PATH
AS_IF([test -n "$with_jdk_bindir" -a "$with_jdk_bindir" != "yes" -a "$with_jdk_bindir" != "no"],
[PATH="$PATH:$with_jdk_bindir"])
AC_PATH_PROG(JAVAC, javac)
AC_PATH_PROG(JAVAH, javah)
AC_PATH_PROG(JAR, jar)
PATH=$PATH_save
# Check to see if we have all 3 programs.
AS_IF([test -z "$JAVAC" -o -z "$JAVAH" -o -z "$JAR"],
[orte_java_happy=no
HAVE_JAVA_SUPPORT=0],
[orte_java_happy=yes
HAVE_JAVA_SUPPORT=1])
# Look for jni.h
AS_IF([test "$orte_java_happy" = "yes"],
[CPPFLAGS_save=$CPPFLAGS
AS_IF([test -n "$with_jdk_headers" -a "$with_jdk_headers" != "yes" -a "$with_jdk_headers" != "no"],
[ORTE_JDK_CPPFLAGS="-I$with_jdk_headers"
# Some flavors of JDK also require -I<blah>/linux.
# See if that's there, and if so, add a -I for that,
# too. Ugh.
AS_IF([test -d "$with_jdk_headers/linux"],
[ORTE_JDK_CPPFLAGS="$ORTE_JDK_CPPFLAGS -I$with_jdk_headers/linux"])
# Solaris JDK also require -I<blah>/solaris.
# See if that's there, and if so, add a -I for that,
# too. Ugh.
AS_IF([test -d "$with_jdk_headers/solaris"],
[ORTE_JDK_CPPFLAGS="$ORTE_JDK_CPPFLAGS -I$with_jdk_headers/solaris"])
CPPFLAGS="$CPPFLAGS $ORTE_JDK_CPPFLAGS"])
AC_CHECK_HEADER([jni.h], [],
[orte_java_happy=no])
CPPFLAGS=$CPPFLAGS_save
])
AC_SUBST(ORTE_JDK_CPPFLAGS)
# Are we happy?
AC_MSG_CHECKING([Java support available])
AS_IF([test "$orte_java_happy" = "no"],
[AC_MSG_RESULT([no])],
[AC_MSG_RESULT([yes])])
AC_DEFINE_UNQUOTED([ORTE_HAVE_JAVA_SUPPOR]T, [$HAVE_JAVA_SUPPORT], [do we have Java support])
AM_CONDITIONAL(ORTE_HAVE_JAVA_SUPPORT, test "$orte_java_happy" = "yes")
])

Просмотреть файл

@ -11,7 +11,7 @@
# Copyright (c) 2004-2005 The Regents of the University of California.
# All rights reserved.
# Copyright (c) 2006-2008 Cisco Systems, Inc. All rights reserved.
# Copyright (c) 2011 Los Alamos National Security, LLC. All rights
# Copyright (c) 2011-2012 Los Alamos National Security, LLC. All rights
# reserved.
# $COPYRIGHT$
#
@ -38,6 +38,10 @@ SUBDIRS += \
tools/orte-migrate \
tools/mapreduce
#if ORTE_WANT_HADOOP_SUPPORT
SUBDIRS += tools/allocators
#endif
DIST_SUBDIRS += \
tools/orte-checkpoint \
tools/orte-clean \
@ -49,5 +53,6 @@ DIST_SUBDIRS += \
tools/orte-top \
tools/orte-info \
tools/orte-migrate \
tools/mapreduce
tools/mapreduce \
tools/allocators

153
orte/tools/allocators/HDFSFileFinder.java Обычный файл
Просмотреть файл

@ -0,0 +1,153 @@
/*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/*
* Derived from work by Simon Fortelny
*/
import java.io.PrintWriter;
import java.io.IOException;
// import java.net.URI;
import java.net.InetAddress;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class HDFSFileFinder {
private final static String name = "Hadoop File Finder";
final static String NEWLINE="\n";
private static String fsName="hdfs://localhost:9000" ;
private static String userAndGroup="superuser,supergroup";
private static String fileName;
private static PrintWriter writer;
private static FileStatus status;
private static BlockLocation[] bLocations;
private static boolean verbose = false;
public static void main(String [ ] args){
try {
writer = new PrintWriter(System.out);
parseCommandLineOptions(args);
} catch (ParseException e) {
System.out.println("There was an exception processing the supplied options");
printUsage(writer);
e.printStackTrace();
System.exit(0);
}
getBlockLocationsFromHdfs();
}
private static Options createCommandLineOptions() {
Options options = new Options();
Option host = OptionBuilder.withArgName( "fs.default.name" )
.hasArg()
.withDescription( "fs.default.name of hadoop namenode e.g. hdfs://localhost:9000" )
.create( "h" );
options.addOption(host);
Option filename = OptionBuilder.withArgName( "filename" )
.hasArg()
.withDescription( "The file to show node locations for" )
.create( "f" );
options.addOption(filename);
Option debug = OptionBuilder.withArgName( "verbose" )
.withDescription( "Provide debug output" )
.create( "v" );
options.addOption(debug);
return options;
}
private static void printUsage(PrintWriter writer){
final HelpFormatter usageFormatter = new HelpFormatter();
usageFormatter.printUsage(writer, 80, name, createCommandLineOptions());
}
private static void parseCommandLineOptions(String [] args) throws ParseException {
StringBuilder sb = new StringBuilder();
Options options = createCommandLineOptions();
CommandLineParser parser = new PosixParser();
CommandLine cmd=null;
cmd = parser.parse(options, args);
//parse cmd line args
if (cmd.hasOption("h")) {
fsName = cmd.getOptionValue("h");
}
if (cmd.hasOption("f")) {
fileName = cmd.getOptionValue("f");
}
if (cmd.hasOption("v")) {
verbose = true;
sb.append("DEBUG: File being located: ").append(fileName).append(NEWLINE);
writer.print(sb.toString());
writer.flush();
}
}
private static void getBlockLocationsFromHdfs(){
StringBuilder sb = new StringBuilder();
Configuration conf = new Configuration();
boolean first = true;
// make connection to hdfs
try {
if (verbose) {
writer.println("DEBUG: Trying to connect to "+ fsName);
}
FileSystem fs = FileSystem.get(conf);
Path file = new Path(fileName);
FileStatus fStatus = fs.getFileStatus(file);
status=fStatus;
bLocations= fs.getFileBlockLocations(status, 0, status.getLen());
//print out all block locations
for (BlockLocation aLocation : bLocations){
String[] names = aLocation.getHosts();
for (String name : names) {
InetAddress addr = InetAddress.getByName(name);
String host = addr.getHostName();
int idx = host.indexOf('.');
String hostname;
if (0 < idx) {
hostname = host.substring(0, host.indexOf('.'));
} else {
hostname = host;
}
if (first) {
sb.append(hostname);
first = false;
} else {
sb.append(",").append(hostname);
}
}
}
sb.append(NEWLINE);
} catch (IOException e) {
writer.println("Error getting block location data from namenode");
e.printStackTrace();
}
writer.print(sb.toString());
writer.flush();
}
}

83
orte/tools/allocators/Makefile.am Обычный файл
Просмотреть файл

@ -0,0 +1,83 @@
#
# Copyright (c) 2012 Los Alamos National Security, Inc.
# All rights reserved.
# $COPYRIGHT$
#
# Additional copyrights may follow
#
# $HEADER$
#
include $(top_srcdir)/Makefile.man-page-rules
man_pages = hdfsalloc.1
EXTRA_DIST = $(man_pages:.1=.1in)
bin_PROGRAMS =
# Automake doesn't directly know about Java source files, and we compile
# them via *.java below (ick!). So we just list them here in EXTRA_DIST
# so that they get picked up by "make dist".
EXTRA_DIST += HDFSFileFinder.java
if OMPI_INSTALL_BINARIES
if !ORTE_DISABLE_FULL_SUPPORT
# Only do this stuff if we want Hadoop support
if ORTE_WANT_HADOOP_SUPPORT
nodist_man_MANS = $(man_pages)
# Ensure that the man pages are rebuilt if the opal_config.h file
# changes; a "good enough" way to know if configure was run again (and
# therefore the release date or version may have changed)
$(nodist_man_MANS): $(top_builddir)/opal/include/opal_config.h
# A little verbosity magic; "make" will show the terse output. "make
# V=1" will show the actual commands used (just like the other
# Automake-generated compilation/linker rules).
ORTE_V_JAVAC = $(orte__v_JAVAC_$(V))
orte__v_JAVAC_ = $(orte__v_JAVAC_$(AM_DEFAULT_VERBOSITY))
orte__v_JAVAC_0 = @echo " JAVAC " `basename $@`;
ORTE_V_JAR = $(orte__v_JAR_$(V))
orte__v_JAR_ = $(orte__v_JAR_$(AM_DEFAULT_VERBOSITY))
orte__v_JAR_0 = @echo " JAR " `basename $@`;
bin_PROGRAMS += HDFSFileFinder.class
if ORTE_HAVE_HADOOP_SERIES1
hadoop_jars = .:$(HADOOP_HOME)/*:$(HADOOP_HOME)/lib/*
else
hadoop_jars = .:$(HADOOP_COMMON_HOME)/share/hadoop/common/*:$(HADOOP_COMMON_HOME)/share/hadoop/common/lib/*:$(HADOOP_HDFS_HOME)/share/hadoop/hdfs/*:$(HADOOP_HDFS_HOME)/share/hadoop/hdfs/lib/*:$(HADOOP_MAPRED_HOME)/share/hadoop/mapreduce/*:$(HADOOP_MAPRED_HOME)/share/hadoop/mapreduce/lib/*:$(YARN_HOME)/share/hadoop/httpfs/tomcat/lib/*:$(YARN_HOME)/share/hadoop/httpfs/tomcat/webapps/webhdfs/WEB-INF/lib/*
endif
HDFSFileFinder.class: HDFSFileFinder.java
$(JAVAC) -d . -classpath $(hadoop_jars) $(top_srcdir)/orte/tools/allocators/HDFSFileFinder.java
test -z "$(bindir)" || $(mkdir_p) "$(DESTDIR)$(bindir)"
(cp hdfsalloc.pl $(DESTDIR)$(bindir))
(cd $(DESTDIR)$(bindir); chmod +x hdfsalloc.pl; rm -f hdfsalloc; $(LN_S) hdfsalloc.pl hdfsalloc)
# Generate the .jar file
hdfsalloc.jar: HDFSFileFinder.class
$(ORTE_V_JAR) $(JAR) cf hdfsalloc.jar HDFSFileFinder.class
# Install the jar file into libdir. Use the DATA Automake primary,
# because Automake will complain if you try to use LIBRARIES with a
# filename that doesn't fit the lib<foo>.* format. Also use an
# indirection to get to the libdir -- Automake does not allow putting
# libdir for the DATA primary.
javadir = $(libdir)
java_DATA = hdfsalloc.jar
# Clean up all the things that this Makefile.am generates.
CLEANFILES += HDFSFileFinder.class hdfsalloc.jar
endif # ORTE_WANT_HADOOP_SUPPORT
endif # !ORTE_DISABLE_FULL_SUPPORT
endif # OMPI_INSTALL_BINARIES
distclean-local:
rm -f $(man_pages)

72
orte/tools/allocators/hdfsalloc.1in Обычный файл
Просмотреть файл

@ -0,0 +1,72 @@
.\"
.\" Copyright (c) 2007 Los Alamos National Security, LLC
.\" All rights reserved.
.\" Copyright (c) 2008-2009 Sun Microsystems, Inc. All rights reserved.
.\"
.\" Man page for OMPI's ompi-server command
.\"
.\" .TH name section center-footer left-footer center-header
.TH OMPI-SERVER 1 "#OMPI_DATE#" "#PACKAGE_VERSION#" "#PACKAGE_NAME#"
.\" **************************
.\" Name Section
.\" **************************
.SH NAME
.
ompi-server \- Server for supporting name publish/lookup operations.
.
.PP
.
.\" **************************
.\" Synopsis Section
.\" **************************
.SH SYNOPSIS
.
.BR ompi-server " [ options ]"
.
.\" **************************
.\" Options Section
.\" **************************
.SH Options
.
\fIompi-server\fR acts as a data server for Open MPI jobs to exchange
contact information in support of MPI-2's Publish_name and Lookup_name
functions.
.
.TP 10
.B -h | --help
Display help for this command
.
.
.TP
.B -d | --debug
Enable verbose output for debugging
.
.
.TP
.B -r | --report-uri \fR<value>\fP
Report the Open MPI contact information for the server. This information is
required for MPI jobs to use the data server. Three parameter values are supported:
(a) '-', indicating that the uri is to be printed to stdout; (b) '+', indicating that
the uri is to be printed to stderr; and (c) "file:path-to-file", indicating that
the uri is to be printed to the specified file. The "path-to-file" can be either
absolute or relative, but must be in a location where the user has write
permissions. Please note that the resulting file must be read-accessible to
expected users of the server.
.
.
.\" **************************
.\" Description Section
.\" **************************
.SH DESCRIPTION
.
.PP
\fIompi-server\fR acts as a data server for Open MPI jobs to exchange
contact information in support of MPI-2's Publish_name and Lookup_name
functions.
.
.\" **************************
.\" See Also Section
.\" **************************
.
.SH SEE ALSO
.

139
orte/tools/allocators/hdfsalloc.pl.in Обычный файл
Просмотреть файл

@ -0,0 +1,139 @@
#!/usr/bin/env perl
# WARNING: DO NOT EDIT THE hdfsalloc.pl FILE AS IT IS GENERATED!
# MAKE ALL CHANGES IN hdfsalloc.pl.in
# Copyright (c) 2012 Los Alamos National Security, Inc.
# All rights reserved.
use strict;
# The main purpose of this wrapper is to add the proper
# class and execution paths to match the Hamster command
# to the library built to support it
# Let the build system provide us with some critical values
my $hadoop_jars;
if (1 == @ORTE_HAVE_HADOOP_SERIES_1@) {
my $hadoop_home = $ENV{'HADOOP_HOME'};
$hadoop_jars = ".:$hadoop_home/*:$hadoop_home/lib/*";
} else {
my $conf_dir = $ENV{'HADOOP_CONF_DIR'};
my $common_dir = $ENV{'HADOOP_COMMON_HOME'};
my $hdfs = $ENV{'HADOOP_HDFS_HOME'};
my $mr = $ENV{'HADOOP_MAPRED_HOME'};
my $yarn = $ENV{'YARN_HOME'};
$hadoop_jars = ".:$conf_dir:$common_dir/share/hadoop/common/*:$common_dir/share/hadoop/common/lib/*:$hdfs/share/hadoop/hdfs/*:$hdfs/share/hadoop/hdfs/lib/*:$mr/share/hadoop/mapreduce/*:$mr/share/hadoop/mapreduce/lib/*:$yarn/share/hadoop/httpfs/tomcat/lib/*:$yarn/share/hadoop/httpfs/tomcat/webapps/webhdfs/WEB-INF/lib/*";
}
my $hadoop_classpath = "$hadoop_jars:@OMPI_WRAPPER_LIBDIR@/hdfsalloc.jar";
my $ompi_libpath = "@OMPI_WRAPPER_LIBDIR@";
# globals
my $showme_arg = 0;
my $debug = 0;
my $my_arg;
my @save_args;
my $slurm = 0;
my $shell;
# Cannot use the usual GetOpts library as the user might
# be passing -options to us! So have to
# parse the options ourselves to look for help and showme
foreach $my_arg (@ARGV) {
if ($my_arg eq "-h" ||
$my_arg eq "--h" ||
$my_arg eq "-help" ||
$my_arg eq "--help") {
print "Options:
--showme Show the actual command without executing it
--debug | -d Print debug without performing allocation
--slurm Use SLURM allocator
--help | -h This help list\n";
exit;
} elsif ($my_arg eq "-showme" ||
$my_arg eq "--showme") {
$showme_arg = 1;
} elsif ($my_arg eq "-debug" ||
$my_arg eq "-d" ||
$my_arg eq "--debug") {
$debug = 1;
} elsif ($my_arg eq "-slurm" ||
$my_arg eq "--slurm") {
$slurm = 1;
} else {
# param to be passed to HDFSFileFinder
push(@save_args, $my_arg);
}
}
# update the CLASSPATH environmental variable to include our path
$ENV{'CLASSPATH'} = "$hadoop_classpath:$ENV{'CLASSPATH'}";
# get our shell
$shell = $ENV{'SHELL'};
# Create a place to save our argv array so we can edit any
# provided class path option
my @arguments = ();
# add the library path
my $where = "-Djava.library.path=.:" . $ompi_libpath;
push(@arguments, $where);
# put the hdfsalloc command at the beginning of
# the user-supplied args
push(@arguments, "HDFSFileFinder");
# put the verbose flag if requested
if ($debug) {
push(@arguments, "-v");
}
# push all user-supplied args
foreach $my_arg (@save_args) {
push(@arguments, $my_arg);
}
# Execute the command
my @output;
my $host;
my $out;
if ($showme_arg) {
print "CLASSPATH= " . $ENV{'CLASSPATH'} . "\n";
print "java @arguments\n";
} else {
if ($debug) {
print "CLASSPATH= " . $ENV{'CLASSPATH'} . "\n";
print "java @arguments\n\n";
}
open(my $fh, "-|", "java", @arguments)
or die "Cannot run program $!\n";
while (<$fh>) {
push(@output, $_);
}
close $fh;
if ($debug) {
for $out (@output) {
if (index($out, "DEBUG") != -1) {
print $out . "\n";
}
}
if ($slurm) {
for $out (@output) {
if (index($out, "DEBUG") == -1) {
chomp $out;
print "CMD: salloc --nodelist=$out $shell\n";
}
}
}
} else {
# execute the allocation request
if ($slurm) {
for $out (@output) {
chomp $out;
system("salloc --nodelist=$out $shell");
}
}
}
}

Просмотреть файл

@ -1,21 +1,17 @@
. -*- nroff -*-
.\" Copyright (c) 2009-2010 Cisco Systems, Inc. All rights reserved.
.\" Copyright (c) 2008-2009 Sun Microsystems, Inc. All rights reserved.
.\" Copyright (c) 2012 Los Alamos National Security, LLC. All rights reserved.
.\"
.\" Man page for ORTE's orterun command
.\" Man page for ORTE's mapreduce command
.\"
.\" .TH name section center-footer left-footer center-header
.TH MPIRUN 1 "#OMPI_DATE#" "#PACKAGE_VERSION#" "#PACKAGE_NAME#"
.TH MAPREDUCE 1 "#OMPI_DATE#" "#PACKAGE_VERSION#" "#PACKAGE_NAME#"
.\" **************************
.\" Name Section
.\" **************************
.SH NAME
.
orterun, mpirun, mpiexec \- Execute serial and parallel jobs in Open MPI.
mapreduce \- Execute mapreduce jobs in Open MPI.
.B Note:
\fImpirun\fP, \fImpiexec\fP, and \fIorterun\fP are all synonyms for each
other. Using any of the names will produce the same behavior.
.
.\" **************************
.\" Synopsis Section
@ -23,40 +19,24 @@ other. Using any of the names will produce the same behavior.
.SH SYNOPSIS
.
.PP
Single Process Multiple Data (SPMD) Model:
.B mpirun
.B mapreduce
[ options ]
.B <program>
.B <mapper>
[ <args> : options ]
.B <reducer>
[ <args> ]
.P
Multiple Instruction Multiple Data (MIMD) Model:
.B mpirun
[ global_options ]
[ local_options1 ]
.B <program1>
[ <args1> ] :
[ local_options2 ]
.B <program2>
[ <args2> ] :
... :
[ local_optionsN ]
.B <programN>
[ <argsN> ]
.P
Note that in both models, invoking \fImpirun\fP via an absolute path
Note that iinvoking \fImapreduce\fP via an absolute path
name is equivalent to specifying the \fI--prefix\fP option with a
\fI<dir>\fR value equivalent to the directory where \fImpirun\fR
\fI<dir>\fR value equivalent to the directory where \fImapreduce\fR
resides, minus its last subdirectory. For example:
\fB%\fP /usr/local/bin/mpirun ...
\fB%\fP /usr/local/bin/mapreduce ...
is equivalent to
\fB%\fP mpirun --prefix /usr/local
\fB%\fP mapreduce --prefix /usr/local
.
.\" **************************
@ -82,7 +62,7 @@ CPU slot. See the rest of this page for more details.
.\" **************************
.SH OPTIONS
.
.I mpirun
.I mapreduce
will send the name of the directory where it was invoked on the local
node to each of the remote nodes, and attempt to change to that
directory. See the "Current Working Directory" section below for further