{ stdenv, fetchurl, jre, bash, simpleBuildTool, python27Packages }: stdenv.mkDerivation rec { name = "spark-${version}"; version = "0.9.0"; src = fetchurl { url = "http://d3kbcqa49mib13.cloudfront.net/${name}-incubating-bin-cdh4.tgz"; sha256 = "0dgirq2ws25accijijanqij6d1mwxkrcqkmq1xsslfpz26svs1w1"; }; unpackPhase = ''tar zxf $src''; untarDir = "spark-${version}-incubating-bin-cdh4"; installPhase = '' set -x mkdir -p $out/lib $out/bin mv ${untarDir} $out/lib cat > $out/bin/spark-class < []" >&2 exit 1 fi export SPARK_MEM=\''${SPARK_MEM:-1024m} JAVA_OPTS="" JAVA_OPTS="\$JAVA_OPTS -Djava.library.path=\"\$SPARK_LIBRARY_PATH\"" JAVA_OPTS="\$JAVA_OPTS -Xms\$SPARK_MEM -Xmx\$SPARK_MEM" export JAVA_OPTS CLASSPATH=\`$out/lib/${untarDir}/bin/compute-classpath.sh\` export CLASSPATH exec ${jre}/bin/java -cp "\$CLASSPATH" \$JAVA_OPTS "\$@" EOF chmod +x $out/bin/spark-class cat > $out/bin/spark-shell </dev/null) if [[ ! \$? ]]; then saved_stty="" fi $out/bin/spark-class \$OPTIONS org.apache.spark.repl.Main "\$@" exit_status=\$? onExit EOF chmod +x $out/bin/spark-shell cat > $out/bin/pyspark < $out/bin/spark-upload-scala < $out/bin/spark-upload-python <