2018-03-08 15:22:37 +00:00
|
|
|
{ buildPythonPackage, fetchPypi, stdenv, py4j }:
|
|
|
|
|
|
|
|
buildPythonPackage rec {
|
|
|
|
pname = "pyspark";
|
2019-01-29 20:53:31 +00:00
|
|
|
version = "2.4.0";
|
2018-03-08 15:22:37 +00:00
|
|
|
|
|
|
|
src = fetchPypi {
|
|
|
|
inherit pname version;
|
2019-01-29 20:53:31 +00:00
|
|
|
sha256 = "1p7z5f1a20l7xkjkh88q9cvjw2x8jbrlydkycn5lh4qvx72vgmy9";
|
2018-03-08 15:22:37 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
# pypandoc is broken with pandoc2, so we just lose docs.
|
|
|
|
postPatch = ''
|
|
|
|
sed -i "s/'pypandoc'//" setup.py
|
2019-01-29 20:53:31 +00:00
|
|
|
|
|
|
|
# Current release works fine with py4j 0.10.8.1
|
|
|
|
substituteInPlace setup.py --replace py4j==0.10.7 'py4j>=0.10.7,<0.11'
|
2018-03-08 15:22:37 +00:00
|
|
|
'';
|
|
|
|
|
|
|
|
propagatedBuildInputs = [ py4j ];
|
|
|
|
|
|
|
|
# Tests assume running spark...
|
|
|
|
doCheck = false;
|
|
|
|
|
|
|
|
meta = with stdenv.lib; {
|
|
|
|
description = "Apache Spark";
|
|
|
|
homepage = https://github.com/apache/spark/tree/master/python;
|
|
|
|
license = licenses.asl20;
|
|
|
|
maintainers = [ maintainers.shlevy ];
|
|
|
|
};
|
|
|
|
}
|