Skip to content
Closed
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion dev/run-pip-tests
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ for python in "${PYTHON_EXECS[@]}"; do
VIRTUALENV_PATH="$VIRTUALENV_BASE"/$python
rm -rf "$VIRTUALENV_PATH"
if [ -n "$USE_CONDA" ]; then
conda create -y -p "$VIRTUALENV_PATH" python=$python numpy pandas pip setuptools
conda create -y -p "$VIRTUALENV_PATH" python=$python numpy pandas pip setuptools=46.0.0
source activate "$VIRTUALENV_PATH"
else
mkdir -p "$VIRTUALENV_PATH"
Expand Down Expand Up @@ -110,6 +110,7 @@ for python in "${PYTHON_EXECS[@]}"; do
cd /

echo "Run basic sanity check on pip installed version with spark-submit"
# ls -al $(dirname $(dirname $(which spark-submit)))/lib/python3.6/site-packages/pyspark/bin/spark-class
spark-submit "$FWDIR"/dev/pip-sanity-check.py
echo "Run basic sanity check with import based"
python3 "$FWDIR"/dev/pip-sanity-check.py
Expand Down
23 changes: 14 additions & 9 deletions dev/run-tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -618,13 +618,14 @@ def main():
test_modules = determine_modules_to_test(changed_modules)

# license checks
run_apache_rat_checks()
# run_apache_rat_checks()

# style checks
if not changed_files or any(f.endswith(".scala")
or f.endswith("scalastyle-config.xml")
for f in changed_files):
run_scala_style_checks(extra_profiles)
# run_scala_style_checks(extra_profiles)
pass
should_run_java_style_checks = False
if not changed_files or any(f.endswith(".java")
or f.endswith("checkstyle.xml")
Expand All @@ -636,20 +637,23 @@ def main():
or f.endswith("tox.ini")
or f.endswith(".py")
for f in changed_files):
run_python_style_checks()
# run_python_style_checks()
pass
if not changed_files or any(f.endswith(".R")
or f.endswith("lint-r")
or f.endswith(".lintr")
for f in changed_files):
run_sparkr_style_checks()
# run_sparkr_style_checks()
pass

# determine if docs were changed and if we're inside the amplab environment
# note - the below commented out until *all* Jenkins workers can get `jekyll` installed
# if "DOCS" in changed_modules and test_env == "amplab_jenkins":
# build_spark_documentation()

if any(m.should_run_build_tests for m in test_modules):
run_build_tests()
# run_build_tests()
pass

# spark build
build_apache_spark(build_tool, extra_profiles)
Expand All @@ -663,18 +667,19 @@ def main():
build_spark_assembly_sbt(extra_profiles, should_run_java_style_checks)

# run the test suites
run_scala_tests(build_tool, extra_profiles, test_modules, excluded_tags)
# run_scala_tests(build_tool, extra_profiles, test_modules, excluded_tags)

modules_with_python_tests = [m for m in test_modules if m.python_test_goals]
if modules_with_python_tests:
# We only run PySpark tests with coverage report in one specific job with
# Spark master with SBT in Jenkins.
is_sbt_master_job = "SPARK_MASTER_SBT_HADOOP_2_7" in os.environ
run_python_tests(
modules_with_python_tests, opts.parallelism, with_coverage=is_sbt_master_job)
# run_python_tests(
# modules_with_python_tests, opts.parallelism, with_coverage=is_sbt_master_job)
run_python_packaging_tests()
if any(m.should_run_r_tests for m in test_modules):
run_sparkr_tests()
# run_sparkr_tests()
pass


def _test():
Expand Down