diff --git a/python/pyspark/java_gateway.py b/python/pyspark/java_gateway.py index fd85e07bf3a50..af6109dc2ab69 100644 --- a/python/pyspark/java_gateway.py +++ b/python/pyspark/java_gateway.py @@ -102,7 +102,10 @@ def run(self): EchoOutputThread(proc.stdout).start() # Connect to the gateway - gateway = JavaGateway(GatewayClient(port=gateway_port), auto_convert=False, start_callback_server=True) + # If start_callback_server is True, it looks like callback server is not killed + # process is hang up and test case does not move forward. + #gateway = JavaGateway(GatewayClient(port=gateway_port), auto_convert=False, start_callback_server=True) + gateway = JavaGateway(GatewayClient(port=gateway_port), auto_convert=False, start_callback_server=False) # Import the classes used by PySpark java_import(gateway.jvm, "org.apache.spark.SparkConf") diff --git a/python/pyspark/streaming/context.py b/python/pyspark/streaming/context.py index 3f455a3e06072..0e32b80cbee31 100644 --- a/python/pyspark/streaming/context.py +++ b/python/pyspark/streaming/context.py @@ -18,6 +18,11 @@ import sys from signal import signal, SIGTERM, SIGINT +import time + +from pyspark.conf import SparkConf +from pyspark.files import SparkFiles +from pyspark.java_gateway import launch_gateway from pyspark.serializers import PickleSerializer, BatchedSerializer, UTF8Deserializer from pyspark.context import SparkContext from pyspark.streaming.dstream import DStream