Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #
# Must be same as the variable and condition defined in KinesisTestUtils.scala and modules.py
"Skipping all Kinesis Python tests as environmental variable 'ENABLE_KINESIS_TESTS' " "was not set.") else: kinesis_asl_assembly_jar = search_jar("external/kinesis-asl-assembly", "spark-streaming-kinesis-asl-assembly-", "spark-streaming-kinesis-asl-assembly_") if kinesis_asl_assembly_jar is None: kinesis_requirement_message = ( # type: ignore "Skipping all Kinesis Python tests as the optional Kinesis project was " "not compiled into a JAR. To run these tests, " "you need to build Spark with 'build/sbt -Pkinesis-asl assembly/package " "streaming-kinesis-asl-assembly/assembly' or " "'build/mvn -Pkinesis-asl package' before running this test.") else: existing_args = os.environ.get("PYSPARK_SUBMIT_ARGS", "pyspark-shell") jars_args = "--jars %s" % kinesis_asl_assembly_jar os.environ["PYSPARK_SUBMIT_ARGS"] = " ".join([jars_args, existing_args]) kinesis_requirement_message = None # type: ignore
def setUpClass(cls):
def tearDownClass(cls): # Clean up in the JVM just in case there has been some issues in Python API if jSparkContextOption.nonEmpty(): jSparkContextOption.get().stop()
# Clean up in the JVM just in case there has been some issues in Python API if jStreamingContextOption.nonEmpty(): jStreamingContextOption.get().stop(False)
print("timeout after", self.timeout)
""" Return the first `n` elements in the stream (will start and stop). """
""" Collect each RDDs into the returned list.
Returns ------- list which will have the collected items. """
""" Parameters ---------- input : list dataset for the test. This should be list of lists. func : function wrapped function. This function should return PythonDStream object. expected expected output for this testcase. """
# Apply test function to stream. else:
"""Sort the list based on first value.""" |