Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #
UNSUPPORTED_COMBINATIONS, checked_versions, checked_package_name
# Test only one case. Testing this is expensive because it needs to download # the Spark distribution.
dest=tmp_dir, spark_version=spark_version, hadoop_version=hadoop_version, hive_version=hive_version)
"spark-3.0.0-bin-hadoop3.2", checked_package_name("spark-3.0.0", "hadoop3.2", "hive2.3"))
# Positive test cases ("spark-3.0.0", "hadoop2.7", "hive2.3"), checked_versions("spark-3.0.0", "hadoop2.7", "hive2.3"))
("spark-3.0.0", "hadoop2.7", "hive2.3"), checked_versions("3.0.0", "2.7", "2.3"))
("spark-2.4.1", "without-hadoop", "hive2.3"), checked_versions("2.4.1", "without", "2.3"))
("spark-3.0.1", "without-hadoop", "hive2.3"), checked_versions("spark-3.0.1", "without-hadoop", "hive2.3"))
# Negative test cases with self.assertRaisesRegex(RuntimeError, 'Hive.*should.*Hadoop'): checked_versions( spark_version=test_version, hadoop_version=hadoop_version, hive_version=hive_version)
spark_version="malformed", hadoop_version=DEFAULT_HADOOP, hive_version=DEFAULT_HIVE)
spark_version=test_version, hadoop_version="malformed", hive_version=DEFAULT_HIVE)
spark_version=test_version, hadoop_version=DEFAULT_HADOOP, hive_version="malformed")
spark_version=test_version, hadoop_version="hadoop3.2", hive_version="hive1.2")
|