Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

# 

# Licensed to the Apache Software Foundation (ASF) under one or more 

# contributor license agreements. See the NOTICE file distributed with 

# this work for additional information regarding copyright ownership. 

# The ASF licenses this file to You under the Apache License, Version 2.0 

# (the "License"); you may not use this file except in compliance with 

# the License. You may obtain a copy of the License at 

# 

# http://www.apache.org/licenses/LICENSE-2.0 

# 

# Unless required by applicable law or agreed to in writing, software 

# distributed under the License is distributed on an "AS IS" BASIS, 

# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

# See the License for the specific language governing permissions and 

# limitations under the License. 

# 

 

import py4j 

 

from pyspark import SparkContext 

 

 

class CapturedException(Exception): 

def __init__(self, desc, stackTrace, cause=None): 

self.desc = desc 

self.stackTrace = stackTrace 

self.cause = convert_exception(cause) if cause is not None else None 

 

def __str__(self): 

sql_conf = SparkContext._jvm.org.apache.spark.sql.internal.SQLConf.get() 

debug_enabled = sql_conf.pysparkJVMStacktraceEnabled() 

desc = self.desc 

33 ↛ 34line 33 didn't jump to line 34, because the condition on line 33 was never true if debug_enabled: 

desc = desc + "\n\nJVM stacktrace:\n%s" % self.stackTrace 

return str(desc) 

 

 

class AnalysisException(CapturedException): 

""" 

Failed to analyze a SQL query plan. 

""" 

 

 

class ParseException(CapturedException): 

""" 

Failed to parse a SQL command. 

""" 

 

 

class IllegalArgumentException(CapturedException): 

""" 

Passed an illegal or inappropriate argument. 

""" 

 

 

class StreamingQueryException(CapturedException): 

""" 

Exception that stopped a :class:`StreamingQuery`. 

""" 

 

 

class QueryExecutionException(CapturedException): 

""" 

Failed to execute a query. 

""" 

 

 

class PythonException(CapturedException): 

""" 

Exceptions thrown from Python workers. 

""" 

 

 

class UnknownException(CapturedException): 

""" 

None of the above exceptions. 

""" 

 

 

def convert_exception(e): 

s = e.toString() 

c = e.getCause() 

stacktrace = SparkContext._jvm.org.apache.spark.util.Utils.exceptionString(e) 

 

if s.startswith('org.apache.spark.sql.AnalysisException: '): 

return AnalysisException(s.split(': ', 1)[1], stacktrace, c) 

if s.startswith('org.apache.spark.sql.catalyst.analysis'): 

return AnalysisException(s.split(': ', 1)[1], stacktrace, c) 

if s.startswith('org.apache.spark.sql.catalyst.parser.ParseException: '): 

return ParseException(s.split(': ', 1)[1], stacktrace, c) 

if s.startswith('org.apache.spark.sql.streaming.StreamingQueryException: '): 

return StreamingQueryException(s.split(': ', 1)[1], stacktrace, c) 

93 ↛ 94line 93 didn't jump to line 94, because the condition on line 93 was never true if s.startswith('org.apache.spark.sql.execution.QueryExecutionException: '): 

return QueryExecutionException(s.split(': ', 1)[1], stacktrace, c) 

if s.startswith('java.lang.IllegalArgumentException: '): 

return IllegalArgumentException(s.split(': ', 1)[1], stacktrace, c) 

if c is not None and ( 

c.toString().startswith('org.apache.spark.api.python.PythonException: ') 

# To make sure this only catches Python UDFs. 

and any(map(lambda v: "org.apache.spark.sql.execution.python" in v.toString(), 

c.getStackTrace()))): 

msg = ("\n An exception was thrown from the Python worker. " 

"Please see the stack trace below.\n%s" % c.getMessage()) 

return PythonException(msg, stacktrace) 

return UnknownException(s, stacktrace, c) 

 

 

def capture_sql_exception(f): 

def deco(*a, **kw): 

try: 

return f(*a, **kw) 

except py4j.protocol.Py4JJavaError as e: 

converted = convert_exception(e.java_exception) 

if not isinstance(converted, UnknownException): 

# Hide where the exception came from that shows a non-Pythonic 

# JVM exception message. 

raise converted from None 

else: 

raise 

return deco 

 

 

def install_exception_handler(): 

""" 

Hook an exception handler into Py4j, which could capture some SQL exceptions in Java. 

 

When calling Java API, it will call `get_return_value` to parse the returned object. 

If any exception happened in JVM, the result will be Java exception object, it raise 

py4j.protocol.Py4JJavaError. We replace the original `get_return_value` with one that 

could capture the Java exception and throw a Python one (with the same error message). 

 

It's idempotent, could be called multiple times. 

""" 

original = py4j.protocol.get_return_value 

# The original `get_return_value` is not patched, it's idempotent. 

patched = capture_sql_exception(original) 

# only patch the one used in py4j.java_gateway (call Java API) 

py4j.java_gateway.get_return_value = patched 

 

 

def toJArray(gateway, jtype, arr): 

""" 

Convert python list to java type array 

 

Parameters 

---------- 

gateway : 

Py4j Gateway 

jtype : 

java type of element in array 

arr : 

python type list 

""" 

jarray = gateway.new_array(jtype, len(arr)) 

for i in range(0, len(arr)): 

jarray[i] = arr[i] 

return jarray 

 

 

def require_test_compiled(): 

""" Raise Exception if test classes are not compiled 

""" 

import os 

import glob 

try: 

spark_home = os.environ['SPARK_HOME'] 

except KeyError: 

raise RuntimeError('SPARK_HOME is not defined in environment') 

 

test_class_path = os.path.join( 

spark_home, 'sql', 'core', 'target', '*', 'test-classes') 

paths = glob.glob(test_class_path) 

 

174 ↛ 175line 174 didn't jump to line 175, because the condition on line 174 was never true if len(paths) == 0: 

raise RuntimeError( 

"%s doesn't exist. Spark sql test classes are not compiled." % test_class_path) 

 

 

class ForeachBatchFunction(object): 

""" 

This is the Python implementation of Java interface 'ForeachBatchFunction'. This wraps 

the user-defined 'foreachBatch' function such that it can be called from the JVM when 

the query is active. 

""" 

 

def __init__(self, sql_ctx, func): 

self.sql_ctx = sql_ctx 

self.func = func 

 

def call(self, jdf, batch_id): 

from pyspark.sql.dataframe import DataFrame 

try: 

self.func(DataFrame(jdf, self.sql_ctx), batch_id) 

except Exception as e: 

self.error = e 

raise e 

 

class Java: 

implements = ['org.apache.spark.sql.execution.streaming.sources.PythonForeachBatchFunction'] 

 

 

def to_str(value): 

""" 

A wrapper over str(), but converts bool values to lower case strings. 

If None is given, just returns None, instead of converting it to string "None". 

""" 

if isinstance(value, bool): 

return str(value).lower() 

elif value is None: 

return value 

else: 

return str(value)