Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

# 

# Licensed to the Apache Software Foundation (ASF) under one or more 

# contributor license agreements. See the NOTICE file distributed with 

# this work for additional information regarding copyright ownership. 

# The ASF licenses this file to You under the Apache License, Version 2.0 

# (the "License"); you may not use this file except in compliance with 

# the License. You may obtain a copy of the License at 

# 

# http://www.apache.org/licenses/LICENSE-2.0 

# 

# Unless required by applicable law or agreed to in writing, software 

# distributed under the License is distributed on an "AS IS" BASIS, 

# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

# See the License for the specific language governing permissions and 

# limitations under the License. 

# 

 

import py4j.protocol 

from py4j.protocol import Py4JJavaError 

from py4j.java_gateway import JavaObject 

from py4j.java_collections import JavaArray, JavaList 

 

from pyspark import RDD, SparkContext 

from pyspark.serializers import PickleSerializer, AutoBatchedSerializer 

from pyspark.sql import DataFrame, SQLContext 

 

# Hack for support float('inf') in Py4j 

_old_smart_decode = py4j.protocol.smart_decode 

 

_float_str_mapping = { 

'nan': 'NaN', 

'inf': 'Infinity', 

'-inf': '-Infinity', 

} 

 

 

def _new_smart_decode(obj): 

38 ↛ 39line 38 didn't jump to line 39, because the condition on line 38 was never true if isinstance(obj, float): 

s = str(obj) 

return _float_str_mapping.get(s, s) 

return _old_smart_decode(obj) 

 

py4j.protocol.smart_decode = _new_smart_decode 

 

 

_picklable_classes = [ 

'LinkedList', 

'SparseVector', 

'DenseVector', 

'DenseMatrix', 

'Rating', 

'LabeledPoint', 

] 

 

 

# this will call the MLlib version of pythonToJava() 

def _to_java_object_rdd(rdd): 

""" Return a JavaRDD of Object by unpickling 

 

It will convert each Python object into Java object by Pyrolite, whenever the 

RDD is serialized in batch or not. 

""" 

rdd = rdd._reserialize(AutoBatchedSerializer(PickleSerializer())) 

return rdd.ctx._jvm.org.apache.spark.mllib.api.python.SerDe.pythonToJava(rdd._jrdd, True) 

 

 

def _py2java(sc, obj): 

""" Convert Python object into Java """ 

if isinstance(obj, RDD): 

obj = _to_java_object_rdd(obj) 

elif isinstance(obj, DataFrame): 

obj = obj._jdf 

elif isinstance(obj, SparkContext): 

obj = obj._jsc 

elif isinstance(obj, list): 

obj = [_py2java(sc, x) for x in obj] 

elif isinstance(obj, JavaObject): 

pass 

elif isinstance(obj, (int, float, bool, bytes, str)): 

pass 

else: 

data = bytearray(PickleSerializer().dumps(obj)) 

obj = sc._jvm.org.apache.spark.mllib.api.python.SerDe.loads(data) 

return obj 

 

 

def _java2py(sc, r, encoding="bytes"): 

if isinstance(r, JavaObject): 

clsName = r.getClass().getSimpleName() 

# convert RDD into JavaRDD 

if clsName != 'JavaRDD' and clsName.endswith("RDD"): 

r = r.toJavaRDD() 

clsName = 'JavaRDD' 

 

if clsName == 'JavaRDD': 

jrdd = sc._jvm.org.apache.spark.mllib.api.python.SerDe.javaToPython(r) 

return RDD(jrdd, sc) 

 

if clsName == 'Dataset': 

return DataFrame(r, SQLContext.getOrCreate(sc)) 

 

if clsName in _picklable_classes: 

r = sc._jvm.org.apache.spark.mllib.api.python.SerDe.dumps(r) 

elif isinstance(r, (JavaArray, JavaList)): 

try: 

r = sc._jvm.org.apache.spark.mllib.api.python.SerDe.dumps(r) 

except Py4JJavaError: 

pass # not pickable 

 

if isinstance(r, (bytearray, bytes)): 

r = PickleSerializer().loads(bytes(r), encoding=encoding) 

return r 

 

 

def callJavaFunc(sc, func, *args): 

""" Call Java Function """ 

args = [_py2java(sc, a) for a in args] 

return _java2py(sc, func(*args)) 

 

 

def callMLlibFunc(name, *args): 

""" Call API in PythonMLLibAPI """ 

sc = SparkContext.getOrCreate() 

api = getattr(sc._jvm.PythonMLLibAPI(), name) 

return callJavaFunc(sc, api, *args) 

 

 

class JavaModelWrapper(object): 

""" 

Wrapper for the model in JVM 

""" 

def __init__(self, java_model): 

self._sc = SparkContext.getOrCreate() 

self._java_model = java_model 

 

def __del__(self): 

self._sc._gateway.detach(self._java_model) 

 

def call(self, name, *a): 

"""Call method of java_model""" 

return callJavaFunc(self._sc, getattr(self._java_model, name), *a) 

 

 

def inherit_doc(cls): 

""" 

A decorator that makes a class inherit documentation from its parents. 

""" 

for name, func in vars(cls).items(): 

# only inherit docstring for public functions 

if name.startswith("_"): 

continue 

152 ↛ 153line 152 didn't jump to line 153, because the condition on line 152 was never true if not func.__doc__: 

for parent in cls.__bases__: 

parent_func = getattr(parent, name, None) 

if parent_func and getattr(parent_func, "__doc__", None): 

func.__doc__ = parent_func.__doc__ 

break 

return cls