-
Notifications
You must be signed in to change notification settings - Fork 28.3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[PySpark] Fix tests with Python 2.6 in 1.0 branch
[SPARK-2951] [PySpark] support unpickle array.array for Python 2.6 Pyrolite can not unpickle array.array which pickled by Python 2.6, this patch fix it by extend Pyrolite. There is a bug in Pyrolite when unpickle array of float/double, this patch workaround it by reverse the endianness for float/double. This workaround should be removed after Pyrolite have a new release to fix this issue. [PySpark] [SPARK-2954] [SPARK-2948] [SPARK-2910] [SPARK-2101] Python 2.6 Fixes - Modify python/run-tests to test with Python 2.6 - Use unittest2 when running on Python 2.6. - Fix issue with namedtuple. - Skip TestOutputFormat.test_newhadoop on Python 2.6 until SPARK-2951 is fixed. - Fix MLlib _deserialize_double on Python 2.6. [SPARK-3867][PySpark] ./python/run-tests failed when it run with Python 2.6 and unittest2 is not installed ./python/run-tests search a Python 2.6 executable on PATH and use it if available. When using Python 2.6, it is going to import unittest2 module which is not a standard library in Python 2.6, so it fails with Import Author: cocoatomo <cocoatomo77@gmail.com> Author: Josh Rosen <joshrosen@apache.org> Author: Davies Liu <davies.liu@gmail.com> Author: Davies Liu <davies@databricks.com> Closes #3668 from davies/port_2365 and squashes the following commits: b32583d [Davies Liu] rollback _common.py bda1c72 [cocoatomo] [SPARK-3867][PySpark] ./python/run-tests failed when it run with Python 2.6 and unittest2 is not installed 14ad3d9 [Josh Rosen] [PySpark] [SPARK-2954] [SPARK-2948] [SPARK-2910] [SPARK-2101] Python 2.6 Fixes 7c55cff [Davies Liu] [SPARK-2951] [PySpark] support unpickle array.array for Python 2.6
- Loading branch information
Showing
5 changed files
with
115 additions
and
4 deletions.
There are no files selected for viewing
83 changes: 83 additions & 0 deletions
83
core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,83 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one or more | ||
* contributor license agreements. See the NOTICE file distributed with | ||
* this work for additional information regarding copyright ownership. | ||
* The ASF licenses this file to You under the Apache License, Version 2.0 | ||
* (the "License"); you may not use this file except in compliance with | ||
* the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package org.apache.spark.api.python | ||
|
||
import java.nio.ByteOrder | ||
|
||
import scala.collection.JavaConversions._ | ||
import scala.util.Failure | ||
import scala.util.Try | ||
|
||
import net.razorvine.pickle.{Unpickler, Pickler} | ||
|
||
import org.apache.spark.{Logging, SparkException} | ||
import org.apache.spark.rdd.RDD | ||
|
||
/** Utilities for serialization / deserialization between Python and Java, using Pickle. */ | ||
private[python] object SerDeUtil extends Logging { | ||
// Unpickle array.array generated by Python 2.6 | ||
class ArrayConstructor extends net.razorvine.pickle.objects.ArrayConstructor { | ||
// /* Description of types */ | ||
// static struct arraydescr descriptors[] = { | ||
// {'c', sizeof(char), c_getitem, c_setitem}, | ||
// {'b', sizeof(char), b_getitem, b_setitem}, | ||
// {'B', sizeof(char), BB_getitem, BB_setitem}, | ||
// #ifdef Py_USING_UNICODE | ||
// {'u', sizeof(Py_UNICODE), u_getitem, u_setitem}, | ||
// #endif | ||
// {'h', sizeof(short), h_getitem, h_setitem}, | ||
// {'H', sizeof(short), HH_getitem, HH_setitem}, | ||
// {'i', sizeof(int), i_getitem, i_setitem}, | ||
// {'I', sizeof(int), II_getitem, II_setitem}, | ||
// {'l', sizeof(long), l_getitem, l_setitem}, | ||
// {'L', sizeof(long), LL_getitem, LL_setitem}, | ||
// {'f', sizeof(float), f_getitem, f_setitem}, | ||
// {'d', sizeof(double), d_getitem, d_setitem}, | ||
// {'\0', 0, 0, 0} /* Sentinel */ | ||
// }; | ||
// TODO: support Py_UNICODE with 2 bytes | ||
// FIXME: unpickle array of float is wrong in Pyrolite, so we reverse the | ||
// machine code for float/double here to workaround it. | ||
// we should fix this after Pyrolite fix them | ||
val machineCodes: Map[Char, Int] = if (ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN)) { | ||
Map('c' -> 1, 'B' -> 0, 'b' -> 1, 'H' -> 3, 'h' -> 5, 'I' -> 7, 'i' -> 9, | ||
'L' -> 11, 'l' -> 13, 'f' -> 14, 'd' -> 16, 'u' -> 21 | ||
) | ||
} else { | ||
Map('c' -> 1, 'B' -> 0, 'b' -> 1, 'H' -> 2, 'h' -> 4, 'I' -> 6, 'i' -> 8, | ||
'L' -> 10, 'l' -> 12, 'f' -> 15, 'd' -> 17, 'u' -> 20 | ||
) | ||
} | ||
override def construct(args: Array[Object]): Object = { | ||
if (args.length == 1) { | ||
construct(args ++ Array("")) | ||
} else if (args.length == 2 && args(1).isInstanceOf[String]) { | ||
val typecode = args(0).asInstanceOf[String].charAt(0) | ||
val data: String = args(1).asInstanceOf[String] | ||
construct(typecode, machineCodes(typecode), data.getBytes("ISO-8859-1")) | ||
} else { | ||
super.construct(args) | ||
} | ||
} | ||
} | ||
|
||
def initialize() = { | ||
Unpickler.registerConstructor("array", "array", new ArrayConstructor()) | ||
} | ||
} | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters