Skip to content

Commit

Permalink
Fix concurrency issue due to static variable in LazyBinaryUtils
Browse files Browse the repository at this point in the history
  • Loading branch information
cengle committed May 4, 2012
1 parent 31be6d8 commit 8920f1d
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ private void parse() {
if ((bytes[nullByteCur] & (1 << (i % 8))) != 0) {
elementIsNull[i] = false;
LazyBinaryUtils.checkObjectByteInfo(listEleObjectInspector, bytes,
lastElementByteEnd, recordInfo);
lastElementByteEnd, recordInfo, vInt);
elementStart[i] = lastElementByteEnd + recordInfo.elementOffset;
elementLength[i] = recordInfo.elementSize;
lastElementByteEnd = elementStart[i] + elementLength[i];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ private void parse() {
if ((bytes[nullByteCur] & (1 << ((i * 2) % 8))) != 0) {
keyIsNull[i] = false;
LazyBinaryUtils.checkObjectByteInfo(((MapObjectInspector) oi)
.getMapKeyObjectInspector(), bytes, lastElementByteEnd, recordInfo);
.getMapKeyObjectInspector(), bytes, lastElementByteEnd, recordInfo, vInt);
keyStart[i] = lastElementByteEnd + recordInfo.elementOffset;
keyLength[i] = recordInfo.elementSize;
lastElementByteEnd = keyStart[i] + keyLength[i];
Expand All @@ -178,7 +178,7 @@ private void parse() {
valueIsNull[i] = false;
LazyBinaryUtils.checkObjectByteInfo(((MapObjectInspector) oi)
.getMapValueObjectInspector(), bytes, lastElementByteEnd,
recordInfo);
recordInfo, vInt);
valueStart[i] = lastElementByteEnd + recordInfo.elementOffset;
valueLength[i] = recordInfo.elementSize;
lastElementByteEnd = valueStart[i] + valueLength[i];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.RecordInfo;
import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
import org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
Expand All @@ -46,6 +47,8 @@ public class LazyBinaryStruct extends

private static Log LOG = LogFactory.getLog(LazyBinaryStruct.class.getName());

private VInt vInt = new VInt();

/**
* Whether the data is already parsed or not.
*/
Expand Down Expand Up @@ -129,7 +132,7 @@ private void parse() {
if ((nullByte & (1 << (i % 8))) != 0) {
fieldIsNull[i] = false;
LazyBinaryUtils.checkObjectByteInfo(fieldRefs.get(i)
.getFieldObjectInspector(), bytes, lastFieldByteEnd, recordInfo);
.getFieldObjectInspector(), bytes, lastFieldByteEnd, recordInfo, vInt);
fieldStart[i] = lastFieldByteEnd + recordInfo.elementOffset;
fieldLength[i] = recordInfo.elementSize;
lastFieldByteEnd = fieldStart[i] + fieldLength[i];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,6 @@ public String toString() {
}
}

static VInt vInt = new LazyBinaryUtils.VInt();

/**
* Check a particular field and set its size and offset in bytes based on the
* field type and the bytes arrays.
Expand All @@ -144,7 +142,7 @@ public String toString() {
* modify this byteinfo object and return it
*/
public static void checkObjectByteInfo(ObjectInspector objectInspector,
byte[] bytes, int offset, RecordInfo recordInfo) {
byte[] bytes, int offset, RecordInfo recordInfo, VInt vInt) {
Category category = objectInspector.getCategory();
switch (category) {
case PRIMITIVE:
Expand Down

0 comments on commit 8920f1d

Please sign in to comment.