Skip to content

Commit

Permalink
Merge pull request #285 from v-afrafi/TVPExceptions
Browse files Browse the repository at this point in the history
Throw SQLServer exception in case of invalid value for TVP
  • Loading branch information
AfsanehR-zz authored May 10, 2017
2 parents 10f0104 + 891a84a commit c706d55
Show file tree
Hide file tree
Showing 3 changed files with 145 additions and 143 deletions.
282 changes: 142 additions & 140 deletions src/main/java/com/microsoft/sqlserver/jdbc/IOBuffer.java
Original file line number Diff line number Diff line change
Expand Up @@ -4606,151 +4606,150 @@ void writeTVPRows(TVP value) throws SQLServerException {
}
}
}
switch (jdbcType) {
case BIGINT:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) 8);
writeLong(Long.valueOf(currentColumnStringValue).longValue());
}
break;
try {
switch (jdbcType) {
case BIGINT:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) 8);
writeLong(Long.valueOf(currentColumnStringValue).longValue());
}
break;

case BIT:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) 1);
writeByte((byte) (Boolean.valueOf(currentColumnStringValue).booleanValue() ? 1 : 0));
}
break;
case BIT:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) 1);
writeByte((byte) (Boolean.valueOf(currentColumnStringValue).booleanValue() ? 1 : 0));
}
break;

case INTEGER:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) 4);
writeInt(Integer.valueOf(currentColumnStringValue).intValue());
}
break;
case INTEGER:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) 4);
writeInt(Integer.valueOf(currentColumnStringValue).intValue());
}
break;

case SMALLINT:
case TINYINT:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) 2); // length of datatype
writeShort(Short.valueOf(currentColumnStringValue).shortValue());
}
break;
case SMALLINT:
case TINYINT:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) 2); // length of datatype
writeShort(Short.valueOf(currentColumnStringValue).shortValue());
}
break;

case DECIMAL:
case NUMERIC:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) TDSWriter.BIGDECIMAL_MAX_LENGTH); // maximum length
BigDecimal bdValue = new BigDecimal(currentColumnStringValue);


/*
* setScale of all BigDecimal value based on metadata as scale is not sent seperately for individual value. Use the
* rounding used in Server. Say, for BigDecimal("0.1"), if scale in metdadata is 0, then ArithmeticException would be
* thrown if RoundingMode is not set
*/
bdValue = bdValue.setScale(columnPair.getValue().scale, RoundingMode.HALF_UP);

byte[] valueBytes = DDC.convertBigDecimalToBytes(bdValue, bdValue.scale());

// 1-byte for sign and 16-byte for integer
byte[] byteValue = new byte[17];

// removing the precision and scale information from the valueBytes array
System.arraycopy(valueBytes, 2, byteValue, 0, valueBytes.length - 2);
writeBytes(byteValue);
}
break;
case DECIMAL:
case NUMERIC:
if (null == currentColumnStringValue)
writeByte((byte) 0);
else {
writeByte((byte) TDSWriter.BIGDECIMAL_MAX_LENGTH); // maximum length
BigDecimal bdValue = new BigDecimal(currentColumnStringValue);

/*
* setScale of all BigDecimal value based on metadata as scale is not sent seperately for individual value. Use
* the rounding used in Server. Say, for BigDecimal("0.1"), if scale in metdadata is 0, then ArithmeticException
* would be thrown if RoundingMode is not set
*/
bdValue = bdValue.setScale(columnPair.getValue().scale, RoundingMode.HALF_UP);

byte[] valueBytes = DDC.convertBigDecimalToBytes(bdValue, bdValue.scale());

// 1-byte for sign and 16-byte for integer
byte[] byteValue = new byte[17];

case DOUBLE:
if (null == currentColumnStringValue)
writeByte((byte) 0); // len of data bytes
else {
writeByte((byte) 8); // len of data bytes
long bits = Double.doubleToLongBits(Double.valueOf(currentColumnStringValue).doubleValue());
long mask = 0xFF;
int nShift = 0;
for (int i = 0; i < 8; i++) {
writeByte((byte) ((bits & mask) >> nShift));
nShift += 8;
mask = mask << 8;
// removing the precision and scale information from the valueBytes array
System.arraycopy(valueBytes, 2, byteValue, 0, valueBytes.length - 2);
writeBytes(byteValue);
}
}
break;
break;

case FLOAT:
case REAL:
if (null == currentColumnStringValue)
writeByte((byte) 0); // actual length (0 == null)
else {
writeByte((byte) 4); // actual length
writeInt(Float.floatToRawIntBits(Float.valueOf(currentColumnStringValue).floatValue()));
}
break;
case DOUBLE:
if (null == currentColumnStringValue)
writeByte((byte) 0); // len of data bytes
else {
writeByte((byte) 8); // len of data bytes
long bits = Double.doubleToLongBits(Double.valueOf(currentColumnStringValue).doubleValue());
long mask = 0xFF;
int nShift = 0;
for (int i = 0; i < 8; i++) {
writeByte((byte) ((bits & mask) >> nShift));
nShift += 8;
mask = mask << 8;
}
}
break;

case DATE:
case TIME:
case TIMESTAMP:
case DATETIMEOFFSET:
case TIMESTAMP_WITH_TIMEZONE:
case TIME_WITH_TIMEZONE:
case CHAR:
case VARCHAR:
case NCHAR:
case NVARCHAR:
case LONGVARCHAR:
case LONGNVARCHAR:
case SQLXML:
isShortValue = (2L * columnPair.getValue().precision) <= DataTypes.SHORT_VARTYPE_MAX_BYTES;
isNull = (null == currentColumnStringValue);
dataLength = isNull ? 0 : currentColumnStringValue.length() * 2;
if (!isShortValue) {
// check null
if (isNull)
// Null header for v*max types is 0xFFFFFFFFFFFFFFFF.
writeLong(0xFFFFFFFFFFFFFFFFL);
else if (DataTypes.UNKNOWN_STREAM_LENGTH == dataLength)
// Append v*max length.
// UNKNOWN_PLP_LEN is 0xFFFFFFFFFFFFFFFE
writeLong(0xFFFFFFFFFFFFFFFEL);
else
// For v*max types with known length, length is <totallength8><chunklength4>
writeLong(dataLength);
if (!isNull) {
if (dataLength > 0) {
writeInt(dataLength);
writeString(currentColumnStringValue);
case FLOAT:
case REAL:
if (null == currentColumnStringValue)
writeByte((byte) 0); // actual length (0 == null)
else {
writeByte((byte) 4); // actual length
writeInt(Float.floatToRawIntBits(Float.valueOf(currentColumnStringValue).floatValue()));
}
break;

case DATE:
case TIME:
case TIMESTAMP:
case DATETIMEOFFSET:
case TIMESTAMP_WITH_TIMEZONE:
case TIME_WITH_TIMEZONE:
case CHAR:
case VARCHAR:
case NCHAR:
case NVARCHAR:
case LONGVARCHAR:
case LONGNVARCHAR:
case SQLXML:
isShortValue = (2L * columnPair.getValue().precision) <= DataTypes.SHORT_VARTYPE_MAX_BYTES;
isNull = (null == currentColumnStringValue);
dataLength = isNull ? 0 : currentColumnStringValue.length() * 2;
if (!isShortValue) {
// check null
if (isNull)
// Null header for v*max types is 0xFFFFFFFFFFFFFFFF.
writeLong(0xFFFFFFFFFFFFFFFFL);
else if (DataTypes.UNKNOWN_STREAM_LENGTH == dataLength)
// Append v*max length.
// UNKNOWN_PLP_LEN is 0xFFFFFFFFFFFFFFFE
writeLong(0xFFFFFFFFFFFFFFFEL);
else
// For v*max types with known length, length is <totallength8><chunklength4>
writeLong(dataLength);
if (!isNull) {
if (dataLength > 0) {
writeInt(dataLength);
writeString(currentColumnStringValue);
}
// Send the terminator PLP chunk.
writeInt(0);
}
// Send the terminator PLP chunk.
writeInt(0);
}
}
else {
if (isNull)
writeShort((short) -1); // actual len
else {
writeShort((short) dataLength);
writeString(currentColumnStringValue);
if (isNull)
writeShort((short) -1); // actual len
else {
writeShort((short) dataLength);
writeString(currentColumnStringValue);
}
}
}
break;

case BINARY:
case VARBINARY:
case LONGVARBINARY:
// Handle conversions as done in other types.
isShortValue = columnPair.getValue().precision <= DataTypes.SHORT_VARTYPE_MAX_BYTES;
isNull = (null == currentObject);
try {
break;

case BINARY:
case VARBINARY:
case LONGVARBINARY:
// Handle conversions as done in other types.
isShortValue = columnPair.getValue().precision <= DataTypes.SHORT_VARTYPE_MAX_BYTES;
isNull = (null == currentObject);
if (currentObject instanceof String)
dataLength = isNull ? 0 : (toByteArray(currentObject.toString())).length;
else
Expand Down Expand Up @@ -4790,14 +4789,17 @@ else if (DataTypes.UNKNOWN_STREAM_LENGTH == dataLength)
writeBytes((byte[]) currentObject);
}
}
}
catch (IllegalArgumentException e) {
throw new SQLServerException(SQLServerException.getErrString("R_TVPInvalidColumnValue"), e);
}
break;
break;

default:
assert false : "Unexpected JDBC type " + jdbcType.toString();
default:
assert false : "Unexpected JDBC type " + jdbcType.toString();
}
}
catch (IllegalArgumentException e) {
throw new SQLServerException(SQLServerException.getErrString("R_errorConvertingValue"), e);
}
catch (ArrayIndexOutOfBoundsException e) {
throw new SQLServerException(SQLServerException.getErrString("R_CSVDataSchemaMismatch"), e);
}
currentColumn++;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -482,7 +482,7 @@ public Object[] getRowData() throws SQLServerException {

// Source header has more columns than current line read
if (columnNames != null && (columnNames.length > data.length)) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_BulkCSVDataSchemaMismatch"));
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_CSVDataSchemaMismatch"));
Object[] msgArgs = {};
throw new SQLServerException(form.format(msgArgs), SQLState.COL_NOT_FOUND, DriverError.NOT_SET, null);
}
Expand Down Expand Up @@ -651,7 +651,7 @@ else if (dateTimeFormatter != null)
throw new SQLServerException(form.format(new Object[] {value, JDBCType.of(cm.columnType)}), null, 0, e);
}
catch (ArrayIndexOutOfBoundsException e) {
throw new SQLServerException(SQLServerException.getErrString("R_BulkCSVDataSchemaMismatch"), e);
throw new SQLServerException(SQLServerException.getErrString("R_CSVDataSchemaMismatch"), e);
}

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ protected Object[][] getContents() {
{"R_invalidTransactionOption", "UseInternalTransaction option can not be set to TRUE when used with a Connection object."},
{"R_invalidNegativeArg", "The {0} argument cannot be negative."},
{"R_BulkColumnMappingsIsEmpty", "Cannot perform bulk copy operation if the only mapping is an identity column and KeepIdentity is set to false."},
{"R_BulkCSVDataSchemaMismatch", "Source data does not match source schema."},
{"R_CSVDataSchemaMismatch", "Source data does not match source schema."},
{"R_BulkCSVDataDuplicateColumn", "Duplicate column names are not allowed."},
{"R_invalidColumnOrdinal", "Column {0} is invalid. Column number should be greater than zero."},
{"R_unsupportedEncoding", "The encoding {0} is not supported."},
Expand Down

0 comments on commit c706d55

Please sign in to comment.