From 9a071720b19305cb7e9125a1fd824701c70a7540 Mon Sep 17 00:00:00 2001 From: luxu1-ms <68044595+luxu1-ms@users.noreply.github.com> Date: Mon, 20 Jun 2022 14:36:07 -0700 Subject: [PATCH] Set appname and add prefix for SQL Server session (#159) * Set the JDBC application name to "Spark MSSQL Connector" by default The change also respects the case when the user passes in applicationName in the `options` Map. * add prefix for sql server session app name * modify the form of app name Co-authored-by: Arvind Shyamsundar --- .../com/microsoft/sqlserver/jdbc/spark/DefaultSource.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/scala/com/microsoft/sqlserver/jdbc/spark/DefaultSource.scala b/src/main/scala/com/microsoft/sqlserver/jdbc/spark/DefaultSource.scala index 7e8cdd09..ed29d1a8 100644 --- a/src/main/scala/com/microsoft/sqlserver/jdbc/spark/DefaultSource.scala +++ b/src/main/scala/com/microsoft/sqlserver/jdbc/spark/DefaultSource.scala @@ -52,7 +52,10 @@ class DefaultSource extends JdbcRelationProvider with Logging { mode: SaveMode, parameters: Map[String, String], rawDf: DataFrame): BaseRelation = { - val options = new SQLServerBulkJdbcOptions(parameters) + // set SQL Server session application name to SparkMSSQLConnector:user input name + // if no user input app name provided, will use SparkMSSQLConnector:NotSpecified + val applicationName = s"SparkMSSQLConnector:${parameters.getOrElse("applicationname", "NotSpecified")}" + val options = new SQLServerBulkJdbcOptions(parameters + ("applicationname" -> applicationName)) val conn = createConnectionFactory(options)() val df = repartitionDataFrame(rawDf, options)