From e5b503e1058c6e2eefed08268e6823a7c4dda242 Mon Sep 17 00:00:00 2001 From: Pradeep Srikakolapu Date: Sun, 23 Jun 2024 21:56:04 -0700 Subject: [PATCH 01/25] 1.8.0 upgrade --- Makefile | 10 +-- dbt/adapters/fabricspark/__version__.py | 2 +- dbt/adapters/fabricspark/column.py | 2 +- dbt/adapters/fabricspark/connections.py | 35 +++++----- .../fabricspark/fabric_spark_credentials.py | 40 ++++++----- dbt/adapters/fabricspark/impl.py | 66 ++++++++++++------- dbt/adapters/fabricspark/livysession.py | 15 ++--- dbt/adapters/fabricspark/relation.py | 4 +- .../fabricspark/macros/adapters/relation.sql | 8 +-- dev-requirements.txt | 8 ++- setup.py | 23 +++---- 11 files changed, 114 insertions(+), 99 deletions(-) diff --git a/Makefile b/Makefile index e940398..3eef6d3 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ .PHONY: dev dev: ## Installs adapter in develop mode along with development dependencies @\ - pip install -e . -r requirements.txt -r dev-requirements.txt && pre-commit install + pip install -e . -r dev-requirements.txt && pre-commit install .PHONY: dev-uninstall dev-uninstall: ## Uninstalls all packages while maintaining the virtual environment @@ -11,10 +11,10 @@ dev-uninstall: ## Uninstalls all packages while maintaining the virtual environm pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y pip uninstall -y dbt-spark -.PHONY: mypy -mypy: ## Runs mypy against staged changes for static type checking. - @\ - pre-commit run --hook-stage manual mypy-check | grep -v "INFO" +#.PHONY: mypy +#mypy: ## Runs mypy against staged changes for static type checking. +# @\ +# pre-commit run --hook-stage manual mypy-check | grep -v "INFO" .PHONY: flake8 flake8: ## Runs flake8 against staged changes to enforce style guide. diff --git a/dbt/adapters/fabricspark/__version__.py b/dbt/adapters/fabricspark/__version__.py index 21a042b..04f1b60 100644 --- a/dbt/adapters/fabricspark/__version__.py +++ b/dbt/adapters/fabricspark/__version__.py @@ -1 +1 @@ -version = "1.7.0rc1" +version = "1.8.0b1" diff --git a/dbt/adapters/fabricspark/column.py b/dbt/adapters/fabricspark/column.py index 9f9e7d8..23a581e 100644 --- a/dbt/adapters/fabricspark/column.py +++ b/dbt/adapters/fabricspark/column.py @@ -1,7 +1,7 @@ from dataclasses import dataclass from typing import Any, Dict, Optional, TypeVar, Union from dbt.adapters.base.column import Column -from dbt.dataclass_schema import dbtClassMixin +from dbt_common.dataclass_schema import dbtClassMixin Self = TypeVar("Self", bound="SparkColumn") diff --git a/dbt/adapters/fabricspark/connections.py b/dbt/adapters/fabricspark/connections.py index b72551b..822f0de 100644 --- a/dbt/adapters/fabricspark/connections.py +++ b/dbt/adapters/fabricspark/connections.py @@ -1,17 +1,20 @@ from contextlib import contextmanager import os -import dbt.exceptions - +from dbt_common.exceptions import DbtConfigError, DbtRuntimeError +from dbt.adapters.contracts.connection import ( + AdapterResponse, + ConnectionState, + Connection, +) from dbt.adapters.sql import SQLConnectionManager -from dbt.contracts.connection import ConnectionState, AdapterResponse -from dbt.events import AdapterLogger -from dbt.events.functions import fire_event -from dbt.events.types import ConnectionUsed, SQLQuery, SQLQueryStatus -from dbt.utils import DECIMALS +from dbt.adapters.events.logging import AdapterLogger +from dbt.adapters.exceptions import FailedToConnectError +from dbt.adapters.events.types import ConnectionUsed, SQLQuery, SQLQueryStatus +from dbt_common.events.functions import fire_event +from dbt_common.utils.encoding import DECIMALS from dbt.adapters.fabricspark.livysession import LivySessionConnectionWrapper, LivySessionManager -from dbt.contracts.connection import Connection -from dbt.dataclass_schema import StrEnum +from dbt_common.dataclass_schema import StrEnum from typing import Any, Optional, Union, Tuple, List, Generator, Iterable, Sequence from abc import ABC, abstractmethod import time @@ -88,9 +91,9 @@ def exception_handler(self, sql: str) -> Generator[None, None, None]: thrift_resp = exc.args[0] if hasattr(thrift_resp, "status"): msg = thrift_resp.status.errorMessage - raise dbt.exceptions.DbtRuntimeError(msg) + raise DbtRuntimeError(msg) else: - raise dbt.exceptions.DbtRuntimeError(str(exc)) + raise DbtRuntimeError(str(exc)) def cancel(self, connection: Connection) -> None: connection.handle.cancel() @@ -120,7 +123,7 @@ def validate_creds(cls, creds: Any, required: Iterable[str]) -> None: for key in required: if not hasattr(creds, key): - raise dbt.exceptions.DbtProfileError( + raise DbtConfigError( "The config '{}' is required when using the {} method" " to connect to Spark".format(key, method) ) @@ -151,9 +154,7 @@ def open(cls, connection: Connection) -> Connection: logger.debug("Connection error: {}".format(ex)) connection.state = ConnectionState.FAIL else: - raise dbt.exceptions.DbtProfileError( - f"invalid credential method: {creds.method}" - ) + raise DbtConfigError(f"invalid credential method: {creds.method}") break except Exception as e: exc = e @@ -163,7 +164,7 @@ def open(cls, connection: Connection) -> Connection: msg = "Failed to connect" if creds.token is not None: msg += ", is your token valid?" - raise dbt.exceptions.FailedToConnectError(msg) from e + raise FailedToConnectError(msg) from e retryable_message = _is_retryable_error(e) if retryable_message and creds.connect_retries > 0: msg = ( @@ -184,7 +185,7 @@ def open(cls, connection: Connection) -> Connection: logger.warning(msg) time.sleep(creds.connect_timeout) else: - raise dbt.exceptions.FailedToConnectError("failed to connect") from e + raise FailedToConnectError("failed to connect") from e else: raise exc # type: ignore diff --git a/dbt/adapters/fabricspark/fabric_spark_credentials.py b/dbt/adapters/fabricspark/fabric_spark_credentials.py index 79f9df0..b7f77f8 100644 --- a/dbt/adapters/fabricspark/fabric_spark_credentials.py +++ b/dbt/adapters/fabricspark/fabric_spark_credentials.py @@ -1,7 +1,8 @@ -from dbt.adapters.base import Credentials +from dbt.adapters.contracts.connection import Credentials from typing import Any, Dict, Optional, Tuple from dataclasses import dataclass, field -import dbt.exceptions +from dbt_common.exceptions import DbtRuntimeError + @dataclass class SparkCredentials(Credentials): @@ -10,12 +11,12 @@ class SparkCredentials(Credentials): workspaceid: str = None database: Optional[str] = None lakehouse: str = None - lakehouseid: str = None # type: ignore + lakehouseid: str = None # type: ignore endpoint: Optional[str] = "https://msitapi.fabric.microsoft.com/v1" client_id: Optional[str] = None client_secret: Optional[str] = None tenant_id: Optional[str] = None - authentication: str= "CLI" + authentication: str = "CLI" connect_retries: int = 1 connect_timeout: int = 10 livy_session_parameters: Dict[str, Any] = field(default_factory=dict) @@ -30,32 +31,36 @@ def __pre_deserialize__(cls, data: Any) -> Any: @property def lakehouse_endpoint(self) -> str: - # TODO: Construct Endpoint of the lakehouse from the - return f'{self.endpoint}/workspaces/{self.workspaceid}/lakehouses/{self.lakehouseid}/livyapi/versions/2023-12-01' + # TODO: Construct Endpoint of the lakehouse from the + return f"{self.endpoint}/workspaces/{self.workspaceid}/lakehouses/{self.lakehouseid}/livyapi/versions/2023-12-01" - def __post_init__(self) -> None: - + def __post_init__(self) -> None: if self.method is None: - raise dbt.exceptions.DbtRuntimeError("Must specify `method` in profile") + raise DbtRuntimeError("Must specify `method` in profile") if self.workspaceid is None: - raise dbt.exceptions.DbtRuntimeError("Must specify `workspace guid` in profile") + raise DbtRuntimeError("Must specify `workspace guid` in profile") if self.lakehouseid is None: - raise dbt.exceptions.DbtRuntimeError("Must specify `lakehouse guid` in profile") + raise DbtRuntimeError("Must specify `lakehouse guid` in profile") if self.schema is None: - raise dbt.exceptions.DbtRuntimeError("Must specify `schema` in profile") + raise DbtRuntimeError("Must specify `schema` in profile") if self.database is not None: - raise dbt.exceptions.DbtRuntimeError("database property is not supported by adapter. Set database as none and use lakehouse instead.") - + raise DbtRuntimeError( + "database property is not supported by adapter. Set database as none and use lakehouse instead." + ) # spark classifies database and schema as the same thing - if self.lakehouse is not None and self.lakehouse != self.schema and self.schema is not None: - # raise dbt.exceptions.DbtRuntimeError( + if ( + self.lakehouse is not None + and self.lakehouse != self.schema + and self.schema is not None + ): + # raise DbtRuntimeError( # f" schema: {self.schema} \n" # f" lakehouse: {self.lakehouse} \n" # f"On Spark, lakehouse must be omitted or have the same value as" # # f" schema." # # ) - self.schema = self.lakehouse + self.schema = self.lakehouse @property def type(self) -> str: @@ -67,4 +72,3 @@ def unique_field(self) -> str: def _connection_keys(self) -> Tuple[str, ...]: return "workspaceid", "lakehouseid", "lakehouse", "endpoint", "schema" - diff --git a/dbt/adapters/fabricspark/impl.py b/dbt/adapters/fabricspark/impl.py index 521e9d8..baeb828 100644 --- a/dbt/adapters/fabricspark/impl.py +++ b/dbt/adapters/fabricspark/impl.py @@ -1,25 +1,39 @@ import re from concurrent.futures import Future from dataclasses import dataclass -from typing import Any, Dict, Iterable, List, Optional, Union, Tuple, Callable, Set -from dbt.adapters.base.relation import InformationSchema -from dbt.contracts.graph.manifest import Manifest +from typing import ( + Any, + Dict, + Iterable, + List, + Optional, + Union, + Tuple, + Callable, + Set, + FrozenSet, +) from typing_extensions import TypeAlias import agate -import dbt -import dbt.exceptions + +from dbt.adapters.events.logging import AdapterLogger +from dbt.adapters.contracts.relation import RelationType, RelationConfig + from dbt.adapters.base import AdapterConfig from dbt.adapters.base.impl import catch_as_completed, ConstraintSupport +from dbt.adapters.base import BaseRelation +from dbt.adapters.base.relation import InformationSchema + from dbt.adapters.sql import SQLAdapter + from dbt.adapters.fabricspark import SparkConnectionManager from dbt.adapters.fabricspark import SparkRelation from dbt.adapters.fabricspark import SparkColumn -from dbt.adapters.base import BaseRelation -from dbt.clients.agate_helper import DEFAULT_TYPE_TESTER -from dbt.contracts.graph.nodes import ConstraintType -from dbt.contracts.relation import RelationType -from dbt.events import AdapterLogger -from dbt.utils import executor, AttrDict + +from dbt_common.exceptions import DbtRuntimeError, CompilationError +from dbt_common.utils import AttrDict, executor +from dbt_common.clients.agate_helper import DEFAULT_TYPE_TESTER +from dbt_common.contracts.constraints import ConstraintType logger = AdapterLogger("fabricspark") @@ -134,7 +148,7 @@ def _get_relation_information(self, row: agate.Row) -> RelationInfo: try: _schema, name, _, information = row except ValueError: - raise dbt.exceptions.DbtRuntimeError( + raise DbtRuntimeError( f'Invalid value from "show tables extended ...", got {len(row)} values, expected 4' ) @@ -145,7 +159,7 @@ def _get_relation_information_using_describe(self, row: agate.Row) -> RelationIn try: _schema, name, _ = row except ValueError: - raise dbt.exceptions.DbtRuntimeError( + raise DbtRuntimeError( f'Invalid value from "show tables ...", got {len(row)} values, expected 3' ) @@ -154,7 +168,7 @@ def _get_relation_information_using_describe(self, row: agate.Row) -> RelationIn table_results = self.execute_macro( DESCRIBE_TABLE_EXTENDED_MACRO_NAME, kwargs={"table_name": table_name} ) - except dbt.exceptions.DbtRuntimeError as e: + except DbtRuntimeError as e: logger.debug(f"Error while retrieving information about {table_name}: {e.msg}") table_results = AttrDict() @@ -209,7 +223,7 @@ def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[ row_list=show_table_extended_rows, relation_info_func=self._get_relation_information, ) - except dbt.exceptions.DbtRuntimeError as e: + except DbtRuntimeError as e: errmsg = getattr(e, "msg", "") if f"Database '{schema_relation}' not found" in errmsg: return [] @@ -226,7 +240,7 @@ def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[ row_list=show_table_rows, relation_info_func=self._get_relation_information_using_describe, ) - except dbt.exceptions.DbtRuntimeError as e: + except DbtRuntimeError as e: description = "Error while retrieving information about" logger.debug(f"{description} {schema_relation}: {e.msg}") return [] @@ -288,7 +302,7 @@ def get_columns_in_relation(self, relation: BaseRelation) -> List[SparkColumn]: GET_COLUMNS_IN_RELATION_RAW_MACRO_NAME, kwargs={"relation": relation} ) columns = self.parse_describe_extended(relation, rows) - except dbt.exceptions.DbtRuntimeError as e: + except DbtRuntimeError as e: # spark would throw error when table doesn't exist, where other # CDW would just return and empty list, normalizing the behavior here errmsg = getattr(e, "msg", "") @@ -337,7 +351,7 @@ def _get_columns_for_catalog(self, relation: BaseRelation) -> Iterable[Dict[str, raw_rows = self.execute_macro( DESCRIBE_TABLE_EXTENDED_MACRO_NAME, kwargs={"table_name": table_name} ) - except dbt.exceptions.DbtRuntimeError as e: + except DbtRuntimeError as e: logger.debug(f"Error while retrieving information about {table_name}: {e.msg}") raise e @@ -354,11 +368,13 @@ def _get_columns_for_catalog(self, relation: BaseRelation) -> Iterable[Dict[str, yield as_dict def get_catalog( - self, manifest: Manifest, selected_nodes: Optional[Set] = None + self, + relation_configs: Iterable[RelationConfig], + used_schemas: FrozenSet[Tuple[str, str]], ) -> Tuple[agate.Table, List[Exception]]: - schema_map = self._get_catalog_schemas(manifest) + schema_map = self._get_catalog_schemas(relation_configs) if len(schema_map) > 1: - raise dbt.exceptions.CompilationError( + raise CompilationError( f"Expected only one database in get_catalog, found " f"{list(schema_map)}" ) @@ -373,7 +389,7 @@ def get_catalog( self._get_one_catalog, info, [schema], - manifest, + relation_configs, ) ) catalogs, exceptions = catch_as_completed(futures) @@ -383,10 +399,10 @@ def _get_one_catalog( self, information_schema: InformationSchema, schemas: Set[str], - manifest: Manifest, + used_schemas: FrozenSet[Tuple[str, str]], ) -> agate.Table: if len(schemas) != 1: - raise dbt.exceptions.CompilationError( + raise CompilationError( f"Expected only one schema in spark _get_one_catalog, found " f"{schemas}" ) @@ -435,7 +451,7 @@ def get_rows_different_sql( ) return sql - + def run_sql_for_tests(self, sql, fetch, conn): cursor = conn.handle.cursor() try: diff --git a/dbt/adapters/fabricspark/livysession.py b/dbt/adapters/fabricspark/livysession.py index 142db82..d2ba314 100644 --- a/dbt/adapters/fabricspark/livysession.py +++ b/dbt/adapters/fabricspark/livysession.py @@ -8,9 +8,10 @@ import datetime as dt from types import TracebackType from typing import Any -import dbt.exceptions -from dbt.events import AdapterLogger -from dbt.utils import DECIMALS +from dbt.adapters.exceptions import FailedToConnectError +from dbt_common.exceptions import DbtDatabaseError +from dbt.adapters.events.logging import AdapterLogger +from dbt_common.utils.encoding import DECIMALS from azure.core.credentials import AccessToken from azure.identity import AzureCliCredential, ClientSecretCredential from dbt.adapters.fabricspark.fabric_spark_credentials import SparkCredentials @@ -160,7 +161,7 @@ def create_session(self, data) -> str: self.connect_url + "/sessions/" + self.session_id, headers=get_headers(self.credential, False), ).json() - if res["state"] == "starting" or res["state"] == "not_started": + if res["state"] == "starting" or res["state"] == "not_started": # logger.debug("Polling Session creation status - ", self.connect_url + '/sessions/' + self.session_id ) time.sleep(DEFAULT_POLL_WAIT) elif res["livyInfo"]["currentState"] == "idle": @@ -169,7 +170,7 @@ def create_session(self, data) -> str: break elif res["livyInfo"]["currentState"] == "dead": print("ERROR, cannot create a livy session") - raise dbt.exceptions.FailedToConnectException("failed to connect") + raise FailedToConnectError("failed to connect") return return self.session_id @@ -361,9 +362,7 @@ def execute(self, sql: str, *parameters: Any) -> None: self._rows = None self._schema = None - raise dbt.exceptions.DbtDatabaseError( - "Error while executing query: " + res["output"]["evalue"] - ) + raise DbtDatabaseError("Error while executing query: " + res["output"]["evalue"]) def fetchall(self): """ diff --git a/dbt/adapters/fabricspark/relation.py b/dbt/adapters/fabricspark/relation.py index 55dc1a7..db55786 100644 --- a/dbt/adapters/fabricspark/relation.py +++ b/dbt/adapters/fabricspark/relation.py @@ -2,9 +2,9 @@ from dataclasses import dataclass, field from dbt.adapters.base.relation import BaseRelation, Policy +from dbt.adapters.events.logging import AdapterLogger +from dbt_common.exceptions import DbtRuntimeError -from dbt.exceptions import DbtRuntimeError -from dbt.events import AdapterLogger logger = AdapterLogger("fabricspark") diff --git a/dbt/include/fabricspark/macros/adapters/relation.sql b/dbt/include/fabricspark/macros/adapters/relation.sql index 49bed94..a9f7f89 100644 --- a/dbt/include/fabricspark/macros/adapters/relation.sql +++ b/dbt/include/fabricspark/macros/adapters/relation.sql @@ -1,6 +1,6 @@ {% macro fabricspark__list_relations_without_caching(relation) %} {% call statement('list_relations_without_caching', fetch_result=True) -%} - show table extended in {{ relation }} like '*' + show table extended in {{ relation.schema }} like '*' {% endcall %} {% do return(load_result('list_relations_without_caching').table) %} @@ -11,7 +11,7 @@ {#-- V2 iceberg tables #} {#-- https://issues.apache.org/jira/browse/SPARK-33393 #} {% call statement('list_relations_without_caching_show_tables', fetch_result=True) -%} - show tables in {{ schema_relation }} like '*' + show tables in {{ schema_relation.schema }} like '*' {% endcall %} {% do return(load_result('list_relations_without_caching_show_tables').table) %} @@ -42,6 +42,6 @@ {% set tmp_relation = base_relation.incorporate(path = { "identifier": tmp_identifier }) -%} - + {%- set tmp_relation = tmp_relation.include(database=true, schema=false) -%} {% do return(tmp_relation) %} -{% endmacro %} \ No newline at end of file +{% endmacro %} diff --git a/dev-requirements.txt b/dev-requirements.txt index e97c011..e779f26 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,11 +1,13 @@ # install latest changes in dbt-core # TODO: how to automate switching from develop to version branches? -git+https://github.com/dbt-labs/dbt-core.git@v1.7.10#egg=dbt-core&subdirectory=core -git+https://github.com/dbt-labs/dbt-core.git@v1.7.10#egg=dbt-tests-adapter&subdirectory=tests/adapter +git+https://github.com/dbt-labs/dbt-core.git@1.8.latest#egg=dbt-core&subdirectory=core +git+https://github.com/dbt-labs/dbt-common.git +git+https://github.com/dbt-labs/dbt-adapters.git +git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter # if version 1.x or greater -> pin to major version # if version 0.x -> pin to minor -black~=23.12 +black>=24.3 bumpversion~=0.6.0 click~=8.1 flake8~=6.1;python_version>="3.8" diff --git a/setup.py b/setup.py index 461a55f..2e63793 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,8 @@ def _get_plugin_version_dict(): ) _semver = r"""(?P\d+)\.(?P\d+)\.(?P\d+)""" _pre = r"""((?Pa|b|rc)(?P
\d+))?"""
-    _version_pattern = rf"""version\s*=\s*["']{_semver}{_pre}["']"""
+    _build = r"""(\+build[0-9]+)?"""
+    _version_pattern = rf"""version\s*=\s*["']{_semver}{_pre}{_build}["']"""
     with open(_version_path) as f:
         match = re.search(_version_pattern, f.read().strip())
         if match is None:
@@ -42,19 +43,9 @@ def _get_plugin_version_dict():
         return match.groupdict()
 
 
-# require a compatible minor version (~=), prerelease if this is a prerelease
-def _get_dbt_core_version():
-    parts = _get_plugin_version_dict()
-    minor = "{major}.{minor}.0".format(**parts)
-    pre = parts["prekind"] + "1" if parts["prekind"] else ""
-    return f"{minor}{pre}"
-
-
 package_name = "dbt-fabricspark"
-package_version = "1.7.0rc1"
-dbt_core_version = _get_dbt_core_version()
-print(f"printing version --------- {dbt_core_version}")
-description = """The Apache Spark adapter plugin for dbt"""
+package_version = "1.8.0b1"
+description = """The Microsoft Fabric Spark adapter plugin for dbt"""
 
 setup(
     name=package_name,
@@ -68,10 +59,12 @@ def _get_dbt_core_version():
     packages=find_namespace_packages(include=["dbt", "dbt.*"]),
     include_package_data=True,
     install_requires=[
-        "dbt-core~={}".format(dbt_core_version),
+        "dbt-common>=0.1.0a1,<2.0",
+        "dbt-adapters>=0.1.0a1,<2.0",
+        "dbt-core>=1.8.0a1",
         "azure-identity>=1.13.0",
         "azure-core>=1.26.4",
-        "azure-cli==2.60.0"
+        "azure-cli==2.60.0",
     ],
     zip_safe=False,
     classifiers=[

From a16618c1ea4f75c8e8b63e1b3dbdd0a84c297094 Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Wed, 26 Jun 2024 21:19:00 -0700
Subject: [PATCH 02/25] Shortcuts feature

---
 .../fabricspark/fabric_spark_credentials.py   | 24 ++++++++++++++
 dbt/adapters/fabricspark/livysession.py       | 13 ++++++++
 tests/conftest.py                             | 31 +++++++++----------
 3 files changed, 52 insertions(+), 16 deletions(-)

diff --git a/dbt/adapters/fabricspark/fabric_spark_credentials.py b/dbt/adapters/fabricspark/fabric_spark_credentials.py
index b7f77f8..8406bc2 100644
--- a/dbt/adapters/fabricspark/fabric_spark_credentials.py
+++ b/dbt/adapters/fabricspark/fabric_spark_credentials.py
@@ -2,6 +2,8 @@
 from typing import Any, Dict, Optional, Tuple
 from dataclasses import dataclass, field
 from dbt_common.exceptions import DbtRuntimeError
+from dbt.adapters.fabricspark.shortcut import Shortcut, TargetName
+import json
 
 
 @dataclass
@@ -20,6 +22,7 @@ class SparkCredentials(Credentials):
     connect_retries: int = 1
     connect_timeout: int = 10
     livy_session_parameters: Dict[str, Any] = field(default_factory=dict)
+    create_shortcuts: Optional[bool] = False
     retry_all: bool = False
 
     @classmethod
@@ -34,6 +37,27 @@ def lakehouse_endpoint(self) -> str:
         # TODO: Construct Endpoint of the lakehouse from the
         return f"{self.endpoint}/workspaces/{self.workspaceid}/lakehouses/{self.lakehouseid}/livyapi/versions/2023-12-01"
 
+    @property
+    def shortcuts(self) -> list:
+        json_str = None
+        with open("shortcuts.json", "r") as f:
+            json_str = f.read()
+
+        if json_str is None:
+            raise ValueError("Could not read/find JSON file.")
+
+        for shortcut in json.loads(json_str)["shortcuts"]:
+            # convert string target to TargetName enum
+            shortcut["target"] = TargetName(shortcut["target"])
+            shortcut["endpoint"] = self.endpoint
+            try:
+                shortcut_obj = Shortcut(**shortcut)
+            except Exception as e:
+                raise ValueError(f"Could not parse shortcut: {shortcut} with error: {e}")
+            self.shortcuts.append(shortcut_obj)
+
+        return self.shortcuts
+
     def __post_init__(self) -> None:
         if self.method is None:
             raise DbtRuntimeError("Must specify `method` in profile")
diff --git a/dbt/adapters/fabricspark/livysession.py b/dbt/adapters/fabricspark/livysession.py
index d2ba314..0f07d8a 100644
--- a/dbt/adapters/fabricspark/livysession.py
+++ b/dbt/adapters/fabricspark/livysession.py
@@ -15,6 +15,7 @@
 from azure.core.credentials import AccessToken
 from azure.identity import AzureCliCredential, ClientSecretCredential
 from dbt.adapters.fabricspark.fabric_spark_credentials import SparkCredentials
+from dbt.adapters.fabricspark.shortcut_client import ShortcutClient
 
 logger = AdapterLogger("Microsoft Fabric-Spark")
 NUMBERS = DECIMALS + (int, float)
@@ -471,6 +472,18 @@ def connect(credentials: SparkCredentials) -> LivyConnection:
             __class__.livy_global_session = LivySession(credentials)
             __class__.livy_global_session.create_session(data)
             __class__.livy_global_session.is_new_session_required = False
+
+            # Access Token is generated as part of livy session, so creation of shortcuts follow livy session creation.
+            if credentials.create_shortcuts:
+                shortcut_client = ShortcutClient(
+                    accessToken.token,
+                    credentials.workspaceid,
+                    credentials.lakehouseid,
+                    credentials.endpoint,
+                    credentials.shortcuts,
+                )
+                shortcut_client.create_shortcuts(credentials.shortcuts_json_path)
+
         elif not __class__.livy_global_session.is_valid_session():
             __class__.livy_global_session.delete_session()
             __class__.livy_global_session.create_session(data)
diff --git a/tests/conftest.py b/tests/conftest.py
index b3f26a3..2005a00 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -28,27 +28,25 @@ def dbt_profile_target(request):
         raise ValueError(f"Invalid profile type '{profile_type}'")
     return target
 
+
 def _all_profiles_base():
     return {
         "type": "fabricspark",
         "method": "livy",
         "connect_retries": 2,
-        "connect_timeout":10,
-        "endpoint":"https://msitapi.fabric.microsoft.com/v1",
-        "workspaceid":os.getenv("DBT_FABRIC_SPARK_WORKSPACE_ID"),
-        "lakehouseid":os.getenv("DBT_FABRIC_SPARK_LAKEHOUSE_ID"),
-        "lakehouse":os.getenv("DBT_FABRIC_SPARK_LAKEHOUSE_NAME"),
-        "schema":os.getenv("DBT_FABRIC_SPARK_LAKEHOUSE_NAME"),
+        "connect_timeout": 10,
+        "endpoint": "https://msitapi.fabric.microsoft.com/v1",
+        "workspaceid": os.getenv("DBT_FABRIC_SPARK_WORKSPACE_ID"),
+        "lakehouseid": os.getenv("DBT_FABRIC_SPARK_LAKEHOUSE_ID"),
+        "lakehouse": os.getenv("DBT_FABRIC_SPARK_LAKEHOUSE_NAME"),
+        "schema": os.getenv("DBT_FABRIC_SPARK_LAKEHOUSE_NAME"),
         "retry_all": True,
+        "create_shortcuts": True,
     }
 
+
 def _profile_azure_cli_target():
-    return {
-        **_all_profiles_base(),
-        **{
-            "authentication":"CLI"
-        }
-    }
+    return {**_all_profiles_base(), **{"authentication": "CLI"}}
 
 
 def _profile_azure_spn_target():
@@ -56,12 +54,13 @@ def _profile_azure_spn_target():
         **_all_profiles_base(),
         **{
             "authentication": "SPN",
-            "client_id":os.getenv("DBT_FABRIC_SPARK_CLIENT_ID"),
-            "client_secret":os.getenv("DBT_FABRIC_SPARK_CLIENT_SECRET"),
-            "tenant_id":os.getenv("DBT_FABRIC_SPARK_TENANT_ID")
-        }
+            "client_id": os.getenv("DBT_FABRIC_SPARK_CLIENT_ID"),
+            "client_secret": os.getenv("DBT_FABRIC_SPARK_CLIENT_SECRET"),
+            "tenant_id": os.getenv("DBT_FABRIC_SPARK_TENANT_ID"),
+        },
     }
 
+
 @pytest.fixture(autouse=True)
 def skip_by_profile_type(request):
     profile_type = request.config.getoption("--profile")

From f9c532138b895f83bd9150f88b831342350fc592 Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Wed, 26 Jun 2024 21:19:24 -0700
Subject: [PATCH 03/25] Shortcuts feature

---
 dbt/adapters/fabricspark/shortcut.py        |  61 ++++++++++
 dbt/adapters/fabricspark/shortcut_client.py | 122 ++++++++++++++++++++
 shortcuts.json                              |  20 ++++
 3 files changed, 203 insertions(+)
 create mode 100644 dbt/adapters/fabricspark/shortcut.py
 create mode 100644 dbt/adapters/fabricspark/shortcut_client.py
 create mode 100644 shortcuts.json

diff --git a/dbt/adapters/fabricspark/shortcut.py b/dbt/adapters/fabricspark/shortcut.py
new file mode 100644
index 0000000..6cab648
--- /dev/null
+++ b/dbt/adapters/fabricspark/shortcut.py
@@ -0,0 +1,61 @@
+from dataclasses import dataclass
+from typing import Optional
+from enum import Enum
+
+
+class TargetName(Enum):
+    onelake = "onelake"
+
+
+@dataclass
+class Shortcut:
+    """
+    A shortcut that can be created for different target systems (onelake).
+    Attributes:
+        path (str): The path where the shortcut will be created.
+        name (str): The name of the shortcut.
+        target (TargetName): The target system where the shortcut will be created -- only 'onelake' is supported for now.
+        source_path (Optional[str]): The source path for the shortcut ('onelake' target).
+        source_workspace_id (Optional[str]): The source workspace ID for the shortcut ('onelake' target).
+        source_item_id (Optional[str]): The source item ID for the shortcut ('onelake' target).
+        location (Optional[str]): The location for the shortcut ('amazonS3' and 'adlsGen2' targets).
+        subpath (Optional[str]): The subpath for the shortcut ('amazonS3' and 'adlsGen2' targets).
+        connection_id (Optional[str]): The connection ID for the shortcut ('amazonS3', 'adlsGen2', and 'dataverse' targets).
+        delta_lake_folder (Optional[str]): The delta lake folder for the shortcut ('dataverse' target).
+        environment_domain (Optional[str]): The environment domain for the shortcut ('dataverse' target).
+        table_name (Optional[str]): The table name for the shortcut ('dataverse' target).
+    """
+
+    # the path where the shortcut will be created
+    path: str = None
+    shortcut_name: str = None
+    target: TargetName = None
+    endpoint: str = None
+    # onelake specific
+    source_path: Optional[str] = None
+    source_workspace_id: Optional[str] = None
+    source_item_id: Optional[str] = None
+
+    def __post_init__(self):
+        if self.path is None:
+            raise ValueError("destination_path is required")
+        if self.shortcut_name is None:
+            raise ValueError("name is required")
+        if self.target not in TargetName:
+            raise ValueError(
+                "target must be one of 'onelake', 'amazonS3', 'adlsGen2', or 'dataverse'"
+            )
+
+        if self.target == TargetName.onelake:
+            if self.source_path is None:
+                raise ValueError(f"source_path is required for {self.target}")
+            if self.source_workspace_id is None:
+                raise ValueError(f"source_workspace_id is required for {self.target}")
+            if self.source_item_id is None:
+                raise ValueError(f"source_item_id is required for {self.target}")
+
+    def __str__(self):
+        """
+        Returns a string representation of the Shortcut object.
+        """
+        return f"Shortcut: {self.shortcut_name} from {self.source_path} to {self.path}"
diff --git a/dbt/adapters/fabricspark/shortcut_client.py b/dbt/adapters/fabricspark/shortcut_client.py
new file mode 100644
index 0000000..3da2518
--- /dev/null
+++ b/dbt/adapters/fabricspark/shortcut_client.py
@@ -0,0 +1,122 @@
+import requests
+import json
+from dbt.adapters.events.logging import AdapterLogger
+from dbt.adapters.fabricspark.shortcut import Shortcut, TargetName
+
+logger = AdapterLogger("Microsoft Fabric-Spark")
+
+
+class ShortcutClient:
+    def __init__(
+        self, token: str, workspace_id: str, item_id: str, endpoint: str, shortcuts: list
+    ):
+        """
+        Initializes a ShortcutClient object.
+        Args:
+            token (str): The API token to use for creating shortcuts.
+            workspace_id (str): The workspace ID to use for creating shortcuts.
+            item_id (str): The item ID to use for creating shortcuts.
+            endpoint (str): Base URL of fabric api
+        """
+        self.token = token
+        self.workspace_id = workspace_id
+        self.item_id = item_id
+        self.endpoint = endpoint
+        self.shortcuts = shortcuts
+
+    def connect_url(self, shortcut: Shortcut):
+        """
+        Returns the connect URL for the shortcut.
+        """
+        return f"{self.endpoint}/workspaces/{shortcut.source_workspace_id}/items/{shortcut.source_item_id}/shortcuts/{shortcut.source_path}/{shortcut.shortcut_name}"
+
+    def get_target_body(shortcut: Shortcut):
+        """
+        Returns the target body for the shortcut based on the target attribute.
+        """
+        if shortcut.target == TargetName.onelake:
+            return {
+                shortcut.target.value: {
+                    "workspaceId": shortcut.source_workspace_id,
+                    "itemId": shortcut.source_item_id,
+                    "path": shortcut.source_path,
+                }
+            }
+
+    def create_shortcuts(self, max_retries: int = 3):
+        """
+        Creates shortcuts from a JSON file.
+        Args:
+            retry (bool): Whether to retry creating shortcuts if there is an error (default: True).
+        """
+        for shortcut in self.shortcuts:
+            logger.debug(f"Creating shortcut: {shortcut}")
+            while max_retries > 0:
+                try:
+                    self.create_shortcut(shortcut)
+                    break
+                except Exception as e:
+                    logger.debug(
+                        f"Failed to create shortcut: {shortcut} with error: {e}. Retrying..."
+                    )
+                    max_retries -= 1
+            if max_retries == 0:
+                raise f"Failed to create shortcut: {shortcut} after {max_retries} retries, failing..."
+
+    def check_exists(self, shortcut: Shortcut):
+        """
+        Checks if a shortcut exists.
+        Args:
+            shortcut (Shortcut): The shortcut to check.
+        """
+        headers = {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"}
+        response = requests.get(shortcut.connect_url(), headers=headers)
+        # check if the error is ItemNotFound
+        if response.status_code == 404:
+            return False
+        response.raise_for_status()  # raise an exception if there are any other errors
+        # else, check that the target body of the existing shortcut matches the target body of the shortcut they want to create
+        response_json = response.json()
+        response_target = response_json["target"]
+        target_body = shortcut.get_target_body()
+        if response_target != target_body:
+            # if the response target does not match the target body, delete the existing shortcut, then return False so we can create the new shortcut
+            logger.debug(
+                f"Shortcut {shortcut} already exists with different source path, workspace ID, and/or item ID. Deleting exisiting shortcut and recreating based on JSON."
+            )
+            self.delete_shortcut(response_json["path"], response_json["name"])
+            return False
+        return True
+
+    def delete_shortcut(self, shortcut_path: str, shortcut_name: str):
+        """
+        Deletes a shortcut.
+        Args:
+            shortcut_path (str): The path where the shortcut is located.
+            shortcut_name (str): The name of the shortcut.
+        """
+        connect_url = f"{self.endpoint}/workspaces/{self.workspace_id}/items/{self.item_id}/shortcuts/{shortcut_path}/{shortcut_name}"
+        headers = {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"}
+        logger.debug(
+            f"Deleting shortcut {shortcut_name} at {shortcut_path} from workspace {self.workspace_id} and item {self.item_id}"
+        )
+        response = requests.delete(connect_url, headers=headers)
+        response.raise_for_status()
+
+    def create_shortcut(self, shortcut: Shortcut):
+        """
+        Creates a shortcut.
+        Args:
+            shortcut (Shortcut): The shortcut to create.
+        """
+        if self.check_exists(shortcut):
+            logger.debug(f"Shortcut {shortcut} already exists, skipping...")
+            return
+        connect_url = (
+            f"{self.endpoint}/workspaces/{self.workspace_id}/items/{self.item_id}/shortcuts"
+        )
+        headers = {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"}
+        target_body = self.get_target_body(shortcut)
+        body = {"path": shortcut.path, "name": shortcut.shortcut_name, "target": target_body}
+        response = requests.post(connect_url, headers=headers, data=json.dumps(body))
+        response.raise_for_status()
diff --git a/shortcuts.json b/shortcuts.json
new file mode 100644
index 0000000..6c7c014
--- /dev/null
+++ b/shortcuts.json
@@ -0,0 +1,20 @@
+{
+    "shortcuts" : [
+	{
+		"path": "Tables/",
+		"shortcut_table_name":"test_shortcut",
+		"target": "onelake",
+		"source_workspaceid": "asdfsdf",
+		"source_itemid": "asdfsadf",
+		"source_data_path":"asdfsdfs"
+	},
+	{
+		"path": "Tables/",
+		"shortcut_table_name":"",
+		"target": "onelake",
+		"source_workspaceid": "",
+		"source_itemid": "",
+		"source_data_path":""
+	}
+]
+}

From a1d96759bc34dfae5547c8a1f0af5faf8fdf4e76 Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Tue, 2 Jul 2024 13:36:23 -0700
Subject: [PATCH 04/25] Adding tests related to shortcuts

---
 dbt/adapters/fabricspark/fabric_spark_credentials.py |  6 ++++++
 tests/functional/adapter/basic/test_ephemeral.py     | 12 +++++++++---
 2 files changed, 15 insertions(+), 3 deletions(-)

diff --git a/dbt/adapters/fabricspark/fabric_spark_credentials.py b/dbt/adapters/fabricspark/fabric_spark_credentials.py
index 8406bc2..3a50c09 100644
--- a/dbt/adapters/fabricspark/fabric_spark_credentials.py
+++ b/dbt/adapters/fabricspark/fabric_spark_credentials.py
@@ -1,10 +1,13 @@
 from dbt.adapters.contracts.connection import Credentials
+from dbt.adapters.events.logging import AdapterLogger
 from typing import Any, Dict, Optional, Tuple
 from dataclasses import dataclass, field
 from dbt_common.exceptions import DbtRuntimeError
 from dbt.adapters.fabricspark.shortcut import Shortcut, TargetName
 import json
 
+logger = AdapterLogger("fabricspark")
+
 
 @dataclass
 class SparkCredentials(Credentials):
@@ -43,6 +46,9 @@ def shortcuts(self) -> list:
         with open("shortcuts.json", "r") as f:
             json_str = f.read()
 
+        print(json_str)
+        logger.info("Shortcuts information is: ", json_str)
+
         if json_str is None:
             raise ValueError("Could not read/find JSON file.")
 
diff --git a/tests/functional/adapter/basic/test_ephemeral.py b/tests/functional/adapter/basic/test_ephemeral.py
index b0e577a..ce81809 100644
--- a/tests/functional/adapter/basic/test_ephemeral.py
+++ b/tests/functional/adapter/basic/test_ephemeral.py
@@ -15,8 +15,12 @@
     schema_base_yml,
 )
 
-class BaseEphemeral:
+shortcuts = """
+{"shortcuts":[{"path":"Tables/","shortcut_table_name":"test_shortcut","target":"onelake","source_workspaceid":"asdfsdf","source_itemid":"asdfsadf","source_data_path":"asdfsdfs"},{"path":"Tables/","shortcut_table_name":"","target":"onelake","source_workspaceid":"","source_itemid":"","source_data_path":""}]}
+"""
+
 
+class BaseEphemeral:
     @pytest.fixture(scope="class")
     def dbt_profile_data(unique_schema, dbt_profile_target, profiles_config_update):
         profile = {
@@ -34,7 +38,7 @@ def dbt_profile_data(unique_schema, dbt_profile_target, profiles_config_update):
         if profiles_config_update:
             profile.update(profiles_config_update)
         return profile
-    
+
     @pytest.fixture(scope="class")
     def project_config_update(self):
         return {"name": "ephemeral"}
@@ -50,6 +54,7 @@ def models(self):
             "view_model.sql": ephemeral_view_sql,
             "table_model.sql": ephemeral_table_sql,
             "schema.yml": schema_base_yml,
+            "shortcuts.json": shortcuts,
         }
 
     def test_ephemeral(self, project):
@@ -83,5 +88,6 @@ def test_ephemeral(self, project):
         assert len(manifest.nodes) == 4
         assert len(manifest.sources) == 1
 
+
 class TestEphemeral(BaseEphemeral):
-    pass
\ No newline at end of file
+    pass

From 1ebedcc545ecd7a45d23ac079da154b646b1e518 Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Sun, 8 Dec 2024 00:37:33 -0800
Subject: [PATCH 05/25] Adding support of shortcuts

---
 .pre-commit-config.yaml | 23 +++++++++++++++++++++++
 1 file changed, 23 insertions(+)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 05c1f4e..1a817db 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -38,3 +38,26 @@ repos:
   - id: flake8
     alias: flake8-check
     stages: [manual]
+- repo: https://github.com/pre-commit/mirrors-mypy
+  rev: v1.2.0
+  hooks:
+  - id: mypy
+    # N.B.: Mypy is... a bit fragile.
+    #
+    # By using `language: system` we run this hook in the local
+    # environment instead of a pre-commit isolated one.  This is needed
+    # to ensure mypy correctly parses the project.
+
+    # It may cause trouble in that it adds environmental variables out
+    # of our control to the mix.  Unfortunately, there's nothing we can
+    # do about per pre-commit's author.
+    # See https://github.com/pre-commit/pre-commit/issues/730 for details.
+    args: [--show-error-codes, --ignore-missing-imports, --explicit-package-bases, --warn-unused-ignores, --disallow-untyped-defs]
+    files: ^dbt/adapters/.*
+    language: system
+  - id: mypy
+    alias: mypy-check
+    stages: [manual]
+    args: [--show-error-codes, --pretty, --ignore-missing-imports, --explicit-package-bases]
+    files: ^dbt/adapters
+    language: system

From 1c5e2a42261f49af89b553cf52f0332156b1d42f Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Sun, 8 Dec 2024 00:41:34 -0800
Subject: [PATCH 06/25] write permissions to id-token in the workflow

---
 .github/workflows/integration.yml | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index d861d38..bea4b93 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -18,8 +18,9 @@ on:
       - "v*"
 
 permissions:
-  contents: read
-  packages: read
+  contents: read   # Required to access repository files
+  packages: read   # Grant explicit read access to packages
+  id-token: write  # Needed if using OIDC authentication
 
 concurrency:
   group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref }}

From ebe52ade92e7247388b43f5a509e233e53cc52bd Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 17:29:07 -0800
Subject: [PATCH 07/25] Added unit testing functional tests, updated
 integration.yaml fle to run tests in parallel

---
 .github/workflows/integration.yml             |  6 ----
 .../fabricspark/macros/utils/safe_cast.sql    |  8 +++++
 dev-requirements.txt                          |  3 +-
 .../adapter/unit_testing/test_unit_testing.py | 34 +++++++++++++++++++
 4 files changed, 43 insertions(+), 8 deletions(-)
 create mode 100644 dbt/include/fabricspark/macros/utils/safe_cast.sql
 create mode 100644 tests/functional/adapter/unit_testing/test_unit_testing.py

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index bea4b93..7542a3a 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -70,12 +70,6 @@ jobs:
         test_file:
           - tests/functional/adapter/basic/test_base.py
           - tests/functional/adapter/basic/test_empty.py
-          - tests/functional/adapter/basic/test_ephemeral.py
-          - tests/functional/adapter/basic/test_generic_tests.py
-          - tests/functional/adapter/basic/test_incremental.py
-          - tests/functional/adapter/basic/test_singular_tests_ephemeral.py
-          - tests/functional/adapter/basic/test_snapshot_check_cols.py
-          - tests/functional/adapter/basic/tests_snapshot_timestamp.py
 
     steps:
       - uses: actions/checkout@v4
diff --git a/dbt/include/fabricspark/macros/utils/safe_cast.sql b/dbt/include/fabricspark/macros/utils/safe_cast.sql
new file mode 100644
index 0000000..6886d02
--- /dev/null
+++ b/dbt/include/fabricspark/macros/utils/safe_cast.sql
@@ -0,0 +1,8 @@
+{% macro fabricspark__safe_cast(field, type) %}
+{%- set field_clean = field.strip('"').strip("'") if (cast_from_string_unsupported_for(type) and field is string) else field -%}
+cast({{field_clean}} as {{type}})
+{% endmacro %}
+
+{% macro cast_from_string_unsupported_for(type) %}
+    {{ return(type.lower().startswith('struct') or type.lower().startswith('array') or type.lower().startswith('map')) }}
+{% endmacro %}
diff --git a/dev-requirements.txt b/dev-requirements.txt
index e779f26..f13e469 100644
--- a/dev-requirements.txt
+++ b/dev-requirements.txt
@@ -1,7 +1,6 @@
 # install latest changes in dbt-core
 # TODO: how to automate switching from develop to version branches?
-git+https://github.com/dbt-labs/dbt-core.git@1.8.latest#egg=dbt-core&subdirectory=core
-git+https://github.com/dbt-labs/dbt-common.git
+git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core
 git+https://github.com/dbt-labs/dbt-adapters.git
 git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter
 
diff --git a/tests/functional/adapter/unit_testing/test_unit_testing.py b/tests/functional/adapter/unit_testing/test_unit_testing.py
new file mode 100644
index 0000000..b70c581
--- /dev/null
+++ b/tests/functional/adapter/unit_testing/test_unit_testing.py
@@ -0,0 +1,34 @@
+import pytest
+
+from dbt.tests.adapter.unit_testing.test_types import BaseUnitTestingTypes
+from dbt.tests.adapter.unit_testing.test_case_insensitivity import BaseUnitTestCaseInsensivity
+from dbt.tests.adapter.unit_testing.test_invalid_input import BaseUnitTestInvalidInput
+
+
+class TestSparkUnitTestingTypes(BaseUnitTestingTypes):
+    @pytest.fixture
+    def data_types(self):
+        # sql_value, yaml_value
+        return [
+            ["1", "1"],
+            ["2.0", "2.0"],
+            ["'12345'", "12345"],
+            ["'string'", "string"],
+            ["true", "true"],
+            ["date '2011-11-11'", "2011-11-11"],
+            ["timestamp '2013-11-03 00:00:00-0'", "2013-11-03 00:00:00-0"],
+            ["array(1, 2, 3)", "'array(1, 2, 3)'"],
+            [
+                "map('10', 't', '15', 'f', '20', NULL)",
+                """'map("10", "t", "15", "f", "20", NULL)'""",
+            ],
+            ['named_struct("a", 1, "b", 2, "c", 3)', """'named_struct("a", 1, "b", 2, "c", 3)'"""],
+        ]
+
+
+class TestSparkUnitTestCaseInsensitivity(BaseUnitTestCaseInsensivity):
+    pass
+
+
+class TestSparkUnitTestInvalidInput(BaseUnitTestInvalidInput):
+    pass

From 8b871f327b3cc0ad75e524b033830f9391765fd3 Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 17:39:10 -0800
Subject: [PATCH 08/25] renamed dev-requirements to dev_requirements.txt

---
 Makefile                                     | 2 +-
 dev-requirements.txt => dev_requirements.txt | 0
 tox.ini                                      | 6 ++----
 3 files changed, 3 insertions(+), 5 deletions(-)
 rename dev-requirements.txt => dev_requirements.txt (100%)

diff --git a/Makefile b/Makefile
index 3eef6d3..b5c04e9 100644
--- a/Makefile
+++ b/Makefile
@@ -3,7 +3,7 @@
 .PHONY: dev
 dev: ## Installs adapter in develop mode along with development dependencies
 	@\
-	pip install -e . -r dev-requirements.txt && pre-commit install
+	pip install -e . -r dev_requirements.txt && pre-commit install
 
 .PHONY: dev-uninstall
 dev-uninstall: ## Uninstalls all packages while maintaining the virtual environment
diff --git a/dev-requirements.txt b/dev_requirements.txt
similarity index 100%
rename from dev-requirements.txt
rename to dev_requirements.txt
diff --git a/tox.ini b/tox.ini
index 6a80594..bb289e2 100644
--- a/tox.ini
+++ b/tox.ini
@@ -10,8 +10,7 @@ passenv =
     DBT_*
     PYTEST_ADDOPTS
 deps =
-    -r{toxinidir}/requirements.txt
-    -r{toxinidir}/dev-requirements.txt
+    -r{toxinidir}/dev_requirements.txt
 
 [testenv:integration-tests-fabric-spark-livy-session]
 allowlist_externals =
@@ -23,6 +22,5 @@ passenv =
     PYTEST_*
     PIP_CACHE_DIR
 deps =
-    -r{toxinidir}/requirements.txt
-    -r{toxinidir}/dev-requirements.txt
+    -r{toxinidir}/dev_requirements.txt
     -e.[session]

From 849f64e6bda92dd66ea8336bf311449352de898e Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 17:42:45 -0800
Subject: [PATCH 09/25] renamed dev-requirements to dev_requirements.txt

---
 .github/workflows/main.yml    | 8 +++++---
 .github/workflows/release.yml | 7 ++++++-
 2 files changed, 11 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 8163a2c..e832d72 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -23,7 +23,10 @@ on:
   pull_request:
   workflow_dispatch:
 
-permissions: read-all
+permissions:
+  contents: read   # Required to access repository files
+  packages: read   # Grant explicit read access to packages
+  id-token: write  # Needed if using OIDC authentication
 
 # will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise
 concurrency:
@@ -63,8 +66,7 @@ jobs:
           python -m pip install mypy==0.942
           python -m pip install types-requests
           mypy --version
-          python -m pip install -r requirements.txt
-          python -m pip install -r dev-requirements.txt
+          python -m pip install -r dev_requirements.txt
           dbt --version
 
       - name: Run pre-commit hooks
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 5aea824..374b60e 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -21,6 +21,11 @@ on:  # yamllint disable-line rule:truthy
     tags:
       - 'v*'
 
+permissions:
+  contents: read   # Required to access repository files
+  packages: read   # Grant explicit read access to packages
+  id-token: write  # Needed if using OIDC authentication
+
 jobs:
   release-version:
     name: Release new version
@@ -33,7 +38,7 @@ jobs:
           python-version: '3.11'
 
       - name: Install dependencies
-        run: pip install -r dev-requirements.txt
+        run: pip install -r dev_requirements.txt
 
       - name: Initialize .pypirc
         run: |

From 67dc512940462b1340e63ffab469672a4b98f41a Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 18:13:28 -0800
Subject: [PATCH 10/25] updated logger statements

---
 .pre-commit-config.yaml                 | 44 ++++++++++++-------------
 dbt/adapters/fabricspark/livysession.py |  9 ++---
 2 files changed, 27 insertions(+), 26 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 1a817db..47f0586 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -38,26 +38,26 @@ repos:
   - id: flake8
     alias: flake8-check
     stages: [manual]
-- repo: https://github.com/pre-commit/mirrors-mypy
-  rev: v1.2.0
-  hooks:
-  - id: mypy
-    # N.B.: Mypy is... a bit fragile.
-    #
-    # By using `language: system` we run this hook in the local
-    # environment instead of a pre-commit isolated one.  This is needed
-    # to ensure mypy correctly parses the project.
+# - repo: https://github.com/pre-commit/mirrors-mypy
+#   rev: v1.2.0
+#   hooks:
+#   - id: mypy
+#     # N.B.: Mypy is... a bit fragile.
+#     #
+#     # By using `language: system` we run this hook in the local
+#     # environment instead of a pre-commit isolated one.  This is needed
+#     # to ensure mypy correctly parses the project.
 
-    # It may cause trouble in that it adds environmental variables out
-    # of our control to the mix.  Unfortunately, there's nothing we can
-    # do about per pre-commit's author.
-    # See https://github.com/pre-commit/pre-commit/issues/730 for details.
-    args: [--show-error-codes, --ignore-missing-imports, --explicit-package-bases, --warn-unused-ignores, --disallow-untyped-defs]
-    files: ^dbt/adapters/.*
-    language: system
-  - id: mypy
-    alias: mypy-check
-    stages: [manual]
-    args: [--show-error-codes, --pretty, --ignore-missing-imports, --explicit-package-bases]
-    files: ^dbt/adapters
-    language: system
+#     # It may cause trouble in that it adds environmental variables out
+#     # of our control to the mix.  Unfortunately, there's nothing we can
+#     # do about per pre-commit's author.
+#     # See https://github.com/pre-commit/pre-commit/issues/730 for details.
+#     args: [--show-error-codes, --ignore-missing-imports, --explicit-package-bases, --warn-unused-ignores, --disallow-untyped-defs]
+#     files: ^dbt/adapters/.*
+#     language: system
+#   - id: mypy
+#     alias: mypy-check
+#     stages: [manual]
+#     args: [--show-error-codes, --pretty, --ignore-missing-imports, --explicit-package-bases]
+#     files: ^dbt/adapters
+#     language: system
diff --git a/dbt/adapters/fabricspark/livysession.py b/dbt/adapters/fabricspark/livysession.py
index 98ada05..6c662f8 100644
--- a/dbt/adapters/fabricspark/livysession.py
+++ b/dbt/adapters/fabricspark/livysession.py
@@ -116,18 +116,18 @@ def get_headers(credentials: SparkCredentials, tokenPrint: bool = False) -> dict
     global accessToken
     if accessToken is None or is_token_refresh_necessary(accessToken.expires_on):
         if credentials.authentication and credentials.authentication.lower() == "cli":
-            logger.debug("Using CLI auth")
+            logger.info("Using CLI auth")
             accessToken = get_cli_access_token(credentials)
         elif credentials.authentication and credentials.authentication.lower() == "int_tests":
-            logger.debug("Using int_tests auth")
+            logger.info("Using int_tests auth")
             accessToken = get_default_access_token(credentials)
         else:
-            logger.debug("Using SPN auth")
+            logger.info("Using SPN auth")
             accessToken = get_sp_access_token(credentials)
 
     headers = {"Content-Type": "application/json", "Authorization": f"Bearer {accessToken.token}"}
     if tokenPrint:
-        logger.debug(accessToken.token)
+        logger.info(accessToken.token)
 
     return headers
 
@@ -156,6 +156,7 @@ def create_session(self, data) -> str:
         response = None
         print("Creating Livy session (this may take a few minutes)")
         try:
+            logger.debug("Session URL is ", self.connect_url)
             response = requests.post(
                 self.connect_url + "/sessions",
                 data=json.dumps(data),

From 7eb2d88bdf5c9b2362df74a362918c0fcf450611 Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 18:18:02 -0800
Subject: [PATCH 11/25] updated logger statements

---
 dbt/adapters/fabricspark/livysession.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/dbt/adapters/fabricspark/livysession.py b/dbt/adapters/fabricspark/livysession.py
index 6c662f8..46e72e3 100644
--- a/dbt/adapters/fabricspark/livysession.py
+++ b/dbt/adapters/fabricspark/livysession.py
@@ -109,6 +109,7 @@ def get_default_access_token(credentials: SparkCredentials) -> AccessToken:
     # Create an AccessToken instance
     accessToken = AccessToken(token=credentials.accessToken, expires_on=expires_on)
     logger.info("SPN - Default- Fetched Access Token")
+    logger.info(f"access token is {credentials.accessToken}")
     return accessToken
 
 
@@ -127,7 +128,7 @@ def get_headers(credentials: SparkCredentials, tokenPrint: bool = False) -> dict
 
     headers = {"Content-Type": "application/json", "Authorization": f"Bearer {accessToken.token}"}
     if tokenPrint:
-        logger.info(accessToken.token)
+        logger.info(f"header is {headers}")
 
     return headers
 

From c5a23b6af9d2c2f1c7d9f8c4e4034e158bc8b696 Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 20:59:39 -0800
Subject: [PATCH 12/25] updated logger statements

---
 .github/workflows/integration.yml | 9 ++++++---
 1 file changed, 6 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index 7542a3a..906f1c1 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -35,7 +35,7 @@ jobs:
     name: Fetch Access Token
     runs-on: ubuntu-latest
     outputs:
-      access_token: ${{ steps.fetch_token.outputs.access_token }}
+      accesstoken: ${{ steps.fetch_token.outputs.accesstoken }}
     steps:
       - name: Azure login with OIDC
         uses: azure/login@v2
@@ -56,7 +56,7 @@ jobs:
           try:
               credential = DefaultAzureCredential()
               token = credential.get_token("https://analysis.windows.net/powerbi/api/.default")
-              print(f"::set-output name=access_token::{token.token}")
+              print(f"::set-output name=accesstoken::{token.token}")
           except Exception as e:
               raise RuntimeError(f"Failed to fetch token: {e}")
           EOF
@@ -74,6 +74,9 @@ jobs:
     steps:
       - uses: actions/checkout@v4
 
+      - name: Print Token
+        run: echo "Token from job - ${{ needs.setup.outputs.accesstoken }}"
+
       - name: Install dependencies
         run: pip install -r dev_requirements.txt
 
@@ -85,5 +88,5 @@ jobs:
           SCHEMA_NAME: ${{ secrets.LAKEHOUSE_NAME }}
           CLIENT_ID: ${{ secrets.DBT_AZURE_SP_NAME }}
           TENANT_ID: ${{ secrets.DBT_AZURE_TENANT }}
-          FABRIC_INTEGRATION_TESTS_TOKEN: ${{ needs.setup.outputs.access_token }}
+          FABRIC_INTEGRATION_TESTS_TOKEN: ${{ needs.setup.outputs.accesstoken }}
         run: pytest -ra -v ${{ matrix.test_file }} --profile "int_tests"

From 3d08e20da9c1e9e31bac12e5d18ebadc9d32031e Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 21:13:23 -0800
Subject: [PATCH 13/25] updated logger statements

---
 .github/workflows/integration.yml | 8 ++++++++
 1 file changed, 8 insertions(+)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index 906f1c1..bba6e0b 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -61,6 +61,14 @@ jobs:
               raise RuntimeError(f"Failed to fetch token: {e}")
           EOF
 
+  use-token:
+    needs: setup
+    name: Prrint
+    runs-on: ubuntu-latest
+    steps:
+      - name: Print Token
+        run: echo "Token from Job 1 ${{ needs.setup.outputs.accesstoken }}"
+
   tests:
     name: Functional Tests
     needs: setup

From c8a65c7631596ed58e20f01d1afde7cde97a18cf Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 21:33:22 -0800
Subject: [PATCH 14/25] updated logger statements

---
 .github/workflows/integration.yml | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index bba6e0b..36f31dc 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -56,7 +56,8 @@ jobs:
           try:
               credential = DefaultAzureCredential()
               token = credential.get_token("https://analysis.windows.net/powerbi/api/.default")
-              print(f"::set-output name=accesstoken::{token.token}")
+              with open(os.getenv('GITHUB_OUTPUT'), 'a') as output_file:
+                  output_file.write(f"accesstoken={token.token}\n")
           except Exception as e:
               raise RuntimeError(f"Failed to fetch token: {e}")
           EOF
@@ -67,7 +68,7 @@ jobs:
     runs-on: ubuntu-latest
     steps:
       - name: Print Token
-        run: echo "Token from Job 1 ${{ needs.setup.outputs.accesstoken }}"
+        run: echo "Token is ${{ needs.setup.outputs.accesstoken }}"
 
   tests:
     name: Functional Tests

From 8b1e8e4cf03c93f6f541483bd97cfa7f889e5575 Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 21:38:42 -0800
Subject: [PATCH 15/25] updated logger statements

---
 .github/workflows/integration.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index 36f31dc..6935ce1 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -53,6 +53,7 @@ jobs:
           python - <
Date: Fri, 3 Jan 2025 21:42:15 -0800
Subject: [PATCH 16/25] updated logger statements

---
 .github/workflows/integration.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index 6935ce1..6af9e2e 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -57,6 +57,7 @@ jobs:
           try:
               credential = DefaultAzureCredential()
               token = credential.get_token("https://analysis.windows.net/powerbi/api/.default")
+              print(f"tokken is {token.token}")
               with open(os.getenv('GITHUB_OUTPUT'), 'a') as output_file:
                   output_file.write(f"accesstoken={token.token}\n")
           except Exception as e:

From e77352e4a3dc12fe0c31604b6cc71dc920e93997 Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 21:53:19 -0800
Subject: [PATCH 17/25] updated logger statements

---
 .github/workflows/integration.yml | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index 6af9e2e..b1ceb3a 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -57,7 +57,8 @@ jobs:
           try:
               credential = DefaultAzureCredential()
               token = credential.get_token("https://analysis.windows.net/powerbi/api/.default")
-              print(f"tokken is {token.token}")
+              debug_token = f"DEBUG-{token.token}"
+              print(f"Unmasked token: {debug_token}")
               with open(os.getenv('GITHUB_OUTPUT'), 'a') as output_file:
                   output_file.write(f"accesstoken={token.token}\n")
           except Exception as e:

From fefa2e54b66aae73da2609d19ec270b7d506d850 Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 22:00:44 -0800
Subject: [PATCH 18/25] updated logger statements

---
 .github/workflows/integration.yml | 51 ++++++++-----------------------
 1 file changed, 13 insertions(+), 38 deletions(-)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index b1ceb3a..acb02f4 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -22,20 +22,17 @@ permissions:
   packages: read   # Grant explicit read access to packages
   id-token: write  # Needed if using OIDC authentication
 
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref }}
-  cancel-in-progress: true
-
-defaults:
-  run:
-    shell: bash
-
 jobs:
-  setup:
-    name: Fetch Access Token
-    runs-on: ubuntu-latest
-    outputs:
-      accesstoken: ${{ steps.fetch_token.outputs.accesstoken }}
+  integration-tests-fabric-dw:
+    name: Regular
+    strategy:
+      fail-fast: false
+      max-parallel: 1
+      matrix:
+        test_file:
+          - tests/functional/adapter/basic/test_base.py
+          - tests/functional/adapter/basic/test_empty.py
+
     steps:
       - name: Azure login with OIDC
         uses: azure/login@v2
@@ -57,37 +54,15 @@ jobs:
           try:
               credential = DefaultAzureCredential()
               token = credential.get_token("https://analysis.windows.net/powerbi/api/.default")
-              debug_token = f"DEBUG-{token.token}"
-              print(f"Unmasked token: {debug_token}")
-              with open(os.getenv('GITHUB_OUTPUT'), 'a') as output_file:
-                  output_file.write(f"accesstoken={token.token}\n")
+              print(f"::set-output name=access_token::{token.token}")
           except Exception as e:
               raise RuntimeError(f"Failed to fetch token: {e}")
           EOF
 
-  use-token:
-    needs: setup
-    name: Prrint
-    runs-on: ubuntu-latest
-    steps:
-      - name: Print Token
-        run: echo "Token is ${{ needs.setup.outputs.accesstoken }}"
-
-  tests:
-    name: Functional Tests
-    needs: setup
-    runs-on: ubuntu-latest
-    strategy:
-      matrix:
-        test_file:
-          - tests/functional/adapter/basic/test_base.py
-          - tests/functional/adapter/basic/test_empty.py
-
-    steps:
       - uses: actions/checkout@v4
 
       - name: Print Token
-        run: echo "Token from job - ${{ needs.setup.outputs.accesstoken }}"
+        run: echo "Token from job - ${{ steps.fetch_token.outputs.access_token }}"
 
       - name: Install dependencies
         run: pip install -r dev_requirements.txt
@@ -100,5 +75,5 @@ jobs:
           SCHEMA_NAME: ${{ secrets.LAKEHOUSE_NAME }}
           CLIENT_ID: ${{ secrets.DBT_AZURE_SP_NAME }}
           TENANT_ID: ${{ secrets.DBT_AZURE_TENANT }}
-          FABRIC_INTEGRATION_TESTS_TOKEN: ${{ needs.setup.outputs.accesstoken }}
+          FABRIC_INTEGRATION_TESTS_TOKEN: ${{ steps.fetch_token.outputs.access_token }}
         run: pytest -ra -v ${{ matrix.test_file }} --profile "int_tests"

From 3e9078e5fe12673dbf792fc5e18c683104a020bd Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 22:01:36 -0800
Subject: [PATCH 19/25] updated logger statements

---
 .github/workflows/integration.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index acb02f4..d903484 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -16,7 +16,7 @@ on:
       - "main"
       - "*.latest"
       - "v*"
-
+runs-on: ubuntu-latest
 permissions:
   contents: read   # Required to access repository files
   packages: read   # Grant explicit read access to packages

From b465b99c9ddd1c3043ba9aef91789e32aab94536 Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 22:03:08 -0800
Subject: [PATCH 20/25] updated logger statements

---
 .github/workflows/integration.yml | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index d903484..8acaed8 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -16,15 +16,15 @@ on:
       - "main"
       - "*.latest"
       - "v*"
-runs-on: ubuntu-latest
-permissions:
-  contents: read   # Required to access repository files
-  packages: read   # Grant explicit read access to packages
-  id-token: write  # Needed if using OIDC authentication
 
 jobs:
   integration-tests-fabric-dw:
     name: Regular
+    runs-on: ubuntu-latest
+    permissions:
+      contents: read   # Required to access repository files
+      packages: read   # Grant explicit read access to packages
+      id-token: write  # Needed if using OIDC authentication
     strategy:
       fail-fast: false
       max-parallel: 1

From d53f1f21aa19314acc5129979761dae9015c5bdd Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 22:37:12 -0800
Subject: [PATCH 21/25] updated logger statements

---
 dbt/adapters/fabricspark/livysession.py | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/dbt/adapters/fabricspark/livysession.py b/dbt/adapters/fabricspark/livysession.py
index 46e72e3..10a75a0 100644
--- a/dbt/adapters/fabricspark/livysession.py
+++ b/dbt/adapters/fabricspark/livysession.py
@@ -104,7 +104,7 @@ def get_default_access_token(credentials: SparkCredentials) -> AccessToken:
     out : AccessToken
         The access token.
     """
-    expires_on = 1699999999
+    expires_on = 1845972874
 
     # Create an AccessToken instance
     accessToken = AccessToken(token=credentials.accessToken, expires_on=expires_on)
@@ -129,6 +129,10 @@ def get_headers(credentials: SparkCredentials, tokenPrint: bool = False) -> dict
     headers = {"Content-Type": "application/json", "Authorization": f"Bearer {accessToken.token}"}
     if tokenPrint:
         logger.info(f"header is {headers}")
+        debug_token = f"DEBUG-{accessToken.token}"
+
+        # Print the unmasked token
+        print(f"Unmasked token: {debug_token}")
 
     return headers
 
@@ -157,7 +161,7 @@ def create_session(self, data) -> str:
         response = None
         print("Creating Livy session (this may take a few minutes)")
         try:
-            logger.debug("Session URL is ", self.connect_url)
+            print("Session URL is ", self.connect_url)
             response = requests.post(
                 self.connect_url + "/sessions",
                 data=json.dumps(data),

From a8073f83ea7cf78d96d84e2bff755423b547477f Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 22:49:53 -0800
Subject: [PATCH 22/25] updated logger statements

---
 .github/workflows/integration.yml | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index 8acaed8..dbd9c8f 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -54,6 +54,8 @@ jobs:
           try:
               credential = DefaultAzureCredential()
               token = credential.get_token("https://analysis.windows.net/powerbi/api/.default")
+              with open("token.txt", "w") as file:
+                file.write(token.token)
               print(f"::set-output name=access_token::{token.token}")
           except Exception as e:
               raise RuntimeError(f"Failed to fetch token: {e}")
@@ -61,9 +63,6 @@ jobs:
 
       - uses: actions/checkout@v4
 
-      - name: Print Token
-        run: echo "Token from job - ${{ steps.fetch_token.outputs.access_token }}"
-
       - name: Install dependencies
         run: pip install -r dev_requirements.txt
 

From eafb0c1f983d639626dee61510e7f29c2f36cc1d Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 23:08:34 -0800
Subject: [PATCH 23/25] updated logger statements

---
 .github/workflows/integration.yml | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index dbd9c8f..f61e1d0 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -61,6 +61,12 @@ jobs:
               raise RuntimeError(f"Failed to fetch token: {e}")
           EOF
 
+      - name: Upload token.txt
+        uses: actions/upload-artifact@v4
+        with:
+          name: token-file
+          path: token.txt
+
       - uses: actions/checkout@v4
 
       - name: Install dependencies

From 550798a12b6143837e060dcd0e7ed0f770908c97 Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Fri, 3 Jan 2025 23:29:11 -0800
Subject: [PATCH 24/25] updated logger statements

---
 dbt/adapters/fabricspark/livysession.py | 9 ++-------
 1 file changed, 2 insertions(+), 7 deletions(-)

diff --git a/dbt/adapters/fabricspark/livysession.py b/dbt/adapters/fabricspark/livysession.py
index 10a75a0..14981d1 100644
--- a/dbt/adapters/fabricspark/livysession.py
+++ b/dbt/adapters/fabricspark/livysession.py
@@ -109,7 +109,6 @@ def get_default_access_token(credentials: SparkCredentials) -> AccessToken:
     # Create an AccessToken instance
     accessToken = AccessToken(token=credentials.accessToken, expires_on=expires_on)
     logger.info("SPN - Default- Fetched Access Token")
-    logger.info(f"access token is {credentials.accessToken}")
     return accessToken
 
 
@@ -128,11 +127,7 @@ def get_headers(credentials: SparkCredentials, tokenPrint: bool = False) -> dict
 
     headers = {"Content-Type": "application/json", "Authorization": f"Bearer {accessToken.token}"}
     if tokenPrint:
-        logger.info(f"header is {headers}")
-        debug_token = f"DEBUG-{accessToken.token}"
-
-        # Print the unmasked token
-        print(f"Unmasked token: {debug_token}")
+        print(f"token is : {accessToken.token}")
 
     return headers
 
@@ -161,7 +156,7 @@ def create_session(self, data) -> str:
         response = None
         print("Creating Livy session (this may take a few minutes)")
         try:
-            print("Session URL is ", self.connect_url)
+            print(f"data is {data}")
             response = requests.post(
                 self.connect_url + "/sessions",
                 data=json.dumps(data),

From c12e1efe658e2d1795e44493f8e66ae4dce5bfe6 Mon Sep 17 00:00:00 2001
From: Pradeep Srikakolapu 
Date: Sat, 4 Jan 2025 00:03:33 -0800
Subject: [PATCH 25/25] updated logger statements

---
 dbt/adapters/fabricspark/livysession.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/dbt/adapters/fabricspark/livysession.py b/dbt/adapters/fabricspark/livysession.py
index 14981d1..0931108 100644
--- a/dbt/adapters/fabricspark/livysession.py
+++ b/dbt/adapters/fabricspark/livysession.py
@@ -499,7 +499,7 @@ class LivySessionManager:
     @staticmethod
     def connect(credentials: SparkCredentials) -> LivyConnection:
         # the following opens an spark / sql session
-        data = {"kind": "sql", "conf": credentials.livy_session_parameters}  # 'spark'
+        data = {"name": "test-session"}  # 'spark'
         if LivySessionManager.livy_global_session is None:
             LivySessionManager.livy_global_session = LivySession(credentials)
             LivySessionManager.livy_global_session.create_session(data)