diff --git a/python/pyspark/context.py b/python/pyspark/context.py index 23c8d4be2ffe4..c3334f2e9ec84 100644 --- a/python/pyspark/context.py +++ b/python/pyspark/context.py @@ -20,6 +20,7 @@ import sys from threading import Lock from tempfile import NamedTemporaryFile +from collections import namedtuple from pyspark import accumulators from pyspark.accumulators import Accumulator @@ -87,7 +88,8 @@ def __init__(self, master=None, appName=None, sparkHome=None, pyFiles=None, if rdd._extract_concise_traceback() is not None: self._callsite = rdd._extract_concise_traceback() else: - self._callsite = {"function": None, "file": None, "line": None} + tempNamedTuple = namedtuple("Callsite", "function file linenum") + self._callsite = tempNamedTuple(function=None, file=None, linenum=None) SparkContext._ensure_initialized(self, gateway=gateway) self.environment = environment or {} @@ -181,7 +183,7 @@ def _ensure_initialized(cls, instance=None, gateway=None): # Raise error if there is already a running Spark context raise ValueError("Cannot run multiple SparkContexts at once; existing SparkContext(app=%s, master=%s)" \ " created by %s at %s:%s " \ - % (currentAppName, currentMaster, callsite['function'], callsite['file'], callsite['line'])) + % (currentAppName, currentMaster, callsite.function, callsite.file, callsite.linenum)) else: SparkContext._active_spark_context = instance diff --git a/python/pyspark/rdd.py b/python/pyspark/rdd.py index 315aadceecb77..5138e6f62b8aa 100644 --- a/python/pyspark/rdd.py +++ b/python/pyspark/rdd.py @@ -18,6 +18,7 @@ from base64 import standard_b64encode as b64enc import copy from collections import defaultdict +from collections import namedtuple from itertools import chain, ifilter, imap import operator import os @@ -47,6 +48,7 @@ def _extract_concise_traceback(): with function name, file name and line number """ tb = traceback.extract_stack() + callsite = namedtuple("Callsite", "function file linenum") if len(tb) == 0: return None file, line, module, what = tb[len(tb) - 1] @@ -59,10 +61,10 @@ def _extract_concise_traceback(): break if first_spark_frame == 0: file, line, fun, what = tb[0] - return {"function": fun, "file": file, "line": line} + return callsite(function=fun, file=file, linenum=line) sfile, sline, sfun, swhat = tb[first_spark_frame] ufile, uline, ufun, uwhat = tb[first_spark_frame-1] - return {"function": sfun, "file": ufile, "line": uline} + return callsite(function=sfun, file=ufile, linenum=uline) _spark_stack_depth = 0 @@ -70,7 +72,7 @@ class _JavaStackTrace(object): def __init__(self, sc): tb = _extract_concise_traceback() if tb is not None: - self._traceback = "%s at %s:%s" % (tb["function"], tb["file"], tb["line"]) + self._traceback = "%s at %s:%s" % (tb.function, tb.file, tb.linenum) else: self._traceback = "Error! Could not extract traceback info" self._context = sc