-
-
Notifications
You must be signed in to change notification settings - Fork 122
/
pydevd.py
3699 lines (3039 loc) · 151 KB
/
pydevd.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""
Entry point module (keep at root):
This module starts the debugger.
"""
import sys # @NoMove
if sys.version_info[:2] < (3, 6):
raise RuntimeError(
"The PyDev.Debugger requires Python 3.6 onwards to be run. If you need to use an older Python version, use an older version of the debugger."
)
import os
try:
# Just empty packages to check if they're in the PYTHONPATH.
import _pydev_bundle
except ImportError:
# On the first import of a pydevd module, add pydevd itself to the PYTHONPATH
# if its dependencies cannot be imported.
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
import _pydev_bundle
# Import this first as it'll check for shadowed modules and will make sure that we import
# things as needed for gevent.
from _pydevd_bundle import pydevd_constants
from typing import Optional, Tuple
from types import FrameType
import atexit
import dis
import io
from collections import defaultdict
from contextlib import contextmanager
from functools import partial
import itertools
import traceback
import weakref
import getpass as getpass_mod
import functools
import pydevd_file_utils
from _pydevd_bundle.pydevd_dont_trace_files import LIB_FILES_IN_DONT_TRACE_DIRS
from _pydev_bundle import pydev_imports, pydev_log
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
from _pydev_bundle.pydev_is_thread_alive import is_thread_alive
from _pydev_bundle.pydev_override import overrides
from _pydev_bundle._pydev_saved_modules import threading, time, thread, ThreadingEvent
from _pydevd_bundle import pydevd_extension_utils, pydevd_frame_utils
from _pydevd_bundle.pydevd_filtering import FilesFiltering, glob_matches_path
from _pydevd_bundle import pydevd_io, pydevd_vm_type, pydevd_defaults
from _pydevd_bundle import pydevd_utils
from _pydevd_bundle import pydevd_runpy
from _pydev_bundle.pydev_console_utils import DebugConsoleStdIn
from _pydevd_bundle.pydevd_additional_thread_info import set_additional_thread_info, remove_additional_info
from _pydevd_bundle.pydevd_breakpoints import ExceptionBreakpoint, get_exception_breakpoint
from _pydevd_bundle.pydevd_comm_constants import (
CMD_THREAD_SUSPEND,
CMD_STEP_INTO,
CMD_SET_BREAK,
CMD_STEP_INTO_MY_CODE,
CMD_STEP_OVER,
CMD_SMART_STEP_INTO,
CMD_RUN_TO_LINE,
CMD_SET_NEXT_STATEMENT,
CMD_STEP_RETURN,
CMD_ADD_EXCEPTION_BREAK,
CMD_STEP_RETURN_MY_CODE,
CMD_STEP_OVER_MY_CODE,
constant_to_str,
CMD_STEP_INTO_COROUTINE,
)
from _pydevd_bundle.pydevd_constants import (
get_thread_id,
get_current_thread_id,
DebugInfoHolder,
PYTHON_SUSPEND,
STATE_SUSPEND,
STATE_RUN,
get_frame,
clear_cached_thread_id,
INTERACTIVE_MODE_AVAILABLE,
SHOW_DEBUG_INFO_ENV,
NULL,
NO_FTRACE,
IS_IRONPYTHON,
JSON_PROTOCOL,
IS_CPYTHON,
HTTP_JSON_PROTOCOL,
USE_CUSTOM_SYS_CURRENT_FRAMES_MAP,
call_only_once,
ForkSafeLock,
IGNORE_BASENAMES_STARTING_WITH,
EXCEPTION_TYPE_UNHANDLED,
SUPPORT_GEVENT,
PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING,
PYDEVD_IPYTHON_CONTEXT,
PYDEVD_USE_SYS_MONITORING,
)
from _pydevd_bundle.pydevd_defaults import PydevdCustomization # Note: import alias used on pydev_monkey.
from _pydevd_bundle.pydevd_custom_frames import CustomFramesContainer, custom_frames_container_init
from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE, PYDEV_FILE, LIB_FILE, DONT_TRACE_DIRS
from _pydevd_bundle.pydevd_extension_api import DebuggerEventHandler
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, remove_exception_from_frame
from _pydevd_bundle.pydevd_net_command_factory_xml import NetCommandFactory
from _pydevd_bundle.pydevd_trace_dispatch import (
trace_dispatch as _trace_dispatch,
global_cache_skips,
global_cache_frame_skips,
fix_top_level_trace_and_get_trace_func,
USING_CYTHON,
)
from _pydevd_bundle.pydevd_utils import save_main_module, is_current_thread_main_thread, import_attr_from_module
from _pydevd_frame_eval.pydevd_frame_eval_main import frame_eval_func, dummy_trace_dispatch, USING_FRAME_EVAL
import pydev_ipython # @UnusedImport
from _pydevd_bundle.pydevd_source_mapping import SourceMapping
from _pydevd_bundle.pydevd_concurrency_analyser.pydevd_concurrency_logger import (
ThreadingLogger,
AsyncioLogger,
send_concurrency_message,
cur_time,
)
from _pydevd_bundle.pydevd_concurrency_analyser.pydevd_thread_wrappers import wrap_threads
from pydevd_file_utils import (
get_abs_path_real_path_and_base_from_frame,
get_abs_path_real_path_and_base_from_file,
NORM_PATHS_AND_BASE_CONTAINER,
)
from pydevd_file_utils import get_fullname, get_package_dir
from os.path import abspath as os_path_abspath
import pydevd_tracing
from _pydevd_bundle.pydevd_comm import InternalThreadCommand, InternalThreadCommandForAnyThread, create_server_socket, FSNotifyThread
from _pydevd_bundle.pydevd_comm import (
InternalConsoleExec,
_queue,
ReaderThread,
GetGlobalDebugger,
get_global_debugger,
set_global_debugger,
WriterThread,
start_client,
start_server,
InternalGetBreakpointException,
InternalSendCurrExceptionTrace,
InternalSendCurrExceptionTraceProceeded,
)
from _pydevd_bundle.pydevd_daemon_thread import PyDBDaemonThread, mark_as_pydevd_daemon_thread
from _pydevd_bundle.pydevd_process_net_command_json import PyDevJsonCommandProcessor
from _pydevd_bundle.pydevd_process_net_command import process_net_command
from _pydevd_bundle.pydevd_net_command import NetCommand, NULL_NET_COMMAND
from _pydevd_bundle.pydevd_breakpoints import stop_on_unhandled_exception
from _pydevd_bundle.pydevd_collect_bytecode_info import collect_try_except_info, collect_return_info, collect_try_except_info_from_source
from _pydevd_bundle.pydevd_suspended_frames import SuspendedFramesManager
from socket import SHUT_RDWR
from _pydevd_bundle.pydevd_api import PyDevdAPI
from _pydevd_bundle.pydevd_timeout import TimeoutTracker
from _pydevd_bundle.pydevd_thread_lifecycle import suspend_all_threads, mark_thread_suspended
if PYDEVD_USE_SYS_MONITORING:
from _pydevd_sys_monitoring import pydevd_sys_monitoring
pydevd_gevent_integration = None
if SUPPORT_GEVENT:
try:
from _pydevd_bundle import pydevd_gevent_integration
except:
pydev_log.exception(
"pydevd: GEVENT_SUPPORT is set but gevent is not available in the environment.\n"
"Please unset GEVENT_SUPPORT from the environment variables or install gevent."
)
else:
pydevd_gevent_integration.log_gevent_debug_info()
if USE_CUSTOM_SYS_CURRENT_FRAMES_MAP:
from _pydevd_bundle.pydevd_constants import constructed_tid_to_last_frame
__version_info__ = (3, 1, 0)
__version_info_str__ = []
for v in __version_info__:
__version_info_str__.append(str(v))
__version__ = ".".join(__version_info_str__)
# IMPORTANT: pydevd_constants must be the 1st thing defined because it'll keep a reference to the original sys._getframe
def install_breakpointhook(pydevd_breakpointhook=None):
if pydevd_breakpointhook is None:
def pydevd_breakpointhook(*args, **kwargs):
hookname = os.getenv("PYTHONBREAKPOINT")
if (
hookname is not None
and len(hookname) > 0
and hasattr(sys, "__breakpointhook__")
and sys.__breakpointhook__ != pydevd_breakpointhook
):
sys.__breakpointhook__(*args, **kwargs)
else:
settrace(*args, **kwargs)
if sys.version_info[0:2] >= (3, 7):
# There are some choices on how to provide the breakpoint hook. Namely, we can provide a
# PYTHONBREAKPOINT which provides the import path for a method to be executed or we
# can override sys.breakpointhook.
# pydevd overrides sys.breakpointhook instead of providing an environment variable because
# it's possible that the debugger starts the user program but is not available in the
# PYTHONPATH (and would thus fail to be imported if PYTHONBREAKPOINT was set to pydevd.settrace).
# Note that the implementation still takes PYTHONBREAKPOINT in account (so, if it was provided
# by someone else, it'd still work).
sys.breakpointhook = pydevd_breakpointhook
else:
if sys.version_info[0] >= 3:
import builtins as __builtin__ # Py3 noqa
else:
import __builtin__ # noqa
# In older versions, breakpoint() isn't really available, so, install the hook directly
# in the builtins.
__builtin__.breakpoint = pydevd_breakpointhook
sys.__breakpointhook__ = pydevd_breakpointhook
# Install the breakpoint hook at import time.
install_breakpointhook()
from _pydevd_bundle.pydevd_plugin_utils import PluginManager
threadingEnumerate = threading.enumerate
threadingCurrentThread = threading.current_thread
try:
"dummy".encode("utf-8") # Added because otherwise Jython 2.2.1 wasn't finding the encoding (if it wasn't loaded in the main thread).
except:
pass
_global_redirect_stdout_to_server = False
_global_redirect_stderr_to_server = False
file_system_encoding = getfilesystemencoding()
_CACHE_FILE_TYPE = {}
pydev_log.debug("Using GEVENT_SUPPORT: %s", pydevd_constants.SUPPORT_GEVENT)
pydev_log.debug("Using GEVENT_SHOW_PAUSED_GREENLETS: %s", pydevd_constants.GEVENT_SHOW_PAUSED_GREENLETS)
pydev_log.debug("pydevd __file__: %s", os.path.abspath(__file__))
pydev_log.debug("Using PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING: %s", pydevd_constants.PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING)
if pydevd_constants.PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING:
pydev_log.debug("PYDEVD_IPYTHON_CONTEXT: %s", pydevd_constants.PYDEVD_IPYTHON_CONTEXT)
TIMEOUT_SLOW = 0.2
TIMEOUT_FAST = 1.0 / 50
# =======================================================================================================================
# PyDBCommandThread
# =======================================================================================================================
class PyDBCommandThread(PyDBDaemonThread):
def __init__(self, py_db):
PyDBDaemonThread.__init__(self, py_db)
self._py_db_command_thread_event = py_db._py_db_command_thread_event
self.name = "pydevd.CommandThread"
@overrides(PyDBDaemonThread._on_run)
def _on_run(self):
# Delay a bit this initialization to wait for the main program to start.
self._py_db_command_thread_event.wait(TIMEOUT_SLOW)
if self._kill_received:
return
try:
while not self._kill_received:
try:
self.py_db.process_internal_commands(("*",))
except:
pydev_log.info("Finishing debug communication...(2)")
self._py_db_command_thread_event.clear()
self._py_db_command_thread_event.wait(TIMEOUT_SLOW)
except:
try:
pydev_log.debug(sys.exc_info()[0])
except:
# In interpreter shutdown many things can go wrong (any module variables may
# be None, streams can be closed, etc).
pass
# only got this error in interpreter shutdown
# pydev_log.info('Finishing debug communication...(3)')
@overrides(PyDBDaemonThread.do_kill_pydev_thread)
def do_kill_pydev_thread(self):
PyDBDaemonThread.do_kill_pydev_thread(self)
# Set flag so that it can exit before the usual timeout.
self._py_db_command_thread_event.set()
# =======================================================================================================================
# CheckAliveThread
# Non-daemon thread: guarantees that all data is written even if program is finished
# =======================================================================================================================
class CheckAliveThread(PyDBDaemonThread):
def __init__(self, py_db):
PyDBDaemonThread.__init__(self, py_db)
self.name = "pydevd.CheckAliveThread"
self.daemon = False
self._wait_event = ThreadingEvent()
@overrides(PyDBDaemonThread._on_run)
def _on_run(self):
py_db = self.py_db
def can_exit():
with py_db._main_lock:
# Note: it's important to get the lock besides checking that it's empty (this
# means that we're not in the middle of some command processing).
writer = py_db.writer
writer_empty = writer is not None and writer.empty()
return not py_db.has_user_threads_alive() and writer_empty
try:
while not self._kill_received:
self._wait_event.wait(TIMEOUT_SLOW)
if can_exit():
break
py_db.check_output_redirect()
if can_exit():
pydev_log.debug("No threads alive, finishing debug session")
py_db.dispose_and_kill_all_pydevd_threads()
except:
pydev_log.exception()
def join(self, timeout=None):
# If someone tries to join this thread, mark it to be killed.
# This is the case for CherryPy when auto-reload is turned on.
self.do_kill_pydev_thread()
PyDBDaemonThread.join(self, timeout=timeout)
@overrides(PyDBDaemonThread.do_kill_pydev_thread)
def do_kill_pydev_thread(self):
PyDBDaemonThread.do_kill_pydev_thread(self)
# Set flag so that it can exit before the usual timeout.
self._wait_event.set()
class AbstractSingleNotificationBehavior(object):
"""
The basic usage should be:
# Increment the request time for the suspend.
single_notification_behavior.increment_suspend_time()
# Notify that this is a pause request (when a pause, not a breakpoint).
single_notification_behavior.on_pause()
# Mark threads to be suspended.
set_suspend(...)
# On do_wait_suspend, use notify_thread_suspended:
def do_wait_suspend(...):
with single_notification_behavior.notify_thread_suspended(thread_id, thread, reason):
...
"""
__slots__ = [
"_last_resume_notification_time",
"_last_suspend_notification_time",
"_lock",
"_next_request_time",
"_suspend_time_request",
"_suspended_thread_id_to_thread",
"_pause_requested",
"_py_db",
]
NOTIFY_OF_PAUSE_TIMEOUT = 0.5
def __init__(self, py_db):
self._py_db = weakref.ref(py_db)
self._next_request_time = partial(next, itertools.count())
self._last_suspend_notification_time = -1
self._last_resume_notification_time = -1
self._suspend_time_request = self._next_request_time()
self._lock = thread.allocate_lock()
self._suspended_thread_id_to_thread = {}
self._pause_requested = False
def send_suspend_notification(self, thread_id, thread, stop_reason):
raise AssertionError("abstract: subclasses must override.")
def send_resume_notification(self, thread_id):
raise AssertionError("abstract: subclasses must override.")
def increment_suspend_time(self):
with self._lock:
self._suspend_time_request = self._next_request_time()
def on_pause(self):
# Upon a pause, we should force sending new suspend notifications
# if no notification is sent after some time and there's some thread already stopped.
with self._lock:
self._pause_requested = True
global_suspend_time = self._suspend_time_request
py_db = self._py_db()
if py_db is not None:
py_db.timeout_tracker.call_on_timeout(
self.NOTIFY_OF_PAUSE_TIMEOUT, self._notify_after_timeout, kwargs={"global_suspend_time": global_suspend_time}
)
def _notify_after_timeout(self, global_suspend_time):
with self._lock:
if self._suspended_thread_id_to_thread:
if global_suspend_time > self._last_suspend_notification_time:
self._last_suspend_notification_time = global_suspend_time
# Notify about any thread which is currently suspended.
pydev_log.info("Sending suspend notification after timeout.")
thread_id, thread = next(iter(self._suspended_thread_id_to_thread.items()))
self.send_suspend_notification(thread_id, thread, CMD_THREAD_SUSPEND)
def on_thread_suspend(self, thread_id, thread, stop_reason):
with self._lock:
pause_requested = self._pause_requested
if pause_requested:
# When a suspend notification is sent, reset the pause flag.
self._pause_requested = False
self._suspended_thread_id_to_thread[thread_id] = thread
# CMD_THREAD_SUSPEND should always be a side-effect of a break, so, only
# issue for a CMD_THREAD_SUSPEND if a pause is pending.
if stop_reason != CMD_THREAD_SUSPEND or pause_requested:
if self._suspend_time_request > self._last_suspend_notification_time:
pydev_log.info("Sending suspend notification.")
self._last_suspend_notification_time = self._suspend_time_request
self.send_suspend_notification(thread_id, thread, stop_reason)
else:
pydev_log.info(
"Suspend not sent (it was already sent). Last suspend % <= Last resume %s",
self._last_suspend_notification_time,
self._last_resume_notification_time,
)
else:
pydev_log.info(
"Suspend not sent because stop reason is thread suspend and pause was not requested.",
)
def on_thread_resume(self, thread_id, thread):
# on resume (step, continue all):
with self._lock:
self._suspended_thread_id_to_thread.pop(thread_id)
if self._last_resume_notification_time < self._last_suspend_notification_time:
pydev_log.info("Sending resume notification.")
self._last_resume_notification_time = self._last_suspend_notification_time
self.send_resume_notification(thread_id)
else:
pydev_log.info(
"Resume not sent (it was already sent). Last resume %s >= Last suspend %s",
self._last_resume_notification_time,
self._last_suspend_notification_time,
)
@contextmanager
def notify_thread_suspended(self, thread_id, thread, stop_reason):
self.on_thread_suspend(thread_id, thread, stop_reason)
try:
yield # At this point the thread must be actually suspended.
finally:
self.on_thread_resume(thread_id, thread)
class ThreadsSuspendedSingleNotification(AbstractSingleNotificationBehavior):
__slots__ = AbstractSingleNotificationBehavior.__slots__ + ["multi_threads_single_notification", "_callbacks", "_callbacks_lock"]
def __init__(self, py_db):
AbstractSingleNotificationBehavior.__init__(self, py_db)
# If True, pydevd will send a single notification when all threads are suspended/resumed.
self.multi_threads_single_notification = False
self._callbacks_lock = threading.Lock()
self._callbacks = []
def add_on_resumed_callback(self, callback):
with self._callbacks_lock:
self._callbacks.append(callback)
@overrides(AbstractSingleNotificationBehavior.send_resume_notification)
def send_resume_notification(self, thread_id):
py_db = self._py_db()
if py_db is not None:
py_db.writer.add_command(py_db.cmd_factory.make_thread_resume_single_notification(thread_id))
with self._callbacks_lock:
callbacks = self._callbacks
self._callbacks = []
for callback in callbacks:
callback()
@overrides(AbstractSingleNotificationBehavior.send_suspend_notification)
def send_suspend_notification(self, thread_id, thread, stop_reason):
py_db = self._py_db()
if py_db is not None:
py_db.writer.add_command(py_db.cmd_factory.make_thread_suspend_single_notification(py_db, thread_id, thread, stop_reason))
@overrides(AbstractSingleNotificationBehavior.notify_thread_suspended)
@contextmanager
def notify_thread_suspended(self, thread_id, thread, stop_reason):
if self.multi_threads_single_notification:
pydev_log.info("Thread suspend mode: single notification")
with AbstractSingleNotificationBehavior.notify_thread_suspended(self, thread_id, thread, stop_reason):
yield
else:
pydev_log.info("Thread suspend mode: NOT single notification")
yield
class _Authentication(object):
__slots__ = ["access_token", "client_access_token", "_authenticated", "_wrong_attempts"]
def __init__(self):
# A token to be send in the command line or through the settrace api -- when such token
# is given, the first message sent to the IDE must pass the same token to authenticate.
# Note that if a disconnect is sent, the same message must be resent to authenticate.
self.access_token = None
# This token is the one that the client requires to accept a connection from pydevd
# (it's stored here and just passed back when required, it's not used internally
# for anything else).
self.client_access_token = None
self._authenticated = None
self._wrong_attempts = 0
def is_authenticated(self):
if self._authenticated is None:
return self.access_token is None
return self._authenticated
def login(self, access_token):
if self._wrong_attempts >= 10: # A user can fail to authenticate at most 10 times.
return
self._authenticated = access_token == self.access_token
if not self._authenticated:
self._wrong_attempts += 1
else:
self._wrong_attempts = 0
def logout(self):
self._authenticated = None
self._wrong_attempts = 0
class PyDB(object):
"""Main debugging class
Lots of stuff going on here:
PyDB starts two threads on startup that connect to remote debugger (RDB)
The threads continuously read & write commands to RDB.
PyDB communicates with these threads through command queues.
Every RDB command is processed by calling process_net_command.
Every PyDB net command is sent to the net by posting NetCommand to WriterThread queue
Some commands need to be executed on the right thread (suspend/resume & friends)
These are placed on the internal command queue.
"""
# Direct child pids which should not be terminated when terminating processes.
# Note: class instance because it should outlive PyDB instances.
dont_terminate_child_pids = set()
def __init__(self, set_as_global=True):
if set_as_global:
pydevd_tracing.replace_sys_set_trace_func()
self.authentication = _Authentication()
self.reader = None
self.writer = None
self._fsnotify_thread = None
self.created_pydb_daemon_threads = {}
self._waiting_for_connection_thread = None
self._on_configuration_done_event = ThreadingEvent()
self.check_alive_thread = None
self.py_db_command_thread = None
self.quitting = None
self.cmd_factory = NetCommandFactory()
self._cmd_queue = defaultdict(_queue.Queue) # Key is thread id or '*', value is Queue
self._thread_events = defaultdict(ThreadingEvent) # Key is thread id or '*', value is Event
self.suspended_frames_manager = SuspendedFramesManager()
self._files_filtering = FilesFiltering()
self.timeout_tracker = TimeoutTracker(self)
# Note: when the source mapping is changed we also have to clear the file types cache
# (because if a given file is a part of the project or not may depend on it being
# defined in the source mapping).
self.source_mapping = SourceMapping(on_source_mapping_changed=self._clear_caches)
# Determines whether we should terminate child processes when asked to terminate.
self.terminate_child_processes = True
# Determines whether we should try to do a soft terminate (i.e.: interrupt the main
# thread with a KeyboardInterrupt).
self.terminate_keyboard_interrupt = False
# Set to True after a keyboard interrupt is requested the first time.
self.keyboard_interrupt_requested = False
# These are the breakpoints received by the PyDevdAPI. They are meant to store
# the breakpoints in the api -- its actual contents are managed by the api.
self.api_received_breakpoints = {}
# These are the breakpoints meant to be consumed during runtime.
self.breakpoints = {}
self.function_breakpoint_name_to_breakpoint = {}
# Set communication protocol
PyDevdAPI().set_protocol(self, 0, PydevdCustomization.DEFAULT_PROTOCOL)
self.variable_presentation = PyDevdAPI.VariablePresentation()
# mtime to be raised when something that will affect the
# tracing in place (such as breakpoints change or filtering).
self.mtime = 0
self.file_to_id_to_line_breakpoint = {}
self.file_to_id_to_plugin_breakpoint = {}
# Note: breakpoints dict should not be mutated: a copy should be created
# and later it should be assigned back (to prevent concurrency issues).
self.break_on_uncaught_exceptions = {}
self.break_on_caught_exceptions = {}
self.break_on_user_uncaught_exceptions = {}
self.ready_to_run = False
self._main_lock = thread.allocate_lock()
self._lock_running_thread_ids = thread.allocate_lock()
self._lock_create_fs_notify = thread.allocate_lock()
self._py_db_command_thread_event = ThreadingEvent()
if set_as_global:
CustomFramesContainer._py_db_command_thread_event = self._py_db_command_thread_event
self.pydb_disposed = False
self._wait_for_threads_to_finish_called = False
self._wait_for_threads_to_finish_called_lock = thread.allocate_lock()
self._wait_for_threads_to_finish_called_event = ThreadingEvent()
self.terminate_requested = False
self._disposed_lock = thread.allocate_lock()
self.signature_factory = None
self.SetTrace = pydevd_tracing.SetTrace
self.skip_on_exceptions_thrown_in_same_context = False
self.ignore_exceptions_thrown_in_lines_with_ignore_exception = True
# Suspend debugger even if breakpoint condition raises an exception.
# May be changed with CMD_PYDEVD_JSON_CONFIG.
self.skip_suspend_on_breakpoint_exception = () # By default suspend on any Exception.
self.skip_print_breakpoint_exception = () # By default print on any Exception.
# By default user can step into properties getter/setter/deleter methods
self.disable_property_trace = False
self.disable_property_getter_trace = False
self.disable_property_setter_trace = False
self.disable_property_deleter_trace = False
# this is a dict of thread ids pointing to thread ids. Whenever a command is passed to the java end that
# acknowledges that a thread was created, the thread id should be passed here -- and if at some time we do not
# find that thread alive anymore, we must remove it from this list and make the java side know that the thread
# was killed.
self._running_thread_ids = {}
# Note: also access '_enable_thread_notifications' with '_lock_running_thread_ids'
self._enable_thread_notifications = False
self._set_breakpoints_with_id = False
# This attribute holds the file-> lines which have an @IgnoreException.
self.filename_to_lines_where_exceptions_are_ignored = {}
# working with plugins (lazily initialized)
self.plugin = None
self.has_plugin_line_breaks = False
self.has_plugin_exception_breaks = False
self.thread_analyser = None
self.asyncio_analyser = None
# The GUI event loop that's going to run.
# Possible values:
# matplotlib - Whatever GUI backend matplotlib is using.
# 'wx'/'qt'/'none'/... - GUI toolkits that have bulitin support. See pydevd_ipython/inputhook.py:24.
# Other - A custom function that'll be imported and run.
self._gui_event_loop = "matplotlib"
self._installed_gui_support = False
self.gui_in_use = False
# GUI event loop support in debugger
self.activate_gui_function = None
# matplotlib support in debugger and debug console
self.mpl_hooks_in_debug_console = False
self.mpl_modules_for_patching = {}
self._filename_to_not_in_scope = {}
self.first_breakpoint_reached = False
self._exclude_filters_enabled = self._files_filtering.use_exclude_filters()
self._is_libraries_filter_enabled = self._files_filtering.use_libraries_filter()
self.is_files_filter_enabled = self._exclude_filters_enabled or self._is_libraries_filter_enabled
self.show_return_values = False
self.remove_return_values_flag = False
self.redirect_output = False
# Note that besides the `redirect_output` flag, we also need to consider that someone
# else is already redirecting (i.e.: debugpy).
self.is_output_redirected = False
# this flag disables frame evaluation even if it's available
self.use_frame_eval = True
# If True, pydevd will send a single notification when all threads are suspended/resumed.
self._threads_suspended_single_notification = ThreadsSuspendedSingleNotification(self)
# If True a step command will do a step in one thread and will also resume all other threads.
self.stepping_resumes_all_threads = False
self._local_thread_trace_func = threading.local()
self._server_socket_ready_event = ThreadingEvent()
self._server_socket_name = None
# Bind many locals to the debugger because upon teardown those names may become None
# in the namespace (and thus can't be relied upon unless the reference was previously
# saved).
if IS_IRONPYTHON:
# A partial() cannot be used in IronPython for sys.settrace.
def new_trace_dispatch(frame, event, arg):
return _trace_dispatch(self, frame, event, arg)
self.trace_dispatch = new_trace_dispatch
else:
self.trace_dispatch = partial(_trace_dispatch, self)
self.fix_top_level_trace_and_get_trace_func = fix_top_level_trace_and_get_trace_func
self.frame_eval_func = frame_eval_func
self.dummy_trace_dispatch = dummy_trace_dispatch
# Note: this is different from pydevd_constants.thread_get_ident because we want Jython
# to be None here because it also doesn't have threading._active.
try:
self.threading_get_ident = threading.get_ident # Python 3
self.threading_active = threading._active
except:
try:
self.threading_get_ident = threading._get_ident # Python 2 noqa
self.threading_active = threading._active
except:
self.threading_get_ident = None # Jython
self.threading_active = None
self.threading_current_thread = threading.currentThread
self.set_additional_thread_info = set_additional_thread_info
self.stop_on_unhandled_exception = stop_on_unhandled_exception
self.collect_return_info = collect_return_info
self.get_exception_breakpoint = get_exception_breakpoint
self._dont_trace_get_file_type = DONT_TRACE.get
self._dont_trace_dirs_get_file_type = DONT_TRACE_DIRS.get
self.PYDEV_FILE = PYDEV_FILE
self.LIB_FILE = LIB_FILE
self._in_project_scope_cache = {}
self._exclude_by_filter_cache = {}
self._apply_filter_cache = {}
self._ignore_system_exit_codes = set()
# DAP related
self._dap_messages_listeners = []
if set_as_global:
# Set as the global instance only after it's initialized.
set_global_debugger(self)
pydevd_defaults.on_pydb_init(self)
# Stop the tracing as the last thing before the actual shutdown for a clean exit.
atexit.register(stoptrace)
def collect_try_except_info(self, code_obj):
filename = code_obj.co_filename
try:
if os.path.exists(filename):
pydev_log.debug("Collecting try..except info from source for %s", filename)
try_except_infos = collect_try_except_info_from_source(filename)
if try_except_infos:
# Filter for the current function
max_line = -1
min_line = sys.maxsize
for _, line in dis.findlinestarts(code_obj):
if line > max_line:
max_line = line
if line < min_line:
min_line = line
try_except_infos = [x for x in try_except_infos if min_line <= x.try_line <= max_line]
return try_except_infos
except:
pydev_log.exception("Error collecting try..except info from source (%s)", filename)
pydev_log.debug("Collecting try..except info from bytecode for %s", filename)
return collect_try_except_info(code_obj)
def setup_auto_reload_watcher(self, enable_auto_reload, watch_dirs, poll_target_time, exclude_patterns, include_patterns):
try:
with self._lock_create_fs_notify:
# When setting up, dispose of the previous one (if any).
if self._fsnotify_thread is not None:
self._fsnotify_thread.do_kill_pydev_thread()
self._fsnotify_thread = None
if not enable_auto_reload:
return
exclude_patterns = tuple(exclude_patterns)
include_patterns = tuple(include_patterns)
def accept_directory(absolute_filename, cache={}):
try:
return cache[absolute_filename]
except:
if absolute_filename and absolute_filename[-1] not in ("/", "\\"):
# I.e.: for directories we always end with '/' or '\\' so that
# we match exclusions such as "**/node_modules/**"
absolute_filename += os.path.sep
# First include what we want
for include_pattern in include_patterns:
if glob_matches_path(absolute_filename, include_pattern):
cache[absolute_filename] = True
return True
# Then exclude what we don't want
for exclude_pattern in exclude_patterns:
if glob_matches_path(absolute_filename, exclude_pattern):
cache[absolute_filename] = False
return False
# By default track all directories not excluded.
cache[absolute_filename] = True
return True
def accept_file(absolute_filename, cache={}):
try:
return cache[absolute_filename]
except:
# First include what we want
for include_pattern in include_patterns:
if glob_matches_path(absolute_filename, include_pattern):
cache[absolute_filename] = True
return True
# Then exclude what we don't want
for exclude_pattern in exclude_patterns:
if glob_matches_path(absolute_filename, exclude_pattern):
cache[absolute_filename] = False
return False
# By default don't track files not included.
cache[absolute_filename] = False
return False
self._fsnotify_thread = FSNotifyThread(self, PyDevdAPI(), watch_dirs)
watcher = self._fsnotify_thread.watcher
watcher.accept_directory = accept_directory
watcher.accept_file = accept_file
watcher.target_time_for_single_scan = poll_target_time
watcher.target_time_for_notification = poll_target_time
self._fsnotify_thread.start()
except:
pydev_log.exception("Error setting up auto-reload.")
def get_arg_ppid(self):
try:
setup = SetupHolder.setup
if setup:
return int(setup.get("ppid", 0))
except:
pydev_log.exception("Error getting ppid.")
return 0
def wait_for_ready_to_run(self):
while not self.ready_to_run:
# busy wait until we receive run command
self.process_internal_commands()
self._py_db_command_thread_event.clear()
self._py_db_command_thread_event.wait(TIMEOUT_FAST)
def on_initialize(self):
"""
Note: only called when using the DAP (Debug Adapter Protocol).
"""
self._on_configuration_done_event.clear()
def on_configuration_done(self):
"""
Note: only called when using the DAP (Debug Adapter Protocol).
"""
self._on_configuration_done_event.set()
self._py_db_command_thread_event.set()
def is_attached(self):
return self._on_configuration_done_event.is_set()
def on_disconnect(self):
"""
Note: only called when using the DAP (Debug Adapter Protocol).
"""
self.authentication.logout()
self._on_configuration_done_event.clear()
def set_ignore_system_exit_codes(self, ignore_system_exit_codes):
assert isinstance(ignore_system_exit_codes, (list, tuple, set))
self._ignore_system_exit_codes = set(ignore_system_exit_codes)
def ignore_system_exit_code(self, system_exit_exc):
if hasattr(system_exit_exc, "code"):
return system_exit_exc.code in self._ignore_system_exit_codes
else:
return system_exit_exc in self._ignore_system_exit_codes
def block_until_configuration_done(self, cancel=None):
if cancel is None:
cancel = NULL
while not cancel.is_set():
if self._on_configuration_done_event.is_set():
cancel.set() # Set cancel to prevent reuse
return
self.process_internal_commands()
self._py_db_command_thread_event.clear()
self._py_db_command_thread_event.wait(TIMEOUT_FAST)
def add_fake_frame(self, thread_id, frame_id, frame):
self.suspended_frames_manager.add_fake_frame(thread_id, frame_id, frame)
def handle_breakpoint_condition(self, info, pybreakpoint, new_frame):
condition = pybreakpoint.condition
try:
if pybreakpoint.handle_hit_condition(new_frame):
return True
if not condition:
return False
return eval(condition, new_frame.f_globals, new_frame.f_locals)
except Exception as e:
if not isinstance(e, self.skip_print_breakpoint_exception):
stack_trace = io.StringIO()
etype, value, tb = sys.exc_info()
traceback.print_exception(etype, value, tb.tb_next, file=stack_trace)
msg = "Error while evaluating expression in conditional breakpoint: %s\n%s" % (condition, stack_trace.getvalue())
api = PyDevdAPI()
api.send_error_message(self, msg)
if not isinstance(e, self.skip_suspend_on_breakpoint_exception):
try:
# add exception_type and stacktrace into thread additional info
etype, value, tb = sys.exc_info()
error = "".join(traceback.format_exception_only(etype, value))
stack = traceback.extract_stack(f=tb.tb_frame.f_back)
# On self.set_suspend(thread, CMD_SET_BREAK) this info will be
# sent to the client.
info.conditional_breakpoint_exception = ("Condition:\n" + condition + "\n\nError:\n" + error, stack)
except:
pydev_log.exception()
return True
return False
finally:
etype, value, tb = None, None, None
def handle_breakpoint_expression(self, pybreakpoint, info, new_frame):
try:
try:
val = eval(pybreakpoint.expression, new_frame.f_globals, new_frame.f_locals)
except:
val = sys.exc_info()[1]
finally:
if val is not None:
info.pydev_message = str(val)
def _internal_get_file_type(self, abs_real_path_and_basename):
basename = abs_real_path_and_basename[-1]
if basename.startswith(IGNORE_BASENAMES_STARTING_WITH) or abs_real_path_and_basename[0].startswith(IGNORE_BASENAMES_STARTING_WITH):
# Note: these are the files that are completely ignored (they aren't shown to the user
# as user nor library code as it's usually just noise in the frame stack).