summaryrefslogtreecommitdiffstats
path: root/third_party/python/sentry_sdk
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /third_party/python/sentry_sdk
parentInitial commit. (diff)
downloadfirefox-e51783d008170d9ab27d25da98ca3a38b0a41b67.tar.xz
firefox-e51783d008170d9ab27d25da98ca3a38b0a41b67.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/python/sentry_sdk')
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/LICENSE9
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/METADATA60
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/RECORD58
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/WHEEL6
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/top_level.txt1
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/__init__.py25
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/_compat.py92
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/_types.py37
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/api.py256
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/client.py406
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/consts.py97
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/debug.py44
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/envelope.py293
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/hub.py647
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/__init__.py183
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/_wsgi_common.py180
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/aiohttp.py211
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/argv.py33
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/asgi.py194
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/atexit.py62
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/aws_lambda.py254
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/beam.py184
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/bottle.py199
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/celery.py258
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/dedupe.py43
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/django/__init__.py484
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/django/asgi.py47
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/django/middleware.py136
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/django/templates.py121
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/django/transactions.py134
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/excepthook.py76
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/falcon.py209
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/flask.py260
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/gnu_backtrace.py107
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/logging.py237
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/modules.py56
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/pyramid.py217
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/redis.py70
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/rq.py150
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/sanic.py233
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/serverless.py87
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/spark/__init__.py4
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/spark/spark_driver.py263
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/spark/spark_worker.py120
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/sqlalchemy.py86
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/stdlib.py230
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/threading.py90
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/tornado.py203
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/trytond.py55
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/integrations/wsgi.py309
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/py.typed0
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/scope.py408
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/serializer.py336
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/sessions.py249
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/tracing.py498
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/transport.py365
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/utils.py831
-rw-r--r--third_party/python/sentry_sdk/sentry_sdk/worker.py142
58 files changed, 10645 insertions, 0 deletions
diff --git a/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/LICENSE b/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/LICENSE
new file mode 100644
index 0000000000..61555f192e
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/LICENSE
@@ -0,0 +1,9 @@
+Copyright (c) 2018 Sentry (https://sentry.io) and individual contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/METADATA b/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/METADATA
new file mode 100644
index 0000000000..5fef4faa5b
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/METADATA
@@ -0,0 +1,60 @@
+Metadata-Version: 2.1
+Name: sentry-sdk
+Version: 0.14.3
+Summary: Python client for Sentry (https://getsentry.com)
+Home-page: https://github.com/getsentry/sentry-python
+Author: Sentry Team and Contributors
+Author-email: hello@getsentry.com
+License: BSD
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Dist: urllib3 (>=1.10.0)
+Requires-Dist: certifi
+Provides-Extra: aiohttp
+Requires-Dist: aiohttp (>=3.5) ; extra == 'aiohttp'
+Provides-Extra: beam
+Requires-Dist: beam (>=2.12) ; extra == 'beam'
+Provides-Extra: bottle
+Requires-Dist: bottle (>=0.12.13) ; extra == 'bottle'
+Provides-Extra: celery
+Requires-Dist: celery (>=3) ; extra == 'celery'
+Provides-Extra: django
+Requires-Dist: django (>=1.8) ; extra == 'django'
+Provides-Extra: falcon
+Requires-Dist: falcon (>=1.4) ; extra == 'falcon'
+Provides-Extra: flask
+Requires-Dist: flask (>=0.11) ; extra == 'flask'
+Requires-Dist: blinker (>=1.1) ; extra == 'flask'
+Provides-Extra: pyspark
+Requires-Dist: pyspark (>=2.4.4) ; extra == 'pyspark'
+Provides-Extra: rq
+Requires-Dist: 0.6 ; extra == 'rq'
+Provides-Extra: sanic
+Requires-Dist: sanic (>=0.8) ; extra == 'sanic'
+Provides-Extra: sqlalchemy
+Requires-Dist: sqlalchemy (>=1.2) ; extra == 'sqlalchemy'
+Provides-Extra: tornado
+Requires-Dist: tornado (>=5) ; extra == 'tornado'
+
+
+Sentry-Python - Sentry SDK for Python
+=====================================
+
+**Sentry-Python is an SDK for Sentry.** Check out `GitHub
+<https://github.com/getsentry/sentry-python>`_ to find out more.
+
+
diff --git a/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/RECORD b/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/RECORD
new file mode 100644
index 0000000000..14cfd725cb
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/RECORD
@@ -0,0 +1,58 @@
+sentry_sdk/__init__.py,sha256=k1HZ_Malhx3a5bIh4pAl4Tvt_hAopZhyR5lcRR0enX0,591
+sentry_sdk/_compat.py,sha256=f3Tadrt6580oSkwwriJrggZR6oeNsdN2OBCOGuSasj8,2422
+sentry_sdk/_types.py,sha256=1WMOPaU3zhM-W_ejMduB3yM-qNeb4fTKdmBY3L2QhXY,1080
+sentry_sdk/api.py,sha256=f6EJa-Zdr_Oehq5JgMqVcXeY0rU0scTiMMp2c3gSJN4,5528
+sentry_sdk/client.py,sha256=mD57zGxONY_DfFFksae5dJEEMnS11UnSgcfv9pE5gC4,13157
+sentry_sdk/consts.py,sha256=WS4SVRzlEoF0H3BKhxgAEPa8kT2mFPFdr6IqDznS6cQ,3487
+sentry_sdk/debug.py,sha256=ZT-7VoCIA5TYhh7X-ZSaSzYZ4_MuoNvzi12gKgIKzm0,1132
+sentry_sdk/envelope.py,sha256=cOSNXA2r0Q2lGMY-UpSP-jBh-cvSDDlMe12vy5DUn3w,8221
+sentry_sdk/hub.py,sha256=tbbZaSexKp44Sx6cxJ7uV8YwDkm2KAObZIIgu9UPfRQ,19459
+sentry_sdk/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+sentry_sdk/scope.py,sha256=ZQshHgonA0lTgRzUsV0eSqqPdOMBD9OHEJZNs5Dsf-8,12509
+sentry_sdk/serializer.py,sha256=Nn9O5fZo7QquXae_pxoaOIbT3_iQP30GrWuUV0ykywI,10507
+sentry_sdk/sessions.py,sha256=5g-pUW3LqcEHL3gvnPk5lZvPQmP6B9Vu2KYYrxjbDls,7732
+sentry_sdk/tracing.py,sha256=55qCUgIesvTTAzl7dL9BFDDOcY25FgsUqm6bHvVMOP4,15210
+sentry_sdk/transport.py,sha256=qXPsbb4OAWLpAhSCImoILnKUulMVoHjgFWuiEJGXbbA,11002
+sentry_sdk/utils.py,sha256=TG89z9WzBqPu0039YnxgY6FcRRr8l5K0_R3X4hiInjc,22953
+sentry_sdk/worker.py,sha256=nlcd5mlawU3AOQLOYIgTtQVceX623p3YMqv2ot0czZw,4560
+sentry_sdk/integrations/__init__.py,sha256=pD6ksI4OXAH0NeXbqAc_vhvVJpyPR-ycGzqOLQSbiy4,6464
+sentry_sdk/integrations/_wsgi_common.py,sha256=J8jlafU5yhQu2xjKuK_3-Bt9HbqbHIP5ZyWebkfSJ_k,4763
+sentry_sdk/integrations/aiohttp.py,sha256=__SUTMu1k2eIT7M7lefyCXNxUMeSAoQYKTV0OtgB6YM,7076
+sentry_sdk/integrations/argv.py,sha256=X-RVfWNxuPObsOmuommDc4FcIhNSTKpGsD1cYbrqza4,945
+sentry_sdk/integrations/asgi.py,sha256=BvLrqtzyaM1UIXidSjkpZEWDGYlrplqOvXc52gX5Zhc,6821
+sentry_sdk/integrations/atexit.py,sha256=b75c2SBJPl_cS4ObiQT1SMKqeZG5Fqed61z2myHRKug,1837
+sentry_sdk/integrations/aws_lambda.py,sha256=NCop82UqSLetsvoIf0THuFPXSdWua-y4ZToG-ysqGSM,8890
+sentry_sdk/integrations/beam.py,sha256=3wSJv4SuIBsVFtoc2J1oW2JYZnq-biMXEL-7Qn85rOQ,5650
+sentry_sdk/integrations/bottle.py,sha256=KwvGziHwdxzv_FeT1UNRMAmtDEWiT_0ssT5QyzFQG2M,6188
+sentry_sdk/integrations/celery.py,sha256=86NG-gplsol8gR-8c5Cr43QT_CvLC_9sbD8efRuuHbg,8394
+sentry_sdk/integrations/dedupe.py,sha256=d3JaHlMJpeF9zqVmITSPcLPBEvr9aHRzIrlGyzqeNgs,1166
+sentry_sdk/integrations/excepthook.py,sha256=ho96OGOzBdTZDxNOV4VQXfHv_MD5hsZ_8ww-5GNrRDI,2182
+sentry_sdk/integrations/falcon.py,sha256=S15UIm84t1cHvzj_3gxEbgmkEy5sXex9p-L8Sc1UwSQ,6797
+sentry_sdk/integrations/flask.py,sha256=U5-23SYrEbiNB80TSX96lry_qzpa-Nyr675cWLSSfWQ,8168
+sentry_sdk/integrations/gnu_backtrace.py,sha256=VJU3zYY7GUybAgIOEGF_P7i4V2R_jOnlSgTCS7XNto0,2912
+sentry_sdk/integrations/logging.py,sha256=kqWZmR711fCc2k--eULHXbi0khJY9K4pcsYvSgu-Zs8,6922
+sentry_sdk/integrations/modules.py,sha256=tgl4abSudtR03NBOjXCWJ08dHY5KlxUveX3mPUNosYk,1393
+sentry_sdk/integrations/pyramid.py,sha256=4VPOY1AZAjGfrRiM2KAZHrevwC4Uy6gBQ2oH9sF5XyU,7006
+sentry_sdk/integrations/redis.py,sha256=HhXrJ8tOrsd-8pqXeYjtAepF31diRgl3bqJ4GnPiluI,1941
+sentry_sdk/integrations/rq.py,sha256=mT6iE4JcuwBL0cMeInMQMSaA0QzJ7JgN0dpDpiw4SCM,4714
+sentry_sdk/integrations/sanic.py,sha256=n_y49BpScw6XCMg1bRSJDrNnLcBIklAnFT9xibmGEQY,7646
+sentry_sdk/integrations/serverless.py,sha256=d97Z1cBXdRFdXxzYsLNtRebATfExIWZ1oxMT8_xTf4Q,1993
+sentry_sdk/integrations/sqlalchemy.py,sha256=jLVpxLSol0_4goqLVb9_z1awTpwdtmG_1IXbpqC1lkg,2525
+sentry_sdk/integrations/stdlib.py,sha256=bYoNEOP_xmKgR-n_SmBLNAHhwC41y8l96go5aQX3gss,7348
+sentry_sdk/integrations/threading.py,sha256=TN5cmoLfRIaayFFWoN9L0VdXunB23iTcUjUA6V9GSrE,2856
+sentry_sdk/integrations/tornado.py,sha256=4V32cl0cw0cpUjCyVUFKtW3kD0iN9VibW4IAHTJYhnM,6910
+sentry_sdk/integrations/trytond.py,sha256=cLpQ5CZrG1Wn5Cq3_Xosswu5Jt43KEV-ag_zrvcXwqo,1728
+sentry_sdk/integrations/wsgi.py,sha256=UPUJiEYM1eu1-zSNNZFSPLenkGYYS022lCLM36CQgCs,10232
+sentry_sdk/integrations/django/__init__.py,sha256=R0zTG2qyFyqal9Azb_DkMfcOW_gjF9PLWPz9vm0KKqM,15893
+sentry_sdk/integrations/django/asgi.py,sha256=-OPPl8WjFjXPgfdTmMuIZv1EqFSk0BRKLfkgmmPAwPA,1467
+sentry_sdk/integrations/django/middleware.py,sha256=bM-J_4ur2qilpHzAlrWtlUsakR-ZPH-wvKw931fnuX4,4419
+sentry_sdk/integrations/django/templates.py,sha256=Knq4W6NyfBbFGCLQqpB6mBCze2ZQJKmS4Up5Gvy47VU,3398
+sentry_sdk/integrations/django/transactions.py,sha256=1W-9xuryfy7ztqI_PLrSTAOWO0holUPyYuXYUh8ez2E,4094
+sentry_sdk/integrations/spark/__init__.py,sha256=oOewMErnZk2rzNvIlZO6URxQexu9bUJuSLM2m_zECy8,208
+sentry_sdk/integrations/spark/spark_driver.py,sha256=CMyEe6_Qf8E9OSz3bcCumsOgO8eJ4egKOrazOYPcvX4,8465
+sentry_sdk/integrations/spark/spark_worker.py,sha256=if_Pqkaxm-SKghaUETCLhL7Vrxk2HKG7A3mwocAHzas,3884
+sentry_sdk-0.14.3.dist-info/LICENSE,sha256=WUBNTIVOV5CX1Bv8zVAGr96dbXDmRs9VB0zb_q1ezxw,1330
+sentry_sdk-0.14.3.dist-info/METADATA,sha256=qTLzHA_baC0M0ImoObls68NW_BzxKhx63tzeKnBAW8E,2203
+sentry_sdk-0.14.3.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+sentry_sdk-0.14.3.dist-info/top_level.txt,sha256=XrQz30XE9FKXSY_yGLrd9bsv2Rk390GTDJOSujYaMxI,11
+sentry_sdk-0.14.3.dist-info/RECORD,,
diff --git a/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/WHEEL b/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/WHEEL
new file mode 100644
index 0000000000..ef99c6cf32
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/top_level.txt b/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/top_level.txt
new file mode 100644
index 0000000000..5051901ecb
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk-0.14.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+sentry_sdk
diff --git a/third_party/python/sentry_sdk/sentry_sdk/__init__.py b/third_party/python/sentry_sdk/sentry_sdk/__init__.py
new file mode 100644
index 0000000000..b211a6c754
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/__init__.py
@@ -0,0 +1,25 @@
+from sentry_sdk.hub import Hub, init
+from sentry_sdk.scope import Scope
+from sentry_sdk.transport import Transport, HttpTransport
+from sentry_sdk.client import Client
+
+from sentry_sdk.api import * # noqa
+from sentry_sdk.api import __all__ as api_all
+
+from sentry_sdk.consts import VERSION # noqa
+
+__all__ = api_all + [ # noqa
+ "Hub",
+ "Scope",
+ "Client",
+ "Transport",
+ "HttpTransport",
+ "init",
+ "integrations",
+]
+
+# Initialize the debug support after everything is loaded
+from sentry_sdk.debug import init_debug_support
+
+init_debug_support()
+del init_debug_support
diff --git a/third_party/python/sentry_sdk/sentry_sdk/_compat.py b/third_party/python/sentry_sdk/sentry_sdk/_compat.py
new file mode 100644
index 0000000000..4db5f44c33
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/_compat.py
@@ -0,0 +1,92 @@
+import sys
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Optional
+ from typing import Tuple
+ from typing import Any
+ from typing import Type
+
+ from typing import TypeVar
+
+ T = TypeVar("T")
+
+
+PY2 = sys.version_info[0] == 2
+
+if PY2:
+ import urlparse # noqa
+
+ text_type = unicode # noqa
+ import Queue as queue # noqa
+
+ string_types = (str, text_type)
+ number_types = (int, long, float) # noqa
+ int_types = (int, long) # noqa
+ iteritems = lambda x: x.iteritems() # noqa: B301
+
+ def implements_str(cls):
+ # type: (T) -> T
+ cls.__unicode__ = cls.__str__
+ cls.__str__ = lambda x: unicode(x).encode("utf-8") # noqa
+ return cls
+
+ exec("def reraise(tp, value, tb=None):\n raise tp, value, tb")
+
+
+else:
+ import urllib.parse as urlparse # noqa
+ import queue # noqa
+
+ text_type = str
+ string_types = (text_type,) # type: Tuple[type]
+ number_types = (int, float) # type: Tuple[type, type]
+ int_types = (int,) # noqa
+ iteritems = lambda x: x.items()
+
+ def implements_str(x):
+ # type: (T) -> T
+ return x
+
+ def reraise(tp, value, tb=None):
+ # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[Any]) -> None
+ assert value is not None
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+
+def with_metaclass(meta, *bases):
+ # type: (Any, *Any) -> Any
+ class MetaClass(type):
+ def __new__(metacls, name, this_bases, d):
+ # type: (Any, Any, Any, Any) -> Any
+ return meta(name, bases, d)
+
+ return type.__new__(MetaClass, "temporary_class", (), {})
+
+
+def check_thread_support():
+ # type: () -> None
+ try:
+ from uwsgi import opt # type: ignore
+ except ImportError:
+ return
+
+ # When `threads` is passed in as a uwsgi option,
+ # `enable-threads` is implied on.
+ if "threads" in opt:
+ return
+
+ if str(opt.get("enable-threads", "0")).lower() in ("false", "off", "no", "0"):
+ from warnings import warn
+
+ warn(
+ Warning(
+ "We detected the use of uwsgi with disabled threads. "
+ "This will cause issues with the transport you are "
+ "trying to use. Please enable threading for uwsgi. "
+ '(Enable the "enable-threads" flag).'
+ )
+ )
diff --git a/third_party/python/sentry_sdk/sentry_sdk/_types.py b/third_party/python/sentry_sdk/sentry_sdk/_types.py
new file mode 100644
index 0000000000..74020aea57
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/_types.py
@@ -0,0 +1,37 @@
+try:
+ from typing import TYPE_CHECKING as MYPY
+except ImportError:
+ MYPY = False
+
+
+if MYPY:
+ from types import TracebackType
+ from typing import Any
+ from typing import Callable
+ from typing import Dict
+ from typing import Optional
+ from typing import Tuple
+ from typing import Type
+ from typing_extensions import Literal
+
+ ExcInfo = Tuple[
+ Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]
+ ]
+
+ Event = Dict[str, Any]
+ Hint = Dict[str, Any]
+
+ Breadcrumb = Dict[str, Any]
+ BreadcrumbHint = Dict[str, Any]
+
+ EventProcessor = Callable[[Event, Hint], Optional[Event]]
+ ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]]
+ BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]]
+
+ # https://github.com/python/mypy/issues/5710
+ NotImplementedType = Any
+
+ EventDataCategory = Literal[
+ "default", "error", "crash", "transaction", "security", "attachment", "session"
+ ]
+ SessionStatus = Literal["ok", "exited", "crashed", "abnormal"]
diff --git a/third_party/python/sentry_sdk/sentry_sdk/api.py b/third_party/python/sentry_sdk/sentry_sdk/api.py
new file mode 100644
index 0000000000..0f1cdfc741
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/api.py
@@ -0,0 +1,256 @@
+import inspect
+from contextlib import contextmanager
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.scope import Scope
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+ from typing import overload
+ from typing import Callable
+ from typing import TypeVar
+ from typing import ContextManager
+
+ from sentry_sdk._types import Event, Hint, Breadcrumb, BreadcrumbHint
+ from sentry_sdk.tracing import Span
+
+ T = TypeVar("T")
+ F = TypeVar("F", bound=Callable[..., Any])
+else:
+
+ def overload(x):
+ # type: (T) -> T
+ return x
+
+
+__all__ = [
+ "capture_event",
+ "capture_message",
+ "capture_exception",
+ "add_breadcrumb",
+ "configure_scope",
+ "push_scope",
+ "flush",
+ "last_event_id",
+ "start_span",
+ "set_tag",
+ "set_context",
+ "set_extra",
+ "set_user",
+ "set_level",
+]
+
+
+def hubmethod(f):
+ # type: (F) -> F
+ f.__doc__ = "%s\n\n%s" % (
+ "Alias for :py:meth:`sentry_sdk.Hub.%s`" % f.__name__,
+ inspect.getdoc(getattr(Hub, f.__name__)),
+ )
+ return f
+
+
+def scopemethod(f):
+ # type: (F) -> F
+ f.__doc__ = "%s\n\n%s" % (
+ "Alias for :py:meth:`sentry_sdk.Scope.%s`" % f.__name__,
+ inspect.getdoc(getattr(Scope, f.__name__)),
+ )
+ return f
+
+
+@hubmethod
+def capture_event(
+ event, # type: Event
+ hint=None, # type: Optional[Hint]
+ scope=None, # type: Optional[Any]
+ **scope_args # type: Dict[str, Any]
+):
+ # type: (...) -> Optional[str]
+ hub = Hub.current
+ if hub is not None:
+ return hub.capture_event(event, hint, scope=scope, **scope_args)
+ return None
+
+
+@hubmethod
+def capture_message(
+ message, # type: str
+ level=None, # type: Optional[str]
+ scope=None, # type: Optional[Any]
+ **scope_args # type: Dict[str, Any]
+):
+ # type: (...) -> Optional[str]
+ hub = Hub.current
+ if hub is not None:
+ return hub.capture_message(message, level, scope=scope, **scope_args)
+ return None
+
+
+@hubmethod
+def capture_exception(
+ error=None, # type: Optional[BaseException]
+ scope=None, # type: Optional[Any]
+ **scope_args # type: Dict[str, Any]
+):
+ # type: (...) -> Optional[str]
+ hub = Hub.current
+ if hub is not None:
+ return hub.capture_exception(error, scope=scope, **scope_args)
+ return None
+
+
+@hubmethod
+def add_breadcrumb(
+ crumb=None, # type: Optional[Breadcrumb]
+ hint=None, # type: Optional[BreadcrumbHint]
+ **kwargs # type: Any
+):
+ # type: (...) -> None
+ hub = Hub.current
+ if hub is not None:
+ return hub.add_breadcrumb(crumb, hint, **kwargs)
+
+
+@overload # noqa
+def configure_scope():
+ # type: () -> ContextManager[Scope]
+ pass
+
+
+@overload # noqa
+def configure_scope(
+ callback, # type: Callable[[Scope], None]
+):
+ # type: (...) -> None
+ pass
+
+
+@hubmethod # noqa
+def configure_scope(
+ callback=None, # type: Optional[Callable[[Scope], None]]
+):
+ # type: (...) -> Optional[ContextManager[Scope]]
+ hub = Hub.current
+ if hub is not None:
+ return hub.configure_scope(callback)
+ elif callback is None:
+
+ @contextmanager
+ def inner():
+ yield Scope()
+
+ return inner()
+ else:
+ # returned if user provided callback
+ return None
+
+
+@overload # noqa
+def push_scope():
+ # type: () -> ContextManager[Scope]
+ pass
+
+
+@overload # noqa
+def push_scope(
+ callback, # type: Callable[[Scope], None]
+):
+ # type: (...) -> None
+ pass
+
+
+@hubmethod # noqa
+def push_scope(
+ callback=None, # type: Optional[Callable[[Scope], None]]
+):
+ # type: (...) -> Optional[ContextManager[Scope]]
+ hub = Hub.current
+ if hub is not None:
+ return hub.push_scope(callback)
+ elif callback is None:
+
+ @contextmanager
+ def inner():
+ yield Scope()
+
+ return inner()
+ else:
+ # returned if user provided callback
+ return None
+
+
+@scopemethod # noqa
+def set_tag(key, value):
+ # type: (str, Any) -> None
+ hub = Hub.current
+ if hub is not None:
+ hub.scope.set_tag(key, value)
+
+
+@scopemethod # noqa
+def set_context(key, value):
+ # type: (str, Any) -> None
+ hub = Hub.current
+ if hub is not None:
+ hub.scope.set_context(key, value)
+
+
+@scopemethod # noqa
+def set_extra(key, value):
+ # type: (str, Any) -> None
+ hub = Hub.current
+ if hub is not None:
+ hub.scope.set_extra(key, value)
+
+
+@scopemethod # noqa
+def set_user(value):
+ # type: (Dict[str, Any]) -> None
+ hub = Hub.current
+ if hub is not None:
+ hub.scope.set_user(value)
+
+
+@scopemethod # noqa
+def set_level(value):
+ # type: (str) -> None
+ hub = Hub.current
+ if hub is not None:
+ hub.scope.set_level(value)
+
+
+@hubmethod
+def flush(
+ timeout=None, # type: Optional[float]
+ callback=None, # type: Optional[Callable[[int, float], None]]
+):
+ # type: (...) -> None
+ hub = Hub.current
+ if hub is not None:
+ return hub.flush(timeout=timeout, callback=callback)
+
+
+@hubmethod
+def last_event_id():
+ # type: () -> Optional[str]
+ hub = Hub.current
+ if hub is not None:
+ return hub.last_event_id()
+ return None
+
+
+@hubmethod
+def start_span(
+ span=None, # type: Optional[Span]
+ **kwargs # type: Any
+):
+ # type: (...) -> Span
+
+ # TODO: All other functions in this module check for
+ # `Hub.current is None`. That actually should never happen?
+ return Hub.current.start_span(span=span, **kwargs)
diff --git a/third_party/python/sentry_sdk/sentry_sdk/client.py b/third_party/python/sentry_sdk/sentry_sdk/client.py
new file mode 100644
index 0000000000..c0fb8422d8
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/client.py
@@ -0,0 +1,406 @@
+import os
+import uuid
+import random
+from datetime import datetime
+import socket
+
+from sentry_sdk._compat import string_types, text_type, iteritems
+from sentry_sdk.utils import (
+ handle_in_app,
+ get_type_name,
+ capture_internal_exceptions,
+ current_stacktrace,
+ disable_capture_event,
+ logger,
+)
+from sentry_sdk.serializer import serialize
+from sentry_sdk.transport import make_transport
+from sentry_sdk.consts import DEFAULT_OPTIONS, SDK_INFO, ClientConstructor
+from sentry_sdk.integrations import setup_integrations
+from sentry_sdk.utils import ContextVar
+from sentry_sdk.sessions import SessionFlusher
+from sentry_sdk.envelope import Envelope
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Callable
+ from typing import Dict
+ from typing import List
+ from typing import Optional
+
+ from sentry_sdk.scope import Scope
+ from sentry_sdk._types import Event, Hint
+ from sentry_sdk.sessions import Session
+
+
+_client_init_debug = ContextVar("client_init_debug")
+
+
+def _get_options(*args, **kwargs):
+ # type: (*Optional[str], **Any) -> Dict[str, Any]
+ if args and (isinstance(args[0], (text_type, bytes, str)) or args[0] is None):
+ dsn = args[0] # type: Optional[str]
+ args = args[1:]
+ else:
+ dsn = None
+
+ rv = dict(DEFAULT_OPTIONS)
+ options = dict(*args, **kwargs)
+ if dsn is not None and options.get("dsn") is None:
+ options["dsn"] = dsn
+
+ for key, value in iteritems(options):
+ if key not in rv:
+ raise TypeError("Unknown option %r" % (key,))
+ rv[key] = value
+
+ if rv["dsn"] is None:
+ rv["dsn"] = os.environ.get("SENTRY_DSN")
+
+ if rv["release"] is None:
+ rv["release"] = os.environ.get("SENTRY_RELEASE")
+
+ if rv["environment"] is None:
+ rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT")
+
+ if rv["server_name"] is None and hasattr(socket, "gethostname"):
+ rv["server_name"] = socket.gethostname()
+
+ return rv
+
+
+class _Client(object):
+ """The client is internally responsible for capturing the events and
+ forwarding them to sentry through the configured transport. It takes
+ the client options as keyword arguments and optionally the DSN as first
+ argument.
+ """
+
+ def __init__(self, *args, **kwargs):
+ # type: (*Any, **Any) -> None
+ self.options = get_options(*args, **kwargs) # type: Dict[str, Any]
+ self._init_impl()
+
+ def __getstate__(self):
+ # type: () -> Any
+ return {"options": self.options}
+
+ def __setstate__(self, state):
+ # type: (Any) -> None
+ self.options = state["options"]
+ self._init_impl()
+
+ def _init_impl(self):
+ # type: () -> None
+ old_debug = _client_init_debug.get(False)
+
+ def _send_sessions(sessions):
+ # type: (List[Any]) -> None
+ transport = self.transport
+ if sessions and transport:
+ envelope = Envelope()
+ for session in sessions:
+ envelope.add_session(session)
+ transport.capture_envelope(envelope)
+
+ try:
+ _client_init_debug.set(self.options["debug"])
+ self.transport = make_transport(self.options)
+ self.session_flusher = SessionFlusher(flush_func=_send_sessions)
+
+ request_bodies = ("always", "never", "small", "medium")
+ if self.options["request_bodies"] not in request_bodies:
+ raise ValueError(
+ "Invalid value for request_bodies. Must be one of {}".format(
+ request_bodies
+ )
+ )
+
+ self.integrations = setup_integrations(
+ self.options["integrations"],
+ with_defaults=self.options["default_integrations"],
+ with_auto_enabling_integrations=self.options["_experiments"].get(
+ "auto_enabling_integrations", False
+ ),
+ )
+ finally:
+ _client_init_debug.set(old_debug)
+
+ @property
+ def dsn(self):
+ # type: () -> Optional[str]
+ """Returns the configured DSN as string."""
+ return self.options["dsn"]
+
+ def _prepare_event(
+ self,
+ event, # type: Event
+ hint, # type: Optional[Hint]
+ scope, # type: Optional[Scope]
+ ):
+ # type: (...) -> Optional[Event]
+
+ if event.get("timestamp") is None:
+ event["timestamp"] = datetime.utcnow()
+
+ hint = dict(hint or ()) # type: Hint
+
+ if scope is not None:
+ event_ = scope.apply_to_event(event, hint)
+ if event_ is None:
+ return None
+ event = event_
+
+ if (
+ self.options["attach_stacktrace"]
+ and "exception" not in event
+ and "stacktrace" not in event
+ and "threads" not in event
+ ):
+ with capture_internal_exceptions():
+ event["threads"] = {
+ "values": [
+ {
+ "stacktrace": current_stacktrace(
+ self.options["with_locals"]
+ ),
+ "crashed": False,
+ "current": True,
+ }
+ ]
+ }
+
+ for key in "release", "environment", "server_name", "dist":
+ if event.get(key) is None and self.options[key] is not None:
+ event[key] = text_type(self.options[key]).strip()
+ if event.get("sdk") is None:
+ sdk_info = dict(SDK_INFO)
+ sdk_info["integrations"] = sorted(self.integrations.keys())
+ event["sdk"] = sdk_info
+
+ if event.get("platform") is None:
+ event["platform"] = "python"
+
+ event = handle_in_app(
+ event, self.options["in_app_exclude"], self.options["in_app_include"]
+ )
+
+ # Postprocess the event here so that annotated types do
+ # generally not surface in before_send
+ if event is not None:
+ event = serialize(event)
+
+ before_send = self.options["before_send"]
+ if before_send is not None:
+ new_event = None
+ with capture_internal_exceptions():
+ new_event = before_send(event, hint or {})
+ if new_event is None:
+ logger.info("before send dropped event (%s)", event)
+ event = new_event # type: ignore
+
+ return event
+
+ def _is_ignored_error(self, event, hint):
+ # type: (Event, Hint) -> bool
+ exc_info = hint.get("exc_info")
+ if exc_info is None:
+ return False
+
+ type_name = get_type_name(exc_info[0])
+ full_name = "%s.%s" % (exc_info[0].__module__, type_name)
+
+ for errcls in self.options["ignore_errors"]:
+ # String types are matched against the type name in the
+ # exception only
+ if isinstance(errcls, string_types):
+ if errcls == full_name or errcls == type_name:
+ return True
+ else:
+ if issubclass(exc_info[0], errcls):
+ return True
+
+ return False
+
+ def _should_capture(
+ self,
+ event, # type: Event
+ hint, # type: Hint
+ scope=None, # type: Optional[Scope]
+ ):
+ # type: (...) -> bool
+ if scope is not None and not scope._should_capture:
+ return False
+
+ if (
+ self.options["sample_rate"] < 1.0
+ and random.random() >= self.options["sample_rate"]
+ ):
+ return False
+
+ if self._is_ignored_error(event, hint):
+ return False
+
+ return True
+
+ def _update_session_from_event(
+ self,
+ session, # type: Session
+ event, # type: Event
+ ):
+ # type: (...) -> None
+
+ crashed = False
+ errored = False
+ user_agent = None
+
+ # Figure out if this counts as an error and if we should mark the
+ # session as crashed.
+ level = event.get("level")
+ if level == "fatal":
+ crashed = True
+ if not crashed:
+ exceptions = (event.get("exception") or {}).get("values")
+ if exceptions:
+ errored = True
+ for error in exceptions:
+ mechanism = error.get("mechanism")
+ if mechanism and mechanism.get("handled") is False:
+ crashed = True
+ break
+
+ user = event.get("user")
+
+ if session.user_agent is None:
+ headers = (event.get("request") or {}).get("headers")
+ for (k, v) in iteritems(headers or {}):
+ if k.lower() == "user-agent":
+ user_agent = v
+ break
+
+ session.update(
+ status="crashed" if crashed else None,
+ user=user,
+ user_agent=user_agent,
+ errors=session.errors + (errored or crashed),
+ )
+
+ def capture_event(
+ self,
+ event, # type: Event
+ hint=None, # type: Optional[Hint]
+ scope=None, # type: Optional[Scope]
+ ):
+ # type: (...) -> Optional[str]
+ """Captures an event.
+
+ :param event: A ready-made event that can be directly sent to Sentry.
+
+ :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object.
+
+ :returns: An event ID. May be `None` if there is no DSN set or of if the SDK decided to discard the event for other reasons. In such situations setting `debug=True` on `init()` may help.
+ """
+ if disable_capture_event.get(False):
+ return None
+
+ if self.transport is None:
+ return None
+ if hint is None:
+ hint = {}
+ event_id = event.get("event_id")
+ if event_id is None:
+ event["event_id"] = event_id = uuid.uuid4().hex
+ if not self._should_capture(event, hint, scope):
+ return None
+ event_opt = self._prepare_event(event, hint, scope)
+ if event_opt is None:
+ return None
+
+ # whenever we capture an event we also check if the session needs
+ # to be updated based on that information.
+ session = scope._session if scope else None
+ if session:
+ self._update_session_from_event(session, event)
+
+ self.transport.capture_event(event_opt)
+ return event_id
+
+ def capture_session(
+ self, session # type: Session
+ ):
+ # type: (...) -> None
+ if not session.release:
+ logger.info("Discarded session update because of missing release")
+ else:
+ self.session_flusher.add_session(session)
+
+ def close(
+ self,
+ timeout=None, # type: Optional[float]
+ callback=None, # type: Optional[Callable[[int, float], None]]
+ ):
+ # type: (...) -> None
+ """
+ Close the client and shut down the transport. Arguments have the same
+ semantics as :py:meth:`Client.flush`.
+ """
+ if self.transport is not None:
+ self.flush(timeout=timeout, callback=callback)
+ self.session_flusher.kill()
+ self.transport.kill()
+ self.transport = None
+
+ def flush(
+ self,
+ timeout=None, # type: Optional[float]
+ callback=None, # type: Optional[Callable[[int, float], None]]
+ ):
+ # type: (...) -> None
+ """
+ Wait for the current events to be sent.
+
+ :param timeout: Wait for at most `timeout` seconds. If no `timeout` is provided, the `shutdown_timeout` option value is used.
+
+ :param callback: Is invoked with the number of pending events and the configured timeout.
+ """
+ if self.transport is not None:
+ if timeout is None:
+ timeout = self.options["shutdown_timeout"]
+ self.session_flusher.flush()
+ self.transport.flush(timeout=timeout, callback=callback)
+
+ def __enter__(self):
+ # type: () -> _Client
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ # type: (Any, Any, Any) -> None
+ self.close()
+
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ # Make mypy, PyCharm and other static analyzers think `get_options` is a
+ # type to have nicer autocompletion for params.
+ #
+ # Use `ClientConstructor` to define the argument types of `init` and
+ # `Dict[str, Any]` to tell static analyzers about the return type.
+
+ class get_options(ClientConstructor, Dict[str, Any]): # noqa: N801
+ pass
+
+ class Client(ClientConstructor, _Client):
+ pass
+
+
+else:
+ # Alias `get_options` for actual usage. Go through the lambda indirection
+ # to throw PyCharm off of the weakly typed signature (it would otherwise
+ # discover both the weakly typed signature of `_init` and our faked `init`
+ # type).
+
+ get_options = (lambda: _get_options)()
+ Client = (lambda: _Client)()
diff --git a/third_party/python/sentry_sdk/sentry_sdk/consts.py b/third_party/python/sentry_sdk/sentry_sdk/consts.py
new file mode 100644
index 0000000000..2fe012e66d
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/consts.py
@@ -0,0 +1,97 @@
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Optional
+ from typing import Callable
+ from typing import Union
+ from typing import List
+ from typing import Type
+ from typing import Dict
+ from typing import Any
+ from typing import Sequence
+ from typing_extensions import TypedDict
+
+ from sentry_sdk.transport import Transport
+ from sentry_sdk.integrations import Integration
+
+ from sentry_sdk._types import Event, EventProcessor, BreadcrumbProcessor
+
+ # Experiments are feature flags to enable and disable certain unstable SDK
+ # functionality. Changing them from the defaults (`None`) in production
+ # code is highly discouraged. They are not subject to any stability
+ # guarantees such as the ones from semantic versioning.
+ Experiments = TypedDict(
+ "Experiments",
+ {
+ "max_spans": Optional[int],
+ "record_sql_params": Optional[bool],
+ "auto_enabling_integrations": Optional[bool],
+ "auto_session_tracking": Optional[bool],
+ },
+ total=False,
+ )
+
+
+# This type exists to trick mypy and PyCharm into thinking `init` and `Client`
+# take these arguments (even though they take opaque **kwargs)
+class ClientConstructor(object):
+ def __init__(
+ self,
+ dsn=None, # type: Optional[str]
+ with_locals=True, # type: bool
+ max_breadcrumbs=100, # type: int
+ release=None, # type: Optional[str]
+ environment=None, # type: Optional[str]
+ server_name=None, # type: Optional[str]
+ shutdown_timeout=2, # type: int
+ integrations=[], # type: Sequence[Integration] # noqa: B006
+ in_app_include=[], # type: List[str] # noqa: B006
+ in_app_exclude=[], # type: List[str] # noqa: B006
+ default_integrations=True, # type: bool
+ dist=None, # type: Optional[str]
+ transport=None, # type: Optional[Union[Transport, Type[Transport], Callable[[Event], None]]]
+ sample_rate=1.0, # type: float
+ send_default_pii=False, # type: bool
+ http_proxy=None, # type: Optional[str]
+ https_proxy=None, # type: Optional[str]
+ ignore_errors=[], # type: List[Union[type, str]] # noqa: B006
+ request_bodies="medium", # type: str
+ before_send=None, # type: Optional[EventProcessor]
+ before_breadcrumb=None, # type: Optional[BreadcrumbProcessor]
+ debug=False, # type: bool
+ attach_stacktrace=False, # type: bool
+ ca_certs=None, # type: Optional[str]
+ propagate_traces=True, # type: bool
+ # DO NOT ENABLE THIS RIGHT NOW UNLESS YOU WANT TO EXCEED YOUR EVENT QUOTA IMMEDIATELY
+ traces_sample_rate=0.0, # type: float
+ traceparent_v2=False, # type: bool
+ _experiments={}, # type: Experiments # noqa: B006
+ ):
+ # type: (...) -> None
+ pass
+
+
+def _get_default_options():
+ # type: () -> Dict[str, Any]
+ import inspect
+
+ if hasattr(inspect, "getfullargspec"):
+ getargspec = inspect.getfullargspec
+ else:
+ getargspec = inspect.getargspec # type: ignore
+
+ a = getargspec(ClientConstructor.__init__)
+ defaults = a.defaults or ()
+ return dict(zip(a.args[-len(defaults) :], defaults))
+
+
+DEFAULT_OPTIONS = _get_default_options()
+del _get_default_options
+
+
+VERSION = "0.14.3"
+SDK_INFO = {
+ "name": "sentry.python",
+ "version": VERSION,
+ "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
+}
diff --git a/third_party/python/sentry_sdk/sentry_sdk/debug.py b/third_party/python/sentry_sdk/sentry_sdk/debug.py
new file mode 100644
index 0000000000..fe8ae50cea
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/debug.py
@@ -0,0 +1,44 @@
+import sys
+import logging
+
+from sentry_sdk import utils
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import logger
+from sentry_sdk.client import _client_init_debug
+from logging import LogRecord
+
+
+class _HubBasedClientFilter(logging.Filter):
+ def filter(self, record):
+ # type: (LogRecord) -> bool
+ if _client_init_debug.get(False):
+ return True
+ hub = Hub.current
+ if hub is not None and hub.client is not None:
+ return hub.client.options["debug"]
+ return False
+
+
+def init_debug_support():
+ # type: () -> None
+ if not logger.handlers:
+ configure_logger()
+ configure_debug_hub()
+
+
+def configure_logger():
+ # type: () -> None
+ _handler = logging.StreamHandler(sys.stderr)
+ _handler.setFormatter(logging.Formatter(" [sentry] %(levelname)s: %(message)s"))
+ logger.addHandler(_handler)
+ logger.setLevel(logging.DEBUG)
+ logger.addFilter(_HubBasedClientFilter())
+
+
+def configure_debug_hub():
+ # type: () -> None
+ def _get_debug_hub():
+ # type: () -> Hub
+ return Hub.current
+
+ utils._get_debug_hub = _get_debug_hub
diff --git a/third_party/python/sentry_sdk/sentry_sdk/envelope.py b/third_party/python/sentry_sdk/sentry_sdk/envelope.py
new file mode 100644
index 0000000000..fd08553249
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/envelope.py
@@ -0,0 +1,293 @@
+import io
+import json
+import shutil
+import mimetypes
+
+from sentry_sdk._compat import text_type
+from sentry_sdk._types import MYPY
+from sentry_sdk.sessions import Session
+
+if MYPY:
+ from typing import Any
+ from typing import Tuple
+ from typing import Optional
+ from typing import Union
+ from typing import Dict
+ from typing import List
+ from typing import Iterator
+
+ from sentry_sdk._types import Event, EventDataCategory
+
+
+def get_event_data_category(event):
+ # type: (Event) -> EventDataCategory
+ if event.get("type") == "transaction":
+ return "transaction"
+ return "error"
+
+
+class Envelope(object):
+ def __init__(
+ self,
+ headers=None, # type: Optional[Dict[str, str]]
+ items=None, # type: Optional[List[Item]]
+ ):
+ # type: (...) -> None
+ if headers is not None:
+ headers = dict(headers)
+ self.headers = headers or {}
+ if items is None:
+ items = []
+ else:
+ items = list(items)
+ self.items = items
+
+ @property
+ def description(self):
+ # type: (...) -> str
+ return "envelope with %s items (%s)" % (
+ len(self.items),
+ ", ".join(x.data_category for x in self.items),
+ )
+
+ def add_event(
+ self, event # type: Event
+ ):
+ # type: (...) -> None
+ self.add_item(Item(payload=PayloadRef(json=event), type="event"))
+
+ def add_session(
+ self, session # type: Union[Session, Any]
+ ):
+ # type: (...) -> None
+ if isinstance(session, Session):
+ session = session.to_json()
+ self.add_item(Item(payload=PayloadRef(json=session), type="session"))
+
+ def add_item(
+ self, item # type: Item
+ ):
+ # type: (...) -> None
+ self.items.append(item)
+
+ def get_event(self):
+ # type: (...) -> Optional[Event]
+ for items in self.items:
+ event = items.get_event()
+ if event is not None:
+ return event
+ return None
+
+ def __iter__(self):
+ # type: (...) -> Iterator[Item]
+ return iter(self.items)
+
+ def serialize_into(
+ self, f # type: Any
+ ):
+ # type: (...) -> None
+ f.write(json.dumps(self.headers).encode("utf-8"))
+ f.write(b"\n")
+ for item in self.items:
+ item.serialize_into(f)
+
+ def serialize(self):
+ # type: (...) -> bytes
+ out = io.BytesIO()
+ self.serialize_into(out)
+ return out.getvalue()
+
+ @classmethod
+ def deserialize_from(
+ cls, f # type: Any
+ ):
+ # type: (...) -> Envelope
+ headers = json.loads(f.readline())
+ items = []
+ while 1:
+ item = Item.deserialize_from(f)
+ if item is None:
+ break
+ items.append(item)
+ return cls(headers=headers, items=items)
+
+ @classmethod
+ def deserialize(
+ cls, bytes # type: bytes
+ ):
+ # type: (...) -> Envelope
+ return cls.deserialize_from(io.BytesIO(bytes))
+
+ def __repr__(self):
+ # type: (...) -> str
+ return "<Envelope headers=%r items=%r>" % (self.headers, self.items)
+
+
+class PayloadRef(object):
+ def __init__(
+ self,
+ bytes=None, # type: Optional[bytes]
+ path=None, # type: Optional[Union[bytes, text_type]]
+ json=None, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ self.json = json
+ self.bytes = bytes
+ self.path = path
+
+ def get_bytes(self):
+ # type: (...) -> bytes
+ if self.bytes is None:
+ if self.path is not None:
+ with open(self.path, "rb") as f:
+ self.bytes = f.read()
+ elif self.json is not None:
+ self.bytes = json.dumps(self.json).encode("utf-8")
+ else:
+ self.bytes = b""
+ return self.bytes
+
+ def _prepare_serialize(self):
+ # type: (...) -> Tuple[Any, Any]
+ if self.path is not None and self.bytes is None:
+ f = open(self.path, "rb")
+ f.seek(0, 2)
+ length = f.tell()
+ f.seek(0, 0)
+
+ def writer(out):
+ # type: (Any) -> None
+ try:
+ shutil.copyfileobj(f, out)
+ finally:
+ f.close()
+
+ return length, writer
+
+ bytes = self.get_bytes()
+ return len(bytes), lambda f: f.write(bytes)
+
+ @property
+ def inferred_content_type(self):
+ # type: (...) -> str
+ if self.json is not None:
+ return "application/json"
+ elif self.path is not None:
+ path = self.path
+ if isinstance(path, bytes):
+ path = path.decode("utf-8", "replace")
+ ty = mimetypes.guess_type(path)[0]
+ if ty:
+ return ty
+ return "application/octet-stream"
+
+ def __repr__(self):
+ # type: (...) -> str
+ return "<Payload %r>" % (self.inferred_content_type,)
+
+
+class Item(object):
+ def __init__(
+ self,
+ payload, # type: Union[bytes, text_type, PayloadRef]
+ headers=None, # type: Optional[Dict[str, str]]
+ type=None, # type: Optional[str]
+ content_type=None, # type: Optional[str]
+ filename=None, # type: Optional[str]
+ ):
+ if headers is not None:
+ headers = dict(headers)
+ elif headers is None:
+ headers = {}
+ self.headers = headers
+ if isinstance(payload, bytes):
+ payload = PayloadRef(bytes=payload)
+ elif isinstance(payload, text_type):
+ payload = PayloadRef(bytes=payload.encode("utf-8"))
+ else:
+ payload = payload
+
+ if filename is not None:
+ headers["filename"] = filename
+ if type is not None:
+ headers["type"] = type
+ if content_type is not None:
+ headers["content_type"] = content_type
+ elif "content_type" not in headers:
+ headers["content_type"] = payload.inferred_content_type
+
+ self.payload = payload
+
+ def __repr__(self):
+ # type: (...) -> str
+ return "<Item headers=%r payload=%r data_category=%r>" % (
+ self.headers,
+ self.payload,
+ self.data_category,
+ )
+
+ @property
+ def data_category(self):
+ # type: (...) -> EventDataCategory
+ rv = "default" # type: Any
+ event = self.get_event()
+ if event is not None:
+ rv = get_event_data_category(event)
+ else:
+ ty = self.headers.get("type")
+ if ty in ("session", "attachment"):
+ rv = ty
+ return rv
+
+ def get_bytes(self):
+ # type: (...) -> bytes
+ return self.payload.get_bytes()
+
+ def get_event(self):
+ # type: (...) -> Optional[Event]
+ if self.headers.get("type") == "event" and self.payload.json is not None:
+ return self.payload.json
+ return None
+
+ def serialize_into(
+ self, f # type: Any
+ ):
+ # type: (...) -> None
+ headers = dict(self.headers)
+ length, writer = self.payload._prepare_serialize()
+ headers["length"] = length
+ f.write(json.dumps(headers).encode("utf-8"))
+ f.write(b"\n")
+ writer(f)
+ f.write(b"\n")
+
+ def serialize(self):
+ # type: (...) -> bytes
+ out = io.BytesIO()
+ self.serialize_into(out)
+ return out.getvalue()
+
+ @classmethod
+ def deserialize_from(
+ cls, f # type: Any
+ ):
+ # type: (...) -> Optional[Item]
+ line = f.readline().rstrip()
+ if not line:
+ return None
+ headers = json.loads(line)
+ length = headers["length"]
+ payload = f.read(length)
+ if headers.get("type") == "event":
+ rv = cls(headers=headers, payload=PayloadRef(json=json.loads(payload)))
+ else:
+ rv = cls(headers=headers, payload=payload)
+ f.readline()
+ return rv
+
+ @classmethod
+ def deserialize(
+ cls, bytes # type: bytes
+ ):
+ # type: (...) -> Optional[Item]
+ return cls.deserialize_from(io.BytesIO(bytes))
diff --git a/third_party/python/sentry_sdk/sentry_sdk/hub.py b/third_party/python/sentry_sdk/sentry_sdk/hub.py
new file mode 100644
index 0000000000..f0060b9d79
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/hub.py
@@ -0,0 +1,647 @@
+import copy
+import random
+import sys
+
+from datetime import datetime
+from contextlib import contextmanager
+
+from sentry_sdk._compat import with_metaclass
+from sentry_sdk.scope import Scope
+from sentry_sdk.client import Client
+from sentry_sdk.tracing import Span
+from sentry_sdk.sessions import Session
+from sentry_sdk.utils import (
+ exc_info_from_error,
+ event_from_exception,
+ logger,
+ ContextVar,
+)
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Union
+ from typing import Any
+ from typing import Optional
+ from typing import Tuple
+ from typing import Dict
+ from typing import List
+ from typing import Callable
+ from typing import Generator
+ from typing import Type
+ from typing import TypeVar
+ from typing import overload
+ from typing import ContextManager
+
+ from sentry_sdk.integrations import Integration
+ from sentry_sdk._types import (
+ Event,
+ Hint,
+ Breadcrumb,
+ BreadcrumbHint,
+ ExcInfo,
+ )
+ from sentry_sdk.consts import ClientConstructor
+
+ T = TypeVar("T")
+
+else:
+
+ def overload(x):
+ # type: (T) -> T
+ return x
+
+
+_local = ContextVar("sentry_current_hub")
+
+
+def _update_scope(base, scope_change, scope_kwargs):
+ # type: (Scope, Optional[Any], Dict[str, Any]) -> Scope
+ if scope_change and scope_kwargs:
+ raise TypeError("cannot provide scope and kwargs")
+ if scope_change is not None:
+ final_scope = copy.copy(base)
+ if callable(scope_change):
+ scope_change(final_scope)
+ else:
+ final_scope.update_from_scope(scope_change)
+ elif scope_kwargs:
+ final_scope = copy.copy(base)
+ final_scope.update_from_kwargs(scope_kwargs)
+ else:
+ final_scope = base
+ return final_scope
+
+
+def _should_send_default_pii():
+ # type: () -> bool
+ client = Hub.current.client
+ if not client:
+ return False
+ return client.options["send_default_pii"]
+
+
+class _InitGuard(object):
+ def __init__(self, client):
+ # type: (Client) -> None
+ self._client = client
+
+ def __enter__(self):
+ # type: () -> _InitGuard
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ # type: (Any, Any, Any) -> None
+ c = self._client
+ if c is not None:
+ c.close()
+
+
+def _init(*args, **kwargs):
+ # type: (*Optional[str], **Any) -> ContextManager[Any]
+ """Initializes the SDK and optionally integrations.
+
+ This takes the same arguments as the client constructor.
+ """
+ client = Client(*args, **kwargs) # type: ignore
+ Hub.current.bind_client(client)
+ rv = _InitGuard(client)
+ return rv
+
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ # Make mypy, PyCharm and other static analyzers think `init` is a type to
+ # have nicer autocompletion for params.
+ #
+ # Use `ClientConstructor` to define the argument types of `init` and
+ # `ContextManager[Any]` to tell static analyzers about the return type.
+
+ class init(ClientConstructor, ContextManager[Any]): # noqa: N801
+ pass
+
+
+else:
+ # Alias `init` for actual usage. Go through the lambda indirection to throw
+ # PyCharm off of the weakly typed signature (it would otherwise discover
+ # both the weakly typed signature of `_init` and our faked `init` type).
+
+ init = (lambda: _init)()
+
+
+class HubMeta(type):
+ @property
+ def current(cls):
+ # type: () -> Hub
+ """Returns the current instance of the hub."""
+ rv = _local.get(None)
+ if rv is None:
+ rv = Hub(GLOBAL_HUB)
+ _local.set(rv)
+ return rv
+
+ @property
+ def main(cls):
+ # type: () -> Hub
+ """Returns the main instance of the hub."""
+ return GLOBAL_HUB
+
+
+class _ScopeManager(object):
+ def __init__(self, hub):
+ # type: (Hub) -> None
+ self._hub = hub
+ self._original_len = len(hub._stack)
+ self._layer = hub._stack[-1]
+
+ def __enter__(self):
+ # type: () -> Scope
+ scope = self._layer[1]
+ assert scope is not None
+ return scope
+
+ def __exit__(self, exc_type, exc_value, tb):
+ # type: (Any, Any, Any) -> None
+ current_len = len(self._hub._stack)
+ if current_len < self._original_len:
+ logger.error(
+ "Scope popped too soon. Popped %s scopes too many.",
+ self._original_len - current_len,
+ )
+ return
+ elif current_len > self._original_len:
+ logger.warning(
+ "Leaked %s scopes: %s",
+ current_len - self._original_len,
+ self._hub._stack[self._original_len :],
+ )
+
+ layer = self._hub._stack[self._original_len - 1]
+ del self._hub._stack[self._original_len - 1 :]
+
+ if layer[1] != self._layer[1]:
+ logger.error(
+ "Wrong scope found. Meant to pop %s, but popped %s.",
+ layer[1],
+ self._layer[1],
+ )
+ elif layer[0] != self._layer[0]:
+ warning = (
+ "init() called inside of pushed scope. This might be entirely "
+ "legitimate but usually occurs when initializing the SDK inside "
+ "a request handler or task/job function. Try to initialize the "
+ "SDK as early as possible instead."
+ )
+ logger.warning(warning)
+
+
+class Hub(with_metaclass(HubMeta)): # type: ignore
+ """The hub wraps the concurrency management of the SDK. Each thread has
+ its own hub but the hub might transfer with the flow of execution if
+ context vars are available.
+
+ If the hub is used with a with statement it's temporarily activated.
+ """
+
+ _stack = None # type: List[Tuple[Optional[Client], Scope]]
+
+ # Mypy doesn't pick up on the metaclass.
+
+ if MYPY:
+ current = None # type: Hub
+ main = None # type: Hub
+
+ def __init__(
+ self,
+ client_or_hub=None, # type: Optional[Union[Hub, Client]]
+ scope=None, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ if isinstance(client_or_hub, Hub):
+ hub = client_or_hub
+ client, other_scope = hub._stack[-1]
+ if scope is None:
+ scope = copy.copy(other_scope)
+ else:
+ client = client_or_hub
+ if scope is None:
+ scope = Scope()
+
+ self._stack = [(client, scope)]
+ self._last_event_id = None # type: Optional[str]
+ self._old_hubs = [] # type: List[Hub]
+
+ def __enter__(self):
+ # type: () -> Hub
+ self._old_hubs.append(Hub.current)
+ _local.set(self)
+ return self
+
+ def __exit__(
+ self,
+ exc_type, # type: Optional[type]
+ exc_value, # type: Optional[BaseException]
+ tb, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ old = self._old_hubs.pop()
+ _local.set(old)
+
+ def run(
+ self, callback # type: Callable[[], T]
+ ):
+ # type: (...) -> T
+ """Runs a callback in the context of the hub. Alternatively the
+ with statement can be used on the hub directly.
+ """
+ with self:
+ return callback()
+
+ def get_integration(
+ self, name_or_class # type: Union[str, Type[Integration]]
+ ):
+ # type: (...) -> Any
+ """Returns the integration for this hub by name or class. If there
+ is no client bound or the client does not have that integration
+ then `None` is returned.
+
+ If the return value is not `None` the hub is guaranteed to have a
+ client attached.
+ """
+ if isinstance(name_or_class, str):
+ integration_name = name_or_class
+ elif name_or_class.identifier is not None:
+ integration_name = name_or_class.identifier
+ else:
+ raise ValueError("Integration has no name")
+
+ client = self._stack[-1][0]
+ if client is not None:
+ rv = client.integrations.get(integration_name)
+ if rv is not None:
+ return rv
+
+ @property
+ def client(self):
+ # type: () -> Optional[Client]
+ """Returns the current client on the hub."""
+ return self._stack[-1][0]
+
+ @property
+ def scope(self):
+ # type: () -> Scope
+ """Returns the current scope on the hub."""
+ return self._stack[-1][1]
+
+ def last_event_id(self):
+ # type: () -> Optional[str]
+ """Returns the last event ID."""
+ return self._last_event_id
+
+ def bind_client(
+ self, new # type: Optional[Client]
+ ):
+ # type: (...) -> None
+ """Binds a new client to the hub."""
+ top = self._stack[-1]
+ self._stack[-1] = (new, top[1])
+
+ def capture_event(
+ self,
+ event, # type: Event
+ hint=None, # type: Optional[Hint]
+ scope=None, # type: Optional[Any]
+ **scope_args # type: Dict[str, Any]
+ ):
+ # type: (...) -> Optional[str]
+ """Captures an event. Alias of :py:meth:`sentry_sdk.Client.capture_event`.
+ """
+ client, top_scope = self._stack[-1]
+ scope = _update_scope(top_scope, scope, scope_args)
+ if client is not None:
+ rv = client.capture_event(event, hint, scope)
+ if rv is not None:
+ self._last_event_id = rv
+ return rv
+ return None
+
+ def capture_message(
+ self,
+ message, # type: str
+ level=None, # type: Optional[str]
+ scope=None, # type: Optional[Any]
+ **scope_args # type: Dict[str, Any]
+ ):
+ # type: (...) -> Optional[str]
+ """Captures a message. The message is just a string. If no level
+ is provided the default level is `info`.
+
+ :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+ """
+ if self.client is None:
+ return None
+ if level is None:
+ level = "info"
+ return self.capture_event(
+ {"message": message, "level": level}, scope=scope, **scope_args
+ )
+
+ def capture_exception(
+ self,
+ error=None, # type: Optional[Union[BaseException, ExcInfo]]
+ scope=None, # type: Optional[Any]
+ **scope_args # type: Dict[str, Any]
+ ):
+ # type: (...) -> Optional[str]
+ """Captures an exception.
+
+ :param error: An exception to catch. If `None`, `sys.exc_info()` will be used.
+
+ :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+ """
+ client = self.client
+ if client is None:
+ return None
+ if error is not None:
+ exc_info = exc_info_from_error(error)
+ else:
+ exc_info = sys.exc_info()
+
+ event, hint = event_from_exception(exc_info, client_options=client.options)
+ try:
+ return self.capture_event(event, hint=hint, scope=scope, **scope_args)
+ except Exception:
+ self._capture_internal_exception(sys.exc_info())
+
+ return None
+
+ def _capture_internal_exception(
+ self, exc_info # type: Any
+ ):
+ # type: (...) -> Any
+ """
+ Capture an exception that is likely caused by a bug in the SDK
+ itself.
+
+ These exceptions do not end up in Sentry and are just logged instead.
+ """
+ logger.error("Internal error in sentry_sdk", exc_info=exc_info)
+
+ def add_breadcrumb(
+ self,
+ crumb=None, # type: Optional[Breadcrumb]
+ hint=None, # type: Optional[BreadcrumbHint]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """
+ Adds a breadcrumb.
+
+ :param crumb: Dictionary with the data as the sentry v7/v8 protocol expects.
+
+ :param hint: An optional value that can be used by `before_breadcrumb`
+ to customize the breadcrumbs that are emitted.
+ """
+ client, scope = self._stack[-1]
+ if client is None:
+ logger.info("Dropped breadcrumb because no client bound")
+ return
+
+ crumb = dict(crumb or ()) # type: Breadcrumb
+ crumb.update(kwargs)
+ if not crumb:
+ return
+
+ hint = dict(hint or ()) # type: Hint
+
+ if crumb.get("timestamp") is None:
+ crumb["timestamp"] = datetime.utcnow()
+ if crumb.get("type") is None:
+ crumb["type"] = "default"
+
+ if client.options["before_breadcrumb"] is not None:
+ new_crumb = client.options["before_breadcrumb"](crumb, hint)
+ else:
+ new_crumb = crumb
+
+ if new_crumb is not None:
+ scope._breadcrumbs.append(new_crumb)
+ else:
+ logger.info("before breadcrumb dropped breadcrumb (%s)", crumb)
+
+ max_breadcrumbs = client.options["max_breadcrumbs"] # type: int
+ while len(scope._breadcrumbs) > max_breadcrumbs:
+ scope._breadcrumbs.popleft()
+
+ def start_span(
+ self,
+ span=None, # type: Optional[Span]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Span
+ """
+ Create a new span whose parent span is the currently active
+ span, if any. The return value is the span object that can
+ be used as a context manager to start and stop timing.
+
+ Note that you will not see any span that is not contained
+ within a transaction. Create a transaction with
+ ``start_span(transaction="my transaction")`` if an
+ integration doesn't already do this for you.
+ """
+
+ client, scope = self._stack[-1]
+
+ kwargs.setdefault("hub", self)
+
+ if span is None:
+ span = scope.span
+ if span is not None:
+ span = span.new_span(**kwargs)
+ else:
+ span = Span(**kwargs)
+
+ if span.sampled is None and span.transaction is not None:
+ sample_rate = client and client.options["traces_sample_rate"] or 0
+ span.sampled = random.random() < sample_rate
+
+ if span.sampled:
+ max_spans = (
+ client and client.options["_experiments"].get("max_spans") or 1000
+ )
+ span.init_finished_spans(maxlen=max_spans)
+
+ return span
+
+ @overload # noqa
+ def push_scope(
+ self, callback=None # type: Optional[None]
+ ):
+ # type: (...) -> ContextManager[Scope]
+ pass
+
+ @overload # noqa
+ def push_scope(
+ self, callback # type: Callable[[Scope], None]
+ ):
+ # type: (...) -> None
+ pass
+
+ def push_scope( # noqa
+ self, callback=None # type: Optional[Callable[[Scope], None]]
+ ):
+ # type: (...) -> Optional[ContextManager[Scope]]
+ """
+ Pushes a new layer on the scope stack.
+
+ :param callback: If provided, this method pushes a scope, calls
+ `callback`, and pops the scope again.
+
+ :returns: If no `callback` is provided, a context manager that should
+ be used to pop the scope again.
+ """
+ if callback is not None:
+ with self.push_scope() as scope:
+ callback(scope)
+ return None
+
+ client, scope = self._stack[-1]
+ new_layer = (client, copy.copy(scope))
+ self._stack.append(new_layer)
+
+ return _ScopeManager(self)
+
+ def pop_scope_unsafe(self):
+ # type: () -> Tuple[Optional[Client], Scope]
+ """
+ Pops a scope layer from the stack.
+
+ Try to use the context manager :py:meth:`push_scope` instead.
+ """
+ rv = self._stack.pop()
+ assert self._stack, "stack must have at least one layer"
+ return rv
+
+ @overload # noqa
+ def configure_scope(
+ self, callback=None # type: Optional[None]
+ ):
+ # type: (...) -> ContextManager[Scope]
+ pass
+
+ @overload # noqa
+ def configure_scope(
+ self, callback # type: Callable[[Scope], None]
+ ):
+ # type: (...) -> None
+ pass
+
+ def configure_scope( # noqa
+ self, callback=None # type: Optional[Callable[[Scope], None]]
+ ): # noqa
+ # type: (...) -> Optional[ContextManager[Scope]]
+
+ """
+ Reconfigures the scope.
+
+ :param callback: If provided, call the callback with the current scope.
+
+ :returns: If no callback is provided, returns a context manager that returns the scope.
+ """
+
+ client, scope = self._stack[-1]
+ if callback is not None:
+ if client is not None:
+ callback(scope)
+
+ return None
+
+ @contextmanager
+ def inner():
+ # type: () -> Generator[Scope, None, None]
+ if client is not None:
+ yield scope
+ else:
+ yield Scope()
+
+ return inner()
+
+ def start_session(self):
+ # type: (...) -> None
+ """Starts a new session."""
+ self.end_session()
+ client, scope = self._stack[-1]
+ scope._session = Session(
+ release=client.options["release"] if client else None,
+ environment=client.options["environment"] if client else None,
+ user=scope._user,
+ )
+
+ def end_session(self):
+ # type: (...) -> None
+ """Ends the current session if there is one."""
+ client, scope = self._stack[-1]
+ session = scope._session
+ if session is not None:
+ session.close()
+ if client is not None:
+ client.capture_session(session)
+ self._stack[-1][1]._session = None
+
+ def stop_auto_session_tracking(self):
+ # type: (...) -> None
+ """Stops automatic session tracking.
+
+ This temporarily session tracking for the current scope when called.
+ To resume session tracking call `resume_auto_session_tracking`.
+ """
+ self.end_session()
+ client, scope = self._stack[-1]
+ scope._force_auto_session_tracking = False
+
+ def resume_auto_session_tracking(self):
+ # type: (...) -> None
+ """Resumes automatic session tracking for the current scope if
+ disabled earlier. This requires that generally automatic session
+ tracking is enabled.
+ """
+ client, scope = self._stack[-1]
+ scope._force_auto_session_tracking = None
+
+ def flush(
+ self,
+ timeout=None, # type: Optional[float]
+ callback=None, # type: Optional[Callable[[int, float], None]]
+ ):
+ # type: (...) -> None
+ """
+ Alias for :py:meth:`sentry_sdk.Client.flush`
+ """
+ client, scope = self._stack[-1]
+ if client is not None:
+ return client.flush(timeout=timeout, callback=callback)
+
+ def iter_trace_propagation_headers(self):
+ # type: () -> Generator[Tuple[str, str], None, None]
+ # TODO: Document
+ client, scope = self._stack[-1]
+ span = scope.span
+
+ if span is None:
+ return
+
+ propagate_traces = client and client.options["propagate_traces"]
+ if not propagate_traces:
+ return
+
+ if client and client.options["traceparent_v2"]:
+ traceparent = span.to_traceparent()
+ else:
+ traceparent = span.to_legacy_traceparent()
+
+ yield "sentry-trace", traceparent
+
+
+GLOBAL_HUB = Hub()
+_local.set(GLOBAL_HUB)
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/__init__.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/__init__.py
new file mode 100644
index 0000000000..f264bc4855
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/__init__.py
@@ -0,0 +1,183 @@
+"""This package"""
+from __future__ import absolute_import
+
+from threading import Lock
+
+from sentry_sdk._compat import iteritems
+from sentry_sdk.utils import logger
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Callable
+ from typing import Dict
+ from typing import Iterator
+ from typing import List
+ from typing import Set
+ from typing import Tuple
+ from typing import Type
+
+
+_installer_lock = Lock()
+_installed_integrations = set() # type: Set[str]
+
+
+def _generate_default_integrations_iterator(integrations, auto_enabling_integrations):
+ # type: (Tuple[str, ...], Tuple[str, ...]) -> Callable[[bool], Iterator[Type[Integration]]]
+
+ def iter_default_integrations(with_auto_enabling_integrations):
+ # type: (bool) -> Iterator[Type[Integration]]
+ """Returns an iterator of the default integration classes:
+ """
+ from importlib import import_module
+
+ if with_auto_enabling_integrations:
+ all_import_strings = integrations + auto_enabling_integrations
+ else:
+ all_import_strings = integrations
+
+ for import_string in all_import_strings:
+ try:
+ module, cls = import_string.rsplit(".", 1)
+ yield getattr(import_module(module), cls)
+ except (DidNotEnable, SyntaxError) as e:
+ logger.debug(
+ "Did not import default integration %s: %s", import_string, e
+ )
+
+ if isinstance(iter_default_integrations.__doc__, str):
+ for import_string in integrations:
+ iter_default_integrations.__doc__ += "\n- `{}`".format(import_string)
+
+ return iter_default_integrations
+
+
+_AUTO_ENABLING_INTEGRATIONS = (
+ "sentry_sdk.integrations.django.DjangoIntegration",
+ "sentry_sdk.integrations.flask.FlaskIntegration",
+ "sentry_sdk.integrations.bottle.BottleIntegration",
+ "sentry_sdk.integrations.falcon.FalconIntegration",
+ "sentry_sdk.integrations.sanic.SanicIntegration",
+ "sentry_sdk.integrations.celery.CeleryIntegration",
+ "sentry_sdk.integrations.rq.RqIntegration",
+ "sentry_sdk.integrations.aiohttp.AioHttpIntegration",
+ "sentry_sdk.integrations.tornado.TornadoIntegration",
+ "sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration",
+)
+
+
+iter_default_integrations = _generate_default_integrations_iterator(
+ integrations=(
+ # stdlib/base runtime integrations
+ "sentry_sdk.integrations.logging.LoggingIntegration",
+ "sentry_sdk.integrations.stdlib.StdlibIntegration",
+ "sentry_sdk.integrations.excepthook.ExcepthookIntegration",
+ "sentry_sdk.integrations.dedupe.DedupeIntegration",
+ "sentry_sdk.integrations.atexit.AtexitIntegration",
+ "sentry_sdk.integrations.modules.ModulesIntegration",
+ "sentry_sdk.integrations.argv.ArgvIntegration",
+ "sentry_sdk.integrations.threading.ThreadingIntegration",
+ ),
+ auto_enabling_integrations=_AUTO_ENABLING_INTEGRATIONS,
+)
+
+del _generate_default_integrations_iterator
+
+
+def setup_integrations(
+ integrations, with_defaults=True, with_auto_enabling_integrations=False
+):
+ # type: (List[Integration], bool, bool) -> Dict[str, Integration]
+ """Given a list of integration instances this installs them all. When
+ `with_defaults` is set to `True` then all default integrations are added
+ unless they were already provided before.
+ """
+ integrations = dict(
+ (integration.identifier, integration) for integration in integrations or ()
+ )
+
+ logger.debug("Setting up integrations (with default = %s)", with_defaults)
+
+ # Integrations that are not explicitly set up by the user.
+ used_as_default_integration = set()
+
+ if with_defaults:
+ for integration_cls in iter_default_integrations(
+ with_auto_enabling_integrations
+ ):
+ if integration_cls.identifier not in integrations:
+ instance = integration_cls()
+ integrations[instance.identifier] = instance
+ used_as_default_integration.add(instance.identifier)
+
+ for identifier, integration in iteritems(integrations):
+ with _installer_lock:
+ if identifier not in _installed_integrations:
+ logger.debug(
+ "Setting up previously not enabled integration %s", identifier
+ )
+ try:
+ type(integration).setup_once()
+ except NotImplementedError:
+ if getattr(integration, "install", None) is not None:
+ logger.warning(
+ "Integration %s: The install method is "
+ "deprecated. Use `setup_once`.",
+ identifier,
+ )
+ integration.install()
+ else:
+ raise
+ except DidNotEnable as e:
+ if identifier not in used_as_default_integration:
+ raise
+
+ logger.debug(
+ "Did not enable default integration %s: %s", identifier, e
+ )
+
+ _installed_integrations.add(identifier)
+
+ for identifier in integrations:
+ logger.debug("Enabling integration %s", identifier)
+
+ return integrations
+
+
+class DidNotEnable(Exception):
+ """
+ The integration could not be enabled due to a trivial user error like
+ `flask` not being installed for the `FlaskIntegration`.
+
+ This exception is silently swallowed for default integrations, but reraised
+ for explicitly enabled integrations.
+ """
+
+
+class Integration(object):
+ """Baseclass for all integrations.
+
+ To accept options for an integration, implement your own constructor that
+ saves those options on `self`.
+ """
+
+ install = None
+ """Legacy method, do not implement."""
+
+ identifier = None # type: str
+ """String unique ID of integration type"""
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ """
+ Initialize the integration.
+
+ This function is only called once, ever. Configuration is not available
+ at this point, so the only thing to do here is to hook into exception
+ handlers, and perhaps do monkeypatches.
+
+ Inside those hooks `Integration.current` can be used to access the
+ instance again.
+ """
+ raise NotImplementedError()
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/_wsgi_common.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/_wsgi_common.py
new file mode 100644
index 0000000000..f874663883
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/_wsgi_common.py
@@ -0,0 +1,180 @@
+import json
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import AnnotatedValue
+from sentry_sdk._compat import text_type, iteritems
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ import sentry_sdk
+
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+ from typing import Union
+
+
+SENSITIVE_ENV_KEYS = (
+ "REMOTE_ADDR",
+ "HTTP_X_FORWARDED_FOR",
+ "HTTP_SET_COOKIE",
+ "HTTP_COOKIE",
+ "HTTP_AUTHORIZATION",
+ "HTTP_X_FORWARDED_FOR",
+ "HTTP_X_REAL_IP",
+)
+
+SENSITIVE_HEADERS = tuple(
+ x[len("HTTP_") :] for x in SENSITIVE_ENV_KEYS if x.startswith("HTTP_")
+)
+
+
+def request_body_within_bounds(client, content_length):
+ # type: (Optional[sentry_sdk.Client], int) -> bool
+ if client is None:
+ return False
+
+ bodies = client.options["request_bodies"]
+ return not (
+ bodies == "never"
+ or (bodies == "small" and content_length > 10 ** 3)
+ or (bodies == "medium" and content_length > 10 ** 4)
+ )
+
+
+class RequestExtractor(object):
+ def __init__(self, request):
+ # type: (Any) -> None
+ self.request = request
+
+ def extract_into_event(self, event):
+ # type: (Dict[str, Any]) -> None
+ client = Hub.current.client
+ if client is None:
+ return
+
+ data = None # type: Optional[Union[AnnotatedValue, Dict[str, Any]]]
+
+ content_length = self.content_length()
+ request_info = event.get("request", {})
+
+ if _should_send_default_pii():
+ request_info["cookies"] = dict(self.cookies())
+
+ if not request_body_within_bounds(client, content_length):
+ data = AnnotatedValue(
+ "",
+ {"rem": [["!config", "x", 0, content_length]], "len": content_length},
+ )
+ else:
+ parsed_body = self.parsed_body()
+ if parsed_body is not None:
+ data = parsed_body
+ elif self.raw_data():
+ data = AnnotatedValue(
+ "",
+ {"rem": [["!raw", "x", 0, content_length]], "len": content_length},
+ )
+ else:
+ data = None
+
+ if data is not None:
+ request_info["data"] = data
+
+ event["request"] = request_info
+
+ def content_length(self):
+ # type: () -> int
+ try:
+ return int(self.env().get("CONTENT_LENGTH", 0))
+ except ValueError:
+ return 0
+
+ def cookies(self):
+ # type: () -> Dict[str, Any]
+ raise NotImplementedError()
+
+ def raw_data(self):
+ # type: () -> Optional[Union[str, bytes]]
+ raise NotImplementedError()
+
+ def form(self):
+ # type: () -> Optional[Dict[str, Any]]
+ raise NotImplementedError()
+
+ def parsed_body(self):
+ # type: () -> Optional[Dict[str, Any]]
+ form = self.form()
+ files = self.files()
+ if form or files:
+ data = dict(iteritems(form))
+ for k, v in iteritems(files):
+ size = self.size_of_file(v)
+ data[k] = AnnotatedValue(
+ "", {"len": size, "rem": [["!raw", "x", 0, size]]}
+ )
+
+ return data
+
+ return self.json()
+
+ def is_json(self):
+ # type: () -> bool
+ return _is_json_content_type(self.env().get("CONTENT_TYPE"))
+
+ def json(self):
+ # type: () -> Optional[Any]
+ try:
+ if not self.is_json():
+ return None
+
+ raw_data = self.raw_data()
+ if raw_data is None:
+ return None
+
+ if isinstance(raw_data, text_type):
+ return json.loads(raw_data)
+ else:
+ return json.loads(raw_data.decode("utf-8"))
+ except ValueError:
+ pass
+
+ return None
+
+ def files(self):
+ # type: () -> Optional[Dict[str, Any]]
+ raise NotImplementedError()
+
+ def size_of_file(self, file):
+ # type: (Any) -> int
+ raise NotImplementedError()
+
+ def env(self):
+ # type: () -> Dict[str, Any]
+ raise NotImplementedError()
+
+
+def _is_json_content_type(ct):
+ # type: (Optional[str]) -> bool
+ mt = (ct or "").split(";", 1)[0]
+ return (
+ mt == "application/json"
+ or (mt.startswith("application/"))
+ and mt.endswith("+json")
+ )
+
+
+def _filter_headers(headers):
+ # type: (Dict[str, str]) -> Dict[str, str]
+ if _should_send_default_pii():
+ return headers
+
+ return {
+ k: (
+ v
+ if k.upper().replace("-", "_") not in SENSITIVE_HEADERS
+ else AnnotatedValue("", {"rem": [["!config", "x", 0, len(v)]]})
+ )
+ for k, v in iteritems(headers)
+ }
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/aiohttp.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/aiohttp.py
new file mode 100644
index 0000000000..02c76df7ef
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/aiohttp.py
@@ -0,0 +1,211 @@
+import sys
+import weakref
+
+from sentry_sdk._compat import reraise
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.integrations._wsgi_common import (
+ _filter_headers,
+ request_body_within_bounds,
+)
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ event_from_exception,
+ transaction_from_function,
+ HAS_REAL_CONTEXTVARS,
+ AnnotatedValue,
+)
+
+try:
+ import asyncio
+
+ from aiohttp import __version__ as AIOHTTP_VERSION
+ from aiohttp.web import Application, HTTPException, UrlDispatcher
+except ImportError:
+ raise DidNotEnable("AIOHTTP not installed")
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from aiohttp.web_request import Request
+ from aiohttp.abc import AbstractMatchInfo
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+ from typing import Tuple
+ from typing import Callable
+ from typing import Union
+
+ from sentry_sdk.utils import ExcInfo
+ from sentry_sdk._types import EventProcessor
+
+
+class AioHttpIntegration(Integration):
+ identifier = "aiohttp"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+
+ try:
+ version = tuple(map(int, AIOHTTP_VERSION.split(".")))
+ except (TypeError, ValueError):
+ raise DidNotEnable("AIOHTTP version unparseable: {}".format(version))
+
+ if version < (3, 4):
+ raise DidNotEnable("AIOHTTP 3.4 or newer required.")
+
+ if not HAS_REAL_CONTEXTVARS:
+ # We better have contextvars or we're going to leak state between
+ # requests.
+ raise RuntimeError(
+ "The aiohttp integration for Sentry requires Python 3.7+ "
+ " or aiocontextvars package"
+ )
+
+ ignore_logger("aiohttp.server")
+
+ old_handle = Application._handle
+
+ async def sentry_app_handle(self, request, *args, **kwargs):
+ # type: (Any, Request, *Any, **Any) -> Any
+ async def inner():
+ # type: () -> Any
+ hub = Hub.current
+ if hub.get_integration(AioHttpIntegration) is None:
+ return await old_handle(self, request, *args, **kwargs)
+
+ weak_request = weakref.ref(request)
+
+ with Hub(Hub.current) as hub:
+ with hub.configure_scope() as scope:
+ scope.clear_breadcrumbs()
+ scope.add_event_processor(_make_request_processor(weak_request))
+
+ span = Span.continue_from_headers(request.headers)
+ span.op = "http.server"
+ # If this transaction name makes it to the UI, AIOHTTP's
+ # URL resolver did not find a route or died trying.
+ span.transaction = "generic AIOHTTP request"
+
+ with hub.start_span(span):
+ try:
+ response = await old_handle(self, request)
+ except HTTPException as e:
+ span.set_http_status(e.status_code)
+ raise
+ except asyncio.CancelledError:
+ span.set_status("cancelled")
+ raise
+ except Exception:
+ # This will probably map to a 500 but seems like we
+ # have no way to tell. Do not set span status.
+ reraise(*_capture_exception(hub))
+
+ span.set_http_status(response.status)
+ return response
+
+ # Explicitly wrap in task such that current contextvar context is
+ # copied. Just doing `return await inner()` will leak scope data
+ # between requests.
+ return await asyncio.get_event_loop().create_task(inner())
+
+ Application._handle = sentry_app_handle
+
+ old_urldispatcher_resolve = UrlDispatcher.resolve
+
+ async def sentry_urldispatcher_resolve(self, request):
+ # type: (UrlDispatcher, Request) -> AbstractMatchInfo
+ rv = await old_urldispatcher_resolve(self, request)
+
+ name = None
+
+ try:
+ name = transaction_from_function(rv.handler)
+ except Exception:
+ pass
+
+ if name is not None:
+ with Hub.current.configure_scope() as scope:
+ scope.transaction = name
+
+ return rv
+
+ UrlDispatcher.resolve = sentry_urldispatcher_resolve
+
+
+def _make_request_processor(weak_request):
+ # type: (Callable[[], Request]) -> EventProcessor
+ def aiohttp_processor(
+ event, # type: Dict[str, Any]
+ hint, # type: Dict[str, Tuple[type, BaseException, Any]]
+ ):
+ # type: (...) -> Dict[str, Any]
+ request = weak_request()
+ if request is None:
+ return event
+
+ with capture_internal_exceptions():
+ request_info = event.setdefault("request", {})
+
+ request_info["url"] = "%s://%s%s" % (
+ request.scheme,
+ request.host,
+ request.path,
+ )
+
+ request_info["query_string"] = request.query_string
+ request_info["method"] = request.method
+ request_info["env"] = {"REMOTE_ADDR": request.remote}
+
+ hub = Hub.current
+ request_info["headers"] = _filter_headers(dict(request.headers))
+
+ # Just attach raw data here if it is within bounds, if available.
+ # Unfortunately there's no way to get structured data from aiohttp
+ # without awaiting on some coroutine.
+ request_info["data"] = get_aiohttp_request_data(hub, request)
+
+ return event
+
+ return aiohttp_processor
+
+
+def _capture_exception(hub):
+ # type: (Hub) -> ExcInfo
+ exc_info = sys.exc_info()
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=hub.client.options, # type: ignore
+ mechanism={"type": "aiohttp", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+ return exc_info
+
+
+BODY_NOT_READ_MESSAGE = "[Can't show request body due to implementation details.]"
+
+
+def get_aiohttp_request_data(hub, request):
+ # type: (Hub, Request) -> Union[Optional[str], AnnotatedValue]
+ bytes_body = request._read_bytes
+
+ if bytes_body is not None:
+ # we have body to show
+ if not request_body_within_bounds(hub.client, len(bytes_body)):
+
+ return AnnotatedValue(
+ "",
+ {"rem": [["!config", "x", 0, len(bytes_body)]], "len": len(bytes_body)},
+ )
+ encoding = request.charset or "utf-8"
+ return bytes_body.decode(encoding, "replace")
+
+ if request.can_read_body:
+ # body exists but we can't show it
+ return BODY_NOT_READ_MESSAGE
+
+ # request has no body
+ return None
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/argv.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/argv.py
new file mode 100644
index 0000000000..f005521d32
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/argv.py
@@ -0,0 +1,33 @@
+from __future__ import absolute_import
+
+import sys
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Optional
+
+ from sentry_sdk._types import Event, Hint
+
+
+class ArgvIntegration(Integration):
+ identifier = "argv"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ @add_global_event_processor
+ def processor(event, hint):
+ # type: (Event, Optional[Hint]) -> Optional[Event]
+ if Hub.current.get_integration(ArgvIntegration) is not None:
+ extra = event.setdefault("extra", {})
+ # If some event processor decided to set extra to e.g. an
+ # `int`, don't crash. Not here.
+ if isinstance(extra, dict):
+ extra["sys.argv"] = sys.argv
+
+ return event
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/asgi.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/asgi.py
new file mode 100644
index 0000000000..762634f82f
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/asgi.py
@@ -0,0 +1,194 @@
+"""
+An ASGI middleware.
+
+Based on Tom Christie's `sentry-asgi <https://github.com/encode/sentry-asgi>`_.
+"""
+
+import asyncio
+import functools
+import inspect
+import urllib
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk.utils import ContextVar, event_from_exception, transaction_from_function
+from sentry_sdk.tracing import Span
+
+if MYPY:
+ from typing import Dict
+ from typing import Any
+ from typing import Optional
+ from typing import Callable
+
+ from sentry_sdk._types import Event, Hint
+
+
+_asgi_middleware_applied = ContextVar("sentry_asgi_middleware_applied")
+
+
+def _capture_exception(hub, exc):
+ # type: (Hub, Any) -> None
+
+ # Check client here as it might have been unset while streaming response
+ if hub.client is not None:
+ event, hint = event_from_exception(
+ exc,
+ client_options=hub.client.options,
+ mechanism={"type": "asgi", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+
+def _looks_like_asgi3(app):
+ # type: (Any) -> bool
+ """
+ Try to figure out if an application object supports ASGI3.
+
+ This is how uvicorn figures out the application version as well.
+ """
+ if inspect.isclass(app):
+ return hasattr(app, "__await__")
+ elif inspect.isfunction(app):
+ return asyncio.iscoroutinefunction(app)
+ else:
+ call = getattr(app, "__call__", None) # noqa
+ return asyncio.iscoroutinefunction(call)
+
+
+class SentryAsgiMiddleware:
+ __slots__ = ("app", "__call__")
+
+ def __init__(self, app):
+ # type: (Any) -> None
+ self.app = app
+
+ if _looks_like_asgi3(app):
+ self.__call__ = self._run_asgi3 # type: Callable[..., Any]
+ else:
+ self.__call__ = self._run_asgi2
+
+ def _run_asgi2(self, scope):
+ # type: (Any) -> Any
+ async def inner(receive, send):
+ # type: (Any, Any) -> Any
+ return await self._run_app(scope, lambda: self.app(scope)(receive, send))
+
+ return inner
+
+ async def _run_asgi3(self, scope, receive, send):
+ # type: (Any, Any, Any) -> Any
+ return await self._run_app(scope, lambda: self.app(scope, receive, send))
+
+ async def _run_app(self, scope, callback):
+ # type: (Any, Any) -> Any
+ if _asgi_middleware_applied.get(False):
+ return await callback()
+
+ _asgi_middleware_applied.set(True)
+ try:
+ hub = Hub(Hub.current)
+ with hub:
+ with hub.configure_scope() as sentry_scope:
+ sentry_scope.clear_breadcrumbs()
+ sentry_scope._name = "asgi"
+ processor = functools.partial(
+ self.event_processor, asgi_scope=scope
+ )
+ sentry_scope.add_event_processor(processor)
+
+ if scope["type"] in ("http", "websocket"):
+ span = Span.continue_from_headers(dict(scope["headers"]))
+ span.op = "{}.server".format(scope["type"])
+ else:
+ span = Span()
+ span.op = "asgi.server"
+
+ span.set_tag("asgi.type", scope["type"])
+ span.transaction = "generic ASGI request"
+
+ with hub.start_span(span) as span:
+ # XXX: Would be cool to have correct span status, but we
+ # would have to wrap send(). That is a bit hard to do with
+ # the current abstraction over ASGI 2/3.
+ try:
+ return await callback()
+ except Exception as exc:
+ _capture_exception(hub, exc)
+ raise exc from None
+ finally:
+ _asgi_middleware_applied.set(False)
+
+ def event_processor(self, event, hint, asgi_scope):
+ # type: (Event, Hint, Any) -> Optional[Event]
+ request_info = event.get("request", {})
+
+ if asgi_scope["type"] in ("http", "websocket"):
+ request_info["url"] = self.get_url(asgi_scope)
+ request_info["method"] = asgi_scope["method"]
+ request_info["headers"] = _filter_headers(self.get_headers(asgi_scope))
+ request_info["query_string"] = self.get_query(asgi_scope)
+
+ if asgi_scope.get("client") and _should_send_default_pii():
+ request_info["env"] = {"REMOTE_ADDR": asgi_scope["client"][0]}
+
+ if asgi_scope.get("endpoint"):
+ # Webframeworks like Starlette mutate the ASGI env once routing is
+ # done, which is sometime after the request has started. If we have
+ # an endpoint, overwrite our path-based transaction name.
+ event["transaction"] = self.get_transaction(asgi_scope)
+
+ event["request"] = request_info
+
+ return event
+
+ def get_url(self, scope):
+ # type: (Any) -> str
+ """
+ Extract URL from the ASGI scope, without also including the querystring.
+ """
+ scheme = scope.get("scheme", "http")
+ server = scope.get("server", None)
+ path = scope.get("root_path", "") + scope["path"]
+
+ for key, value in scope["headers"]:
+ if key == b"host":
+ host_header = value.decode("latin-1")
+ return "%s://%s%s" % (scheme, host_header, path)
+
+ if server is not None:
+ host, port = server
+ default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
+ if port != default_port:
+ return "%s://%s:%s%s" % (scheme, host, port, path)
+ return "%s://%s%s" % (scheme, host, path)
+ return path
+
+ def get_query(self, scope):
+ # type: (Any) -> Any
+ """
+ Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.
+ """
+ return urllib.parse.unquote(scope["query_string"].decode("latin-1"))
+
+ def get_headers(self, scope):
+ # type: (Any) -> Dict[str, Any]
+ """
+ Extract headers from the ASGI scope, in the format that the Sentry protocol expects.
+ """
+ headers = {} # type: Dict[str, str]
+ for raw_key, raw_value in scope["headers"]:
+ key = raw_key.decode("latin-1")
+ value = raw_value.decode("latin-1")
+ if key in headers:
+ headers[key] = headers[key] + ", " + value
+ else:
+ headers[key] = value
+ return headers
+
+ def get_transaction(self, scope):
+ # type: (Any) -> Optional[str]
+ """
+ Return a transaction string to identify the routed endpoint.
+ """
+ return transaction_from_function(scope["endpoint"])
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/atexit.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/atexit.py
new file mode 100644
index 0000000000..18fe657bff
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/atexit.py
@@ -0,0 +1,62 @@
+from __future__ import absolute_import
+
+import os
+import sys
+import atexit
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import logger
+from sentry_sdk.integrations import Integration
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+
+ from typing import Any
+ from typing import Optional
+
+
+def default_callback(pending, timeout):
+ # type: (int, int) -> None
+ """This is the default shutdown callback that is set on the options.
+ It prints out a message to stderr that informs the user that some events
+ are still pending and the process is waiting for them to flush out.
+ """
+
+ def echo(msg):
+ # type: (str) -> None
+ sys.stderr.write(msg + "\n")
+
+ echo("Sentry is attempting to send %i pending error messages" % pending)
+ echo("Waiting up to %s seconds" % timeout)
+ echo("Press Ctrl-%s to quit" % (os.name == "nt" and "Break" or "C"))
+ sys.stderr.flush()
+
+
+class AtexitIntegration(Integration):
+ identifier = "atexit"
+
+ def __init__(self, callback=None):
+ # type: (Optional[Any]) -> None
+ if callback is None:
+ callback = default_callback
+ self.callback = callback
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ @atexit.register
+ def _shutdown():
+ # type: () -> None
+ logger.debug("atexit: got shutdown signal")
+ hub = Hub.main
+ integration = hub.get_integration(AtexitIntegration)
+ if integration is not None:
+ logger.debug("atexit: shutting down client")
+
+ # If there is a session on the hub, close it now.
+ hub.end_session()
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+ client.close(callback=integration.callback)
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/aws_lambda.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/aws_lambda.py
new file mode 100644
index 0000000000..3a08d998db
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/aws_lambda.py
@@ -0,0 +1,254 @@
+from datetime import datetime, timedelta
+from os import environ
+import sys
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk._compat import reraise
+from sentry_sdk.utils import (
+ AnnotatedValue,
+ capture_internal_exceptions,
+ event_from_exception,
+ logger,
+)
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import TypeVar
+ from typing import Callable
+ from typing import Optional
+
+ from sentry_sdk._types import EventProcessor, Event, Hint
+
+ F = TypeVar("F", bound=Callable[..., Any])
+
+
+def _wrap_handler(handler):
+ # type: (F) -> F
+ def sentry_handler(event, context, *args, **kwargs):
+ # type: (Any, Any, *Any, **Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(AwsLambdaIntegration)
+ if integration is None:
+ return handler(event, context, *args, **kwargs)
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ with hub.push_scope() as scope:
+ with capture_internal_exceptions():
+ scope.clear_breadcrumbs()
+ scope.transaction = context.function_name
+ scope.add_event_processor(_make_request_event_processor(event, context))
+
+ try:
+ return handler(event, context, *args, **kwargs)
+ except Exception:
+ exc_info = sys.exc_info()
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=client.options,
+ mechanism={"type": "aws_lambda", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+ reraise(*exc_info)
+
+ return sentry_handler # type: ignore
+
+
+def _drain_queue():
+ # type: () -> None
+ with capture_internal_exceptions():
+ hub = Hub.current
+ integration = hub.get_integration(AwsLambdaIntegration)
+ if integration is not None:
+ # Flush out the event queue before AWS kills the
+ # process.
+ hub.flush()
+
+
+class AwsLambdaIntegration(Integration):
+ identifier = "aws_lambda"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ import __main__ as lambda_bootstrap # type: ignore
+
+ pre_37 = True # Python 3.6 or 2.7
+
+ if not hasattr(lambda_bootstrap, "handle_http_request"):
+ try:
+ import bootstrap as lambda_bootstrap # type: ignore
+
+ pre_37 = False # Python 3.7
+ except ImportError:
+ pass
+
+ if not hasattr(lambda_bootstrap, "handle_event_request"):
+ logger.warning(
+ "Not running in AWS Lambda environment, "
+ "AwsLambdaIntegration disabled"
+ )
+ return
+
+ if pre_37:
+ old_handle_event_request = lambda_bootstrap.handle_event_request
+
+ def sentry_handle_event_request(request_handler, *args, **kwargs):
+ # type: (Any, *Any, **Any) -> Any
+ request_handler = _wrap_handler(request_handler)
+ return old_handle_event_request(request_handler, *args, **kwargs)
+
+ lambda_bootstrap.handle_event_request = sentry_handle_event_request
+
+ old_handle_http_request = lambda_bootstrap.handle_http_request
+
+ def sentry_handle_http_request(request_handler, *args, **kwargs):
+ # type: (Any, *Any, **Any) -> Any
+ request_handler = _wrap_handler(request_handler)
+ return old_handle_http_request(request_handler, *args, **kwargs)
+
+ lambda_bootstrap.handle_http_request = sentry_handle_http_request
+
+ # Patch to_json to drain the queue. This should work even when the
+ # SDK is initialized inside of the handler
+
+ old_to_json = lambda_bootstrap.to_json
+
+ def sentry_to_json(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ _drain_queue()
+ return old_to_json(*args, **kwargs)
+
+ lambda_bootstrap.to_json = sentry_to_json
+ else:
+ old_handle_event_request = lambda_bootstrap.handle_event_request
+
+ def sentry_handle_event_request( # type: ignore
+ lambda_runtime_client, request_handler, *args, **kwargs
+ ):
+ request_handler = _wrap_handler(request_handler)
+ return old_handle_event_request(
+ lambda_runtime_client, request_handler, *args, **kwargs
+ )
+
+ lambda_bootstrap.handle_event_request = sentry_handle_event_request
+
+ # Patch the runtime client to drain the queue. This should work
+ # even when the SDK is initialized inside of the handler
+
+ def _wrap_post_function(f):
+ # type: (F) -> F
+ def inner(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ _drain_queue()
+ return f(*args, **kwargs)
+
+ return inner # type: ignore
+
+ lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = _wrap_post_function(
+ lambda_bootstrap.LambdaRuntimeClient.post_invocation_result
+ )
+ lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = _wrap_post_function(
+ lambda_bootstrap.LambdaRuntimeClient.post_invocation_error
+ )
+
+
+def _make_request_event_processor(aws_event, aws_context):
+ # type: (Any, Any) -> EventProcessor
+ start_time = datetime.now()
+
+ def event_processor(event, hint, start_time=start_time):
+ # type: (Event, Hint, datetime) -> Optional[Event]
+ extra = event.setdefault("extra", {})
+ extra["lambda"] = {
+ "function_name": aws_context.function_name,
+ "function_version": aws_context.function_version,
+ "invoked_function_arn": aws_context.invoked_function_arn,
+ "remaining_time_in_millis": aws_context.get_remaining_time_in_millis(),
+ "aws_request_id": aws_context.aws_request_id,
+ }
+
+ extra["cloudwatch logs"] = {
+ "url": _get_cloudwatch_logs_url(aws_context, start_time),
+ "log_group": aws_context.log_group_name,
+ "log_stream": aws_context.log_stream_name,
+ }
+
+ request = event.get("request", {})
+
+ if "httpMethod" in aws_event:
+ request["method"] = aws_event["httpMethod"]
+
+ request["url"] = _get_url(aws_event, aws_context)
+
+ if "queryStringParameters" in aws_event:
+ request["query_string"] = aws_event["queryStringParameters"]
+
+ if "headers" in aws_event:
+ request["headers"] = _filter_headers(aws_event["headers"])
+
+ if aws_event.get("body", None):
+ # Unfortunately couldn't find a way to get structured body from AWS
+ # event. Meaning every body is unstructured to us.
+ request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]})
+
+ if _should_send_default_pii():
+ user_info = event.setdefault("user", {})
+
+ id = aws_event.get("identity", {}).get("userArn")
+ if id is not None:
+ user_info.setdefault("id", id)
+
+ ip = aws_event.get("identity", {}).get("sourceIp")
+ if ip is not None:
+ user_info.setdefault("ip_address", ip)
+
+ event["request"] = request
+
+ return event
+
+ return event_processor
+
+
+def _get_url(event, context):
+ # type: (Any, Any) -> str
+ path = event.get("path", None)
+ headers = event.get("headers", {})
+ host = headers.get("Host", None)
+ proto = headers.get("X-Forwarded-Proto", None)
+ if proto and host and path:
+ return "{}://{}{}".format(proto, host, path)
+ return "awslambda:///{}".format(context.function_name)
+
+
+def _get_cloudwatch_logs_url(context, start_time):
+ # type: (Any, datetime) -> str
+ """
+ Generates a CloudWatchLogs console URL based on the context object
+
+ Arguments:
+ context {Any} -- context from lambda handler
+
+ Returns:
+ str -- AWS Console URL to logs.
+ """
+ formatstring = "%Y-%m-%dT%H:%M:%S"
+
+ url = (
+ "https://console.aws.amazon.com/cloudwatch/home?region={region}"
+ "#logEventViewer:group={log_group};stream={log_stream}"
+ ";start={start_time};end={end_time}"
+ ).format(
+ region=environ.get("AWS_REGION"),
+ log_group=context.log_group_name,
+ log_stream=context.log_stream_name,
+ start_time=(start_time - timedelta(seconds=1)).strftime(formatstring),
+ end_time=(datetime.now() + timedelta(seconds=2)).strftime(formatstring),
+ )
+
+ return url
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/beam.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/beam.py
new file mode 100644
index 0000000000..7252746a7f
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/beam.py
@@ -0,0 +1,184 @@
+from __future__ import absolute_import
+
+import sys
+import types
+from functools import wraps
+
+from sentry_sdk.hub import Hub
+from sentry_sdk._compat import reraise
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Iterator
+ from typing import TypeVar
+ from typing import Optional
+ from typing import Callable
+
+ from sentry_sdk.client import Client
+ from sentry_sdk._types import ExcInfo
+
+ T = TypeVar("T")
+ F = TypeVar("F", bound=Callable[..., Any])
+
+
+WRAPPED_FUNC = "_wrapped_{}_"
+INSPECT_FUNC = "_inspect_{}" # Required format per apache_beam/transforms/core.py
+USED_FUNC = "_sentry_used_"
+
+
+class BeamIntegration(Integration):
+ identifier = "beam"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ from apache_beam.transforms.core import DoFn, ParDo # type: ignore
+
+ ignore_logger("root")
+ ignore_logger("bundle_processor.create")
+
+ function_patches = ["process", "start_bundle", "finish_bundle", "setup"]
+ for func_name in function_patches:
+ setattr(
+ DoFn,
+ INSPECT_FUNC.format(func_name),
+ _wrap_inspect_call(DoFn, func_name),
+ )
+
+ old_init = ParDo.__init__
+
+ def sentry_init_pardo(self, fn, *args, **kwargs):
+ # type: (ParDo, Any, *Any, **Any) -> Any
+ # Do not monkey patch init twice
+ if not getattr(self, "_sentry_is_patched", False):
+ for func_name in function_patches:
+ if not hasattr(fn, func_name):
+ continue
+ wrapped_func = WRAPPED_FUNC.format(func_name)
+
+ # Check to see if inspect is set and process is not
+ # to avoid monkey patching process twice.
+ # Check to see if function is part of object for
+ # backwards compatibility.
+ process_func = getattr(fn, func_name)
+ inspect_func = getattr(fn, INSPECT_FUNC.format(func_name))
+ if not getattr(inspect_func, USED_FUNC, False) and not getattr(
+ process_func, USED_FUNC, False
+ ):
+ setattr(fn, wrapped_func, process_func)
+ setattr(fn, func_name, _wrap_task_call(process_func))
+
+ self._sentry_is_patched = True
+ old_init(self, fn, *args, **kwargs)
+
+ ParDo.__init__ = sentry_init_pardo
+
+
+def _wrap_inspect_call(cls, func_name):
+ # type: (Any, Any) -> Any
+ from apache_beam.typehints.decorators import getfullargspec # type: ignore
+
+ if not hasattr(cls, func_name):
+ return None
+
+ def _inspect(self):
+ # type: (Any) -> Any
+ """
+ Inspect function overrides the way Beam gets argspec.
+ """
+ wrapped_func = WRAPPED_FUNC.format(func_name)
+ if hasattr(self, wrapped_func):
+ process_func = getattr(self, wrapped_func)
+ else:
+ process_func = getattr(self, func_name)
+ setattr(self, func_name, _wrap_task_call(process_func))
+ setattr(self, wrapped_func, process_func)
+
+ # getfullargspec is deprecated in more recent beam versions and get_function_args_defaults
+ # (which uses Signatures internally) should be used instead.
+ try:
+ from apache_beam.transforms.core import get_function_args_defaults
+
+ return get_function_args_defaults(process_func)
+ except ImportError:
+ return getfullargspec(process_func)
+
+ setattr(_inspect, USED_FUNC, True)
+ return _inspect
+
+
+def _wrap_task_call(func):
+ # type: (F) -> F
+ """
+ Wrap task call with a try catch to get exceptions.
+ Pass the client on to raise_exception so it can get rebinded.
+ """
+ client = Hub.current.client
+
+ @wraps(func)
+ def _inner(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ try:
+ gen = func(*args, **kwargs)
+ except Exception:
+ raise_exception(client)
+
+ if not isinstance(gen, types.GeneratorType):
+ return gen
+ return _wrap_generator_call(gen, client)
+
+ setattr(_inner, USED_FUNC, True)
+ return _inner # type: ignore
+
+
+def _capture_exception(exc_info, hub):
+ # type: (ExcInfo, Hub) -> None
+ """
+ Send Beam exception to Sentry.
+ """
+ integration = hub.get_integration(BeamIntegration)
+ if integration is None:
+ return
+
+ client = hub.client
+ if client is None:
+ return
+
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=client.options,
+ mechanism={"type": "beam", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+
+def raise_exception(client):
+ # type: (Optional[Client]) -> None
+ """
+ Raise an exception. If the client is not in the hub, rebind it.
+ """
+ hub = Hub.current
+ if hub.client is None:
+ hub.bind_client(client)
+ exc_info = sys.exc_info()
+ with capture_internal_exceptions():
+ _capture_exception(exc_info, hub)
+ reraise(*exc_info)
+
+
+def _wrap_generator_call(gen, client):
+ # type: (Iterator[T], Optional[Client]) -> Iterator[T]
+ """
+ Wrap the generator to handle any failures.
+ """
+ while True:
+ try:
+ yield next(gen)
+ except StopIteration:
+ break
+ except Exception:
+ raise_exception(client)
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/bottle.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/bottle.py
new file mode 100644
index 0000000000..80224e4dc4
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/bottle.py
@@ -0,0 +1,199 @@
+from __future__ import absolute_import
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ event_from_exception,
+ transaction_from_function,
+)
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from sentry_sdk.integrations.wsgi import _ScopedResponse
+ from typing import Any
+ from typing import Dict
+ from typing import Callable
+ from typing import Optional
+ from bottle import FileUpload, FormsDict, LocalRequest # type: ignore
+
+ from sentry_sdk._types import EventProcessor
+
+try:
+ from bottle import (
+ Bottle,
+ Route,
+ request as bottle_request,
+ HTTPResponse,
+ __version__ as BOTTLE_VERSION,
+ )
+except ImportError:
+ raise DidNotEnable("Bottle not installed")
+
+
+TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+
+
+class BottleIntegration(Integration):
+ identifier = "bottle"
+
+ transaction_style = None
+
+ def __init__(self, transaction_style="endpoint"):
+ # type: (str) -> None
+
+ if transaction_style not in TRANSACTION_STYLE_VALUES:
+ raise ValueError(
+ "Invalid value for transaction_style: %s (must be in %s)"
+ % (transaction_style, TRANSACTION_STYLE_VALUES)
+ )
+ self.transaction_style = transaction_style
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+
+ try:
+ version = tuple(map(int, BOTTLE_VERSION.split(".")))
+ except (TypeError, ValueError):
+ raise DidNotEnable("Unparseable Bottle version: {}".format(version))
+
+ if version < (0, 12):
+ raise DidNotEnable("Bottle 0.12 or newer required.")
+
+ # monkey patch method Bottle.__call__
+ old_app = Bottle.__call__
+
+ def sentry_patched_wsgi_app(self, environ, start_response):
+ # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+
+ hub = Hub.current
+ integration = hub.get_integration(BottleIntegration)
+ if integration is None:
+ return old_app(self, environ, start_response)
+
+ return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))(
+ environ, start_response
+ )
+
+ Bottle.__call__ = sentry_patched_wsgi_app
+
+ # monkey patch method Bottle._handle
+ old_handle = Bottle._handle
+
+ def _patched_handle(self, environ):
+ # type: (Bottle, Dict[str, Any]) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(BottleIntegration)
+ if integration is None:
+ return old_handle(self, environ)
+
+ # create new scope
+ scope_manager = hub.push_scope()
+
+ with scope_manager:
+ app = self
+ with hub.configure_scope() as scope:
+ scope._name = "bottle"
+ scope.add_event_processor(
+ _make_request_event_processor(app, bottle_request, integration)
+ )
+ res = old_handle(self, environ)
+
+ # scope cleanup
+ return res
+
+ Bottle._handle = _patched_handle
+
+ # monkey patch method Route._make_callback
+ old_make_callback = Route._make_callback
+
+ def patched_make_callback(self, *args, **kwargs):
+ # type: (Route, *object, **object) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(BottleIntegration)
+ prepared_callback = old_make_callback(self, *args, **kwargs)
+ if integration is None:
+ return prepared_callback
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ def wrapped_callback(*args, **kwargs):
+ # type: (*object, **object) -> Any
+
+ try:
+ res = prepared_callback(*args, **kwargs)
+ except HTTPResponse:
+ raise
+ except Exception as exception:
+ event, hint = event_from_exception(
+ exception,
+ client_options=client.options,
+ mechanism={"type": "bottle", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+ raise exception
+
+ return res
+
+ return wrapped_callback
+
+ Route._make_callback = patched_make_callback
+
+
+class BottleRequestExtractor(RequestExtractor):
+ def env(self):
+ # type: () -> Dict[str, str]
+ return self.request.environ
+
+ def cookies(self):
+ # type: () -> Dict[str, str]
+ return self.request.cookies
+
+ def raw_data(self):
+ # type: () -> bytes
+ return self.request.body.read()
+
+ def form(self):
+ # type: () -> FormsDict
+ if self.is_json():
+ return None
+ return self.request.forms.decode()
+
+ def files(self):
+ # type: () -> Optional[Dict[str, str]]
+ if self.is_json():
+ return None
+
+ return self.request.files
+
+ def size_of_file(self, file):
+ # type: (FileUpload) -> int
+ return file.content_length
+
+
+def _make_request_event_processor(app, request, integration):
+ # type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor
+ def inner(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+
+ try:
+ if integration.transaction_style == "endpoint":
+ event["transaction"] = request.route.name or transaction_from_function(
+ request.route.callback
+ )
+ elif integration.transaction_style == "url":
+ event["transaction"] = request.route.rule
+ except Exception:
+ pass
+
+ with capture_internal_exceptions():
+ BottleRequestExtractor(request).extract_into_event(event)
+
+ return event
+
+ return inner
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/celery.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/celery.py
new file mode 100644
index 0000000000..9b58796173
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/celery.py
@@ -0,0 +1,258 @@
+from __future__ import absolute_import
+
+import functools
+import sys
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.tracing import Span
+from sentry_sdk._compat import reraise
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import TypeVar
+ from typing import Callable
+ from typing import Optional
+
+ from sentry_sdk._types import EventProcessor, Event, Hint, ExcInfo
+
+ F = TypeVar("F", bound=Callable[..., Any])
+
+
+try:
+ from celery import VERSION as CELERY_VERSION # type: ignore
+ from celery.exceptions import ( # type: ignore
+ SoftTimeLimitExceeded,
+ Retry,
+ Ignore,
+ Reject,
+ )
+except ImportError:
+ raise DidNotEnable("Celery not installed")
+
+
+CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject)
+
+
+class CeleryIntegration(Integration):
+ identifier = "celery"
+
+ def __init__(self, propagate_traces=True):
+ # type: (bool) -> None
+ self.propagate_traces = propagate_traces
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ if CELERY_VERSION < (3,):
+ raise DidNotEnable("Celery 3 or newer required.")
+
+ import celery.app.trace as trace # type: ignore
+
+ old_build_tracer = trace.build_tracer
+
+ def sentry_build_tracer(name, task, *args, **kwargs):
+ # type: (Any, Any, *Any, **Any) -> Any
+ if not getattr(task, "_sentry_is_patched", False):
+ # Need to patch both methods because older celery sometimes
+ # short-circuits to task.run if it thinks it's safe.
+ task.__call__ = _wrap_task_call(task, task.__call__)
+ task.run = _wrap_task_call(task, task.run)
+ task.apply_async = _wrap_apply_async(task, task.apply_async)
+
+ # `build_tracer` is apparently called for every task
+ # invocation. Can't wrap every celery task for every invocation
+ # or we will get infinitely nested wrapper functions.
+ task._sentry_is_patched = True
+
+ return _wrap_tracer(task, old_build_tracer(name, task, *args, **kwargs))
+
+ trace.build_tracer = sentry_build_tracer
+
+ _patch_worker_exit()
+
+ # This logger logs every status of every task that ran on the worker.
+ # Meaning that every task's breadcrumbs are full of stuff like "Task
+ # <foo> raised unexpected <bar>".
+ ignore_logger("celery.worker.job")
+ ignore_logger("celery.app.trace")
+
+ # This is stdout/err redirected to a logger, can't deal with this
+ # (need event_level=logging.WARN to reproduce)
+ ignore_logger("celery.redirected")
+
+
+def _wrap_apply_async(task, f):
+ # type: (Any, F) -> F
+ @functools.wraps(f)
+ def apply_async(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(CeleryIntegration)
+ if integration is not None and integration.propagate_traces:
+ headers = None
+ for key, value in hub.iter_trace_propagation_headers():
+ if headers is None:
+ headers = dict(kwargs.get("headers") or {})
+ headers[key] = value
+ if headers is not None:
+ kwargs["headers"] = headers
+
+ with hub.start_span(op="celery.submit", description=task.name):
+ return f(*args, **kwargs)
+ else:
+ return f(*args, **kwargs)
+
+ return apply_async # type: ignore
+
+
+def _wrap_tracer(task, f):
+ # type: (Any, F) -> F
+
+ # Need to wrap tracer for pushing the scope before prerun is sent, and
+ # popping it after postrun is sent.
+ #
+ # This is the reason we don't use signals for hooking in the first place.
+ # Also because in Celery 3, signal dispatch returns early if one handler
+ # crashes.
+ @functools.wraps(f)
+ def _inner(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ hub = Hub.current
+ if hub.get_integration(CeleryIntegration) is None:
+ return f(*args, **kwargs)
+
+ with hub.push_scope() as scope:
+ scope._name = "celery"
+ scope.clear_breadcrumbs()
+ scope.add_event_processor(_make_event_processor(task, *args, **kwargs))
+
+ span = Span.continue_from_headers(args[3].get("headers") or {})
+ span.op = "celery.task"
+ span.transaction = "unknown celery task"
+
+ # Could possibly use a better hook than this one
+ span.set_status("ok")
+
+ with capture_internal_exceptions():
+ # Celery task objects are not a thing to be trusted. Even
+ # something such as attribute access can fail.
+ span.transaction = task.name
+
+ with hub.start_span(span):
+ return f(*args, **kwargs)
+
+ return _inner # type: ignore
+
+
+def _wrap_task_call(task, f):
+ # type: (Any, F) -> F
+
+ # Need to wrap task call because the exception is caught before we get to
+ # see it. Also celery's reported stacktrace is untrustworthy.
+
+ # functools.wraps is important here because celery-once looks at this
+ # method's name.
+ # https://github.com/getsentry/sentry-python/issues/421
+ @functools.wraps(f)
+ def _inner(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ try:
+ return f(*args, **kwargs)
+ except Exception:
+ exc_info = sys.exc_info()
+ with capture_internal_exceptions():
+ _capture_exception(task, exc_info)
+ reraise(*exc_info)
+
+ return _inner # type: ignore
+
+
+def _make_event_processor(task, uuid, args, kwargs, request=None):
+ # type: (Any, Any, Any, Any, Optional[Any]) -> EventProcessor
+ def event_processor(event, hint):
+ # type: (Event, Hint) -> Optional[Event]
+
+ with capture_internal_exceptions():
+ tags = event.setdefault("tags", {})
+ tags["celery_task_id"] = uuid
+ extra = event.setdefault("extra", {})
+ extra["celery-job"] = {
+ "task_name": task.name,
+ "args": args,
+ "kwargs": kwargs,
+ }
+
+ if "exc_info" in hint:
+ with capture_internal_exceptions():
+ if issubclass(hint["exc_info"][0], SoftTimeLimitExceeded):
+ event["fingerprint"] = [
+ "celery",
+ "SoftTimeLimitExceeded",
+ getattr(task, "name", task),
+ ]
+
+ return event
+
+ return event_processor
+
+
+def _capture_exception(task, exc_info):
+ # type: (Any, ExcInfo) -> None
+ hub = Hub.current
+
+ if hub.get_integration(CeleryIntegration) is None:
+ return
+ if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS):
+ # ??? Doesn't map to anything
+ _set_status(hub, "aborted")
+ return
+
+ _set_status(hub, "internal_error")
+
+ if hasattr(task, "throws") and isinstance(exc_info[1], task.throws):
+ return
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=client.options,
+ mechanism={"type": "celery", "handled": False},
+ )
+
+ hub.capture_event(event, hint=hint)
+
+
+def _set_status(hub, status):
+ # type: (Hub, str) -> None
+ with capture_internal_exceptions():
+ with hub.configure_scope() as scope:
+ if scope.span is not None:
+ scope.span.set_status(status)
+
+
+def _patch_worker_exit():
+ # type: () -> None
+
+ # Need to flush queue before worker shutdown because a crashing worker will
+ # call os._exit
+ from billiard.pool import Worker # type: ignore
+
+ old_workloop = Worker.workloop
+
+ def sentry_workloop(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ try:
+ return old_workloop(*args, **kwargs)
+ finally:
+ with capture_internal_exceptions():
+ hub = Hub.current
+ if hub.get_integration(CeleryIntegration) is not None:
+ hub.flush()
+
+ Worker.workloop = sentry_workloop
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/dedupe.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/dedupe.py
new file mode 100644
index 0000000000..b023df2042
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/dedupe.py
@@ -0,0 +1,43 @@
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import ContextVar
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Optional
+
+ from sentry_sdk._types import Event, Hint
+
+
+class DedupeIntegration(Integration):
+ identifier = "dedupe"
+
+ def __init__(self):
+ # type: () -> None
+ self._last_seen = ContextVar("last-seen")
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ @add_global_event_processor
+ def processor(event, hint):
+ # type: (Event, Optional[Hint]) -> Optional[Event]
+ if hint is None:
+ return event
+
+ integration = Hub.current.get_integration(DedupeIntegration)
+
+ if integration is None:
+ return event
+
+ exc_info = hint.get("exc_info", None)
+ if exc_info is None:
+ return event
+
+ exc = exc_info[1]
+ if integration._last_seen.get(None) is exc:
+ return None
+ integration._last_seen.set(exc)
+ return event
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/django/__init__.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/django/__init__.py
new file mode 100644
index 0000000000..4e62fe3b74
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/django/__init__.py
@@ -0,0 +1,484 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import sys
+import threading
+import weakref
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.serializer import add_global_repr_processor
+from sentry_sdk.tracing import record_sql_queries
+from sentry_sdk.utils import (
+ HAS_REAL_CONTEXTVARS,
+ logger,
+ capture_internal_exceptions,
+ event_from_exception,
+ transaction_from_function,
+ walk_exception_chain,
+)
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+
+try:
+ from django import VERSION as DJANGO_VERSION
+ from django.core import signals
+
+ try:
+ from django.urls import resolve
+ except ImportError:
+ from django.core.urlresolvers import resolve
+except ImportError:
+ raise DidNotEnable("Django not installed")
+
+
+from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER
+from sentry_sdk.integrations.django.templates import get_template_frame_from_exception
+from sentry_sdk.integrations.django.middleware import patch_django_middlewares
+
+
+if MYPY:
+ from typing import Any
+ from typing import Callable
+ from typing import Dict
+ from typing import Optional
+ from typing import Union
+ from typing import List
+
+ from django.core.handlers.wsgi import WSGIRequest
+ from django.http.response import HttpResponse
+ from django.http.request import QueryDict
+ from django.utils.datastructures import MultiValueDict
+
+ from sentry_sdk.integrations.wsgi import _ScopedResponse
+ from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType
+
+
+if DJANGO_VERSION < (1, 10):
+
+ def is_authenticated(request_user):
+ # type: (Any) -> bool
+ return request_user.is_authenticated()
+
+
+else:
+
+ def is_authenticated(request_user):
+ # type: (Any) -> bool
+ return request_user.is_authenticated
+
+
+TRANSACTION_STYLE_VALUES = ("function_name", "url")
+
+
+class DjangoIntegration(Integration):
+ identifier = "django"
+
+ transaction_style = None
+ middleware_spans = None
+
+ def __init__(self, transaction_style="url", middleware_spans=True):
+ # type: (str, bool) -> None
+ if transaction_style not in TRANSACTION_STYLE_VALUES:
+ raise ValueError(
+ "Invalid value for transaction_style: %s (must be in %s)"
+ % (transaction_style, TRANSACTION_STYLE_VALUES)
+ )
+ self.transaction_style = transaction_style
+ self.middleware_spans = middleware_spans
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+
+ if DJANGO_VERSION < (1, 6):
+ raise DidNotEnable("Django 1.6 or newer is required.")
+
+ install_sql_hook()
+ # Patch in our custom middleware.
+
+ # logs an error for every 500
+ ignore_logger("django.server")
+ ignore_logger("django.request")
+
+ from django.core.handlers.wsgi import WSGIHandler
+
+ old_app = WSGIHandler.__call__
+
+ def sentry_patched_wsgi_handler(self, environ, start_response):
+ # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+ if Hub.current.get_integration(DjangoIntegration) is None:
+ return old_app(self, environ, start_response)
+
+ bound_old_app = old_app.__get__(self, WSGIHandler)
+
+ return SentryWsgiMiddleware(bound_old_app)(environ, start_response)
+
+ WSGIHandler.__call__ = sentry_patched_wsgi_handler
+
+ _patch_django_asgi_handler()
+
+ # patch get_response, because at that point we have the Django request
+ # object
+ from django.core.handlers.base import BaseHandler
+
+ old_get_response = BaseHandler.get_response
+
+ def sentry_patched_get_response(self, request):
+ # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException]
+ hub = Hub.current
+ integration = hub.get_integration(DjangoIntegration)
+ if integration is not None:
+ _patch_drf()
+
+ with hub.configure_scope() as scope:
+ # Rely on WSGI middleware to start a trace
+ try:
+ if integration.transaction_style == "function_name":
+ scope.transaction = transaction_from_function(
+ resolve(request.path).func
+ )
+ elif integration.transaction_style == "url":
+ scope.transaction = LEGACY_RESOLVER.resolve(request.path)
+ except Exception:
+ pass
+
+ scope.add_event_processor(
+ _make_event_processor(weakref.ref(request), integration)
+ )
+ return old_get_response(self, request)
+
+ BaseHandler.get_response = sentry_patched_get_response
+
+ signals.got_request_exception.connect(_got_request_exception)
+
+ @add_global_event_processor
+ def process_django_templates(event, hint):
+ # type: (Event, Optional[Hint]) -> Optional[Event]
+ if hint is None:
+ return event
+
+ exc_info = hint.get("exc_info", None)
+
+ if exc_info is None:
+ return event
+
+ exception = event.get("exception", None)
+
+ if exception is None:
+ return event
+
+ values = exception.get("values", None)
+
+ if values is None:
+ return event
+
+ for exception, (_, exc_value, _) in zip(
+ reversed(values), walk_exception_chain(exc_info)
+ ):
+ frame = get_template_frame_from_exception(exc_value)
+ if frame is not None:
+ frames = exception.get("stacktrace", {}).get("frames", [])
+
+ for i in reversed(range(len(frames))):
+ f = frames[i]
+ if (
+ f.get("function") in ("parse", "render")
+ and f.get("module") == "django.template.base"
+ ):
+ i += 1
+ break
+ else:
+ i = len(frames)
+
+ frames.insert(i, frame)
+
+ return event
+
+ @add_global_repr_processor
+ def _django_queryset_repr(value, hint):
+ # type: (Any, Dict[str, Any]) -> Union[NotImplementedType, str]
+ try:
+ # Django 1.6 can fail to import `QuerySet` when Django settings
+ # have not yet been initialized.
+ #
+ # If we fail to import, return `NotImplemented`. It's at least
+ # unlikely that we have a query set in `value` when importing
+ # `QuerySet` fails.
+ from django.db.models.query import QuerySet
+ except Exception:
+ return NotImplemented
+
+ if not isinstance(value, QuerySet) or value._result_cache:
+ return NotImplemented
+
+ # Do not call Hub.get_integration here. It is intentional that
+ # running under a new hub does not suddenly start executing
+ # querysets. This might be surprising to the user but it's likely
+ # less annoying.
+
+ return u"<%s from %s at 0x%x>" % (
+ value.__class__.__name__,
+ value.__module__,
+ id(value),
+ )
+
+ _patch_channels()
+ patch_django_middlewares()
+
+
+_DRF_PATCHED = False
+_DRF_PATCH_LOCK = threading.Lock()
+
+
+def _patch_drf():
+ # type: () -> None
+ """
+ Patch Django Rest Framework for more/better request data. DRF's request
+ type is a wrapper around Django's request type. The attribute we're
+ interested in is `request.data`, which is a cached property containing a
+ parsed request body. Reading a request body from that property is more
+ reliable than reading from any of Django's own properties, as those don't
+ hold payloads in memory and therefore can only be accessed once.
+
+ We patch the Django request object to include a weak backreference to the
+ DRF request object, such that we can later use either in
+ `DjangoRequestExtractor`.
+
+ This function is not called directly on SDK setup, because importing almost
+ any part of Django Rest Framework will try to access Django settings (where
+ `sentry_sdk.init()` might be called from in the first place). Instead we
+ run this function on every request and do the patching on the first
+ request.
+ """
+
+ global _DRF_PATCHED
+
+ if _DRF_PATCHED:
+ # Double-checked locking
+ return
+
+ with _DRF_PATCH_LOCK:
+ if _DRF_PATCHED:
+ return
+
+ # We set this regardless of whether the code below succeeds or fails.
+ # There is no point in trying to patch again on the next request.
+ _DRF_PATCHED = True
+
+ with capture_internal_exceptions():
+ try:
+ from rest_framework.views import APIView # type: ignore
+ except ImportError:
+ pass
+ else:
+ old_drf_initial = APIView.initial
+
+ def sentry_patched_drf_initial(self, request, *args, **kwargs):
+ # type: (APIView, Any, *Any, **Any) -> Any
+ with capture_internal_exceptions():
+ request._request._sentry_drf_request_backref = weakref.ref(
+ request
+ )
+ pass
+ return old_drf_initial(self, request, *args, **kwargs)
+
+ APIView.initial = sentry_patched_drf_initial
+
+
+def _patch_channels():
+ # type: () -> None
+ try:
+ from channels.http import AsgiHandler # type: ignore
+ except ImportError:
+ return
+
+ if not HAS_REAL_CONTEXTVARS:
+ # We better have contextvars or we're going to leak state between
+ # requests.
+ #
+ # We cannot hard-raise here because channels may not be used at all in
+ # the current process.
+ logger.warning(
+ "We detected that you are using Django channels 2.0. To get proper "
+ "instrumentation for ASGI requests, the Sentry SDK requires "
+ "Python 3.7+ or the aiocontextvars package from PyPI."
+ )
+
+ from sentry_sdk.integrations.django.asgi import patch_channels_asgi_handler_impl
+
+ patch_channels_asgi_handler_impl(AsgiHandler)
+
+
+def _patch_django_asgi_handler():
+ # type: () -> None
+ try:
+ from django.core.handlers.asgi import ASGIHandler
+ except ImportError:
+ return
+
+ if not HAS_REAL_CONTEXTVARS:
+ # We better have contextvars or we're going to leak state between
+ # requests.
+ #
+ # We cannot hard-raise here because Django may not be used at all in
+ # the current process.
+ logger.warning(
+ "We detected that you are using Django 3. To get proper "
+ "instrumentation for ASGI requests, the Sentry SDK requires "
+ "Python 3.7+ or the aiocontextvars package from PyPI."
+ )
+
+ from sentry_sdk.integrations.django.asgi import patch_django_asgi_handler_impl
+
+ patch_django_asgi_handler_impl(ASGIHandler)
+
+
+def _make_event_processor(weak_request, integration):
+ # type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor
+ def event_processor(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ # if the request is gone we are fine not logging the data from
+ # it. This might happen if the processor is pushed away to
+ # another thread.
+ request = weak_request()
+ if request is None:
+ return event
+
+ try:
+ drf_request = request._sentry_drf_request_backref()
+ if drf_request is not None:
+ request = drf_request
+ except AttributeError:
+ pass
+
+ with capture_internal_exceptions():
+ DjangoRequestExtractor(request).extract_into_event(event)
+
+ if _should_send_default_pii():
+ with capture_internal_exceptions():
+ _set_user_info(request, event)
+
+ return event
+
+ return event_processor
+
+
+def _got_request_exception(request=None, **kwargs):
+ # type: (WSGIRequest, **Any) -> None
+ hub = Hub.current
+ integration = hub.get_integration(DjangoIntegration)
+ if integration is not None:
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ sys.exc_info(),
+ client_options=client.options,
+ mechanism={"type": "django", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+
+class DjangoRequestExtractor(RequestExtractor):
+ def env(self):
+ # type: () -> Dict[str, str]
+ return self.request.META
+
+ def cookies(self):
+ # type: () -> Dict[str, str]
+ return self.request.COOKIES
+
+ def raw_data(self):
+ # type: () -> bytes
+ return self.request.body
+
+ def form(self):
+ # type: () -> QueryDict
+ return self.request.POST
+
+ def files(self):
+ # type: () -> MultiValueDict
+ return self.request.FILES
+
+ def size_of_file(self, file):
+ # type: (Any) -> int
+ return file.size
+
+ def parsed_body(self):
+ # type: () -> Optional[Dict[str, Any]]
+ try:
+ return self.request.data
+ except AttributeError:
+ return RequestExtractor.parsed_body(self)
+
+
+def _set_user_info(request, event):
+ # type: (WSGIRequest, Dict[str, Any]) -> None
+ user_info = event.setdefault("user", {})
+
+ user = getattr(request, "user", None)
+
+ if user is None or not is_authenticated(user):
+ return
+
+ try:
+ user_info.setdefault("id", str(user.pk))
+ except Exception:
+ pass
+
+ try:
+ user_info.setdefault("email", user.email)
+ except Exception:
+ pass
+
+ try:
+ user_info.setdefault("username", user.get_username())
+ except Exception:
+ pass
+
+
+def install_sql_hook():
+ # type: () -> None
+ """If installed this causes Django's queries to be captured."""
+ try:
+ from django.db.backends.utils import CursorWrapper
+ except ImportError:
+ from django.db.backends.util import CursorWrapper
+
+ try:
+ real_execute = CursorWrapper.execute
+ real_executemany = CursorWrapper.executemany
+ except AttributeError:
+ # This won't work on Django versions < 1.6
+ return
+
+ def execute(self, sql, params=None):
+ # type: (CursorWrapper, Any, Optional[Any]) -> Any
+ hub = Hub.current
+ if hub.get_integration(DjangoIntegration) is None:
+ return real_execute(self, sql, params)
+
+ with record_sql_queries(
+ hub, self.cursor, sql, params, paramstyle="format", executemany=False
+ ):
+ return real_execute(self, sql, params)
+
+ def executemany(self, sql, param_list):
+ # type: (CursorWrapper, Any, List[Any]) -> Any
+ hub = Hub.current
+ if hub.get_integration(DjangoIntegration) is None:
+ return real_executemany(self, sql, param_list)
+
+ with record_sql_queries(
+ hub, self.cursor, sql, param_list, paramstyle="format", executemany=True
+ ):
+ return real_executemany(self, sql, param_list)
+
+ CursorWrapper.execute = execute
+ CursorWrapper.executemany = executemany
+ ignore_logger("django.db.backends")
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/django/asgi.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/django/asgi.py
new file mode 100644
index 0000000000..96ae3e0809
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/django/asgi.py
@@ -0,0 +1,47 @@
+"""
+Instrumentation for Django 3.0
+
+Since this file contains `async def` it is conditionally imported in
+`sentry_sdk.integrations.django` (depending on the existence of
+`django.core.handlers.asgi`.
+"""
+
+from sentry_sdk import Hub
+from sentry_sdk._types import MYPY
+
+from sentry_sdk.integrations.django import DjangoIntegration
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+
+if MYPY:
+ from typing import Any
+
+
+def patch_django_asgi_handler_impl(cls):
+ # type: (Any) -> None
+ old_app = cls.__call__
+
+ async def sentry_patched_asgi_handler(self, scope, receive, send):
+ # type: (Any, Any, Any, Any) -> Any
+ if Hub.current.get_integration(DjangoIntegration) is None:
+ return await old_app(self, scope, receive, send)
+
+ middleware = SentryAsgiMiddleware(old_app.__get__(self, cls))._run_asgi3
+ return await middleware(scope, receive, send)
+
+ cls.__call__ = sentry_patched_asgi_handler
+
+
+def patch_channels_asgi_handler_impl(cls):
+ # type: (Any) -> None
+ old_app = cls.__call__
+
+ async def sentry_patched_asgi_handler(self, receive, send):
+ # type: (Any, Any, Any) -> Any
+ if Hub.current.get_integration(DjangoIntegration) is None:
+ return await old_app(self, receive, send)
+
+ middleware = SentryAsgiMiddleware(lambda _scope: old_app.__get__(self, cls))
+
+ return await middleware(self.scope)(receive, send)
+
+ cls.__call__ = sentry_patched_asgi_handler
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/django/middleware.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/django/middleware.py
new file mode 100644
index 0000000000..edbeccb093
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/django/middleware.py
@@ -0,0 +1,136 @@
+"""
+Create spans from Django middleware invocations
+"""
+
+from functools import wraps
+
+from django import VERSION as DJANGO_VERSION
+
+from sentry_sdk import Hub
+from sentry_sdk.utils import (
+ ContextVar,
+ transaction_from_function,
+ capture_internal_exceptions,
+)
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Callable
+ from typing import TypeVar
+
+ F = TypeVar("F", bound=Callable[..., Any])
+
+_import_string_should_wrap_middleware = ContextVar(
+ "import_string_should_wrap_middleware"
+)
+
+if DJANGO_VERSION < (1, 7):
+ import_string_name = "import_by_path"
+else:
+ import_string_name = "import_string"
+
+
+def patch_django_middlewares():
+ # type: () -> None
+ from django.core.handlers import base
+
+ old_import_string = getattr(base, import_string_name)
+
+ def sentry_patched_import_string(dotted_path):
+ # type: (str) -> Any
+ rv = old_import_string(dotted_path)
+
+ if _import_string_should_wrap_middleware.get(None):
+ rv = _wrap_middleware(rv, dotted_path)
+
+ return rv
+
+ setattr(base, import_string_name, sentry_patched_import_string)
+
+ old_load_middleware = base.BaseHandler.load_middleware
+
+ def sentry_patched_load_middleware(self):
+ # type: (base.BaseHandler) -> Any
+ _import_string_should_wrap_middleware.set(True)
+ try:
+ return old_load_middleware(self)
+ finally:
+ _import_string_should_wrap_middleware.set(False)
+
+ base.BaseHandler.load_middleware = sentry_patched_load_middleware
+
+
+def _wrap_middleware(middleware, middleware_name):
+ # type: (Any, str) -> Any
+ from sentry_sdk.integrations.django import DjangoIntegration
+
+ def _get_wrapped_method(old_method):
+ # type: (F) -> F
+ with capture_internal_exceptions():
+
+ def sentry_wrapped_method(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(DjangoIntegration)
+ if integration is None or not integration.middleware_spans:
+ return old_method(*args, **kwargs)
+
+ function_name = transaction_from_function(old_method)
+
+ description = middleware_name
+ function_basename = getattr(old_method, "__name__", None)
+ if function_basename:
+ description = "{}.{}".format(description, function_basename)
+
+ with hub.start_span(
+ op="django.middleware", description=description
+ ) as span:
+ span.set_tag("django.function_name", function_name)
+ span.set_tag("django.middleware_name", middleware_name)
+ return old_method(*args, **kwargs)
+
+ try:
+ # fails for __call__ of function on Python 2 (see py2.7-django-1.11)
+ return wraps(old_method)(sentry_wrapped_method) # type: ignore
+ except Exception:
+ return sentry_wrapped_method # type: ignore
+
+ return old_method
+
+ class SentryWrappingMiddleware(object):
+ def __init__(self, *args, **kwargs):
+ # type: (*Any, **Any) -> None
+ self._inner = middleware(*args, **kwargs)
+ self._call_method = None
+
+ # We need correct behavior for `hasattr()`, which we can only determine
+ # when we have an instance of the middleware we're wrapping.
+ def __getattr__(self, method_name):
+ # type: (str) -> Any
+ if method_name not in (
+ "process_request",
+ "process_view",
+ "process_template_response",
+ "process_response",
+ "process_exception",
+ ):
+ raise AttributeError()
+
+ old_method = getattr(self._inner, method_name)
+ rv = _get_wrapped_method(old_method)
+ self.__dict__[method_name] = rv
+ return rv
+
+ def __call__(self, *args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ f = self._call_method
+ if f is None:
+ self._call_method = f = _get_wrapped_method(self._inner.__call__)
+ return f(*args, **kwargs)
+
+ if hasattr(middleware, "__name__"):
+ SentryWrappingMiddleware.__name__ = middleware.__name__
+
+ return SentryWrappingMiddleware
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/django/templates.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/django/templates.py
new file mode 100644
index 0000000000..2285644909
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/django/templates.py
@@ -0,0 +1,121 @@
+from django.template import TemplateSyntaxError
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+ from typing import Iterator
+ from typing import Tuple
+
+try:
+ # support Django 1.9
+ from django.template.base import Origin
+except ImportError:
+ # backward compatibility
+ from django.template.loader import LoaderOrigin as Origin
+
+
+def get_template_frame_from_exception(exc_value):
+ # type: (Optional[BaseException]) -> Optional[Dict[str, Any]]
+
+ # As of Django 1.9 or so the new template debug thing showed up.
+ if hasattr(exc_value, "template_debug"):
+ return _get_template_frame_from_debug(exc_value.template_debug) # type: ignore
+
+ # As of r16833 (Django) all exceptions may contain a
+ # ``django_template_source`` attribute (rather than the legacy
+ # ``TemplateSyntaxError.source`` check)
+ if hasattr(exc_value, "django_template_source"):
+ return _get_template_frame_from_source(
+ exc_value.django_template_source # type: ignore
+ )
+
+ if isinstance(exc_value, TemplateSyntaxError) and hasattr(exc_value, "source"):
+ source = exc_value.source
+ if isinstance(source, (tuple, list)) and isinstance(source[0], Origin):
+ return _get_template_frame_from_source(source) # type: ignore
+
+ return None
+
+
+def _get_template_frame_from_debug(debug):
+ # type: (Dict[str, Any]) -> Dict[str, Any]
+ if debug is None:
+ return None
+
+ lineno = debug["line"]
+ filename = debug["name"]
+ if filename is None:
+ filename = "<django template>"
+
+ pre_context = []
+ post_context = []
+ context_line = None
+
+ for i, line in debug["source_lines"]:
+ if i < lineno:
+ pre_context.append(line)
+ elif i > lineno:
+ post_context.append(line)
+ else:
+ context_line = line
+
+ return {
+ "filename": filename,
+ "lineno": lineno,
+ "pre_context": pre_context[-5:],
+ "post_context": post_context[:5],
+ "context_line": context_line,
+ "in_app": True,
+ }
+
+
+def _linebreak_iter(template_source):
+ # type: (str) -> Iterator[int]
+ yield 0
+ p = template_source.find("\n")
+ while p >= 0:
+ yield p + 1
+ p = template_source.find("\n", p + 1)
+
+
+def _get_template_frame_from_source(source):
+ # type: (Tuple[Origin, Tuple[int, int]]) -> Optional[Dict[str, Any]]
+ if not source:
+ return None
+
+ origin, (start, end) = source
+ filename = getattr(origin, "loadname", None)
+ if filename is None:
+ filename = "<django template>"
+ template_source = origin.reload()
+ lineno = None
+ upto = 0
+ pre_context = []
+ post_context = []
+ context_line = None
+
+ for num, next in enumerate(_linebreak_iter(template_source)):
+ line = template_source[upto:next]
+ if start >= upto and end <= next:
+ lineno = num
+ context_line = line
+ elif lineno is None:
+ pre_context.append(line)
+ else:
+ post_context.append(line)
+
+ upto = next
+
+ if context_line is None or lineno is None:
+ return None
+
+ return {
+ "filename": filename,
+ "lineno": lineno,
+ "pre_context": pre_context[-5:],
+ "post_context": post_context[:5],
+ "context_line": context_line,
+ }
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/django/transactions.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/django/transactions.py
new file mode 100644
index 0000000000..f20866ef95
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/django/transactions.py
@@ -0,0 +1,134 @@
+"""
+Copied from raven-python. Used for
+`DjangoIntegration(transaction_fron="raven_legacy")`.
+"""
+
+from __future__ import absolute_import
+
+import re
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from django.urls.resolvers import URLResolver
+ from typing import Dict
+ from typing import List
+ from typing import Optional
+ from django.urls.resolvers import URLPattern
+ from typing import Tuple
+ from typing import Union
+ from re import Pattern
+
+try:
+ from django.urls import get_resolver
+except ImportError:
+ from django.core.urlresolvers import get_resolver
+
+
+def get_regex(resolver_or_pattern):
+ # type: (Union[URLPattern, URLResolver]) -> Pattern[str]
+ """Utility method for django's deprecated resolver.regex"""
+ try:
+ regex = resolver_or_pattern.regex
+ except AttributeError:
+ regex = resolver_or_pattern.pattern.regex
+ return regex
+
+
+class RavenResolver(object):
+ _optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)")
+ _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)")
+ _non_named_group_matcher = re.compile(r"\([^\)]+\)")
+ # [foo|bar|baz]
+ _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]")
+ _camel_re = re.compile(r"([A-Z]+)([a-z])")
+
+ _cache = {} # type: Dict[URLPattern, str]
+
+ def _simplify(self, pattern):
+ # type: (str) -> str
+ r"""
+ Clean up urlpattern regexes into something readable by humans:
+
+ From:
+ > "^(?P<sport_slug>\w+)/athletes/(?P<athlete_slug>\w+)/$"
+
+ To:
+ > "{sport_slug}/athletes/{athlete_slug}/"
+ """
+ # remove optional params
+ # TODO(dcramer): it'd be nice to change these into [%s] but it currently
+ # conflicts with the other rules because we're doing regexp matches
+ # rather than parsing tokens
+ result = self._optional_group_matcher.sub(lambda m: "%s" % m.group(1), pattern)
+
+ # handle named groups first
+ result = self._named_group_matcher.sub(lambda m: "{%s}" % m.group(1), result)
+
+ # handle non-named groups
+ result = self._non_named_group_matcher.sub("{var}", result)
+
+ # handle optional params
+ result = self._either_option_matcher.sub(lambda m: m.group(1), result)
+
+ # clean up any outstanding regex-y characters.
+ result = (
+ result.replace("^", "")
+ .replace("$", "")
+ .replace("?", "")
+ .replace("//", "/")
+ .replace("\\", "")
+ )
+
+ return result
+
+ def _resolve(self, resolver, path, parents=None):
+ # type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str]
+
+ match = get_regex(resolver).search(path) # Django < 2.0
+
+ if not match:
+ return None
+
+ if parents is None:
+ parents = [resolver]
+ elif resolver not in parents:
+ parents = parents + [resolver]
+
+ new_path = path[match.end() :]
+ for pattern in resolver.url_patterns:
+ # this is an include()
+ if not pattern.callback:
+ match_ = self._resolve(pattern, new_path, parents)
+ if match_:
+ return match_
+ continue
+ elif not get_regex(pattern).search(new_path):
+ continue
+
+ try:
+ return self._cache[pattern]
+ except KeyError:
+ pass
+
+ prefix = "".join(self._simplify(get_regex(p).pattern) for p in parents)
+ result = prefix + self._simplify(get_regex(pattern).pattern)
+ if not result.startswith("/"):
+ result = "/" + result
+ self._cache[pattern] = result
+ return result
+
+ return None
+
+ def resolve(
+ self,
+ path, # type: str
+ urlconf=None, # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]]
+ ):
+ # type: (...) -> str
+ resolver = get_resolver(urlconf)
+ match = self._resolve(resolver, path)
+ return match or path
+
+
+LEGACY_RESOLVER = RavenResolver()
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/excepthook.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/excepthook.py
new file mode 100644
index 0000000000..d8aead097a
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/excepthook.py
@@ -0,0 +1,76 @@
+import sys
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.integrations import Integration
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Callable
+ from typing import Any
+ from typing import Type
+
+ from types import TracebackType
+
+ Excepthook = Callable[
+ [Type[BaseException], BaseException, TracebackType], Any,
+ ]
+
+
+class ExcepthookIntegration(Integration):
+ identifier = "excepthook"
+
+ always_run = False
+
+ def __init__(self, always_run=False):
+ # type: (bool) -> None
+
+ if not isinstance(always_run, bool):
+ raise ValueError(
+ "Invalid value for always_run: %s (must be type boolean)"
+ % (always_run,)
+ )
+ self.always_run = always_run
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ sys.excepthook = _make_excepthook(sys.excepthook)
+
+
+def _make_excepthook(old_excepthook):
+ # type: (Excepthook) -> Excepthook
+ def sentry_sdk_excepthook(type_, value, traceback):
+ # type: (Type[BaseException], BaseException, TracebackType) -> None
+ hub = Hub.current
+ integration = hub.get_integration(ExcepthookIntegration)
+
+ if integration is not None and _should_send(integration.always_run):
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ with capture_internal_exceptions():
+ event, hint = event_from_exception(
+ (type_, value, traceback),
+ client_options=client.options,
+ mechanism={"type": "excepthook", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+ return old_excepthook(type_, value, traceback)
+
+ return sentry_sdk_excepthook
+
+
+def _should_send(always_run=False):
+ # type: (bool) -> bool
+ if always_run:
+ return True
+
+ if hasattr(sys, "ps1"):
+ # Disable the excepthook for interactive Python shells, otherwise
+ # every typo gets sent to Sentry.
+ return False
+
+ return True
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/falcon.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/falcon.py
new file mode 100644
index 0000000000..b24aac41c6
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/falcon.py
@@ -0,0 +1,209 @@
+from __future__ import absolute_import
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+
+ from sentry_sdk._types import EventProcessor
+
+try:
+ import falcon # type: ignore
+ import falcon.api_helpers # type: ignore
+
+ from falcon import __version__ as FALCON_VERSION
+except ImportError:
+ raise DidNotEnable("Falcon not installed")
+
+
+class FalconRequestExtractor(RequestExtractor):
+ def env(self):
+ # type: () -> Dict[str, Any]
+ return self.request.env
+
+ def cookies(self):
+ # type: () -> Dict[str, Any]
+ return self.request.cookies
+
+ def form(self):
+ # type: () -> None
+ return None # No such concept in Falcon
+
+ def files(self):
+ # type: () -> None
+ return None # No such concept in Falcon
+
+ def raw_data(self):
+ # type: () -> Optional[str]
+
+ # As request data can only be read once we won't make this available
+ # to Sentry. Just send back a dummy string in case there was a
+ # content length.
+ # TODO(jmagnusson): Figure out if there's a way to support this
+ content_length = self.content_length()
+ if content_length > 0:
+ return "[REQUEST_CONTAINING_RAW_DATA]"
+ else:
+ return None
+
+ def json(self):
+ # type: () -> Optional[Dict[str, Any]]
+ try:
+ return self.request.media
+ except falcon.errors.HTTPBadRequest:
+ # NOTE(jmagnusson): We return `falcon.Request._media` here because
+ # falcon 1.4 doesn't do proper type checking in
+ # `falcon.Request.media`. This has been fixed in 2.0.
+ # Relevant code: https://github.com/falconry/falcon/blob/1.4.1/falcon/request.py#L953
+ return self.request._media
+
+
+class SentryFalconMiddleware(object):
+ """Captures exceptions in Falcon requests and send to Sentry"""
+
+ def process_request(self, req, resp, *args, **kwargs):
+ # type: (Any, Any, *Any, **Any) -> None
+ hub = Hub.current
+ integration = hub.get_integration(FalconIntegration)
+ if integration is None:
+ return
+
+ with hub.configure_scope() as scope:
+ scope._name = "falcon"
+ scope.add_event_processor(_make_request_event_processor(req, integration))
+
+
+TRANSACTION_STYLE_VALUES = ("uri_template", "path")
+
+
+class FalconIntegration(Integration):
+ identifier = "falcon"
+
+ transaction_style = None
+
+ def __init__(self, transaction_style="uri_template"):
+ # type: (str) -> None
+ if transaction_style not in TRANSACTION_STYLE_VALUES:
+ raise ValueError(
+ "Invalid value for transaction_style: %s (must be in %s)"
+ % (transaction_style, TRANSACTION_STYLE_VALUES)
+ )
+ self.transaction_style = transaction_style
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ try:
+ version = tuple(map(int, FALCON_VERSION.split(".")))
+ except (ValueError, TypeError):
+ raise DidNotEnable("Unparseable Falcon version: {}".format(FALCON_VERSION))
+
+ if version < (1, 4):
+ raise DidNotEnable("Falcon 1.4 or newer required.")
+
+ _patch_wsgi_app()
+ _patch_handle_exception()
+ _patch_prepare_middleware()
+
+
+def _patch_wsgi_app():
+ # type: () -> None
+ original_wsgi_app = falcon.API.__call__
+
+ def sentry_patched_wsgi_app(self, env, start_response):
+ # type: (falcon.API, Any, Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(FalconIntegration)
+ if integration is None:
+ return original_wsgi_app(self, env, start_response)
+
+ sentry_wrapped = SentryWsgiMiddleware(
+ lambda envi, start_resp: original_wsgi_app(self, envi, start_resp)
+ )
+
+ return sentry_wrapped(env, start_response)
+
+ falcon.API.__call__ = sentry_patched_wsgi_app
+
+
+def _patch_handle_exception():
+ # type: () -> None
+ original_handle_exception = falcon.API._handle_exception
+
+ def sentry_patched_handle_exception(self, *args):
+ # type: (falcon.API, *Any) -> Any
+ # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception
+ # method signature from `(ex, req, resp, params)` to
+ # `(req, resp, ex, params)`
+ if isinstance(args[0], Exception):
+ ex = args[0]
+ else:
+ ex = args[2]
+
+ was_handled = original_handle_exception(self, *args)
+
+ hub = Hub.current
+ integration = hub.get_integration(FalconIntegration)
+
+ if integration is not None and not _is_falcon_http_error(ex):
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ ex,
+ client_options=client.options,
+ mechanism={"type": "falcon", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+ return was_handled
+
+ falcon.API._handle_exception = sentry_patched_handle_exception
+
+
+def _patch_prepare_middleware():
+ # type: () -> None
+ original_prepare_middleware = falcon.api_helpers.prepare_middleware
+
+ def sentry_patched_prepare_middleware(
+ middleware=None, independent_middleware=False
+ ):
+ # type: (Any, Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(FalconIntegration)
+ if integration is not None:
+ middleware = [SentryFalconMiddleware()] + (middleware or [])
+ return original_prepare_middleware(middleware, independent_middleware)
+
+ falcon.api_helpers.prepare_middleware = sentry_patched_prepare_middleware
+
+
+def _is_falcon_http_error(ex):
+ # type: (BaseException) -> bool
+ return isinstance(ex, (falcon.HTTPError, falcon.http_status.HTTPStatus))
+
+
+def _make_request_event_processor(req, integration):
+ # type: (falcon.Request, FalconIntegration) -> EventProcessor
+
+ def inner(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ if integration.transaction_style == "uri_template":
+ event["transaction"] = req.uri_template
+ elif integration.transaction_style == "path":
+ event["transaction"] = req.path
+
+ with capture_internal_exceptions():
+ FalconRequestExtractor(req).extract_into_event(event)
+
+ return event
+
+ return inner
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/flask.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/flask.py
new file mode 100644
index 0000000000..ef6ae0e4f0
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/flask.py
@@ -0,0 +1,260 @@
+from __future__ import absolute_import
+
+import weakref
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from sentry_sdk.integrations.wsgi import _ScopedResponse
+ from typing import Any
+ from typing import Dict
+ from werkzeug.datastructures import ImmutableTypeConversionDict
+ from werkzeug.datastructures import ImmutableMultiDict
+ from werkzeug.datastructures import FileStorage
+ from typing import Union
+ from typing import Callable
+
+ from sentry_sdk._types import EventProcessor
+
+
+try:
+ import flask_login # type: ignore
+except ImportError:
+ flask_login = None
+
+try:
+ from flask import ( # type: ignore
+ Request,
+ Flask,
+ _request_ctx_stack,
+ _app_ctx_stack,
+ __version__ as FLASK_VERSION,
+ )
+ from flask.signals import (
+ appcontext_pushed,
+ appcontext_tearing_down,
+ got_request_exception,
+ request_started,
+ )
+except ImportError:
+ raise DidNotEnable("Flask is not installed")
+
+
+TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+
+
+class FlaskIntegration(Integration):
+ identifier = "flask"
+
+ transaction_style = None
+
+ def __init__(self, transaction_style="endpoint"):
+ # type: (str) -> None
+ if transaction_style not in TRANSACTION_STYLE_VALUES:
+ raise ValueError(
+ "Invalid value for transaction_style: %s (must be in %s)"
+ % (transaction_style, TRANSACTION_STYLE_VALUES)
+ )
+ self.transaction_style = transaction_style
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ try:
+ version = tuple(map(int, FLASK_VERSION.split(".")[:3]))
+ except (ValueError, TypeError):
+ raise DidNotEnable("Unparseable Flask version: {}".format(FLASK_VERSION))
+
+ if version < (0, 11):
+ raise DidNotEnable("Flask 0.11 or newer is required.")
+
+ appcontext_pushed.connect(_push_appctx)
+ appcontext_tearing_down.connect(_pop_appctx)
+ request_started.connect(_request_started)
+ got_request_exception.connect(_capture_exception)
+
+ old_app = Flask.__call__
+
+ def sentry_patched_wsgi_app(self, environ, start_response):
+ # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+ if Hub.current.get_integration(FlaskIntegration) is None:
+ return old_app(self, environ, start_response)
+
+ return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))(
+ environ, start_response
+ )
+
+ Flask.__call__ = sentry_patched_wsgi_app # type: ignore
+
+
+def _push_appctx(*args, **kwargs):
+ # type: (*Flask, **Any) -> None
+ hub = Hub.current
+ if hub.get_integration(FlaskIntegration) is not None:
+ # always want to push scope regardless of whether WSGI app might already
+ # have (not the case for CLI for example)
+ scope_manager = hub.push_scope()
+ scope_manager.__enter__()
+ _app_ctx_stack.top.sentry_sdk_scope_manager = scope_manager
+ with hub.configure_scope() as scope:
+ scope._name = "flask"
+
+
+def _pop_appctx(*args, **kwargs):
+ # type: (*Flask, **Any) -> None
+ scope_manager = getattr(_app_ctx_stack.top, "sentry_sdk_scope_manager", None)
+ if scope_manager is not None:
+ scope_manager.__exit__(None, None, None)
+
+
+def _request_started(sender, **kwargs):
+ # type: (Flask, **Any) -> None
+ hub = Hub.current
+ integration = hub.get_integration(FlaskIntegration)
+ if integration is None:
+ return
+
+ app = _app_ctx_stack.top.app
+ with hub.configure_scope() as scope:
+ request = _request_ctx_stack.top.request
+
+ # Rely on WSGI middleware to start a trace
+ try:
+ if integration.transaction_style == "endpoint":
+ scope.transaction = request.url_rule.endpoint
+ elif integration.transaction_style == "url":
+ scope.transaction = request.url_rule.rule
+ except Exception:
+ pass
+
+ weak_request = weakref.ref(request)
+ evt_processor = _make_request_event_processor(
+ app, weak_request, integration # type: ignore
+ )
+ scope.add_event_processor(evt_processor)
+
+
+class FlaskRequestExtractor(RequestExtractor):
+ def env(self):
+ # type: () -> Dict[str, str]
+ return self.request.environ
+
+ def cookies(self):
+ # type: () -> ImmutableTypeConversionDict[Any, Any]
+ return self.request.cookies
+
+ def raw_data(self):
+ # type: () -> bytes
+ return self.request.get_data()
+
+ def form(self):
+ # type: () -> ImmutableMultiDict[str, Any]
+ return self.request.form
+
+ def files(self):
+ # type: () -> ImmutableMultiDict[str, Any]
+ return self.request.files
+
+ def is_json(self):
+ # type: () -> bool
+ return self.request.is_json
+
+ def json(self):
+ # type: () -> Any
+ return self.request.get_json()
+
+ def size_of_file(self, file):
+ # type: (FileStorage) -> int
+ return file.content_length
+
+
+def _make_request_event_processor(app, weak_request, integration):
+ # type: (Flask, Callable[[], Request], FlaskIntegration) -> EventProcessor
+ def inner(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ request = weak_request()
+
+ # if the request is gone we are fine not logging the data from
+ # it. This might happen if the processor is pushed away to
+ # another thread.
+ if request is None:
+ return event
+
+ with capture_internal_exceptions():
+ FlaskRequestExtractor(request).extract_into_event(event)
+
+ if _should_send_default_pii():
+ with capture_internal_exceptions():
+ _add_user_to_event(event)
+
+ return event
+
+ return inner
+
+
+def _capture_exception(sender, exception, **kwargs):
+ # type: (Flask, Union[ValueError, BaseException], **Any) -> None
+ hub = Hub.current
+ if hub.get_integration(FlaskIntegration) is None:
+ return
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ exception,
+ client_options=client.options,
+ mechanism={"type": "flask", "handled": False},
+ )
+
+ hub.capture_event(event, hint=hint)
+
+
+def _add_user_to_event(event):
+ # type: (Dict[str, Any]) -> None
+ if flask_login is None:
+ return
+
+ user = flask_login.current_user
+ if user is None:
+ return
+
+ with capture_internal_exceptions():
+ # Access this object as late as possible as accessing the user
+ # is relatively costly
+
+ user_info = event.setdefault("user", {})
+
+ try:
+ user_info.setdefault("id", user.get_id())
+ # TODO: more configurable user attrs here
+ except AttributeError:
+ # might happen if:
+ # - flask_login could not be imported
+ # - flask_login is not configured
+ # - no user is logged in
+ pass
+
+ # The following attribute accesses are ineffective for the general
+ # Flask-Login case, because the User interface of Flask-Login does not
+ # care about anything but the ID. However, Flask-User (based on
+ # Flask-Login) documents a few optional extra attributes.
+ #
+ # https://github.com/lingthio/Flask-User/blob/a379fa0a281789618c484b459cb41236779b95b1/docs/source/data_models.rst#fixed-data-model-property-names
+
+ try:
+ user_info.setdefault("email", user.email)
+ except Exception:
+ pass
+
+ try:
+ user_info.setdefault("username", user.username)
+ user_info.setdefault("username", user.email)
+ except Exception:
+ pass
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/gnu_backtrace.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/gnu_backtrace.py
new file mode 100644
index 0000000000..e0ec110547
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/gnu_backtrace.py
@@ -0,0 +1,107 @@
+import re
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.utils import capture_internal_exceptions
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+
+
+MODULE_RE = r"[a-zA-Z0-9/._:\\-]+"
+TYPE_RE = r"[a-zA-Z0-9._:<>,-]+"
+HEXVAL_RE = r"[A-Fa-f0-9]+"
+
+
+FRAME_RE = r"""
+^(?P<index>\d+)\.\s
+(?P<package>{MODULE_RE})\(
+ (?P<retval>{TYPE_RE}\ )?
+ ((?P<function>{TYPE_RE})
+ (?P<args>\(.*\))?
+ )?
+ ((?P<constoffset>\ const)?\+0x(?P<offset>{HEXVAL_RE}))?
+\)\s
+\[0x(?P<retaddr>{HEXVAL_RE})\]$
+""".format(
+ MODULE_RE=MODULE_RE, HEXVAL_RE=HEXVAL_RE, TYPE_RE=TYPE_RE
+)
+
+FRAME_RE = re.compile(FRAME_RE, re.MULTILINE | re.VERBOSE)
+
+
+class GnuBacktraceIntegration(Integration):
+ identifier = "gnu_backtrace"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ @add_global_event_processor
+ def process_gnu_backtrace(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ with capture_internal_exceptions():
+ return _process_gnu_backtrace(event, hint)
+
+
+def _process_gnu_backtrace(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ if Hub.current.get_integration(GnuBacktraceIntegration) is None:
+ return event
+
+ exc_info = hint.get("exc_info", None)
+
+ if exc_info is None:
+ return event
+
+ exception = event.get("exception", None)
+
+ if exception is None:
+ return event
+
+ values = exception.get("values", None)
+
+ if values is None:
+ return event
+
+ for exception in values:
+ frames = exception.get("stacktrace", {}).get("frames", [])
+ if not frames:
+ continue
+
+ msg = exception.get("value", None)
+ if not msg:
+ continue
+
+ additional_frames = []
+ new_msg = []
+
+ for line in msg.splitlines():
+ match = FRAME_RE.match(line)
+ if match:
+ additional_frames.append(
+ (
+ int(match.group("index")),
+ {
+ "package": match.group("package") or None,
+ "function": match.group("function") or None,
+ "platform": "native",
+ },
+ )
+ )
+ else:
+ # Put garbage lines back into message, not sure what to do with them.
+ new_msg.append(line)
+
+ if additional_frames:
+ additional_frames.sort(key=lambda x: -x[0])
+ for _, frame in additional_frames:
+ frames.append(frame)
+
+ new_msg.append("<stacktrace parsed and removed by GnuBacktraceIntegration>")
+ exception["value"] = "\n".join(new_msg)
+
+ return event
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/logging.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/logging.py
new file mode 100644
index 0000000000..6edd785e91
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/logging.py
@@ -0,0 +1,237 @@
+from __future__ import absolute_import
+
+import logging
+import datetime
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import (
+ to_string,
+ event_from_exception,
+ current_stacktrace,
+ capture_internal_exceptions,
+)
+from sentry_sdk.integrations import Integration
+from sentry_sdk._compat import iteritems
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from logging import LogRecord
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+
+DEFAULT_LEVEL = logging.INFO
+DEFAULT_EVENT_LEVEL = logging.ERROR
+
+_IGNORED_LOGGERS = set(["sentry_sdk.errors"])
+
+
+def ignore_logger(
+ name, # type: str
+):
+ # type: (...) -> None
+ """This disables recording (both in breadcrumbs and as events) calls to
+ a logger of a specific name. Among other uses, many of our integrations
+ use this to prevent their actions being recorded as breadcrumbs. Exposed
+ to users as a way to quiet spammy loggers.
+
+ :param name: The name of the logger to ignore (same string you would pass to ``logging.getLogger``).
+ """
+ _IGNORED_LOGGERS.add(name)
+
+
+class LoggingIntegration(Integration):
+ identifier = "logging"
+
+ def __init__(self, level=DEFAULT_LEVEL, event_level=DEFAULT_EVENT_LEVEL):
+ # type: (Optional[int], Optional[int]) -> None
+ self._handler = None
+ self._breadcrumb_handler = None
+
+ if level is not None:
+ self._breadcrumb_handler = BreadcrumbHandler(level=level)
+
+ if event_level is not None:
+ self._handler = EventHandler(level=event_level)
+
+ def _handle_record(self, record):
+ # type: (LogRecord) -> None
+ if self._handler is not None and record.levelno >= self._handler.level:
+ self._handler.handle(record)
+
+ if (
+ self._breadcrumb_handler is not None
+ and record.levelno >= self._breadcrumb_handler.level
+ ):
+ self._breadcrumb_handler.handle(record)
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ old_callhandlers = logging.Logger.callHandlers # type: ignore
+
+ def sentry_patched_callhandlers(self, record):
+ # type: (Any, LogRecord) -> Any
+ try:
+ return old_callhandlers(self, record)
+ finally:
+ # This check is done twice, once also here before we even get
+ # the integration. Otherwise we have a high chance of getting
+ # into a recursion error when the integration is resolved
+ # (this also is slower).
+ if record.name not in _IGNORED_LOGGERS:
+ integration = Hub.current.get_integration(LoggingIntegration)
+ if integration is not None:
+ integration._handle_record(record)
+
+ logging.Logger.callHandlers = sentry_patched_callhandlers # type: ignore
+
+
+def _can_record(record):
+ # type: (LogRecord) -> bool
+ return record.name not in _IGNORED_LOGGERS
+
+
+def _breadcrumb_from_record(record):
+ # type: (LogRecord) -> Dict[str, Any]
+ return {
+ "ty": "log",
+ "level": _logging_to_event_level(record.levelname),
+ "category": record.name,
+ "message": record.message,
+ "timestamp": datetime.datetime.utcfromtimestamp(record.created),
+ "data": _extra_from_record(record),
+ }
+
+
+def _logging_to_event_level(levelname):
+ # type: (str) -> str
+ return {"critical": "fatal"}.get(levelname.lower(), levelname.lower())
+
+
+COMMON_RECORD_ATTRS = frozenset(
+ (
+ "args",
+ "created",
+ "exc_info",
+ "exc_text",
+ "filename",
+ "funcName",
+ "levelname",
+ "levelno",
+ "linenno",
+ "lineno",
+ "message",
+ "module",
+ "msecs",
+ "msg",
+ "name",
+ "pathname",
+ "process",
+ "processName",
+ "relativeCreated",
+ "stack",
+ "tags",
+ "thread",
+ "threadName",
+ "stack_info",
+ )
+)
+
+
+def _extra_from_record(record):
+ # type: (LogRecord) -> Dict[str, None]
+ return {
+ k: v
+ for k, v in iteritems(vars(record))
+ if k not in COMMON_RECORD_ATTRS
+ and (not isinstance(k, str) or not k.startswith("_"))
+ }
+
+
+class EventHandler(logging.Handler, object):
+ """
+ A logging handler that emits Sentry events for each log record
+
+ Note that you do not have to use this class if the logging integration is enabled, which it is by default.
+ """
+
+ def emit(self, record):
+ # type: (LogRecord) -> Any
+ with capture_internal_exceptions():
+ self.format(record)
+ return self._emit(record)
+
+ def _emit(self, record):
+ # type: (LogRecord) -> None
+ if not _can_record(record):
+ return
+
+ hub = Hub.current
+ if hub.client is None:
+ return
+
+ client_options = hub.client.options
+
+ # exc_info might be None or (None, None, None)
+ if record.exc_info is not None and record.exc_info[0] is not None:
+ event, hint = event_from_exception(
+ record.exc_info,
+ client_options=client_options,
+ mechanism={"type": "logging", "handled": True},
+ )
+ elif record.exc_info and record.exc_info[0] is None:
+ event = {}
+ hint = {}
+ with capture_internal_exceptions():
+ event["threads"] = {
+ "values": [
+ {
+ "stacktrace": current_stacktrace(
+ client_options["with_locals"]
+ ),
+ "crashed": False,
+ "current": True,
+ }
+ ]
+ }
+ else:
+ event = {}
+ hint = {}
+
+ hint["log_record"] = record
+
+ event["level"] = _logging_to_event_level(record.levelname)
+ event["logger"] = record.name
+ event["logentry"] = {"message": to_string(record.msg), "params": record.args}
+ event["extra"] = _extra_from_record(record)
+
+ hub.capture_event(event, hint=hint)
+
+
+# Legacy name
+SentryHandler = EventHandler
+
+
+class BreadcrumbHandler(logging.Handler, object):
+ """
+ A logging handler that records breadcrumbs for each log record.
+
+ Note that you do not have to use this class if the logging integration is enabled, which it is by default.
+ """
+
+ def emit(self, record):
+ # type: (LogRecord) -> Any
+ with capture_internal_exceptions():
+ self.format(record)
+ return self._emit(record)
+
+ def _emit(self, record):
+ # type: (LogRecord) -> None
+ if not _can_record(record):
+ return
+
+ Hub.current.add_breadcrumb(
+ _breadcrumb_from_record(record), hint={"log_record": record}
+ )
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/modules.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/modules.py
new file mode 100644
index 0000000000..3d78cb89bb
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/modules.py
@@ -0,0 +1,56 @@
+from __future__ import absolute_import
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+ from typing import Tuple
+ from typing import Iterator
+
+ from sentry_sdk._types import Event
+
+
+_installed_modules = None
+
+
+def _generate_installed_modules():
+ # type: () -> Iterator[Tuple[str, str]]
+ try:
+ import pkg_resources
+ except ImportError:
+ return
+
+ for info in pkg_resources.working_set:
+ yield info.key, info.version
+
+
+def _get_installed_modules():
+ # type: () -> Dict[str, str]
+ global _installed_modules
+ if _installed_modules is None:
+ _installed_modules = dict(_generate_installed_modules())
+ return _installed_modules
+
+
+class ModulesIntegration(Integration):
+ identifier = "modules"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ @add_global_event_processor
+ def processor(event, hint):
+ # type: (Event, Any) -> Dict[str, Any]
+ if event.get("type") == "transaction":
+ return event
+
+ if Hub.current.get_integration(ModulesIntegration) is None:
+ return event
+
+ event["modules"] = _get_installed_modules()
+ return event
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/pyramid.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/pyramid.py
new file mode 100644
index 0000000000..ee9682343a
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/pyramid.py
@@ -0,0 +1,217 @@
+from __future__ import absolute_import
+
+import os
+import sys
+import weakref
+
+from pyramid.httpexceptions import HTTPException
+from pyramid.request import Request
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk._compat import reraise, iteritems
+
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from pyramid.response import Response
+ from typing import Any
+ from sentry_sdk.integrations.wsgi import _ScopedResponse
+ from typing import Callable
+ from typing import Dict
+ from typing import Optional
+ from webob.cookies import RequestCookies # type: ignore
+ from webob.compat import cgi_FieldStorage # type: ignore
+
+ from sentry_sdk.utils import ExcInfo
+ from sentry_sdk._types import EventProcessor
+
+
+if getattr(Request, "authenticated_userid", None):
+
+ def authenticated_userid(request):
+ # type: (Request) -> Optional[Any]
+ return request.authenticated_userid
+
+
+else:
+ # bw-compat for pyramid < 1.5
+ from pyramid.security import authenticated_userid # type: ignore
+
+
+TRANSACTION_STYLE_VALUES = ("route_name", "route_pattern")
+
+
+class PyramidIntegration(Integration):
+ identifier = "pyramid"
+
+ transaction_style = None
+
+ def __init__(self, transaction_style="route_name"):
+ # type: (str) -> None
+ if transaction_style not in TRANSACTION_STYLE_VALUES:
+ raise ValueError(
+ "Invalid value for transaction_style: %s (must be in %s)"
+ % (transaction_style, TRANSACTION_STYLE_VALUES)
+ )
+ self.transaction_style = transaction_style
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ from pyramid.router import Router
+ from pyramid.request import Request
+
+ old_handle_request = Router.handle_request
+
+ def sentry_patched_handle_request(self, request, *args, **kwargs):
+ # type: (Any, Request, *Any, **Any) -> Response
+ hub = Hub.current
+ integration = hub.get_integration(PyramidIntegration)
+ if integration is not None:
+ with hub.configure_scope() as scope:
+ scope.add_event_processor(
+ _make_event_processor(weakref.ref(request), integration)
+ )
+
+ return old_handle_request(self, request, *args, **kwargs)
+
+ Router.handle_request = sentry_patched_handle_request
+
+ if hasattr(Request, "invoke_exception_view"):
+ old_invoke_exception_view = Request.invoke_exception_view
+
+ def sentry_patched_invoke_exception_view(self, *args, **kwargs):
+ # type: (Request, *Any, **Any) -> Any
+ rv = old_invoke_exception_view(self, *args, **kwargs)
+
+ if (
+ self.exc_info
+ and all(self.exc_info)
+ and rv.status_int == 500
+ and Hub.current.get_integration(PyramidIntegration) is not None
+ ):
+ _capture_exception(self.exc_info)
+
+ return rv
+
+ Request.invoke_exception_view = sentry_patched_invoke_exception_view
+
+ old_wsgi_call = Router.__call__
+
+ def sentry_patched_wsgi_call(self, environ, start_response):
+ # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+ hub = Hub.current
+ integration = hub.get_integration(PyramidIntegration)
+ if integration is None:
+ return old_wsgi_call(self, environ, start_response)
+
+ def sentry_patched_inner_wsgi_call(environ, start_response):
+ # type: (Dict[str, Any], Callable[..., Any]) -> Any
+ try:
+ return old_wsgi_call(self, environ, start_response)
+ except Exception:
+ einfo = sys.exc_info()
+ _capture_exception(einfo)
+ reraise(*einfo)
+
+ return SentryWsgiMiddleware(sentry_patched_inner_wsgi_call)(
+ environ, start_response
+ )
+
+ Router.__call__ = sentry_patched_wsgi_call
+
+
+def _capture_exception(exc_info):
+ # type: (ExcInfo) -> None
+ if exc_info[0] is None or issubclass(exc_info[0], HTTPException):
+ return
+ hub = Hub.current
+ if hub.get_integration(PyramidIntegration) is None:
+ return
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=client.options,
+ mechanism={"type": "pyramid", "handled": False},
+ )
+
+ hub.capture_event(event, hint=hint)
+
+
+class PyramidRequestExtractor(RequestExtractor):
+ def url(self):
+ # type: () -> str
+ return self.request.path_url
+
+ def env(self):
+ # type: () -> Dict[str, str]
+ return self.request.environ
+
+ def cookies(self):
+ # type: () -> RequestCookies
+ return self.request.cookies
+
+ def raw_data(self):
+ # type: () -> str
+ return self.request.text
+
+ def form(self):
+ # type: () -> Dict[str, str]
+ return {
+ key: value
+ for key, value in iteritems(self.request.POST)
+ if not getattr(value, "filename", None)
+ }
+
+ def files(self):
+ # type: () -> Dict[str, cgi_FieldStorage]
+ return {
+ key: value
+ for key, value in iteritems(self.request.POST)
+ if getattr(value, "filename", None)
+ }
+
+ def size_of_file(self, postdata):
+ # type: (cgi_FieldStorage) -> int
+ file = postdata.file
+ try:
+ return os.fstat(file.fileno()).st_size
+ except Exception:
+ return 0
+
+
+def _make_event_processor(weak_request, integration):
+ # type: (Callable[[], Request], PyramidIntegration) -> EventProcessor
+ def event_processor(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ request = weak_request()
+ if request is None:
+ return event
+
+ try:
+ if integration.transaction_style == "route_name":
+ event["transaction"] = request.matched_route.name
+ elif integration.transaction_style == "route_pattern":
+ event["transaction"] = request.matched_route.pattern
+ except Exception:
+ pass
+
+ with capture_internal_exceptions():
+ PyramidRequestExtractor(request).extract_into_event(event)
+
+ if _should_send_default_pii():
+ with capture_internal_exceptions():
+ user_info = event.setdefault("user", {})
+ user_info.setdefault("id", authenticated_userid(request))
+
+ return event
+
+ return event_processor
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/redis.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/redis.py
new file mode 100644
index 0000000000..510fdbb22c
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/redis.py
@@ -0,0 +1,70 @@
+from __future__ import absolute_import
+
+from sentry_sdk import Hub
+from sentry_sdk.utils import capture_internal_exceptions
+from sentry_sdk.integrations import Integration
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+
+
+class RedisIntegration(Integration):
+ identifier = "redis"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ import redis
+
+ patch_redis_client(redis.StrictRedis)
+
+ try:
+ import rb.clients # type: ignore
+ except ImportError:
+ pass
+ else:
+ patch_redis_client(rb.clients.FanoutClient)
+ patch_redis_client(rb.clients.MappingClient)
+ patch_redis_client(rb.clients.RoutingClient)
+
+
+def patch_redis_client(cls):
+ # type: (Any) -> None
+ """
+ This function can be used to instrument custom redis client classes or
+ subclasses.
+ """
+
+ old_execute_command = cls.execute_command
+
+ def sentry_patched_execute_command(self, name, *args, **kwargs):
+ # type: (Any, str, *Any, **Any) -> Any
+ hub = Hub.current
+
+ if hub.get_integration(RedisIntegration) is None:
+ return old_execute_command(self, name, *args, **kwargs)
+
+ description = name
+
+ with capture_internal_exceptions():
+ description_parts = [name]
+ for i, arg in enumerate(args):
+ if i > 10:
+ break
+
+ description_parts.append(repr(arg))
+
+ description = " ".join(description_parts)
+
+ with hub.start_span(op="redis", description=description) as span:
+ if name:
+ span.set_tag("redis.command", name)
+
+ if name and args and name.lower() in ("get", "set", "setex", "setnx"):
+ span.set_tag("redis.key", args[0])
+
+ return old_execute_command(self, name, *args, **kwargs)
+
+ cls.execute_command = sentry_patched_execute_command
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/rq.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/rq.py
new file mode 100644
index 0000000000..fbe8cdda3d
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/rq.py
@@ -0,0 +1,150 @@
+from __future__ import absolute_import
+
+import weakref
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+
+
+try:
+ from rq.version import VERSION as RQ_VERSION
+ from rq.timeouts import JobTimeoutException
+ from rq.worker import Worker
+ from rq.queue import Queue
+except ImportError:
+ raise DidNotEnable("RQ not installed")
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+ from typing import Callable
+
+ from rq.job import Job
+
+ from sentry_sdk.utils import ExcInfo
+ from sentry_sdk._types import EventProcessor
+
+
+class RqIntegration(Integration):
+ identifier = "rq"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+
+ try:
+ version = tuple(map(int, RQ_VERSION.split(".")[:3]))
+ except (ValueError, TypeError):
+ raise DidNotEnable("Unparseable RQ version: {}".format(RQ_VERSION))
+
+ if version < (0, 6):
+ raise DidNotEnable("RQ 0.6 or newer is required.")
+
+ old_perform_job = Worker.perform_job
+
+ def sentry_patched_perform_job(self, job, *args, **kwargs):
+ # type: (Any, Job, *Queue, **Any) -> bool
+ hub = Hub.current
+ integration = hub.get_integration(RqIntegration)
+
+ if integration is None:
+ return old_perform_job(self, job, *args, **kwargs)
+
+ client = hub.client
+ assert client is not None
+
+ with hub.push_scope() as scope:
+ scope.clear_breadcrumbs()
+ scope.add_event_processor(_make_event_processor(weakref.ref(job)))
+
+ span = Span.continue_from_headers(
+ job.meta.get("_sentry_trace_headers") or {}
+ )
+ span.op = "rq.task"
+
+ with capture_internal_exceptions():
+ span.transaction = job.func_name
+
+ with hub.start_span(span):
+ rv = old_perform_job(self, job, *args, **kwargs)
+
+ if self.is_horse:
+ # We're inside of a forked process and RQ is
+ # about to call `os._exit`. Make sure that our
+ # events get sent out.
+ client.flush()
+
+ return rv
+
+ Worker.perform_job = sentry_patched_perform_job
+
+ old_handle_exception = Worker.handle_exception
+
+ def sentry_patched_handle_exception(self, job, *exc_info, **kwargs):
+ # type: (Worker, Any, *Any, **Any) -> Any
+ _capture_exception(exc_info) # type: ignore
+ return old_handle_exception(self, job, *exc_info, **kwargs)
+
+ Worker.handle_exception = sentry_patched_handle_exception
+
+ old_enqueue_job = Queue.enqueue_job
+
+ def sentry_patched_enqueue_job(self, job, **kwargs):
+ # type: (Queue, Any, **Any) -> Any
+ hub = Hub.current
+ if hub.get_integration(RqIntegration) is not None:
+ job.meta["_sentry_trace_headers"] = dict(
+ hub.iter_trace_propagation_headers()
+ )
+
+ return old_enqueue_job(self, job, **kwargs)
+
+ Queue.enqueue_job = sentry_patched_enqueue_job
+
+
+def _make_event_processor(weak_job):
+ # type: (Callable[[], Job]) -> EventProcessor
+ def event_processor(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ job = weak_job()
+ if job is not None:
+ with capture_internal_exceptions():
+ extra = event.setdefault("extra", {})
+ extra["rq-job"] = {
+ "job_id": job.id,
+ "func": job.func_name,
+ "args": job.args,
+ "kwargs": job.kwargs,
+ "description": job.description,
+ }
+
+ if "exc_info" in hint:
+ with capture_internal_exceptions():
+ if issubclass(hint["exc_info"][0], JobTimeoutException):
+ event["fingerprint"] = ["rq", "JobTimeoutException", job.func_name]
+
+ return event
+
+ return event_processor
+
+
+def _capture_exception(exc_info, **kwargs):
+ # type: (ExcInfo, **Any) -> None
+ hub = Hub.current
+ if hub.get_integration(RqIntegration) is None:
+ return
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=client.options,
+ mechanism={"type": "rq", "handled": False},
+ )
+
+ hub.capture_event(event, hint=hint)
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/sanic.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/sanic.py
new file mode 100644
index 0000000000..e8fdca422a
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/sanic.py
@@ -0,0 +1,233 @@
+import sys
+import weakref
+from inspect import isawaitable
+
+from sentry_sdk._compat import urlparse, reraise
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ event_from_exception,
+ HAS_REAL_CONTEXTVARS,
+)
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers
+from sentry_sdk.integrations.logging import ignore_logger
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Callable
+ from typing import Optional
+ from typing import Union
+ from typing import Tuple
+ from typing import Dict
+
+ from sanic.request import Request, RequestParameters
+
+ from sentry_sdk._types import Event, EventProcessor, Hint
+
+try:
+ from sanic import Sanic, __version__ as SANIC_VERSION
+ from sanic.exceptions import SanicException
+ from sanic.router import Router
+ from sanic.handlers import ErrorHandler
+except ImportError:
+ raise DidNotEnable("Sanic not installed")
+
+
+class SanicIntegration(Integration):
+ identifier = "sanic"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ try:
+ version = tuple(map(int, SANIC_VERSION.split(".")))
+ except (TypeError, ValueError):
+ raise DidNotEnable("Unparseable Sanic version: {}".format(SANIC_VERSION))
+
+ if version < (0, 8):
+ raise DidNotEnable("Sanic 0.8 or newer required.")
+
+ if not HAS_REAL_CONTEXTVARS:
+ # We better have contextvars or we're going to leak state between
+ # requests.
+ raise DidNotEnable(
+ "The sanic integration for Sentry requires Python 3.7+ "
+ " or aiocontextvars package"
+ )
+
+ if SANIC_VERSION.startswith("0.8."):
+ # Sanic 0.8 and older creates a logger named "root" and puts a
+ # stringified version of every exception in there (without exc_info),
+ # which our error deduplication can't detect.
+ #
+ # We explicitly check the version here because it is a very
+ # invasive step to ignore this logger and not necessary in newer
+ # versions at all.
+ #
+ # https://github.com/huge-success/sanic/issues/1332
+ ignore_logger("root")
+
+ old_handle_request = Sanic.handle_request
+
+ async def sentry_handle_request(self, request, *args, **kwargs):
+ # type: (Any, Request, *Any, **Any) -> Any
+ hub = Hub.current
+ if hub.get_integration(SanicIntegration) is None:
+ return old_handle_request(self, request, *args, **kwargs)
+
+ weak_request = weakref.ref(request)
+
+ with Hub(hub) as hub:
+ with hub.configure_scope() as scope:
+ scope.clear_breadcrumbs()
+ scope.add_event_processor(_make_request_processor(weak_request))
+
+ response = old_handle_request(self, request, *args, **kwargs)
+ if isawaitable(response):
+ response = await response
+
+ return response
+
+ Sanic.handle_request = sentry_handle_request
+
+ old_router_get = Router.get
+
+ def sentry_router_get(self, request):
+ # type: (Any, Request) -> Any
+ rv = old_router_get(self, request)
+ hub = Hub.current
+ if hub.get_integration(SanicIntegration) is not None:
+ with capture_internal_exceptions():
+ with hub.configure_scope() as scope:
+ scope.transaction = rv[0].__name__
+ return rv
+
+ Router.get = sentry_router_get
+
+ old_error_handler_lookup = ErrorHandler.lookup
+
+ def sentry_error_handler_lookup(self, exception):
+ # type: (Any, Exception) -> Optional[object]
+ _capture_exception(exception)
+ old_error_handler = old_error_handler_lookup(self, exception)
+
+ if old_error_handler is None:
+ return None
+
+ if Hub.current.get_integration(SanicIntegration) is None:
+ return old_error_handler
+
+ async def sentry_wrapped_error_handler(request, exception):
+ # type: (Request, Exception) -> Any
+ try:
+ response = old_error_handler(request, exception)
+ if isawaitable(response):
+ response = await response
+ return response
+ except Exception:
+ # Report errors that occur in Sanic error handler. These
+ # exceptions will not even show up in Sanic's
+ # `sanic.exceptions` logger.
+ exc_info = sys.exc_info()
+ _capture_exception(exc_info)
+ reraise(*exc_info)
+
+ return sentry_wrapped_error_handler
+
+ ErrorHandler.lookup = sentry_error_handler_lookup
+
+
+def _capture_exception(exception):
+ # type: (Union[Tuple[Optional[type], Optional[BaseException], Any], BaseException]) -> None
+ hub = Hub.current
+ integration = hub.get_integration(SanicIntegration)
+ if integration is None:
+ return
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ with capture_internal_exceptions():
+ event, hint = event_from_exception(
+ exception,
+ client_options=client.options,
+ mechanism={"type": "sanic", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+
+def _make_request_processor(weak_request):
+ # type: (Callable[[], Request]) -> EventProcessor
+ def sanic_processor(event, hint):
+ # type: (Event, Optional[Hint]) -> Optional[Event]
+
+ try:
+ if hint and issubclass(hint["exc_info"][0], SanicException):
+ return None
+ except KeyError:
+ pass
+
+ request = weak_request()
+ if request is None:
+ return event
+
+ with capture_internal_exceptions():
+ extractor = SanicRequestExtractor(request)
+ extractor.extract_into_event(event)
+
+ request_info = event["request"]
+ urlparts = urlparse.urlsplit(request.url)
+
+ request_info["url"] = "%s://%s%s" % (
+ urlparts.scheme,
+ urlparts.netloc,
+ urlparts.path,
+ )
+
+ request_info["query_string"] = urlparts.query
+ request_info["method"] = request.method
+ request_info["env"] = {"REMOTE_ADDR": request.remote_addr}
+ request_info["headers"] = _filter_headers(dict(request.headers))
+
+ return event
+
+ return sanic_processor
+
+
+class SanicRequestExtractor(RequestExtractor):
+ def content_length(self):
+ # type: () -> int
+ if self.request.body is None:
+ return 0
+ return len(self.request.body)
+
+ def cookies(self):
+ # type: () -> Dict[str, str]
+ return dict(self.request.cookies)
+
+ def raw_data(self):
+ # type: () -> bytes
+ return self.request.body
+
+ def form(self):
+ # type: () -> RequestParameters
+ return self.request.form
+
+ def is_json(self):
+ # type: () -> bool
+ raise NotImplementedError()
+
+ def json(self):
+ # type: () -> Optional[Any]
+ return self.request.json
+
+ def files(self):
+ # type: () -> RequestParameters
+ return self.request.files
+
+ def size_of_file(self, file):
+ # type: (Any) -> int
+ return len(file.body or ())
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/serverless.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/serverless.py
new file mode 100644
index 0000000000..6dd90b43d0
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/serverless.py
@@ -0,0 +1,87 @@
+import functools
+import sys
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.utils import event_from_exception
+from sentry_sdk._compat import reraise
+
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Callable
+ from typing import TypeVar
+ from typing import Union
+ from typing import Optional
+
+ from typing import overload
+
+ F = TypeVar("F", bound=Callable[..., Any])
+
+else:
+
+ def overload(x):
+ # type: (F) -> F
+ return x
+
+
+@overload
+def serverless_function(f, flush=True):
+ # type: (F, bool) -> F
+ pass
+
+
+@overload # noqa
+def serverless_function(f=None, flush=True):
+ # type: (None, bool) -> Callable[[F], F]
+ pass
+
+
+def serverless_function(f=None, flush=True): # noqa
+ # type: (Optional[F], bool) -> Union[F, Callable[[F], F]]
+ def wrapper(f):
+ # type: (F) -> F
+ @functools.wraps(f)
+ def inner(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ with Hub(Hub.current) as hub:
+ with hub.configure_scope() as scope:
+ scope.clear_breadcrumbs()
+
+ try:
+ return f(*args, **kwargs)
+ except Exception:
+ _capture_and_reraise()
+ finally:
+ if flush:
+ _flush_client()
+
+ return inner # type: ignore
+
+ if f is None:
+ return wrapper
+ else:
+ return wrapper(f)
+
+
+def _capture_and_reraise():
+ # type: () -> None
+ exc_info = sys.exc_info()
+ hub = Hub.current
+ if hub is not None and hub.client is not None:
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=hub.client.options,
+ mechanism={"type": "serverless", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+ reraise(*exc_info)
+
+
+def _flush_client():
+ # type: () -> None
+ hub = Hub.current
+ if hub is not None:
+ hub.flush()
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/spark/__init__.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/spark/__init__.py
new file mode 100644
index 0000000000..10d94163c5
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/spark/__init__.py
@@ -0,0 +1,4 @@
+from sentry_sdk.integrations.spark.spark_driver import SparkIntegration
+from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration
+
+__all__ = ["SparkIntegration", "SparkWorkerIntegration"]
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/spark/spark_driver.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/spark/spark_driver.py
new file mode 100644
index 0000000000..ea43c37821
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/spark/spark_driver.py
@@ -0,0 +1,263 @@
+from sentry_sdk import configure_scope
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.utils import capture_internal_exceptions
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Optional
+
+ from sentry_sdk._types import Event, Hint
+
+
+class SparkIntegration(Integration):
+ identifier = "spark"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ patch_spark_context_init()
+
+
+def _set_app_properties():
+ # type: () -> None
+ """
+ Set properties in driver that propagate to worker processes, allowing for workers to have access to those properties.
+ This allows worker integration to have access to app_name and application_id.
+ """
+ from pyspark import SparkContext
+
+ spark_context = SparkContext._active_spark_context
+ if spark_context:
+ spark_context.setLocalProperty("sentry_app_name", spark_context.appName)
+ spark_context.setLocalProperty(
+ "sentry_application_id", spark_context.applicationId
+ )
+
+
+def _start_sentry_listener(sc):
+ # type: (Any) -> None
+ """
+ Start java gateway server to add custom `SparkListener`
+ """
+ from pyspark.java_gateway import ensure_callback_server_started
+
+ gw = sc._gateway
+ ensure_callback_server_started(gw)
+ listener = SentryListener()
+ sc._jsc.sc().addSparkListener(listener)
+
+
+def patch_spark_context_init():
+ # type: () -> None
+ from pyspark import SparkContext
+
+ spark_context_init = SparkContext._do_init
+
+ def _sentry_patched_spark_context_init(self, *args, **kwargs):
+ # type: (SparkContext, *Any, **Any) -> Optional[Any]
+ init = spark_context_init(self, *args, **kwargs)
+
+ if Hub.current.get_integration(SparkIntegration) is None:
+ return init
+
+ _start_sentry_listener(self)
+ _set_app_properties()
+
+ with configure_scope() as scope:
+
+ @scope.add_event_processor
+ def process_event(event, hint):
+ # type: (Event, Hint) -> Optional[Event]
+ with capture_internal_exceptions():
+ if Hub.current.get_integration(SparkIntegration) is None:
+ return event
+
+ event.setdefault("user", {}).setdefault("id", self.sparkUser())
+
+ event.setdefault("tags", {}).setdefault(
+ "executor.id", self._conf.get("spark.executor.id")
+ )
+ event["tags"].setdefault(
+ "spark-submit.deployMode",
+ self._conf.get("spark.submit.deployMode"),
+ )
+ event["tags"].setdefault(
+ "driver.host", self._conf.get("spark.driver.host")
+ )
+ event["tags"].setdefault(
+ "driver.port", self._conf.get("spark.driver.port")
+ )
+ event["tags"].setdefault("spark_version", self.version)
+ event["tags"].setdefault("app_name", self.appName)
+ event["tags"].setdefault("application_id", self.applicationId)
+ event["tags"].setdefault("master", self.master)
+ event["tags"].setdefault("spark_home", self.sparkHome)
+
+ event.setdefault("extra", {}).setdefault("web_url", self.uiWebUrl)
+
+ return event
+
+ return init
+
+ SparkContext._do_init = _sentry_patched_spark_context_init
+
+
+class SparkListener(object):
+ def onApplicationEnd(self, applicationEnd): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onApplicationStart(self, applicationStart): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onBlockManagerAdded(self, blockManagerAdded): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onBlockManagerRemoved(self, blockManagerRemoved): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onBlockUpdated(self, blockUpdated): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onEnvironmentUpdate(self, environmentUpdate): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onExecutorAdded(self, executorAdded): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onExecutorBlacklisted(self, executorBlacklisted): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onExecutorBlacklistedForStage( # noqa: N802
+ self, executorBlacklistedForStage # noqa: N803
+ ):
+ # type: (Any) -> None
+ pass
+
+ def onExecutorMetricsUpdate(self, executorMetricsUpdate): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onExecutorRemoved(self, executorRemoved): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onJobEnd(self, jobEnd): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onJobStart(self, jobStart): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onNodeBlacklisted(self, nodeBlacklisted): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onNodeBlacklistedForStage(self, nodeBlacklistedForStage): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onNodeUnblacklisted(self, nodeUnblacklisted): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onOtherEvent(self, event): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onSpeculativeTaskSubmitted(self, speculativeTask): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onStageCompleted(self, stageCompleted): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onTaskEnd(self, taskEnd): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onTaskGettingResult(self, taskGettingResult): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onTaskStart(self, taskStart): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ def onUnpersistRDD(self, unpersistRDD): # noqa: N802,N803
+ # type: (Any) -> None
+ pass
+
+ class Java:
+ implements = ["org.apache.spark.scheduler.SparkListenerInterface"]
+
+
+class SentryListener(SparkListener):
+ def __init__(self):
+ # type: () -> None
+ self.hub = Hub.current
+
+ def onJobStart(self, jobStart): # noqa: N802,N803
+ # type: (Any) -> None
+ message = "Job {} Started".format(jobStart.jobId())
+ self.hub.add_breadcrumb(level="info", message=message)
+ _set_app_properties()
+
+ def onJobEnd(self, jobEnd): # noqa: N802,N803
+ # type: (Any) -> None
+ level = ""
+ message = ""
+ data = {"result": jobEnd.jobResult().toString()}
+
+ if jobEnd.jobResult().toString() == "JobSucceeded":
+ level = "info"
+ message = "Job {} Ended".format(jobEnd.jobId())
+ else:
+ level = "warning"
+ message = "Job {} Failed".format(jobEnd.jobId())
+
+ self.hub.add_breadcrumb(level=level, message=message, data=data)
+
+ def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803
+ # type: (Any) -> None
+ stage_info = stageSubmitted.stageInfo()
+ message = "Stage {} Submitted".format(stage_info.stageId())
+ data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()}
+ self.hub.add_breadcrumb(level="info", message=message, data=data)
+ _set_app_properties()
+
+ def onStageCompleted(self, stageCompleted): # noqa: N802,N803
+ # type: (Any) -> None
+ from py4j.protocol import Py4JJavaError # type: ignore
+
+ stage_info = stageCompleted.stageInfo()
+ message = ""
+ level = ""
+ data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()}
+
+ # Have to Try Except because stageInfo.failureReason() is typed with Scala Option
+ try:
+ data["reason"] = stage_info.failureReason().get()
+ message = "Stage {} Failed".format(stage_info.stageId())
+ level = "warning"
+ except Py4JJavaError:
+ message = "Stage {} Completed".format(stage_info.stageId())
+ level = "info"
+
+ self.hub.add_breadcrumb(level=level, message=message, data=data)
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/spark/spark_worker.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/spark/spark_worker.py
new file mode 100644
index 0000000000..bae4413d11
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/spark/spark_worker.py
@@ -0,0 +1,120 @@
+from __future__ import absolute_import
+
+import sys
+
+from sentry_sdk import configure_scope
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ exc_info_from_error,
+ single_exception_from_error_tuple,
+ walk_exception_chain,
+ event_hint_with_exc_info,
+)
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Optional
+
+ from sentry_sdk._types import ExcInfo, Event, Hint
+
+
+class SparkWorkerIntegration(Integration):
+ identifier = "spark_worker"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ import pyspark.daemon as original_daemon
+
+ original_daemon.worker_main = _sentry_worker_main
+
+
+def _capture_exception(exc_info, hub):
+ # type: (ExcInfo, Hub) -> None
+ client = hub.client
+
+ client_options = client.options # type: ignore
+
+ mechanism = {"type": "spark", "handled": False}
+
+ exc_info = exc_info_from_error(exc_info)
+
+ exc_type, exc_value, tb = exc_info
+ rv = []
+
+ # On Exception worker will call sys.exit(-1), so we can ignore SystemExit and similar errors
+ for exc_type, exc_value, tb in walk_exception_chain(exc_info):
+ if exc_type not in (SystemExit, EOFError, ConnectionResetError):
+ rv.append(
+ single_exception_from_error_tuple(
+ exc_type, exc_value, tb, client_options, mechanism
+ )
+ )
+
+ if rv:
+ rv.reverse()
+ hint = event_hint_with_exc_info(exc_info)
+ event = {"level": "error", "exception": {"values": rv}}
+
+ _tag_task_context()
+
+ hub.capture_event(event, hint=hint)
+
+
+def _tag_task_context():
+ # type: () -> None
+ from pyspark.taskcontext import TaskContext
+
+ with configure_scope() as scope:
+
+ @scope.add_event_processor
+ def process_event(event, hint):
+ # type: (Event, Hint) -> Optional[Event]
+ with capture_internal_exceptions():
+ integration = Hub.current.get_integration(SparkWorkerIntegration)
+ task_context = TaskContext.get()
+
+ if integration is None or task_context is None:
+ return event
+
+ event.setdefault("tags", {}).setdefault(
+ "stageId", task_context.stageId()
+ )
+ event["tags"].setdefault("partitionId", task_context.partitionId())
+ event["tags"].setdefault("attemptNumber", task_context.attemptNumber())
+ event["tags"].setdefault("taskAttemptId", task_context.taskAttemptId())
+
+ if task_context._localProperties:
+ if "sentry_app_name" in task_context._localProperties:
+ event["tags"].setdefault(
+ "app_name", task_context._localProperties["sentry_app_name"]
+ )
+ event["tags"].setdefault(
+ "application_id",
+ task_context._localProperties["sentry_application_id"],
+ )
+
+ if "callSite.short" in task_context._localProperties:
+ event.setdefault("extra", {}).setdefault(
+ "callSite", task_context._localProperties["callSite.short"]
+ )
+
+ return event
+
+
+def _sentry_worker_main(*args, **kwargs):
+ # type: (*Optional[Any], **Optional[Any]) -> None
+ import pyspark.worker as original_worker
+
+ try:
+ original_worker.main(*args, **kwargs)
+ except SystemExit:
+ if Hub.current.get_integration(SparkWorkerIntegration) is not None:
+ hub = Hub.current
+ exc_info = sys.exc_info()
+ with capture_internal_exceptions():
+ _capture_exception(exc_info, hub)
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/sqlalchemy.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/sqlalchemy.py
new file mode 100644
index 0000000000..f24d2f20bf
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/sqlalchemy.py
@@ -0,0 +1,86 @@
+from __future__ import absolute_import
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.tracing import record_sql_queries
+
+try:
+ from sqlalchemy.engine import Engine # type: ignore
+ from sqlalchemy.event import listen # type: ignore
+ from sqlalchemy import __version__ as SQLALCHEMY_VERSION # type: ignore
+except ImportError:
+ raise DidNotEnable("SQLAlchemy not installed.")
+
+if MYPY:
+ from typing import Any
+ from typing import ContextManager
+ from typing import Optional
+
+ from sentry_sdk.tracing import Span
+
+
+class SqlalchemyIntegration(Integration):
+ identifier = "sqlalchemy"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+
+ try:
+ version = tuple(map(int, SQLALCHEMY_VERSION.split("b")[0].split(".")))
+ except (TypeError, ValueError):
+ raise DidNotEnable(
+ "Unparseable SQLAlchemy version: {}".format(SQLALCHEMY_VERSION)
+ )
+
+ if version < (1, 2):
+ raise DidNotEnable("SQLAlchemy 1.2 or newer required.")
+
+ listen(Engine, "before_cursor_execute", _before_cursor_execute)
+ listen(Engine, "after_cursor_execute", _after_cursor_execute)
+ listen(Engine, "handle_error", _handle_error)
+
+
+def _before_cursor_execute(
+ conn, cursor, statement, parameters, context, executemany, *args
+):
+ # type: (Any, Any, Any, Any, Any, bool, *Any) -> None
+ hub = Hub.current
+ if hub.get_integration(SqlalchemyIntegration) is None:
+ return
+
+ ctx_mgr = record_sql_queries(
+ hub,
+ cursor,
+ statement,
+ parameters,
+ paramstyle=context and context.dialect and context.dialect.paramstyle or None,
+ executemany=executemany,
+ )
+ conn._sentry_sql_span_manager = ctx_mgr
+
+ span = ctx_mgr.__enter__()
+
+ if span is not None:
+ conn._sentry_sql_span = span
+
+
+def _after_cursor_execute(conn, cursor, statement, *args):
+ # type: (Any, Any, Any, *Any) -> None
+ ctx_mgr = getattr(
+ conn, "_sentry_sql_span_manager", None
+ ) # type: ContextManager[Any]
+
+ if ctx_mgr is not None:
+ conn._sentry_sql_span_manager = None
+ ctx_mgr.__exit__(None, None, None)
+
+
+def _handle_error(context, *args):
+ # type: (Any, *Any) -> None
+ conn = context.connection
+ span = getattr(conn, "_sentry_sql_span", None) # type: Optional[Span]
+
+ if span is not None:
+ span.set_status("internal_error")
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/stdlib.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/stdlib.py
new file mode 100644
index 0000000000..56cece70ac
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/stdlib.py
@@ -0,0 +1,230 @@
+import os
+import subprocess
+import sys
+import platform
+
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.tracing import EnvironHeaders
+from sentry_sdk.utils import capture_internal_exceptions, safe_repr
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Callable
+ from typing import Dict
+ from typing import Optional
+ from typing import List
+
+ from sentry_sdk._types import Event, Hint
+
+
+try:
+ from httplib import HTTPConnection # type: ignore
+except ImportError:
+ from http.client import HTTPConnection
+
+
+_RUNTIME_CONTEXT = {
+ "name": platform.python_implementation(),
+ "version": "%s.%s.%s" % (sys.version_info[:3]),
+ "build": sys.version,
+}
+
+
+class StdlibIntegration(Integration):
+ identifier = "stdlib"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ _install_httplib()
+ _install_subprocess()
+
+ @add_global_event_processor
+ def add_python_runtime_context(event, hint):
+ # type: (Event, Hint) -> Optional[Event]
+ if Hub.current.get_integration(StdlibIntegration) is not None:
+ contexts = event.setdefault("contexts", {})
+ if isinstance(contexts, dict) and "runtime" not in contexts:
+ contexts["runtime"] = _RUNTIME_CONTEXT
+
+ return event
+
+
+def _install_httplib():
+ # type: () -> None
+ real_putrequest = HTTPConnection.putrequest
+ real_getresponse = HTTPConnection.getresponse
+
+ def putrequest(self, method, url, *args, **kwargs):
+ # type: (HTTPConnection, str, str, *Any, **Any) -> Any
+ hub = Hub.current
+ if hub.get_integration(StdlibIntegration) is None:
+ return real_putrequest(self, method, url, *args, **kwargs)
+
+ host = self.host
+ port = self.port
+ default_port = self.default_port
+
+ real_url = url
+ if not real_url.startswith(("http://", "https://")):
+ real_url = "%s://%s%s%s" % (
+ default_port == 443 and "https" or "http",
+ host,
+ port != default_port and ":%s" % port or "",
+ url,
+ )
+
+ span = hub.start_span(op="http", description="%s %s" % (method, real_url))
+
+ span.set_data("method", method)
+ span.set_data("url", real_url)
+
+ rv = real_putrequest(self, method, url, *args, **kwargs)
+
+ for key, value in hub.iter_trace_propagation_headers():
+ self.putheader(key, value)
+
+ self._sentrysdk_span = span
+
+ return rv
+
+ def getresponse(self, *args, **kwargs):
+ # type: (HTTPConnection, *Any, **Any) -> Any
+ span = getattr(self, "_sentrysdk_span", None)
+
+ if span is None:
+ return real_getresponse(self, *args, **kwargs)
+
+ rv = real_getresponse(self, *args, **kwargs)
+
+ span.set_data("status_code", rv.status)
+ span.set_http_status(int(rv.status))
+ span.set_data("reason", rv.reason)
+ span.finish()
+
+ return rv
+
+ HTTPConnection.putrequest = putrequest
+ HTTPConnection.getresponse = getresponse
+
+
+def _init_argument(args, kwargs, name, position, setdefault_callback=None):
+ # type: (List[Any], Dict[Any, Any], str, int, Optional[Callable[[Any], Any]]) -> Any
+ """
+ given (*args, **kwargs) of a function call, retrieve (and optionally set a
+ default for) an argument by either name or position.
+
+ This is useful for wrapping functions with complex type signatures and
+ extracting a few arguments without needing to redefine that function's
+ entire type signature.
+ """
+
+ if name in kwargs:
+ rv = kwargs[name]
+ if setdefault_callback is not None:
+ rv = setdefault_callback(rv)
+ if rv is not None:
+ kwargs[name] = rv
+ elif position < len(args):
+ rv = args[position]
+ if setdefault_callback is not None:
+ rv = setdefault_callback(rv)
+ if rv is not None:
+ args[position] = rv
+ else:
+ rv = setdefault_callback and setdefault_callback(None)
+ if rv is not None:
+ kwargs[name] = rv
+
+ return rv
+
+
+def _install_subprocess():
+ # type: () -> None
+ old_popen_init = subprocess.Popen.__init__
+
+ def sentry_patched_popen_init(self, *a, **kw):
+ # type: (subprocess.Popen[Any], *Any, **Any) -> None
+
+ hub = Hub.current
+ if hub.get_integration(StdlibIntegration) is None:
+ return old_popen_init(self, *a, **kw) # type: ignore
+
+ # Convert from tuple to list to be able to set values.
+ a = list(a)
+
+ args = _init_argument(a, kw, "args", 0) or []
+ cwd = _init_argument(a, kw, "cwd", 9)
+
+ # if args is not a list or tuple (and e.g. some iterator instead),
+ # let's not use it at all. There are too many things that can go wrong
+ # when trying to collect an iterator into a list and setting that list
+ # into `a` again.
+ #
+ # Also invocations where `args` is not a sequence are not actually
+ # legal. They just happen to work under CPython.
+ description = None
+
+ if isinstance(args, (list, tuple)) and len(args) < 100:
+ with capture_internal_exceptions():
+ description = " ".join(map(str, args))
+
+ if description is None:
+ description = safe_repr(args)
+
+ env = None
+
+ for k, v in hub.iter_trace_propagation_headers():
+ if env is None:
+ env = _init_argument(a, kw, "env", 10, lambda x: dict(x or os.environ))
+ env["SUBPROCESS_" + k.upper().replace("-", "_")] = v
+
+ with hub.start_span(op="subprocess", description=description) as span:
+ if cwd:
+ span.set_data("subprocess.cwd", cwd)
+
+ rv = old_popen_init(self, *a, **kw) # type: ignore
+
+ span.set_tag("subprocess.pid", self.pid)
+ return rv
+
+ subprocess.Popen.__init__ = sentry_patched_popen_init # type: ignore
+
+ old_popen_wait = subprocess.Popen.wait
+
+ def sentry_patched_popen_wait(self, *a, **kw):
+ # type: (subprocess.Popen[Any], *Any, **Any) -> Any
+ hub = Hub.current
+
+ if hub.get_integration(StdlibIntegration) is None:
+ return old_popen_wait(self, *a, **kw)
+
+ with hub.start_span(op="subprocess.wait") as span:
+ span.set_tag("subprocess.pid", self.pid)
+ return old_popen_wait(self, *a, **kw)
+
+ subprocess.Popen.wait = sentry_patched_popen_wait # type: ignore
+
+ old_popen_communicate = subprocess.Popen.communicate
+
+ def sentry_patched_popen_communicate(self, *a, **kw):
+ # type: (subprocess.Popen[Any], *Any, **Any) -> Any
+ hub = Hub.current
+
+ if hub.get_integration(StdlibIntegration) is None:
+ return old_popen_communicate(self, *a, **kw)
+
+ with hub.start_span(op="subprocess.communicate") as span:
+ span.set_tag("subprocess.pid", self.pid)
+ return old_popen_communicate(self, *a, **kw)
+
+ subprocess.Popen.communicate = sentry_patched_popen_communicate # type: ignore
+
+
+def get_subprocess_traceparent_headers():
+ # type: () -> EnvironHeaders
+ return EnvironHeaders(os.environ, prefix="SUBPROCESS_")
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/threading.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/threading.py
new file mode 100644
index 0000000000..b750257e2a
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/threading.py
@@ -0,0 +1,90 @@
+from __future__ import absolute_import
+
+import sys
+from threading import Thread, current_thread
+
+from sentry_sdk import Hub
+from sentry_sdk._compat import reraise
+from sentry_sdk._types import MYPY
+from sentry_sdk.integrations import Integration
+from sentry_sdk.utils import event_from_exception, capture_internal_exceptions
+
+if MYPY:
+ from typing import Any
+ from typing import TypeVar
+ from typing import Callable
+ from typing import Optional
+
+ from sentry_sdk._types import ExcInfo
+
+ F = TypeVar("F", bound=Callable[..., Any])
+
+
+class ThreadingIntegration(Integration):
+ identifier = "threading"
+
+ def __init__(self, propagate_hub=False):
+ # type: (bool) -> None
+ self.propagate_hub = propagate_hub
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ old_start = Thread.start
+
+ def sentry_start(self, *a, **kw):
+ # type: (Thread, *Any, **Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(ThreadingIntegration)
+ if integration is not None:
+ if not integration.propagate_hub:
+ hub_ = None
+ else:
+ hub_ = Hub(hub)
+ # Patching instance methods in `start()` creates a reference cycle if
+ # done in a naive way. See
+ # https://github.com/getsentry/sentry-python/pull/434
+ #
+ # In threading module, using current_thread API will access current thread instance
+ # without holding it to avoid a reference cycle in an easier way.
+ with capture_internal_exceptions():
+ new_run = _wrap_run(hub_, getattr(self.run, "__func__", self.run))
+ self.run = new_run # type: ignore
+
+ return old_start(self, *a, **kw) # type: ignore
+
+ Thread.start = sentry_start # type: ignore
+
+
+def _wrap_run(parent_hub, old_run_func):
+ # type: (Optional[Hub], F) -> F
+ def run(*a, **kw):
+ # type: (*Any, **Any) -> Any
+ hub = parent_hub or Hub.current
+ with hub:
+ try:
+ self = current_thread()
+ return old_run_func(self, *a, **kw)
+ except Exception:
+ reraise(*_capture_exception())
+
+ return run # type: ignore
+
+
+def _capture_exception():
+ # type: () -> ExcInfo
+ hub = Hub.current
+ exc_info = sys.exc_info()
+
+ if hub.get_integration(ThreadingIntegration) is not None:
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=client.options,
+ mechanism={"type": "threading", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+ return exc_info
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/tornado.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/tornado.py
new file mode 100644
index 0000000000..d3ae065690
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/tornado.py
@@ -0,0 +1,203 @@
+import weakref
+from inspect import iscoroutinefunction
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import (
+ HAS_REAL_CONTEXTVARS,
+ event_from_exception,
+ capture_internal_exceptions,
+ transaction_from_function,
+)
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations._wsgi_common import (
+ RequestExtractor,
+ _filter_headers,
+ _is_json_content_type,
+)
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk._compat import iteritems
+
+try:
+ from tornado import version_info as TORNADO_VERSION # type: ignore
+ from tornado.web import RequestHandler, HTTPError
+ from tornado.gen import coroutine
+except ImportError:
+ raise DidNotEnable("Tornado not installed")
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Optional
+ from typing import Dict
+ from typing import Callable
+
+ from sentry_sdk._types import EventProcessor
+
+
+class TornadoIntegration(Integration):
+ identifier = "tornado"
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ if TORNADO_VERSION < (5, 0):
+ raise DidNotEnable("Tornado 5+ required")
+
+ if not HAS_REAL_CONTEXTVARS:
+ # Tornado is async. We better have contextvars or we're going to leak
+ # state between requests.
+ raise DidNotEnable(
+ "The tornado integration for Sentry requires Python 3.6+ or the aiocontextvars package"
+ )
+
+ ignore_logger("tornado.access")
+
+ old_execute = RequestHandler._execute # type: ignore
+
+ awaitable = iscoroutinefunction(old_execute)
+
+ if awaitable:
+ # Starting Tornado 6 RequestHandler._execute method is a standard Python coroutine (async/await)
+ # In that case our method should be a coroutine function too
+ async def sentry_execute_request_handler(self, *args, **kwargs):
+ # type: (Any, *Any, **Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(TornadoIntegration)
+ if integration is None:
+ return await old_execute(self, *args, **kwargs)
+
+ weak_handler = weakref.ref(self)
+
+ with Hub(hub) as hub:
+ with hub.configure_scope() as scope:
+ scope.clear_breadcrumbs()
+ processor = _make_event_processor(weak_handler) # type: ignore
+ scope.add_event_processor(processor)
+ return await old_execute(self, *args, **kwargs)
+
+ else:
+
+ @coroutine # type: ignore
+ def sentry_execute_request_handler(self, *args, **kwargs):
+ # type: (RequestHandler, *Any, **Any) -> Any
+ hub = Hub.current
+ integration = hub.get_integration(TornadoIntegration)
+ if integration is None:
+ return old_execute(self, *args, **kwargs)
+
+ weak_handler = weakref.ref(self)
+
+ with Hub(hub) as hub:
+ with hub.configure_scope() as scope:
+ scope.clear_breadcrumbs()
+ processor = _make_event_processor(weak_handler) # type: ignore
+ scope.add_event_processor(processor)
+ result = yield from old_execute(self, *args, **kwargs)
+ return result
+
+ RequestHandler._execute = sentry_execute_request_handler # type: ignore
+
+ old_log_exception = RequestHandler.log_exception
+
+ def sentry_log_exception(self, ty, value, tb, *args, **kwargs):
+ # type: (Any, type, BaseException, Any, *Any, **Any) -> Optional[Any]
+ _capture_exception(ty, value, tb)
+ return old_log_exception(self, ty, value, tb, *args, **kwargs) # type: ignore
+
+ RequestHandler.log_exception = sentry_log_exception # type: ignore
+
+
+def _capture_exception(ty, value, tb):
+ # type: (type, BaseException, Any) -> None
+ hub = Hub.current
+ if hub.get_integration(TornadoIntegration) is None:
+ return
+ if isinstance(value, HTTPError):
+ return
+
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+
+ event, hint = event_from_exception(
+ (ty, value, tb),
+ client_options=client.options,
+ mechanism={"type": "tornado", "handled": False},
+ )
+
+ hub.capture_event(event, hint=hint)
+
+
+def _make_event_processor(weak_handler):
+ # type: (Callable[[], RequestHandler]) -> EventProcessor
+ def tornado_processor(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ handler = weak_handler()
+ if handler is None:
+ return event
+
+ request = handler.request
+
+ with capture_internal_exceptions():
+ method = getattr(handler, handler.request.method.lower())
+ event["transaction"] = transaction_from_function(method)
+
+ with capture_internal_exceptions():
+ extractor = TornadoRequestExtractor(request)
+ extractor.extract_into_event(event)
+
+ request_info = event["request"]
+
+ request_info["url"] = "%s://%s%s" % (
+ request.protocol,
+ request.host,
+ request.path,
+ )
+
+ request_info["query_string"] = request.query
+ request_info["method"] = request.method
+ request_info["env"] = {"REMOTE_ADDR": request.remote_ip}
+ request_info["headers"] = _filter_headers(dict(request.headers))
+
+ with capture_internal_exceptions():
+ if handler.current_user and _should_send_default_pii():
+ event.setdefault("user", {}).setdefault("is_authenticated", True)
+
+ return event
+
+ return tornado_processor
+
+
+class TornadoRequestExtractor(RequestExtractor):
+ def content_length(self):
+ # type: () -> int
+ if self.request.body is None:
+ return 0
+ return len(self.request.body)
+
+ def cookies(self):
+ # type: () -> Dict[str, str]
+ return {k: v.value for k, v in iteritems(self.request.cookies)}
+
+ def raw_data(self):
+ # type: () -> bytes
+ return self.request.body
+
+ def form(self):
+ # type: () -> Dict[str, Any]
+ return {
+ k: [v.decode("latin1", "replace") for v in vs]
+ for k, vs in iteritems(self.request.body_arguments)
+ }
+
+ def is_json(self):
+ # type: () -> bool
+ return _is_json_content_type(self.request.headers.get("content-type"))
+
+ def files(self):
+ # type: () -> Dict[str, Any]
+ return {k: v[0] for k, v in iteritems(self.request.files) if v}
+
+ def size_of_file(self, file):
+ # type: (Any) -> int
+ return len(file.body or ())
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/trytond.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/trytond.py
new file mode 100644
index 0000000000..062a756993
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/trytond.py
@@ -0,0 +1,55 @@
+import sentry_sdk.hub
+import sentry_sdk.utils
+import sentry_sdk.integrations
+import sentry_sdk.integrations.wsgi
+from sentry_sdk._types import MYPY
+
+from trytond.exceptions import TrytonException # type: ignore
+from trytond.wsgi import app # type: ignore
+
+if MYPY:
+ from typing import Any
+
+
+# TODO: trytond-worker, trytond-cron and trytond-admin intergations
+
+
+class TrytondWSGIIntegration(sentry_sdk.integrations.Integration):
+ identifier = "trytond_wsgi"
+
+ def __init__(self): # type: () -> None
+ pass
+
+ @staticmethod
+ def setup_once(): # type: () -> None
+
+ app.wsgi_app = sentry_sdk.integrations.wsgi.SentryWsgiMiddleware(app.wsgi_app)
+
+ def error_handler(e): # type: (Exception) -> None
+ hub = sentry_sdk.hub.Hub.current
+
+ if hub.get_integration(TrytondWSGIIntegration) is None:
+ return
+ elif isinstance(e, TrytonException):
+ return
+ else:
+ # If an integration is there, a client has to be there.
+ client = hub.client # type: Any
+ event, hint = sentry_sdk.utils.event_from_exception(
+ e,
+ client_options=client.options,
+ mechanism={"type": "trytond", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+ # Expected error handlers signature was changed
+ # when the error_handler decorator was introduced
+ # in Tryton-5.4
+ if hasattr(app, "error_handler"):
+
+ @app.error_handler
+ def _(app, request, e): # type: ignore
+ error_handler(e)
+
+ else:
+ app.error_handlers.append(error_handler)
diff --git a/third_party/python/sentry_sdk/sentry_sdk/integrations/wsgi.py b/third_party/python/sentry_sdk/sentry_sdk/integrations/wsgi.py
new file mode 100644
index 0000000000..22982d8bb1
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/integrations/wsgi.py
@@ -0,0 +1,309 @@
+import functools
+import sys
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import (
+ ContextVar,
+ capture_internal_exceptions,
+ event_from_exception,
+)
+from sentry_sdk._compat import PY2, reraise, iteritems
+from sentry_sdk.tracing import Span
+from sentry_sdk.sessions import auto_session_tracking
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Callable
+ from typing import Dict
+ from typing import Iterator
+ from typing import Any
+ from typing import Tuple
+ from typing import Optional
+ from typing import TypeVar
+ from typing import Protocol
+
+ from sentry_sdk.utils import ExcInfo
+ from sentry_sdk._types import EventProcessor
+
+ WsgiResponseIter = TypeVar("WsgiResponseIter")
+ WsgiResponseHeaders = TypeVar("WsgiResponseHeaders")
+ WsgiExcInfo = TypeVar("WsgiExcInfo")
+
+ class StartResponse(Protocol):
+ def __call__(self, status, response_headers, exc_info=None):
+ # type: (str, WsgiResponseHeaders, Optional[WsgiExcInfo]) -> WsgiResponseIter
+ pass
+
+
+_wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied")
+
+
+if PY2:
+
+ def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
+ # type: (str, str, str) -> str
+ return s.decode(charset, errors)
+
+
+else:
+
+ def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
+ # type: (str, str, str) -> str
+ return s.encode("latin1").decode(charset, errors)
+
+
+def get_host(environ):
+ # type: (Dict[str, str]) -> str
+ """Return the host for the given WSGI environment. Yanked from Werkzeug."""
+ if environ.get("HTTP_HOST"):
+ rv = environ["HTTP_HOST"]
+ if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
+ rv = rv[:-3]
+ elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
+ rv = rv[:-4]
+ elif environ.get("SERVER_NAME"):
+ rv = environ["SERVER_NAME"]
+ if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in (
+ ("https", "443"),
+ ("http", "80"),
+ ):
+ rv += ":" + environ["SERVER_PORT"]
+ else:
+ # In spite of the WSGI spec, SERVER_NAME might not be present.
+ rv = "unknown"
+
+ return rv
+
+
+def get_request_url(environ):
+ # type: (Dict[str, str]) -> str
+ """Return the absolute URL without query string for the given WSGI
+ environment."""
+ return "%s://%s/%s" % (
+ environ.get("wsgi.url_scheme"),
+ get_host(environ),
+ wsgi_decoding_dance(environ.get("PATH_INFO") or "").lstrip("/"),
+ )
+
+
+class SentryWsgiMiddleware(object):
+ __slots__ = ("app",)
+
+ def __init__(self, app):
+ # type: (Callable[[Dict[str, str], Callable[..., Any]], Any]) -> None
+ self.app = app
+
+ def __call__(self, environ, start_response):
+ # type: (Dict[str, str], Callable[..., Any]) -> _ScopedResponse
+ if _wsgi_middleware_applied.get(False):
+ return self.app(environ, start_response)
+
+ _wsgi_middleware_applied.set(True)
+ try:
+ hub = Hub(Hub.current)
+ with auto_session_tracking(hub):
+ with hub:
+ with capture_internal_exceptions():
+ with hub.configure_scope() as scope:
+ scope.clear_breadcrumbs()
+ scope._name = "wsgi"
+ scope.add_event_processor(
+ _make_wsgi_event_processor(environ)
+ )
+
+ span = Span.continue_from_environ(environ)
+ span.op = "http.server"
+ span.transaction = "generic WSGI request"
+
+ with hub.start_span(span) as span:
+ try:
+ rv = self.app(
+ environ,
+ functools.partial(
+ _sentry_start_response, start_response, span
+ ),
+ )
+ except BaseException:
+ reraise(*_capture_exception(hub))
+ finally:
+ _wsgi_middleware_applied.set(False)
+
+ return _ScopedResponse(hub, rv)
+
+
+def _sentry_start_response(
+ old_start_response, # type: StartResponse
+ span, # type: Span
+ status, # type: str
+ response_headers, # type: WsgiResponseHeaders
+ exc_info=None, # type: Optional[WsgiExcInfo]
+):
+ # type: (...) -> WsgiResponseIter
+ with capture_internal_exceptions():
+ status_int = int(status.split(" ", 1)[0])
+ span.set_http_status(status_int)
+
+ if exc_info is None:
+ # The Django Rest Framework WSGI test client, and likely other
+ # (incorrect) implementations, cannot deal with the exc_info argument
+ # if one is present. Avoid providing a third argument if not necessary.
+ return old_start_response(status, response_headers)
+ else:
+ return old_start_response(status, response_headers, exc_info)
+
+
+def _get_environ(environ):
+ # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
+ """
+ Returns our whitelisted environment variables.
+ """
+ keys = ["SERVER_NAME", "SERVER_PORT"]
+ if _should_send_default_pii():
+ # make debugging of proxy setup easier. Proxy headers are
+ # in headers.
+ keys += ["REMOTE_ADDR"]
+
+ for key in keys:
+ if key in environ:
+ yield key, environ[key]
+
+
+# `get_headers` comes from `werkzeug.datastructures.EnvironHeaders`
+#
+# We need this function because Django does not give us a "pure" http header
+# dict. So we might as well use it for all WSGI integrations.
+def _get_headers(environ):
+ # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
+ """
+ Returns only proper HTTP headers.
+
+ """
+ for key, value in iteritems(environ):
+ key = str(key)
+ if key.startswith("HTTP_") and key not in (
+ "HTTP_CONTENT_TYPE",
+ "HTTP_CONTENT_LENGTH",
+ ):
+ yield key[5:].replace("_", "-").title(), value
+ elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"):
+ yield key.replace("_", "-").title(), value
+
+
+def get_client_ip(environ):
+ # type: (Dict[str, str]) -> Optional[Any]
+ """
+ Infer the user IP address from various headers. This cannot be used in
+ security sensitive situations since the value may be forged from a client,
+ but it's good enough for the event payload.
+ """
+ try:
+ return environ["HTTP_X_FORWARDED_FOR"].split(",")[0].strip()
+ except (KeyError, IndexError):
+ pass
+
+ try:
+ return environ["HTTP_X_REAL_IP"]
+ except KeyError:
+ pass
+
+ return environ.get("REMOTE_ADDR")
+
+
+def _capture_exception(hub):
+ # type: (Hub) -> ExcInfo
+ exc_info = sys.exc_info()
+
+ # Check client here as it might have been unset while streaming response
+ if hub.client is not None:
+ e = exc_info[1]
+
+ # SystemExit(0) is the only uncaught exception that is expected behavior
+ should_skip_capture = isinstance(e, SystemExit) and e.code in (0, None)
+ if not should_skip_capture:
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=hub.client.options,
+ mechanism={"type": "wsgi", "handled": False},
+ )
+ hub.capture_event(event, hint=hint)
+
+ return exc_info
+
+
+class _ScopedResponse(object):
+ __slots__ = ("_response", "_hub")
+
+ def __init__(self, hub, response):
+ # type: (Hub, Iterator[bytes]) -> None
+ self._hub = hub
+ self._response = response
+
+ def __iter__(self):
+ # type: () -> Iterator[bytes]
+ iterator = iter(self._response)
+
+ while True:
+ with self._hub:
+ try:
+ chunk = next(iterator)
+ except StopIteration:
+ break
+ except BaseException:
+ reraise(*_capture_exception(self._hub))
+
+ yield chunk
+
+ def close(self):
+ # type: () -> None
+ with self._hub:
+ try:
+ self._response.close() # type: ignore
+ except AttributeError:
+ pass
+ except BaseException:
+ reraise(*_capture_exception(self._hub))
+
+
+def _make_wsgi_event_processor(environ):
+ # type: (Dict[str, str]) -> EventProcessor
+ # It's a bit unfortunate that we have to extract and parse the request data
+ # from the environ so eagerly, but there are a few good reasons for this.
+ #
+ # We might be in a situation where the scope/hub never gets torn down
+ # properly. In that case we will have an unnecessary strong reference to
+ # all objects in the environ (some of which may take a lot of memory) when
+ # we're really just interested in a few of them.
+ #
+ # Keeping the environment around for longer than the request lifecycle is
+ # also not necessarily something uWSGI can deal with:
+ # https://github.com/unbit/uwsgi/issues/1950
+
+ client_ip = get_client_ip(environ)
+ request_url = get_request_url(environ)
+ query_string = environ.get("QUERY_STRING")
+ method = environ.get("REQUEST_METHOD")
+ env = dict(_get_environ(environ))
+ headers = _filter_headers(dict(_get_headers(environ)))
+
+ def event_processor(event, hint):
+ # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+ with capture_internal_exceptions():
+ # if the code below fails halfway through we at least have some data
+ request_info = event.setdefault("request", {})
+
+ if _should_send_default_pii():
+ user_info = event.setdefault("user", {})
+ if client_ip:
+ user_info.setdefault("ip_address", client_ip)
+
+ request_info["url"] = request_url
+ request_info["query_string"] = query_string
+ request_info["method"] = method
+ request_info["env"] = env
+ request_info["headers"] = headers
+
+ return event
+
+ return event_processor
diff --git a/third_party/python/sentry_sdk/sentry_sdk/py.typed b/third_party/python/sentry_sdk/sentry_sdk/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/py.typed
diff --git a/third_party/python/sentry_sdk/sentry_sdk/scope.py b/third_party/python/sentry_sdk/sentry_sdk/scope.py
new file mode 100644
index 0000000000..407af3a2cb
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/scope.py
@@ -0,0 +1,408 @@
+from copy import copy
+from collections import deque
+from functools import wraps
+from itertools import chain
+
+from sentry_sdk.utils import logger, capture_internal_exceptions
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Any
+ from typing import Dict
+ from typing import Optional
+ from typing import Deque
+ from typing import List
+ from typing import Callable
+ from typing import TypeVar
+
+ from sentry_sdk._types import (
+ Breadcrumb,
+ Event,
+ EventProcessor,
+ ErrorProcessor,
+ ExcInfo,
+ Hint,
+ Type,
+ )
+
+ from sentry_sdk.tracing import Span
+ from sentry_sdk.sessions import Session
+
+ F = TypeVar("F", bound=Callable[..., Any])
+ T = TypeVar("T")
+
+
+global_event_processors = [] # type: List[EventProcessor]
+
+
+def add_global_event_processor(processor):
+ # type: (EventProcessor) -> None
+ global_event_processors.append(processor)
+
+
+def _attr_setter(fn):
+ # type: (Any) -> Any
+ return property(fset=fn, doc=fn.__doc__)
+
+
+def _disable_capture(fn):
+ # type: (F) -> F
+ @wraps(fn)
+ def wrapper(self, *args, **kwargs):
+ # type: (Any, *Dict[str, Any], **Any) -> Any
+ if not self._should_capture:
+ return
+ try:
+ self._should_capture = False
+ return fn(self, *args, **kwargs)
+ finally:
+ self._should_capture = True
+
+ return wrapper # type: ignore
+
+
+class Scope(object):
+ """The scope holds extra information that should be sent with all
+ events that belong to it.
+ """
+
+ # NOTE: Even though it should not happen, the scope needs to not crash when
+ # accessed by multiple threads. It's fine if it's full of races, but those
+ # races should never make the user application crash.
+ #
+ # The same needs to hold for any accesses of the scope the SDK makes.
+
+ __slots__ = (
+ "_level",
+ "_name",
+ "_fingerprint",
+ "_transaction",
+ "_user",
+ "_tags",
+ "_contexts",
+ "_extras",
+ "_breadcrumbs",
+ "_event_processors",
+ "_error_processors",
+ "_should_capture",
+ "_span",
+ "_session",
+ "_force_auto_session_tracking",
+ )
+
+ def __init__(self):
+ # type: () -> None
+ self._event_processors = [] # type: List[EventProcessor]
+ self._error_processors = [] # type: List[ErrorProcessor]
+
+ self._name = None # type: Optional[str]
+ self.clear()
+
+ def clear(self):
+ # type: () -> None
+ """Clears the entire scope."""
+ self._level = None # type: Optional[str]
+ self._fingerprint = None # type: Optional[List[str]]
+ self._transaction = None # type: Optional[str]
+ self._user = None # type: Optional[Dict[str, Any]]
+
+ self._tags = {} # type: Dict[str, Any]
+ self._contexts = {} # type: Dict[str, Dict[str, Any]]
+ self._extras = {} # type: Dict[str, Any]
+
+ self.clear_breadcrumbs()
+ self._should_capture = True
+
+ self._span = None # type: Optional[Span]
+ self._session = None # type: Optional[Session]
+ self._force_auto_session_tracking = None # type: Optional[bool]
+
+ @_attr_setter
+ def level(self, value):
+ # type: (Optional[str]) -> None
+ """When set this overrides the level. Deprecated in favor of set_level."""
+ self._level = value
+
+ def set_level(self, value):
+ # type: (Optional[str]) -> None
+ """Sets the level for the scope."""
+ self._level = value
+
+ @_attr_setter
+ def fingerprint(self, value):
+ # type: (Optional[List[str]]) -> None
+ """When set this overrides the default fingerprint."""
+ self._fingerprint = value
+
+ @_attr_setter
+ def transaction(self, value):
+ # type: (Optional[str]) -> None
+ """When set this forces a specific transaction name to be set."""
+ self._transaction = value
+ span = self._span
+ if span:
+ span.transaction = value
+
+ @_attr_setter
+ def user(self, value):
+ # type: (Dict[str, Any]) -> None
+ """When set a specific user is bound to the scope. Deprecated in favor of set_user."""
+ self.set_user(value)
+
+ def set_user(self, value):
+ # type: (Dict[str, Any]) -> None
+ """Sets a user for the scope."""
+ self._user = value
+ if self._session is not None:
+ self._session.update(user=value)
+
+ @property
+ def span(self):
+ # type: () -> Optional[Span]
+ """Get/set current tracing span."""
+ return self._span
+
+ @span.setter
+ def span(self, span):
+ # type: (Optional[Span]) -> None
+ self._span = span
+ if span is not None:
+ span_transaction = span.transaction
+ if span_transaction:
+ self._transaction = span_transaction
+
+ def set_tag(
+ self,
+ key, # type: str
+ value, # type: Any
+ ):
+ # type: (...) -> None
+ """Sets a tag for a key to a specific value."""
+ self._tags[key] = value
+
+ def remove_tag(
+ self, key # type: str
+ ):
+ # type: (...) -> None
+ """Removes a specific tag."""
+ self._tags.pop(key, None)
+
+ def set_context(
+ self,
+ key, # type: str
+ value, # type: Any
+ ):
+ # type: (...) -> None
+ """Binds a context at a certain key to a specific value."""
+ self._contexts[key] = value
+
+ def remove_context(
+ self, key # type: str
+ ):
+ # type: (...) -> None
+ """Removes a context."""
+ self._contexts.pop(key, None)
+
+ def set_extra(
+ self,
+ key, # type: str
+ value, # type: Any
+ ):
+ # type: (...) -> None
+ """Sets an extra key to a specific value."""
+ self._extras[key] = value
+
+ def remove_extra(
+ self, key # type: str
+ ):
+ # type: (...) -> None
+ """Removes a specific extra key."""
+ self._extras.pop(key, None)
+
+ def clear_breadcrumbs(self):
+ # type: () -> None
+ """Clears breadcrumb buffer."""
+ self._breadcrumbs = deque() # type: Deque[Breadcrumb]
+
+ def add_event_processor(
+ self, func # type: EventProcessor
+ ):
+ # type: (...) -> None
+ """Register a scope local event processor on the scope.
+
+ :param func: This function behaves like `before_send.`
+ """
+ if len(self._event_processors) > 20:
+ logger.warning(
+ "Too many event processors on scope! Clearing list to free up some memory: %r",
+ self._event_processors,
+ )
+ del self._event_processors[:]
+
+ self._event_processors.append(func)
+
+ def add_error_processor(
+ self,
+ func, # type: ErrorProcessor
+ cls=None, # type: Optional[Type[BaseException]]
+ ):
+ # type: (...) -> None
+ """Register a scope local error processor on the scope.
+
+ :param func: A callback that works similar to an event processor but is invoked with the original exception info triple as second argument.
+
+ :param cls: Optionally, only process exceptions of this type.
+ """
+ if cls is not None:
+ cls_ = cls # For mypy.
+ real_func = func
+
+ def func(event, exc_info):
+ # type: (Event, ExcInfo) -> Optional[Event]
+ try:
+ is_inst = isinstance(exc_info[1], cls_)
+ except Exception:
+ is_inst = False
+ if is_inst:
+ return real_func(event, exc_info)
+ return event
+
+ self._error_processors.append(func)
+
+ @_disable_capture
+ def apply_to_event(
+ self,
+ event, # type: Event
+ hint, # type: Hint
+ ):
+ # type: (...) -> Optional[Event]
+ """Applies the information contained on the scope to the given event."""
+
+ def _drop(event, cause, ty):
+ # type: (Dict[str, Any], Any, str) -> Optional[Any]
+ logger.info("%s (%s) dropped event (%s)", ty, cause, event)
+ return None
+
+ if self._level is not None:
+ event["level"] = self._level
+
+ if event.get("type") != "transaction":
+ event.setdefault("breadcrumbs", []).extend(self._breadcrumbs)
+
+ if event.get("user") is None and self._user is not None:
+ event["user"] = self._user
+
+ if event.get("transaction") is None and self._transaction is not None:
+ event["transaction"] = self._transaction
+
+ if event.get("fingerprint") is None and self._fingerprint is not None:
+ event["fingerprint"] = self._fingerprint
+
+ if self._extras:
+ event.setdefault("extra", {}).update(self._extras)
+
+ if self._tags:
+ event.setdefault("tags", {}).update(self._tags)
+
+ if self._contexts:
+ event.setdefault("contexts", {}).update(self._contexts)
+
+ if self._span is not None:
+ contexts = event.setdefault("contexts", {})
+ if not contexts.get("trace"):
+ contexts["trace"] = self._span.get_trace_context()
+
+ exc_info = hint.get("exc_info")
+ if exc_info is not None:
+ for error_processor in self._error_processors:
+ new_event = error_processor(event, exc_info)
+ if new_event is None:
+ return _drop(event, error_processor, "error processor")
+ event = new_event
+
+ for event_processor in chain(global_event_processors, self._event_processors):
+ new_event = event
+ with capture_internal_exceptions():
+ new_event = event_processor(event, hint)
+ if new_event is None:
+ return _drop(event, event_processor, "event processor")
+ event = new_event
+
+ return event
+
+ def update_from_scope(self, scope):
+ # type: (Scope) -> None
+ if scope._level is not None:
+ self._level = scope._level
+ if scope._fingerprint is not None:
+ self._fingerprint = scope._fingerprint
+ if scope._transaction is not None:
+ self._transaction = scope._transaction
+ if scope._user is not None:
+ self._user = scope._user
+ if scope._tags:
+ self._tags.update(scope._tags)
+ if scope._contexts:
+ self._contexts.update(scope._contexts)
+ if scope._extras:
+ self._extras.update(scope._extras)
+ if scope._breadcrumbs:
+ self._breadcrumbs.extend(scope._breadcrumbs)
+ if scope._span:
+ self._span = scope._span
+
+ def update_from_kwargs(
+ self,
+ user=None, # type: Optional[Any]
+ level=None, # type: Optional[str]
+ extras=None, # type: Optional[Dict[str, Any]]
+ contexts=None, # type: Optional[Dict[str, Any]]
+ tags=None, # type: Optional[Dict[str, str]]
+ fingerprint=None, # type: Optional[List[str]]
+ ):
+ # type: (...) -> None
+ if level is not None:
+ self._level = level
+ if user is not None:
+ self._user = user
+ if extras is not None:
+ self._extras.update(extras)
+ if contexts is not None:
+ self._contexts.update(contexts)
+ if tags is not None:
+ self._tags.update(tags)
+ if fingerprint is not None:
+ self._fingerprint = fingerprint
+
+ def __copy__(self):
+ # type: () -> Scope
+ rv = object.__new__(self.__class__) # type: Scope
+
+ rv._level = self._level
+ rv._name = self._name
+ rv._fingerprint = self._fingerprint
+ rv._transaction = self._transaction
+ rv._user = self._user
+
+ rv._tags = dict(self._tags)
+ rv._contexts = dict(self._contexts)
+ rv._extras = dict(self._extras)
+
+ rv._breadcrumbs = copy(self._breadcrumbs)
+ rv._event_processors = list(self._event_processors)
+ rv._error_processors = list(self._error_processors)
+
+ rv._should_capture = self._should_capture
+ rv._span = self._span
+ rv._session = self._session
+ rv._force_auto_session_tracking = self._force_auto_session_tracking
+
+ return rv
+
+ def __repr__(self):
+ # type: () -> str
+ return "<%s id=%s name=%s>" % (
+ self.__class__.__name__,
+ hex(id(self)),
+ self._name,
+ )
diff --git a/third_party/python/sentry_sdk/sentry_sdk/serializer.py b/third_party/python/sentry_sdk/sentry_sdk/serializer.py
new file mode 100644
index 0000000000..3940947553
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/serializer.py
@@ -0,0 +1,336 @@
+import sys
+
+from datetime import datetime
+
+from sentry_sdk.utils import (
+ AnnotatedValue,
+ capture_internal_exception,
+ disable_capture_event,
+ safe_repr,
+ strip_string,
+ format_timestamp,
+)
+
+from sentry_sdk._compat import text_type, PY2, string_types, number_types, iteritems
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from types import TracebackType
+
+ from typing import Any
+ from typing import Dict
+ from typing import List
+ from typing import Optional
+ from typing import Callable
+ from typing import Union
+ from typing import ContextManager
+ from typing import Type
+
+ from sentry_sdk._types import NotImplementedType, Event
+
+ ReprProcessor = Callable[[Any, Dict[str, Any]], Union[NotImplementedType, str]]
+ Segment = Union[str, int]
+
+
+if PY2:
+ # Importing ABCs from collections is deprecated, and will stop working in 3.8
+ # https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
+ from collections import Mapping, Sequence
+
+ serializable_str_types = string_types
+
+else:
+ # New in 3.3
+ # https://docs.python.org/3/library/collections.abc.html
+ from collections.abc import Mapping, Sequence
+
+ # Bytes are technically not strings in Python 3, but we can serialize them
+ serializable_str_types = (str, bytes)
+
+MAX_DATABAG_DEPTH = 5
+MAX_DATABAG_BREADTH = 10
+CYCLE_MARKER = u"<cyclic>"
+
+
+global_repr_processors = [] # type: List[ReprProcessor]
+
+
+def add_global_repr_processor(processor):
+ # type: (ReprProcessor) -> None
+ global_repr_processors.append(processor)
+
+
+class Memo(object):
+ __slots__ = ("_ids", "_objs")
+
+ def __init__(self):
+ # type: () -> None
+ self._ids = {} # type: Dict[int, Any]
+ self._objs = [] # type: List[Any]
+
+ def memoize(self, obj):
+ # type: (Any) -> ContextManager[bool]
+ self._objs.append(obj)
+ return self
+
+ def __enter__(self):
+ # type: () -> bool
+ obj = self._objs[-1]
+ if id(obj) in self._ids:
+ return True
+ else:
+ self._ids[id(obj)] = obj
+ return False
+
+ def __exit__(
+ self,
+ ty, # type: Optional[Type[BaseException]]
+ value, # type: Optional[BaseException]
+ tb, # type: Optional[TracebackType]
+ ):
+ # type: (...) -> None
+ self._ids.pop(id(self._objs.pop()), None)
+
+
+def serialize(event, **kwargs):
+ # type: (Event, **Any) -> Event
+ memo = Memo()
+ path = [] # type: List[Segment]
+ meta_stack = [] # type: List[Dict[str, Any]]
+
+ def _annotate(**meta):
+ # type: (**Any) -> None
+ while len(meta_stack) <= len(path):
+ try:
+ segment = path[len(meta_stack) - 1]
+ node = meta_stack[-1].setdefault(text_type(segment), {})
+ except IndexError:
+ node = {}
+
+ meta_stack.append(node)
+
+ meta_stack[-1].setdefault("", {}).update(meta)
+
+ def _should_repr_strings():
+ # type: () -> Optional[bool]
+ """
+ By default non-serializable objects are going through
+ safe_repr(). For certain places in the event (local vars) we
+ want to repr() even things that are JSON-serializable to
+ make their type more apparent. For example, it's useful to
+ see the difference between a unicode-string and a bytestring
+ when viewing a stacktrace.
+
+ For container-types we still don't do anything different.
+ Generally we just try to make the Sentry UI present exactly
+ what a pretty-printed repr would look like.
+
+ :returns: `True` if we are somewhere in frame variables, and `False` if
+ we are in a position where we will never encounter frame variables
+ when recursing (for example, we're in `event.extra`). `None` if we
+ are not (yet) in frame variables, but might encounter them when
+ recursing (e.g. we're in `event.exception`)
+ """
+ try:
+ p0 = path[0]
+ if p0 == "stacktrace" and path[1] == "frames" and path[3] == "vars":
+ return True
+
+ if (
+ p0 in ("threads", "exception")
+ and path[1] == "values"
+ and path[3] == "stacktrace"
+ and path[4] == "frames"
+ and path[6] == "vars"
+ ):
+ return True
+ except IndexError:
+ return None
+
+ return False
+
+ def _is_databag():
+ # type: () -> Optional[bool]
+ """
+ A databag is any value that we need to trim.
+
+ :returns: Works like `_should_repr_strings()`. `True` for "yes",
+ `False` for :"no", `None` for "maybe soon".
+ """
+ try:
+ rv = _should_repr_strings()
+ if rv in (True, None):
+ return rv
+
+ p0 = path[0]
+ if p0 == "request" and path[1] == "data":
+ return True
+
+ if p0 == "breadcrumbs":
+ path[1]
+ return True
+
+ if p0 == "extra":
+ return True
+
+ except IndexError:
+ return None
+
+ return False
+
+ def _serialize_node(
+ obj, # type: Any
+ is_databag=None, # type: Optional[bool]
+ should_repr_strings=None, # type: Optional[bool]
+ segment=None, # type: Optional[Segment]
+ remaining_breadth=None, # type: Optional[int]
+ remaining_depth=None, # type: Optional[int]
+ ):
+ # type: (...) -> Any
+ if segment is not None:
+ path.append(segment)
+
+ try:
+ with memo.memoize(obj) as result:
+ if result:
+ return CYCLE_MARKER
+
+ return _serialize_node_impl(
+ obj,
+ is_databag=is_databag,
+ should_repr_strings=should_repr_strings,
+ remaining_depth=remaining_depth,
+ remaining_breadth=remaining_breadth,
+ )
+ except BaseException:
+ capture_internal_exception(sys.exc_info())
+
+ if is_databag:
+ return u"<failed to serialize, use init(debug=True) to see error logs>"
+
+ return None
+ finally:
+ if segment is not None:
+ path.pop()
+ del meta_stack[len(path) + 1 :]
+
+ def _flatten_annotated(obj):
+ # type: (Any) -> Any
+ if isinstance(obj, AnnotatedValue):
+ _annotate(**obj.metadata)
+ obj = obj.value
+ return obj
+
+ def _serialize_node_impl(
+ obj, is_databag, should_repr_strings, remaining_depth, remaining_breadth
+ ):
+ # type: (Any, Optional[bool], Optional[bool], Optional[int], Optional[int]) -> Any
+ if should_repr_strings is None:
+ should_repr_strings = _should_repr_strings()
+
+ if is_databag is None:
+ is_databag = _is_databag()
+
+ if is_databag and remaining_depth is None:
+ remaining_depth = MAX_DATABAG_DEPTH
+ if is_databag and remaining_breadth is None:
+ remaining_breadth = MAX_DATABAG_BREADTH
+
+ obj = _flatten_annotated(obj)
+
+ if remaining_depth is not None and remaining_depth <= 0:
+ _annotate(rem=[["!limit", "x"]])
+ if is_databag:
+ return _flatten_annotated(strip_string(safe_repr(obj)))
+ return None
+
+ if is_databag and global_repr_processors:
+ hints = {"memo": memo, "remaining_depth": remaining_depth}
+ for processor in global_repr_processors:
+ result = processor(obj, hints)
+ if result is not NotImplemented:
+ return _flatten_annotated(result)
+
+ if obj is None or isinstance(obj, (bool, number_types)):
+ return obj if not should_repr_strings else safe_repr(obj)
+
+ elif isinstance(obj, datetime):
+ return (
+ text_type(format_timestamp(obj))
+ if not should_repr_strings
+ else safe_repr(obj)
+ )
+
+ elif isinstance(obj, Mapping):
+ # Create temporary copy here to avoid calling too much code that
+ # might mutate our dictionary while we're still iterating over it.
+ obj = dict(iteritems(obj))
+
+ rv_dict = {} # type: Dict[str, Any]
+ i = 0
+
+ for k, v in iteritems(obj):
+ if remaining_breadth is not None and i >= remaining_breadth:
+ _annotate(len=len(obj))
+ break
+
+ str_k = text_type(k)
+ v = _serialize_node(
+ v,
+ segment=str_k,
+ should_repr_strings=should_repr_strings,
+ is_databag=is_databag,
+ remaining_depth=remaining_depth - 1
+ if remaining_depth is not None
+ else None,
+ remaining_breadth=remaining_breadth,
+ )
+ rv_dict[str_k] = v
+ i += 1
+
+ return rv_dict
+
+ elif not isinstance(obj, serializable_str_types) and isinstance(obj, Sequence):
+ rv_list = []
+
+ for i, v in enumerate(obj):
+ if remaining_breadth is not None and i >= remaining_breadth:
+ _annotate(len=len(obj))
+ break
+
+ rv_list.append(
+ _serialize_node(
+ v,
+ segment=i,
+ should_repr_strings=should_repr_strings,
+ is_databag=is_databag,
+ remaining_depth=remaining_depth - 1
+ if remaining_depth is not None
+ else None,
+ remaining_breadth=remaining_breadth,
+ )
+ )
+
+ return rv_list
+
+ if should_repr_strings:
+ obj = safe_repr(obj)
+ else:
+ if isinstance(obj, bytes):
+ obj = obj.decode("utf-8", "replace")
+
+ if not isinstance(obj, string_types):
+ obj = safe_repr(obj)
+
+ return _flatten_annotated(strip_string(obj))
+
+ disable_capture_event.set(True)
+ try:
+ rv = _serialize_node(event, **kwargs)
+ if meta_stack and isinstance(rv, dict):
+ rv["_meta"] = meta_stack[0]
+
+ return rv
+ finally:
+ disable_capture_event.set(False)
diff --git a/third_party/python/sentry_sdk/sentry_sdk/sessions.py b/third_party/python/sentry_sdk/sentry_sdk/sessions.py
new file mode 100644
index 0000000000..f4f7137cc0
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/sessions.py
@@ -0,0 +1,249 @@
+import os
+import uuid
+import time
+from datetime import datetime
+from threading import Thread, Lock
+from contextlib import contextmanager
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.utils import format_timestamp
+
+if MYPY:
+ import sentry_sdk
+
+ from typing import Optional
+ from typing import Union
+ from typing import Any
+ from typing import Dict
+ from typing import Generator
+
+ from sentry_sdk._types import SessionStatus
+
+
+def is_auto_session_tracking_enabled(hub=None):
+ # type: (Optional[sentry_sdk.Hub]) -> bool
+ """Utility function to find out if session tracking is enabled."""
+ if hub is None:
+ hub = sentry_sdk.Hub.current
+ should_track = hub.scope._force_auto_session_tracking
+ if should_track is None:
+ exp = hub.client.options["_experiments"] if hub.client else {}
+ should_track = exp.get("auto_session_tracking")
+ return should_track
+
+
+@contextmanager
+def auto_session_tracking(hub=None):
+ # type: (Optional[sentry_sdk.Hub]) -> Generator[None, None, None]
+ """Starts and stops a session automatically around a block."""
+ if hub is None:
+ hub = sentry_sdk.Hub.current
+ should_track = is_auto_session_tracking_enabled(hub)
+ if should_track:
+ hub.start_session()
+ try:
+ yield
+ finally:
+ if should_track:
+ hub.end_session()
+
+
+def _make_uuid(
+ val, # type: Union[str, uuid.UUID]
+):
+ # type: (...) -> uuid.UUID
+ if isinstance(val, uuid.UUID):
+ return val
+ return uuid.UUID(val)
+
+
+TERMINAL_SESSION_STATES = ("exited", "abnormal", "crashed")
+
+
+class SessionFlusher(object):
+ def __init__(
+ self,
+ flush_func, # type: Any
+ flush_interval=10, # type: int
+ ):
+ # type: (...) -> None
+ self.flush_func = flush_func
+ self.flush_interval = flush_interval
+ self.pending = {} # type: Dict[str, Any]
+ self._thread = None # type: Optional[Thread]
+ self._thread_lock = Lock()
+ self._thread_for_pid = None # type: Optional[int]
+ self._running = True
+
+ def flush(self):
+ # type: (...) -> None
+ pending = self.pending
+ self.pending = {}
+ self.flush_func(list(pending.values()))
+
+ def _ensure_running(self):
+ # type: (...) -> None
+ if self._thread_for_pid == os.getpid() and self._thread is not None:
+ return None
+ with self._thread_lock:
+ if self._thread_for_pid == os.getpid() and self._thread is not None:
+ return None
+
+ def _thread():
+ # type: (...) -> None
+ while self._running:
+ time.sleep(self.flush_interval)
+ if self.pending and self._running:
+ self.flush()
+
+ thread = Thread(target=_thread)
+ thread.daemon = True
+ thread.start()
+ self._thread = thread
+ self._thread_for_pid = os.getpid()
+ return None
+
+ def add_session(
+ self, session # type: Session
+ ):
+ # type: (...) -> None
+ self.pending[session.sid.hex] = session.to_json()
+ self._ensure_running()
+
+ def kill(self):
+ # type: (...) -> None
+ self._running = False
+
+ def __del__(self):
+ # type: (...) -> None
+ self.kill()
+
+
+class Session(object):
+ def __init__(
+ self,
+ sid=None, # type: Optional[Union[str, uuid.UUID]]
+ did=None, # type: Optional[str]
+ timestamp=None, # type: Optional[datetime]
+ started=None, # type: Optional[datetime]
+ duration=None, # type: Optional[float]
+ status=None, # type: Optional[SessionStatus]
+ release=None, # type: Optional[str]
+ environment=None, # type: Optional[str]
+ user_agent=None, # type: Optional[str]
+ ip_address=None, # type: Optional[str]
+ errors=None, # type: Optional[int]
+ user=None, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ if sid is None:
+ sid = uuid.uuid4()
+ if started is None:
+ started = datetime.utcnow()
+ if status is None:
+ status = "ok"
+ self.status = status
+ self.did = None # type: Optional[str]
+ self.started = started
+ self.release = None # type: Optional[str]
+ self.environment = None # type: Optional[str]
+ self.duration = None # type: Optional[float]
+ self.user_agent = None # type: Optional[str]
+ self.ip_address = None # type: Optional[str]
+ self.errors = 0
+
+ self.update(
+ sid=sid,
+ did=did,
+ timestamp=timestamp,
+ duration=duration,
+ release=release,
+ environment=environment,
+ user_agent=user_agent,
+ ip_address=ip_address,
+ errors=errors,
+ user=user,
+ )
+
+ def update(
+ self,
+ sid=None, # type: Optional[Union[str, uuid.UUID]]
+ did=None, # type: Optional[str]
+ timestamp=None, # type: Optional[datetime]
+ duration=None, # type: Optional[float]
+ status=None, # type: Optional[SessionStatus]
+ release=None, # type: Optional[str]
+ environment=None, # type: Optional[str]
+ user_agent=None, # type: Optional[str]
+ ip_address=None, # type: Optional[str]
+ errors=None, # type: Optional[int]
+ user=None, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ # If a user is supplied we pull some data form it
+ if user:
+ if ip_address is None:
+ ip_address = user.get("ip_address")
+ if did is None:
+ did = user.get("id") or user.get("email") or user.get("username")
+
+ if sid is not None:
+ self.sid = _make_uuid(sid)
+ if did is not None:
+ self.did = str(did)
+ if timestamp is None:
+ timestamp = datetime.utcnow()
+ self.timestamp = timestamp
+ if duration is not None:
+ self.duration = duration
+ if release is not None:
+ self.release = release
+ if environment is not None:
+ self.environment = environment
+ if ip_address is not None:
+ self.ip_address = ip_address
+ if user_agent is not None:
+ self.user_agent = user_agent
+ if errors is not None:
+ self.errors = errors
+
+ if status is not None:
+ self.status = status
+
+ def close(
+ self, status=None # type: Optional[SessionStatus]
+ ):
+ # type: (...) -> Any
+ if status is None and self.status == "ok":
+ status = "exited"
+ if status is not None:
+ self.update(status=status)
+
+ def to_json(self):
+ # type: (...) -> Any
+ rv = {
+ "sid": str(self.sid),
+ "init": True,
+ "started": format_timestamp(self.started),
+ "timestamp": format_timestamp(self.timestamp),
+ "status": self.status,
+ } # type: Dict[str, Any]
+ if self.errors:
+ rv["errors"] = self.errors
+ if self.did is not None:
+ rv["did"] = self.did
+ if self.duration is not None:
+ rv["duration"] = self.duration
+
+ attrs = {}
+ if self.release is not None:
+ attrs["release"] = self.release
+ if self.environment is not None:
+ attrs["environment"] = self.environment
+ if self.ip_address is not None:
+ attrs["ip_address"] = self.ip_address
+ if self.user_agent is not None:
+ attrs["user_agent"] = self.user_agent
+ if attrs:
+ rv["attrs"] = attrs
+ return rv
diff --git a/third_party/python/sentry_sdk/sentry_sdk/tracing.py b/third_party/python/sentry_sdk/sentry_sdk/tracing.py
new file mode 100644
index 0000000000..9293365b83
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/tracing.py
@@ -0,0 +1,498 @@
+import re
+import uuid
+import contextlib
+import time
+
+from datetime import datetime, timedelta
+
+import sentry_sdk
+
+from sentry_sdk.utils import capture_internal_exceptions, logger, to_string
+from sentry_sdk._compat import PY2
+from sentry_sdk._types import MYPY
+
+if PY2:
+ from collections import Mapping
+else:
+ from collections.abc import Mapping
+
+if MYPY:
+ import typing
+
+ from typing import Generator
+ from typing import Optional
+ from typing import Any
+ from typing import Dict
+ from typing import List
+ from typing import Tuple
+
+_traceparent_header_format_re = re.compile(
+ "^[ \t]*" # whitespace
+ "([0-9a-f]{32})?" # trace_id
+ "-?([0-9a-f]{16})?" # span_id
+ "-?([01])?" # sampled
+ "[ \t]*$" # whitespace
+)
+
+
+class EnvironHeaders(Mapping): # type: ignore
+ def __init__(
+ self,
+ environ, # type: typing.Mapping[str, str]
+ prefix="HTTP_", # type: str
+ ):
+ # type: (...) -> None
+ self.environ = environ
+ self.prefix = prefix
+
+ def __getitem__(self, key):
+ # type: (str) -> Optional[Any]
+ return self.environ[self.prefix + key.replace("-", "_").upper()]
+
+ def __len__(self):
+ # type: () -> int
+ return sum(1 for _ in iter(self))
+
+ def __iter__(self):
+ # type: () -> Generator[str, None, None]
+ for k in self.environ:
+ if not isinstance(k, str):
+ continue
+
+ k = k.replace("-", "_").upper()
+ if not k.startswith(self.prefix):
+ continue
+
+ yield k[len(self.prefix) :]
+
+
+class _SpanRecorder(object):
+ __slots__ = ("maxlen", "finished_spans", "open_span_count")
+
+ def __init__(self, maxlen):
+ # type: (int) -> None
+ self.maxlen = maxlen
+ self.open_span_count = 0 # type: int
+ self.finished_spans = [] # type: List[Span]
+
+ def start_span(self, span):
+ # type: (Span) -> None
+
+ # This is just so that we don't run out of memory while recording a lot
+ # of spans. At some point we just stop and flush out the start of the
+ # trace tree (i.e. the first n spans with the smallest
+ # start_timestamp).
+ self.open_span_count += 1
+ if self.open_span_count > self.maxlen:
+ span._span_recorder = None
+
+ def finish_span(self, span):
+ # type: (Span) -> None
+ self.finished_spans.append(span)
+
+
+class Span(object):
+ __slots__ = (
+ "trace_id",
+ "span_id",
+ "parent_span_id",
+ "same_process_as_parent",
+ "sampled",
+ "transaction",
+ "op",
+ "description",
+ "start_timestamp",
+ "_start_timestamp_monotonic",
+ "status",
+ "timestamp",
+ "_tags",
+ "_data",
+ "_span_recorder",
+ "hub",
+ "_context_manager_state",
+ )
+
+ def __init__(
+ self,
+ trace_id=None, # type: Optional[str]
+ span_id=None, # type: Optional[str]
+ parent_span_id=None, # type: Optional[str]
+ same_process_as_parent=True, # type: bool
+ sampled=None, # type: Optional[bool]
+ transaction=None, # type: Optional[str]
+ op=None, # type: Optional[str]
+ description=None, # type: Optional[str]
+ hub=None, # type: Optional[sentry_sdk.Hub]
+ status=None, # type: Optional[str]
+ ):
+ # type: (...) -> None
+ self.trace_id = trace_id or uuid.uuid4().hex
+ self.span_id = span_id or uuid.uuid4().hex[16:]
+ self.parent_span_id = parent_span_id
+ self.same_process_as_parent = same_process_as_parent
+ self.sampled = sampled
+ self.transaction = transaction
+ self.op = op
+ self.description = description
+ self.status = status
+ self.hub = hub
+ self._tags = {} # type: Dict[str, str]
+ self._data = {} # type: Dict[str, Any]
+ self.start_timestamp = datetime.utcnow()
+ try:
+ # TODO: For Python 3.7+, we could use a clock with ns resolution:
+ # self._start_timestamp_monotonic = time.perf_counter_ns()
+
+ # Python 3.3+
+ self._start_timestamp_monotonic = time.perf_counter()
+ except AttributeError:
+ pass
+
+ #: End timestamp of span
+ self.timestamp = None # type: Optional[datetime]
+
+ self._span_recorder = None # type: Optional[_SpanRecorder]
+
+ def init_finished_spans(self, maxlen):
+ # type: (int) -> None
+ if self._span_recorder is None:
+ self._span_recorder = _SpanRecorder(maxlen)
+ self._span_recorder.start_span(self)
+
+ def __repr__(self):
+ # type: () -> str
+ return (
+ "<%s(transaction=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>"
+ % (
+ self.__class__.__name__,
+ self.transaction,
+ self.trace_id,
+ self.span_id,
+ self.parent_span_id,
+ self.sampled,
+ )
+ )
+
+ def __enter__(self):
+ # type: () -> Span
+ hub = self.hub or sentry_sdk.Hub.current
+
+ _, scope = hub._stack[-1]
+ old_span = scope.span
+ scope.span = self
+ self._context_manager_state = (hub, scope, old_span)
+ return self
+
+ def __exit__(self, ty, value, tb):
+ # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
+ if value is not None:
+ self.set_status("internal_error")
+
+ hub, scope, old_span = self._context_manager_state
+ del self._context_manager_state
+
+ self.finish(hub)
+ scope.span = old_span
+
+ def new_span(self, **kwargs):
+ # type: (**Any) -> Span
+ rv = type(self)(
+ trace_id=self.trace_id,
+ span_id=None,
+ parent_span_id=self.span_id,
+ sampled=self.sampled,
+ **kwargs
+ )
+
+ rv._span_recorder = self._span_recorder
+ return rv
+
+ @classmethod
+ def continue_from_environ(cls, environ):
+ # type: (typing.Mapping[str, str]) -> Span
+ return cls.continue_from_headers(EnvironHeaders(environ))
+
+ @classmethod
+ def continue_from_headers(cls, headers):
+ # type: (typing.Mapping[str, str]) -> Span
+ parent = cls.from_traceparent(headers.get("sentry-trace"))
+ if parent is None:
+ return cls()
+ parent.same_process_as_parent = False
+ return parent
+
+ def iter_headers(self):
+ # type: () -> Generator[Tuple[str, str], None, None]
+ yield "sentry-trace", self.to_traceparent()
+
+ @classmethod
+ def from_traceparent(cls, traceparent):
+ # type: (Optional[str]) -> Optional[Span]
+ if not traceparent:
+ return None
+
+ if traceparent.startswith("00-") and traceparent.endswith("-00"):
+ traceparent = traceparent[3:-3]
+
+ match = _traceparent_header_format_re.match(str(traceparent))
+ if match is None:
+ return None
+
+ trace_id, span_id, sampled_str = match.groups()
+
+ if trace_id is not None:
+ trace_id = "{:032x}".format(int(trace_id, 16))
+ if span_id is not None:
+ span_id = "{:016x}".format(int(span_id, 16))
+
+ if sampled_str:
+ sampled = sampled_str != "0" # type: Optional[bool]
+ else:
+ sampled = None
+
+ return cls(trace_id=trace_id, parent_span_id=span_id, sampled=sampled)
+
+ def to_traceparent(self):
+ # type: () -> str
+ sampled = ""
+ if self.sampled is True:
+ sampled = "1"
+ if self.sampled is False:
+ sampled = "0"
+ return "%s-%s-%s" % (self.trace_id, self.span_id, sampled)
+
+ def to_legacy_traceparent(self):
+ # type: () -> str
+ return "00-%s-%s-00" % (self.trace_id, self.span_id)
+
+ def set_tag(self, key, value):
+ # type: (str, Any) -> None
+ self._tags[key] = value
+
+ def set_data(self, key, value):
+ # type: (str, Any) -> None
+ self._data[key] = value
+
+ def set_status(self, value):
+ # type: (str) -> None
+ self.status = value
+
+ def set_http_status(self, http_status):
+ # type: (int) -> None
+ self.set_tag("http.status_code", http_status)
+
+ if http_status < 400:
+ self.set_status("ok")
+ elif 400 <= http_status < 500:
+ if http_status == 403:
+ self.set_status("permission_denied")
+ elif http_status == 404:
+ self.set_status("not_found")
+ elif http_status == 429:
+ self.set_status("resource_exhausted")
+ elif http_status == 413:
+ self.set_status("failed_precondition")
+ elif http_status == 401:
+ self.set_status("unauthenticated")
+ elif http_status == 409:
+ self.set_status("already_exists")
+ else:
+ self.set_status("invalid_argument")
+ elif 500 <= http_status < 600:
+ if http_status == 504:
+ self.set_status("deadline_exceeded")
+ elif http_status == 501:
+ self.set_status("unimplemented")
+ elif http_status == 503:
+ self.set_status("unavailable")
+ else:
+ self.set_status("internal_error")
+ else:
+ self.set_status("unknown_error")
+
+ def is_success(self):
+ # type: () -> bool
+ return self.status == "ok"
+
+ def finish(self, hub=None):
+ # type: (Optional[sentry_sdk.Hub]) -> Optional[str]
+ hub = hub or self.hub or sentry_sdk.Hub.current
+
+ if self.timestamp is not None:
+ # This transaction is already finished, so we should not flush it again.
+ return None
+
+ try:
+ duration_seconds = time.perf_counter() - self._start_timestamp_monotonic
+ self.timestamp = self.start_timestamp + timedelta(seconds=duration_seconds)
+ except AttributeError:
+ self.timestamp = datetime.utcnow()
+
+ _maybe_create_breadcrumbs_from_span(hub, self)
+
+ if self._span_recorder is None:
+ return None
+
+ self._span_recorder.finish_span(self)
+
+ if self.transaction is None:
+ # If this has no transaction set we assume there's a parent
+ # transaction for this span that would be flushed out eventually.
+ return None
+
+ client = hub.client
+
+ if client is None:
+ # We have no client and therefore nowhere to send this transaction
+ # event.
+ return None
+
+ if not self.sampled:
+ # At this point a `sampled = None` should have already been
+ # resolved to a concrete decision. If `sampled` is `None`, it's
+ # likely that somebody used `with sentry_sdk.Hub.start_span(..)` on a
+ # non-transaction span and later decided to make it a transaction.
+ if self.sampled is None:
+ logger.warning("Discarding transaction Span without sampling decision")
+
+ return None
+
+ return hub.capture_event(
+ {
+ "type": "transaction",
+ "transaction": self.transaction,
+ "contexts": {"trace": self.get_trace_context()},
+ "tags": self._tags,
+ "timestamp": self.timestamp,
+ "start_timestamp": self.start_timestamp,
+ "spans": [
+ s.to_json(client)
+ for s in self._span_recorder.finished_spans
+ if s is not self
+ ],
+ }
+ )
+
+ def to_json(self, client):
+ # type: (Optional[sentry_sdk.Client]) -> Dict[str, Any]
+ rv = {
+ "trace_id": self.trace_id,
+ "span_id": self.span_id,
+ "parent_span_id": self.parent_span_id,
+ "same_process_as_parent": self.same_process_as_parent,
+ "op": self.op,
+ "description": self.description,
+ "start_timestamp": self.start_timestamp,
+ "timestamp": self.timestamp,
+ } # type: Dict[str, Any]
+
+ transaction = self.transaction
+ if transaction:
+ rv["transaction"] = transaction
+
+ if self.status:
+ self._tags["status"] = self.status
+
+ tags = self._tags
+ if tags:
+ rv["tags"] = tags
+
+ data = self._data
+ if data:
+ rv["data"] = data
+
+ return rv
+
+ def get_trace_context(self):
+ # type: () -> Any
+ rv = {
+ "trace_id": self.trace_id,
+ "span_id": self.span_id,
+ "parent_span_id": self.parent_span_id,
+ "op": self.op,
+ "description": self.description,
+ }
+ if self.status:
+ rv["status"] = self.status
+
+ return rv
+
+
+def _format_sql(cursor, sql):
+ # type: (Any, str) -> Optional[str]
+
+ real_sql = None
+
+ # If we're using psycopg2, it could be that we're
+ # looking at a query that uses Composed objects. Use psycopg2's mogrify
+ # function to format the query. We lose per-parameter trimming but gain
+ # accuracy in formatting.
+ try:
+ if hasattr(cursor, "mogrify"):
+ real_sql = cursor.mogrify(sql)
+ if isinstance(real_sql, bytes):
+ real_sql = real_sql.decode(cursor.connection.encoding)
+ except Exception:
+ real_sql = None
+
+ return real_sql or to_string(sql)
+
+
+@contextlib.contextmanager
+def record_sql_queries(
+ hub, # type: sentry_sdk.Hub
+ cursor, # type: Any
+ query, # type: Any
+ params_list, # type: Any
+ paramstyle, # type: Optional[str]
+ executemany, # type: bool
+):
+ # type: (...) -> Generator[Span, None, None]
+
+ # TODO: Bring back capturing of params by default
+ if hub.client and hub.client.options["_experiments"].get(
+ "record_sql_params", False
+ ):
+ if not params_list or params_list == [None]:
+ params_list = None
+
+ if paramstyle == "pyformat":
+ paramstyle = "format"
+ else:
+ params_list = None
+ paramstyle = None
+
+ query = _format_sql(cursor, query)
+
+ data = {}
+ if params_list is not None:
+ data["db.params"] = params_list
+ if paramstyle is not None:
+ data["db.paramstyle"] = paramstyle
+ if executemany:
+ data["db.executemany"] = True
+
+ with capture_internal_exceptions():
+ hub.add_breadcrumb(message=query, category="query", data=data)
+
+ with hub.start_span(op="db", description=query) as span:
+ for k, v in data.items():
+ span.set_data(k, v)
+ yield span
+
+
+def _maybe_create_breadcrumbs_from_span(hub, span):
+ # type: (sentry_sdk.Hub, Span) -> None
+ if span.op == "redis":
+ hub.add_breadcrumb(
+ message=span.description, type="redis", category="redis", data=span._tags
+ )
+ elif span.op == "http":
+ hub.add_breadcrumb(type="http", category="httplib", data=span._data)
+ elif span.op == "subprocess":
+ hub.add_breadcrumb(
+ type="subprocess",
+ category="subprocess",
+ message=span.description,
+ data=span._data,
+ )
diff --git a/third_party/python/sentry_sdk/sentry_sdk/transport.py b/third_party/python/sentry_sdk/sentry_sdk/transport.py
new file mode 100644
index 0000000000..60ab611c54
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/transport.py
@@ -0,0 +1,365 @@
+from __future__ import print_function
+
+import json
+import io
+import urllib3 # type: ignore
+import certifi
+import gzip
+
+from datetime import datetime, timedelta
+
+from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions
+from sentry_sdk.worker import BackgroundWorker
+from sentry_sdk.envelope import Envelope, get_event_data_category
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from typing import Type
+ from typing import Any
+ from typing import Optional
+ from typing import Dict
+ from typing import Union
+ from typing import Callable
+ from urllib3.poolmanager import PoolManager # type: ignore
+ from urllib3.poolmanager import ProxyManager
+
+ from sentry_sdk._types import Event
+
+try:
+ from urllib.request import getproxies
+except ImportError:
+ from urllib import getproxies # type: ignore
+
+
+class Transport(object):
+ """Baseclass for all transports.
+
+ A transport is used to send an event to sentry.
+ """
+
+ parsed_dsn = None # type: Optional[Dsn]
+
+ def __init__(
+ self, options=None # type: Optional[Dict[str, Any]]
+ ):
+ # type: (...) -> None
+ self.options = options
+ if options and options["dsn"] is not None and options["dsn"]:
+ self.parsed_dsn = Dsn(options["dsn"])
+ else:
+ self.parsed_dsn = None
+
+ def capture_event(
+ self, event # type: Event
+ ):
+ # type: (...) -> None
+ """This gets invoked with the event dictionary when an event should
+ be sent to sentry.
+ """
+ raise NotImplementedError()
+
+ def capture_envelope(
+ self, envelope # type: Envelope
+ ):
+ # type: (...) -> None
+ """This gets invoked with an envelope when an event should
+ be sent to sentry. The default implementation invokes `capture_event`
+ if the envelope contains an event and ignores all other envelopes.
+ """
+ event = envelope.get_event()
+ if event is not None:
+ self.capture_event(event)
+ return None
+
+ def flush(
+ self,
+ timeout, # type: float
+ callback=None, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ """Wait `timeout` seconds for the current events to be sent out."""
+ pass
+
+ def kill(self):
+ # type: () -> None
+ """Forcefully kills the transport."""
+ pass
+
+ def __del__(self):
+ # type: () -> None
+ try:
+ self.kill()
+ except Exception:
+ pass
+
+
+class HttpTransport(Transport):
+ """The default HTTP transport."""
+
+ def __init__(
+ self, options # type: Dict[str, Any]
+ ):
+ # type: (...) -> None
+ from sentry_sdk.consts import VERSION
+
+ Transport.__init__(self, options)
+ assert self.parsed_dsn is not None
+ self._worker = BackgroundWorker()
+ self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION)
+ self._disabled_until = {} # type: Dict[Any, datetime]
+ self._retry = urllib3.util.Retry()
+ self.options = options
+
+ self._pool = self._make_pool(
+ self.parsed_dsn,
+ http_proxy=options["http_proxy"],
+ https_proxy=options["https_proxy"],
+ ca_certs=options["ca_certs"],
+ )
+
+ from sentry_sdk import Hub
+
+ self.hub_cls = Hub
+
+ def _update_rate_limits(self, response):
+ # type: (urllib3.HTTPResponse) -> None
+
+ # new sentries with more rate limit insights. We honor this header
+ # no matter of the status code to update our internal rate limits.
+ header = response.headers.get("x-sentry-rate-limit")
+ if header:
+ for limit in header.split(","):
+ try:
+ retry_after, categories, _ = limit.strip().split(":", 2)
+ retry_after = datetime.utcnow() + timedelta(
+ seconds=int(retry_after)
+ )
+ for category in categories.split(";") or (None,):
+ self._disabled_until[category] = retry_after
+ except (LookupError, ValueError):
+ continue
+
+ # old sentries only communicate global rate limit hits via the
+ # retry-after header on 429. This header can also be emitted on new
+ # sentries if a proxy in front wants to globally slow things down.
+ elif response.status == 429:
+ self._disabled_until[None] = datetime.utcnow() + timedelta(
+ seconds=self._retry.get_retry_after(response) or 60
+ )
+
+ def _send_request(
+ self,
+ body, # type: bytes
+ headers, # type: Dict[str, str]
+ ):
+ # type: (...) -> None
+ headers.update(
+ {
+ "User-Agent": str(self._auth.client),
+ "X-Sentry-Auth": str(self._auth.to_header()),
+ }
+ )
+ response = self._pool.request(
+ "POST", str(self._auth.store_api_url), body=body, headers=headers
+ )
+
+ try:
+ self._update_rate_limits(response)
+
+ if response.status == 429:
+ # if we hit a 429. Something was rate limited but we already
+ # acted on this in `self._update_rate_limits`.
+ pass
+
+ elif response.status >= 300 or response.status < 200:
+ logger.error(
+ "Unexpected status code: %s (body: %s)",
+ response.status,
+ response.data,
+ )
+ finally:
+ response.close()
+
+ def _check_disabled(self, category):
+ # type: (str) -> bool
+ def _disabled(bucket):
+ # type: (Any) -> bool
+ ts = self._disabled_until.get(bucket)
+ return ts is not None and ts > datetime.utcnow()
+
+ return _disabled(category) or _disabled(None)
+
+ def _send_event(
+ self, event # type: Event
+ ):
+ # type: (...) -> None
+ if self._check_disabled(get_event_data_category(event)):
+ return None
+
+ body = io.BytesIO()
+ with gzip.GzipFile(fileobj=body, mode="w") as f:
+ f.write(json.dumps(event, allow_nan=False).encode("utf-8"))
+
+ assert self.parsed_dsn is not None
+ logger.debug(
+ "Sending event, type:%s level:%s event_id:%s project:%s host:%s"
+ % (
+ event.get("type") or "null",
+ event.get("level") or "null",
+ event.get("event_id") or "null",
+ self.parsed_dsn.project_id,
+ self.parsed_dsn.host,
+ )
+ )
+ self._send_request(
+ body.getvalue(),
+ headers={"Content-Type": "application/json", "Content-Encoding": "gzip"},
+ )
+ return None
+
+ def _send_envelope(
+ self, envelope # type: Envelope
+ ):
+ # type: (...) -> None
+
+ # remove all items from the envelope which are over quota
+ envelope.items[:] = [
+ x for x in envelope.items if not self._check_disabled(x.data_category)
+ ]
+ if not envelope.items:
+ return None
+
+ body = io.BytesIO()
+ with gzip.GzipFile(fileobj=body, mode="w") as f:
+ envelope.serialize_into(f)
+
+ assert self.parsed_dsn is not None
+ logger.debug(
+ "Sending envelope [%s] project:%s host:%s",
+ envelope.description,
+ self.parsed_dsn.project_id,
+ self.parsed_dsn.host,
+ )
+ self._send_request(
+ body.getvalue(),
+ headers={
+ "Content-Type": "application/x-sentry-envelope",
+ "Content-Encoding": "gzip",
+ },
+ )
+ return None
+
+ def _get_pool_options(self, ca_certs):
+ # type: (Optional[Any]) -> Dict[str, Any]
+ return {
+ "num_pools": 2,
+ "cert_reqs": "CERT_REQUIRED",
+ "ca_certs": ca_certs or certifi.where(),
+ }
+
+ def _make_pool(
+ self,
+ parsed_dsn, # type: Dsn
+ http_proxy, # type: Optional[str]
+ https_proxy, # type: Optional[str]
+ ca_certs, # type: Optional[Any]
+ ):
+ # type: (...) -> Union[PoolManager, ProxyManager]
+ proxy = None
+
+ # try HTTPS first
+ if parsed_dsn.scheme == "https" and (https_proxy != ""):
+ proxy = https_proxy or getproxies().get("https")
+
+ # maybe fallback to HTTP proxy
+ if not proxy and (http_proxy != ""):
+ proxy = http_proxy or getproxies().get("http")
+
+ opts = self._get_pool_options(ca_certs)
+
+ if proxy:
+ return urllib3.ProxyManager(proxy, **opts)
+ else:
+ return urllib3.PoolManager(**opts)
+
+ def capture_event(
+ self, event # type: Event
+ ):
+ # type: (...) -> None
+ hub = self.hub_cls.current
+
+ def send_event_wrapper():
+ # type: () -> None
+ with hub:
+ with capture_internal_exceptions():
+ self._send_event(event)
+
+ self._worker.submit(send_event_wrapper)
+
+ def capture_envelope(
+ self, envelope # type: Envelope
+ ):
+ # type: (...) -> None
+ hub = self.hub_cls.current
+
+ def send_envelope_wrapper():
+ # type: () -> None
+ with hub:
+ with capture_internal_exceptions():
+ self._send_envelope(envelope)
+
+ self._worker.submit(send_envelope_wrapper)
+
+ def flush(
+ self,
+ timeout, # type: float
+ callback=None, # type: Optional[Any]
+ ):
+ # type: (...) -> None
+ logger.debug("Flushing HTTP transport")
+ if timeout > 0:
+ self._worker.flush(timeout, callback)
+
+ def kill(self):
+ # type: () -> None
+ logger.debug("Killing HTTP transport")
+ self._worker.kill()
+
+
+class _FunctionTransport(Transport):
+ def __init__(
+ self, func # type: Callable[[Event], None]
+ ):
+ # type: (...) -> None
+ Transport.__init__(self)
+ self._func = func
+
+ def capture_event(
+ self, event # type: Event
+ ):
+ # type: (...) -> None
+ self._func(event)
+ return None
+
+
+def make_transport(options):
+ # type: (Dict[str, Any]) -> Optional[Transport]
+ ref_transport = options["transport"]
+
+ # If no transport is given, we use the http transport class
+ if ref_transport is None:
+ transport_cls = HttpTransport # type: Type[Transport]
+ elif isinstance(ref_transport, Transport):
+ return ref_transport
+ elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport):
+ transport_cls = ref_transport
+ elif callable(ref_transport):
+ return _FunctionTransport(ref_transport) # type: ignore
+
+ # if a transport class is given only instanciate it if the dsn is not
+ # empty or None
+ if options["dsn"]:
+ return transport_cls(options)
+
+ return None
diff --git a/third_party/python/sentry_sdk/sentry_sdk/utils.py b/third_party/python/sentry_sdk/sentry_sdk/utils.py
new file mode 100644
index 0000000000..d92309c5f7
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/utils.py
@@ -0,0 +1,831 @@
+import os
+import sys
+import linecache
+import logging
+
+from datetime import datetime
+
+import sentry_sdk
+from sentry_sdk._compat import urlparse, text_type, implements_str, PY2
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from types import FrameType
+ from types import TracebackType
+ from typing import Any
+ from typing import Callable
+ from typing import Dict
+ from typing import ContextManager
+ from typing import Iterator
+ from typing import List
+ from typing import Optional
+ from typing import Set
+ from typing import Tuple
+ from typing import Union
+ from typing import Type
+
+ from sentry_sdk._types import ExcInfo
+
+epoch = datetime(1970, 1, 1)
+
+
+# The logger is created here but initialized in the debug support module
+logger = logging.getLogger("sentry_sdk.errors")
+
+MAX_STRING_LENGTH = 512
+MAX_FORMAT_PARAM_LENGTH = 128
+
+
+def _get_debug_hub():
+ # type: () -> Optional[sentry_sdk.Hub]
+ # This function is replaced by debug.py
+ pass
+
+
+class CaptureInternalException(object):
+ __slots__ = ()
+
+ def __enter__(self):
+ # type: () -> ContextManager[Any]
+ return self
+
+ def __exit__(self, ty, value, tb):
+ # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> bool
+ if ty is not None and value is not None:
+ capture_internal_exception((ty, value, tb))
+
+ return True
+
+
+_CAPTURE_INTERNAL_EXCEPTION = CaptureInternalException()
+
+
+def capture_internal_exceptions():
+ # type: () -> ContextManager[Any]
+ return _CAPTURE_INTERNAL_EXCEPTION
+
+
+def capture_internal_exception(exc_info):
+ # type: (ExcInfo) -> None
+ hub = _get_debug_hub()
+ if hub is not None:
+ hub._capture_internal_exception(exc_info)
+
+
+def to_timestamp(value):
+ # type: (datetime) -> float
+ return (value - epoch).total_seconds()
+
+
+def format_timestamp(value):
+ # type: (datetime) -> str
+ return value.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
+
+
+def event_hint_with_exc_info(exc_info=None):
+ # type: (Optional[ExcInfo]) -> Dict[str, Optional[ExcInfo]]
+ """Creates a hint with the exc info filled in."""
+ if exc_info is None:
+ exc_info = sys.exc_info()
+ else:
+ exc_info = exc_info_from_error(exc_info)
+ if exc_info[0] is None:
+ exc_info = None
+ return {"exc_info": exc_info}
+
+
+class BadDsn(ValueError):
+ """Raised on invalid DSNs."""
+
+
+@implements_str
+class Dsn(object):
+ """Represents a DSN."""
+
+ def __init__(self, value):
+ # type: (Union[Dsn, str]) -> None
+ if isinstance(value, Dsn):
+ self.__dict__ = dict(value.__dict__)
+ return
+ parts = urlparse.urlsplit(text_type(value))
+
+ if parts.scheme not in (u"http", u"https"):
+ raise BadDsn("Unsupported scheme %r" % parts.scheme)
+ self.scheme = parts.scheme
+
+ if parts.hostname is None:
+ raise BadDsn("Missing hostname")
+
+ self.host = parts.hostname
+
+ if parts.port is None:
+ self.port = self.scheme == "https" and 443 or 80
+ else:
+ self.port = parts.port
+
+ if not parts.username:
+ raise BadDsn("Missing public key")
+
+ self.public_key = parts.username
+ self.secret_key = parts.password
+
+ path = parts.path.rsplit("/", 1)
+
+ try:
+ self.project_id = text_type(int(path.pop()))
+ except (ValueError, TypeError):
+ raise BadDsn("Invalid project in DSN (%r)" % (parts.path or "")[1:])
+
+ self.path = "/".join(path) + "/"
+
+ @property
+ def netloc(self):
+ # type: () -> str
+ """The netloc part of a DSN."""
+ rv = self.host
+ if (self.scheme, self.port) not in (("http", 80), ("https", 443)):
+ rv = "%s:%s" % (rv, self.port)
+ return rv
+
+ def to_auth(self, client=None):
+ # type: (Optional[Any]) -> Auth
+ """Returns the auth info object for this dsn."""
+ return Auth(
+ scheme=self.scheme,
+ host=self.netloc,
+ path=self.path,
+ project_id=self.project_id,
+ public_key=self.public_key,
+ secret_key=self.secret_key,
+ client=client,
+ )
+
+ def __str__(self):
+ # type: () -> str
+ return "%s://%s%s@%s%s%s" % (
+ self.scheme,
+ self.public_key,
+ self.secret_key and "@" + self.secret_key or "",
+ self.netloc,
+ self.path,
+ self.project_id,
+ )
+
+
+class Auth(object):
+ """Helper object that represents the auth info."""
+
+ def __init__(
+ self,
+ scheme,
+ host,
+ project_id,
+ public_key,
+ secret_key=None,
+ version=7,
+ client=None,
+ path="/",
+ ):
+ # type: (str, str, str, str, Optional[str], int, Optional[Any], str) -> None
+ self.scheme = scheme
+ self.host = host
+ self.path = path
+ self.project_id = project_id
+ self.public_key = public_key
+ self.secret_key = secret_key
+ self.version = version
+ self.client = client
+
+ @property
+ def store_api_url(self):
+ # type: () -> str
+ """Returns the API url for storing events."""
+ return "%s://%s%sapi/%s/store/" % (
+ self.scheme,
+ self.host,
+ self.path,
+ self.project_id,
+ )
+
+ def to_header(self, timestamp=None):
+ # type: (Optional[datetime]) -> str
+ """Returns the auth header a string."""
+ rv = [("sentry_key", self.public_key), ("sentry_version", self.version)]
+ if timestamp is not None:
+ rv.append(("sentry_timestamp", str(to_timestamp(timestamp))))
+ if self.client is not None:
+ rv.append(("sentry_client", self.client))
+ if self.secret_key is not None:
+ rv.append(("sentry_secret", self.secret_key))
+ return u"Sentry " + u", ".join("%s=%s" % (key, value) for key, value in rv)
+
+
+class AnnotatedValue(object):
+ __slots__ = ("value", "metadata")
+
+ def __init__(self, value, metadata):
+ # type: (Optional[Any], Dict[str, Any]) -> None
+ self.value = value
+ self.metadata = metadata
+
+
+if MYPY:
+ from typing import TypeVar
+
+ T = TypeVar("T")
+ Annotated = Union[AnnotatedValue, T]
+
+
+def get_type_name(cls):
+ # type: (Optional[type]) -> Optional[str]
+ return getattr(cls, "__qualname__", None) or getattr(cls, "__name__", None)
+
+
+def get_type_module(cls):
+ # type: (Optional[type]) -> Optional[str]
+ mod = getattr(cls, "__module__", None)
+ if mod not in (None, "builtins", "__builtins__"):
+ return mod
+ return None
+
+
+def should_hide_frame(frame):
+ # type: (FrameType) -> bool
+ try:
+ mod = frame.f_globals["__name__"]
+ if mod.startswith("sentry_sdk."):
+ return True
+ except (AttributeError, KeyError):
+ pass
+
+ for flag_name in "__traceback_hide__", "__tracebackhide__":
+ try:
+ if frame.f_locals[flag_name]:
+ return True
+ except Exception:
+ pass
+
+ return False
+
+
+def iter_stacks(tb):
+ # type: (Optional[TracebackType]) -> Iterator[TracebackType]
+ tb_ = tb # type: Optional[TracebackType]
+ while tb_ is not None:
+ if not should_hide_frame(tb_.tb_frame):
+ yield tb_
+ tb_ = tb_.tb_next
+
+
+def get_lines_from_file(
+ filename, # type: str
+ lineno, # type: int
+ loader=None, # type: Optional[Any]
+ module=None, # type: Optional[str]
+):
+ # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]
+ context_lines = 5
+ source = None
+ if loader is not None and hasattr(loader, "get_source"):
+ try:
+ source_str = loader.get_source(module) # type: Optional[str]
+ except (ImportError, IOError):
+ source_str = None
+ if source_str is not None:
+ source = source_str.splitlines()
+
+ if source is None:
+ try:
+ source = linecache.getlines(filename)
+ except (OSError, IOError):
+ return [], None, []
+
+ if not source:
+ return [], None, []
+
+ lower_bound = max(0, lineno - context_lines)
+ upper_bound = min(lineno + 1 + context_lines, len(source))
+
+ try:
+ pre_context = [
+ strip_string(line.strip("\r\n")) for line in source[lower_bound:lineno]
+ ]
+ context_line = strip_string(source[lineno].strip("\r\n"))
+ post_context = [
+ strip_string(line.strip("\r\n"))
+ for line in source[(lineno + 1) : upper_bound]
+ ]
+ return pre_context, context_line, post_context
+ except IndexError:
+ # the file may have changed since it was loaded into memory
+ return [], None, []
+
+
+def get_source_context(
+ frame, # type: FrameType
+ tb_lineno, # type: int
+):
+ # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]
+ try:
+ abs_path = frame.f_code.co_filename # type: Optional[str]
+ except Exception:
+ abs_path = None
+ try:
+ module = frame.f_globals["__name__"]
+ except Exception:
+ return [], None, []
+ try:
+ loader = frame.f_globals["__loader__"]
+ except Exception:
+ loader = None
+ lineno = tb_lineno - 1
+ if lineno is not None and abs_path:
+ return get_lines_from_file(abs_path, lineno, loader, module)
+ return [], None, []
+
+
+def safe_str(value):
+ # type: (Any) -> str
+ try:
+ return text_type(value)
+ except Exception:
+ return safe_repr(value)
+
+
+if PY2:
+
+ def safe_repr(value):
+ # type: (Any) -> str
+ try:
+ rv = repr(value).decode("utf-8", "replace")
+
+ # At this point `rv` contains a bunch of literal escape codes, like
+ # this (exaggerated example):
+ #
+ # u"\\x2f"
+ #
+ # But we want to show this string as:
+ #
+ # u"/"
+ try:
+ # unicode-escape does this job, but can only decode latin1. So we
+ # attempt to encode in latin1.
+ return rv.encode("latin1").decode("unicode-escape")
+ except Exception:
+ # Since usually strings aren't latin1 this can break. In those
+ # cases we just give up.
+ return rv
+ except Exception:
+ # If e.g. the call to `repr` already fails
+ return u"<broken repr>"
+
+
+else:
+
+ def safe_repr(value):
+ # type: (Any) -> str
+ try:
+ return repr(value)
+ except Exception:
+ return "<broken repr>"
+
+
+def filename_for_module(module, abs_path):
+ # type: (Optional[str], Optional[str]) -> Optional[str]
+ if not abs_path or not module:
+ return abs_path
+
+ try:
+ if abs_path.endswith(".pyc"):
+ abs_path = abs_path[:-1]
+
+ base_module = module.split(".", 1)[0]
+ if base_module == module:
+ return os.path.basename(abs_path)
+
+ base_module_path = sys.modules[base_module].__file__
+ return abs_path.split(base_module_path.rsplit(os.sep, 2)[0], 1)[-1].lstrip(
+ os.sep
+ )
+ except Exception:
+ return abs_path
+
+
+def serialize_frame(frame, tb_lineno=None, with_locals=True):
+ # type: (FrameType, Optional[int], bool) -> Dict[str, Any]
+ f_code = getattr(frame, "f_code", None)
+ if not f_code:
+ abs_path = None
+ function = None
+ else:
+ abs_path = frame.f_code.co_filename
+ function = frame.f_code.co_name
+ try:
+ module = frame.f_globals["__name__"]
+ except Exception:
+ module = None
+
+ if tb_lineno is None:
+ tb_lineno = frame.f_lineno
+
+ pre_context, context_line, post_context = get_source_context(frame, tb_lineno)
+
+ rv = {
+ "filename": filename_for_module(module, abs_path) or None,
+ "abs_path": os.path.abspath(abs_path) if abs_path else None,
+ "function": function or "<unknown>",
+ "module": module,
+ "lineno": tb_lineno,
+ "pre_context": pre_context,
+ "context_line": context_line,
+ "post_context": post_context,
+ } # type: Dict[str, Any]
+ if with_locals:
+ rv["vars"] = frame.f_locals
+
+ return rv
+
+
+def stacktrace_from_traceback(tb=None, with_locals=True):
+ # type: (Optional[TracebackType], bool) -> Dict[str, List[Dict[str, Any]]]
+ return {
+ "frames": [
+ serialize_frame(
+ tb.tb_frame, tb_lineno=tb.tb_lineno, with_locals=with_locals
+ )
+ for tb in iter_stacks(tb)
+ ]
+ }
+
+
+def current_stacktrace(with_locals=True):
+ # type: (bool) -> Any
+ __tracebackhide__ = True
+ frames = []
+
+ f = sys._getframe() # type: Optional[FrameType]
+ while f is not None:
+ if not should_hide_frame(f):
+ frames.append(serialize_frame(f, with_locals=with_locals))
+ f = f.f_back
+
+ frames.reverse()
+
+ return {"frames": frames}
+
+
+def get_errno(exc_value):
+ # type: (BaseException) -> Optional[Any]
+ return getattr(exc_value, "errno", None)
+
+
+def single_exception_from_error_tuple(
+ exc_type, # type: Optional[type]
+ exc_value, # type: Optional[BaseException]
+ tb, # type: Optional[TracebackType]
+ client_options=None, # type: Optional[Dict[str, Any]]
+ mechanism=None, # type: Optional[Dict[str, Any]]
+):
+ # type: (...) -> Dict[str, Any]
+ if exc_value is not None:
+ errno = get_errno(exc_value)
+ else:
+ errno = None
+
+ if errno is not None:
+ mechanism = mechanism or {}
+ mechanism.setdefault("meta", {}).setdefault("errno", {}).setdefault(
+ "number", errno
+ )
+
+ if client_options is None:
+ with_locals = True
+ else:
+ with_locals = client_options["with_locals"]
+
+ return {
+ "module": get_type_module(exc_type),
+ "type": get_type_name(exc_type),
+ "value": safe_str(exc_value),
+ "mechanism": mechanism,
+ "stacktrace": stacktrace_from_traceback(tb, with_locals),
+ }
+
+
+HAS_CHAINED_EXCEPTIONS = hasattr(Exception, "__suppress_context__")
+
+if HAS_CHAINED_EXCEPTIONS:
+
+ def walk_exception_chain(exc_info):
+ # type: (ExcInfo) -> Iterator[ExcInfo]
+ exc_type, exc_value, tb = exc_info
+
+ seen_exceptions = []
+ seen_exception_ids = set() # type: Set[int]
+
+ while (
+ exc_type is not None
+ and exc_value is not None
+ and id(exc_value) not in seen_exception_ids
+ ):
+ yield exc_type, exc_value, tb
+
+ # Avoid hashing random types we don't know anything
+ # about. Use the list to keep a ref so that the `id` is
+ # not used for another object.
+ seen_exceptions.append(exc_value)
+ seen_exception_ids.add(id(exc_value))
+
+ if exc_value.__suppress_context__:
+ cause = exc_value.__cause__
+ else:
+ cause = exc_value.__context__
+ if cause is None:
+ break
+ exc_type = type(cause)
+ exc_value = cause
+ tb = getattr(cause, "__traceback__", None)
+
+
+else:
+
+ def walk_exception_chain(exc_info):
+ # type: (ExcInfo) -> Iterator[ExcInfo]
+ yield exc_info
+
+
+def exceptions_from_error_tuple(
+ exc_info, # type: ExcInfo
+ client_options=None, # type: Optional[Dict[str, Any]]
+ mechanism=None, # type: Optional[Dict[str, Any]]
+):
+ # type: (...) -> List[Dict[str, Any]]
+ exc_type, exc_value, tb = exc_info
+ rv = []
+ for exc_type, exc_value, tb in walk_exception_chain(exc_info):
+ rv.append(
+ single_exception_from_error_tuple(
+ exc_type, exc_value, tb, client_options, mechanism
+ )
+ )
+
+ rv.reverse()
+
+ return rv
+
+
+def to_string(value):
+ # type: (str) -> str
+ try:
+ return text_type(value)
+ except UnicodeDecodeError:
+ return repr(value)[1:-1]
+
+
+def iter_event_stacktraces(event):
+ # type: (Dict[str, Any]) -> Iterator[Dict[str, Any]]
+ if "stacktrace" in event:
+ yield event["stacktrace"]
+ if "threads" in event:
+ for thread in event["threads"].get("values") or ():
+ if "stacktrace" in thread:
+ yield thread["stacktrace"]
+ if "exception" in event:
+ for exception in event["exception"].get("values") or ():
+ if "stacktrace" in exception:
+ yield exception["stacktrace"]
+
+
+def iter_event_frames(event):
+ # type: (Dict[str, Any]) -> Iterator[Dict[str, Any]]
+ for stacktrace in iter_event_stacktraces(event):
+ for frame in stacktrace.get("frames") or ():
+ yield frame
+
+
+def handle_in_app(event, in_app_exclude=None, in_app_include=None):
+ # type: (Dict[str, Any], Optional[List[str]], Optional[List[str]]) -> Dict[str, Any]
+ for stacktrace in iter_event_stacktraces(event):
+ handle_in_app_impl(
+ stacktrace.get("frames"),
+ in_app_exclude=in_app_exclude,
+ in_app_include=in_app_include,
+ )
+
+ return event
+
+
+def handle_in_app_impl(frames, in_app_exclude, in_app_include):
+ # type: (Any, Optional[List[str]], Optional[List[str]]) -> Optional[Any]
+ if not frames:
+ return None
+
+ any_in_app = False
+ for frame in frames:
+ in_app = frame.get("in_app")
+ if in_app is not None:
+ if in_app:
+ any_in_app = True
+ continue
+
+ module = frame.get("module")
+ if not module:
+ continue
+ elif _module_in_set(module, in_app_include):
+ frame["in_app"] = True
+ any_in_app = True
+ elif _module_in_set(module, in_app_exclude):
+ frame["in_app"] = False
+
+ if not any_in_app:
+ for frame in frames:
+ if frame.get("in_app") is None:
+ frame["in_app"] = True
+
+ return frames
+
+
+def exc_info_from_error(error):
+ # type: (Union[BaseException, ExcInfo]) -> ExcInfo
+ if isinstance(error, tuple) and len(error) == 3:
+ exc_type, exc_value, tb = error
+ elif isinstance(error, BaseException):
+ tb = getattr(error, "__traceback__", None)
+ if tb is not None:
+ exc_type = type(error)
+ exc_value = error
+ else:
+ exc_type, exc_value, tb = sys.exc_info()
+ if exc_value is not error:
+ tb = None
+ exc_value = error
+ exc_type = type(error)
+
+ else:
+ raise ValueError("Expected Exception object to report, got %s!" % type(error))
+
+ return exc_type, exc_value, tb
+
+
+def event_from_exception(
+ exc_info, # type: Union[BaseException, ExcInfo]
+ client_options=None, # type: Optional[Dict[str, Any]]
+ mechanism=None, # type: Optional[Dict[str, Any]]
+):
+ # type: (...) -> Tuple[Dict[str, Any], Dict[str, Any]]
+ exc_info = exc_info_from_error(exc_info)
+ hint = event_hint_with_exc_info(exc_info)
+ return (
+ {
+ "level": "error",
+ "exception": {
+ "values": exceptions_from_error_tuple(
+ exc_info, client_options, mechanism
+ )
+ },
+ },
+ hint,
+ )
+
+
+def _module_in_set(name, set):
+ # type: (str, Optional[List[str]]) -> bool
+ if not set:
+ return False
+ for item in set or ():
+ if item == name or name.startswith(item + "."):
+ return True
+ return False
+
+
+def strip_string(value, max_length=None):
+ # type: (str, Optional[int]) -> Union[AnnotatedValue, str]
+ # TODO: read max_length from config
+ if not value:
+ return value
+
+ if max_length is None:
+ # This is intentionally not just the default such that one can patch `MAX_STRING_LENGTH` and affect `strip_string`.
+ max_length = MAX_STRING_LENGTH
+
+ length = len(value)
+
+ if length > max_length:
+ return AnnotatedValue(
+ value=value[: max_length - 3] + u"...",
+ metadata={
+ "len": length,
+ "rem": [["!limit", "x", max_length - 3, max_length]],
+ },
+ )
+ return value
+
+
+def _is_threading_local_monkey_patched():
+ # type: () -> bool
+ try:
+ from gevent.monkey import is_object_patched # type: ignore
+
+ if is_object_patched("threading", "local"):
+ return True
+ except ImportError:
+ pass
+
+ try:
+ from eventlet.patcher import is_monkey_patched # type: ignore
+
+ if is_monkey_patched("thread"):
+ return True
+ except ImportError:
+ pass
+
+ return False
+
+
+def _get_contextvars():
+ # type: () -> Tuple[bool, type]
+ """
+ Try to import contextvars and use it if it's deemed safe. We should not use
+ contextvars if gevent or eventlet have patched thread locals, as
+ contextvars are unaffected by that patch.
+
+ https://github.com/gevent/gevent/issues/1407
+ """
+ if not _is_threading_local_monkey_patched():
+ # aiocontextvars is a PyPI package that ensures that the contextvars
+ # backport (also a PyPI package) works with asyncio under Python 3.6
+ #
+ # Import it if available.
+ if not PY2 and sys.version_info < (3, 7):
+ try:
+ from aiocontextvars import ContextVar # noqa
+
+ return True, ContextVar
+ except ImportError:
+ pass
+
+ try:
+ from contextvars import ContextVar
+
+ return True, ContextVar
+ except ImportError:
+ pass
+
+ from threading import local
+
+ class ContextVar(object):
+ # Super-limited impl of ContextVar
+
+ def __init__(self, name):
+ # type: (str) -> None
+ self._name = name
+ self._local = local()
+
+ def get(self, default):
+ # type: (Any) -> Any
+ return getattr(self._local, "value", default)
+
+ def set(self, value):
+ # type: (Any) -> None
+ self._local.value = value
+
+ return False, ContextVar
+
+
+HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars()
+
+
+def transaction_from_function(func):
+ # type: (Callable[..., Any]) -> Optional[str]
+ # Methods in Python 2
+ try:
+ return "%s.%s.%s" % (
+ func.im_class.__module__, # type: ignore
+ func.im_class.__name__, # type: ignore
+ func.__name__,
+ )
+ except Exception:
+ pass
+
+ func_qualname = (
+ getattr(func, "__qualname__", None) or getattr(func, "__name__", None) or None
+ ) # type: Optional[str]
+
+ if not func_qualname:
+ # No idea what it is
+ return None
+
+ # Methods in Python 3
+ # Functions
+ # Classes
+ try:
+ return "%s.%s" % (func.__module__, func_qualname)
+ except Exception:
+ pass
+
+ # Possibly a lambda
+ return func_qualname
+
+
+disable_capture_event = ContextVar("disable_capture_event")
diff --git a/third_party/python/sentry_sdk/sentry_sdk/worker.py b/third_party/python/sentry_sdk/sentry_sdk/worker.py
new file mode 100644
index 0000000000..b5f2ea8ae6
--- /dev/null
+++ b/third_party/python/sentry_sdk/sentry_sdk/worker.py
@@ -0,0 +1,142 @@
+import os
+
+from threading import Thread, Lock
+from time import sleep, time
+from sentry_sdk._compat import queue, check_thread_support
+from sentry_sdk.utils import logger
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+ from queue import Queue
+ from typing import Any
+ from typing import Optional
+ from typing import Callable
+
+
+_TERMINATOR = object()
+
+
+class BackgroundWorker(object):
+ def __init__(self):
+ # type: () -> None
+ check_thread_support()
+ self._queue = queue.Queue(30) # type: Queue[Any]
+ self._lock = Lock()
+ self._thread = None # type: Optional[Thread]
+ self._thread_for_pid = None # type: Optional[int]
+
+ @property
+ def is_alive(self):
+ # type: () -> bool
+ if self._thread_for_pid != os.getpid():
+ return False
+ if not self._thread:
+ return False
+ return self._thread.is_alive()
+
+ def _ensure_thread(self):
+ # type: () -> None
+ if not self.is_alive:
+ self.start()
+
+ def _timed_queue_join(self, timeout):
+ # type: (float) -> bool
+ deadline = time() + timeout
+ queue = self._queue
+
+ real_all_tasks_done = getattr(
+ queue, "all_tasks_done", None
+ ) # type: Optional[Any]
+ if real_all_tasks_done is not None:
+ real_all_tasks_done.acquire()
+ all_tasks_done = real_all_tasks_done # type: Optional[Any]
+ elif queue.__module__.startswith("eventlet."):
+ all_tasks_done = getattr(queue, "_cond", None)
+ else:
+ all_tasks_done = None
+
+ try:
+ while queue.unfinished_tasks:
+ delay = deadline - time()
+ if delay <= 0:
+ return False
+ if all_tasks_done is not None:
+ all_tasks_done.wait(timeout=delay)
+ else:
+ # worst case, we just poll the number of remaining tasks
+ sleep(0.1)
+
+ return True
+ finally:
+ if real_all_tasks_done is not None:
+ real_all_tasks_done.release()
+
+ def start(self):
+ # type: () -> None
+ with self._lock:
+ if not self.is_alive:
+ self._thread = Thread(
+ target=self._target, name="raven-sentry.BackgroundWorker"
+ )
+ self._thread.setDaemon(True)
+ self._thread.start()
+ self._thread_for_pid = os.getpid()
+
+ def kill(self):
+ # type: () -> None
+ """
+ Kill worker thread. Returns immediately. Not useful for
+ waiting on shutdown for events, use `flush` for that.
+ """
+ logger.debug("background worker got kill request")
+ with self._lock:
+ if self._thread:
+ try:
+ self._queue.put_nowait(_TERMINATOR)
+ except queue.Full:
+ logger.debug("background worker queue full, kill failed")
+
+ self._thread = None
+ self._thread_for_pid = None
+
+ def flush(self, timeout, callback=None):
+ # type: (float, Optional[Any]) -> None
+ logger.debug("background worker got flush request")
+ with self._lock:
+ if self.is_alive and timeout > 0.0:
+ self._wait_flush(timeout, callback)
+ logger.debug("background worker flushed")
+
+ def _wait_flush(self, timeout, callback):
+ # type: (float, Optional[Any]) -> None
+ initial_timeout = min(0.1, timeout)
+ if not self._timed_queue_join(initial_timeout):
+ pending = self._queue.qsize()
+ logger.debug("%d event(s) pending on flush", pending)
+ if callback is not None:
+ callback(pending, timeout)
+ self._timed_queue_join(timeout - initial_timeout)
+
+ def submit(self, callback):
+ # type: (Callable[[], None]) -> None
+ self._ensure_thread()
+ try:
+ self._queue.put_nowait(callback)
+ except queue.Full:
+ logger.debug("background worker queue full, dropping event")
+
+ def _target(self):
+ # type: () -> None
+ while True:
+ callback = self._queue.get()
+ try:
+ if callback is _TERMINATOR:
+ break
+ try:
+ callback()
+ except Exception:
+ logger.error("Failed processing job", exc_info=True)
+ finally:
+ self._queue.task_done()
+ sleep(0)