From 416bc0b21f5c3360b803b92756293e911e540ac7 Mon Sep 17 00:00:00 2001 From: Jakub Boukal Date: Tue, 10 Dec 2024 19:27:49 +0100 Subject: [PATCH 1/2] Add black to pre-commit hooks. Fixes: #616 --- .pre-commit-config.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 737a2113..82eb0c55 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,6 +17,10 @@ repos: hooks: - id: isort args: ['--profile', 'black'] +- repo: https://github.com/psf/black + rev: '24.10.0' + hooks: + - id: black - repo: https://github.com/pre-commit/pre-commit-hooks rev: 'v5.0.0' hooks: From ada64b8b6b7e76ef6c0f559b915aee38a0e967bf Mon Sep 17 00:00:00 2001 From: Jakub Boukal Date: Tue, 10 Dec 2024 19:28:01 +0100 Subject: [PATCH 2/2] Apply black formatting to codebase --- docs/conf.py | 45 ++-- project/example_app/admin.py | 16 +- .../example_app/migrations/0001_initial.py | 23 +- .../migrations/0002_alter_blind_photo.py | 8 +- .../0003_blind_unique_name_if_provided.py | 10 +- project/example_app/models.py | 2 +- project/example_app/urls.py | 6 +- project/example_app/views.py | 10 +- project/project/__init__.py | 2 +- project/project/settings.py | 103 ++++---- project/project/urls.py | 23 +- project/tests/data/__init__.py | 2 +- project/tests/data/dynamic.py | 12 +- project/tests/factories.py | 12 +- project/tests/test_code.py | 36 +-- project/tests/test_code_gen_curl.py | 19 +- project/tests/test_code_gen_django.py | 9 +- project/tests/test_collector.py | 14 +- project/tests/test_compat.py | 10 +- project/tests/test_config_auth.py | 34 ++- project/tests/test_config_long_urls.py | 14 +- project/tests/test_config_max_body_size.py | 24 +- project/tests/test_config_meta.py | 15 +- project/tests/test_db.py | 23 +- project/tests/test_dynamic_profiling.py | 84 ++++--- project/tests/test_encoding.py | 60 ++--- project/tests/test_end_points.py | 134 ++++++----- project/tests/test_execute_sql.py | 49 ++-- project/tests/test_filters.py | 12 +- project/tests/test_lib/__init__.py | 2 +- project/tests/test_lib/mock_suite.py | 131 +++++----- project/tests/test_models.py | 120 ++++++---- project/tests/test_multipart_forms.py | 11 +- project/tests/test_profile_dot.py | 11 +- project/tests/test_profile_parser.py | 25 +- project/tests/test_response_assumptions.py | 4 +- .../tests/test_sensitive_data_in_request.py | 177 ++++++++------ project/tests/test_silky_middleware.py | 83 +++---- project/tests/test_silky_profiler.py | 40 ++-- project/tests/test_view_profiling.py | 170 +++++++------ project/tests/test_view_requests.py | 223 +++++++++++------- project/tests/test_view_sql_detail.py | 63 +++-- project/tests/test_view_summary_view.py | 46 ++-- project/tests/urlconf_without_silk.py | 5 +- project/tests/util.py | 2 +- project/wsgi.py | 1 + setup.py | 62 ++--- silk/auth.py | 4 +- silk/code_generation/__init__.py | 2 +- silk/code_generation/curl.py | 36 +-- silk/code_generation/django_test_client.py | 18 +- silk/collector.py | 71 +++--- silk/config.py | 64 +++-- silk/middleware.py | 70 +++--- silk/migrations/0001_initial.py | 172 +++++++++----- .../0002_auto_update_uuid4_id_field.py | 18 +- silk/migrations/0003_request_prof_file.py | 8 +- .../0004_request_prof_file_storage.py | 10 +- .../0005_increase_request_prof_file_length.py | 13 +- .../0006_fix_request_prof_file_blank.py | 13 +- silk/migrations/0007_sqlquery_identifier.py | 6 +- silk/migrations/0008_sqlquery_analysis.py | 6 +- silk/model_factory.py | 163 +++++++------ silk/models.py | 111 +++++---- silk/profiling/__init__.py | 2 +- silk/profiling/dynamic.py | 42 ++-- silk/profiling/profiler.py | 71 +++--- silk/request_filters.py | 57 ++--- silk/singleton.py | 2 +- silk/sql.py | 28 +-- silk/storage.py | 3 +- silk/templatetags/__init__.py | 2 +- silk/templatetags/silk_filters.py | 28 ++- silk/templatetags/silk_inclusion.py | 35 ++- silk/templatetags/silk_nav.py | 2 +- silk/urls.py | 66 +++--- silk/utils/__init__.py | 2 +- silk/utils/data_deletion.py | 11 +- silk/utils/pagination.py | 4 +- silk/utils/profile_parser.py | 6 +- silk/views/__init__.py | 2 +- silk/views/clear_db.py | 14 +- silk/views/code.py | 24 +- silk/views/cprofile.py | 8 +- silk/views/profile_detail.py | 24 +- silk/views/profile_dot.py | 12 +- silk/views/profile_download.py | 8 +- silk/views/profiling.py | 122 ++++++---- silk/views/raw.py | 24 +- silk/views/request_detail.py | 36 +-- silk/views/requests.py | 201 ++++++++-------- silk/views/sql.py | 24 +- silk/views/sql_detail.py | 48 ++-- silk/views/summary.py | 100 +++++--- 94 files changed, 2160 insertions(+), 1620 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index cbea73ef..30737b31 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -27,19 +27,19 @@ extensions = [] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'silk' -copyright = '2014, Michael Ford' +project = "silk" +copyright = "2014, Michael Ford" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -48,7 +48,7 @@ # The full version, including alpha/beta/rc tags. release = pkg_resources.get_distribution("django-silk").version # The short X.Y version. -version = '.'.join(release.split('.')[:2]) +version = ".".join(release.split(".")[:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -62,7 +62,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -80,7 +80,7 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -93,7 +93,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'default' +html_theme = "default" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -122,7 +122,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied @@ -171,7 +171,7 @@ # html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'silkdoc' +htmlhelp_basename = "silkdoc" # -- Options for LaTeX output --------------------------------------------- @@ -179,10 +179,8 @@ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # 'preamble': '', } @@ -191,8 +189,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'silk.tex', 'silk Documentation', - 'Michael Ford', 'manual'), + ("index", "silk.tex", "silk Documentation", "Michael Ford", "manual"), ] # The name of an image file (relative to this directory) to place at the top of @@ -221,10 +218,8 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'silk', 'silk Documentation', - ['Michael Ford'], 1), - ('profiling', 'Profiling', 'Profiling', - ['Michael Ford'], 2), + ("index", "silk", "silk Documentation", ["Michael Ford"], 1), + ("profiling", "Profiling", "Profiling", ["Michael Ford"], 2), ] # If true, show URL addresses after external links. @@ -237,9 +232,15 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'silk', 'silk Documentation', - 'Michael Ford', 'silk', 'One line description of project.', - 'Miscellaneous'), + ( + "index", + "silk", + "silk Documentation", + "Michael Ford", + "silk", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. diff --git a/project/example_app/admin.py b/project/example_app/admin.py index ad1ec478..24d6a1de 100644 --- a/project/example_app/admin.py +++ b/project/example_app/admin.py @@ -6,27 +6,23 @@ @admin.register(Blind) class BlindAdmin(admin.ModelAdmin): - list_display = ('desc', 'thumbnail', 'name', 'child_safe') - list_editable = ('name', 'child_safe') + list_display = ("desc", "thumbnail", "name", "child_safe") + list_editable = ("name", "child_safe") - @admin.display( - description='Photo' - ) + @admin.display(description="Photo") def thumbnail(self, obj): try: img_tag = '' % obj.photo.url except ValueError: - return '' + return "" url = self._blind_url(obj) return f'{img_tag}' def _blind_url(self, obj): - url = reverse('admin:example_app_blind_change', args=(obj.id, )) + url = reverse("admin:example_app_blind_change", args=(obj.id,)) return url - @admin.display( - description='Blind' - ) + @admin.display(description="Blind") def desc(self, obj): desc = str(obj) url = self._blind_url(obj) diff --git a/project/example_app/migrations/0001_initial.py b/project/example_app/migrations/0001_initial.py index e9dede90..db13eaba 100644 --- a/project/example_app/migrations/0001_initial.py +++ b/project/example_app/migrations/0001_initial.py @@ -7,20 +7,27 @@ class Migration(migrations.Migration): initial = True - dependencies = [ - ] + dependencies = [] operations = [ migrations.CreateModel( - name='Blind', + name="Blind", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('photo', models.ImageField(upload_to=b'products')), - ('name', models.TextField()), - ('child_safe', models.BooleanField(default=False)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("photo", models.ImageField(upload_to=b"products")), + ("name", models.TextField()), + ("child_safe", models.BooleanField(default=False)), ], options={ - 'abstract': False, + "abstract": False, }, ), ] diff --git a/project/example_app/migrations/0002_alter_blind_photo.py b/project/example_app/migrations/0002_alter_blind_photo.py index abb1979b..780fba63 100644 --- a/project/example_app/migrations/0002_alter_blind_photo.py +++ b/project/example_app/migrations/0002_alter_blind_photo.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('example_app', '0001_initial'), + ("example_app", "0001_initial"), ] operations = [ migrations.AlterField( - model_name='blind', - name='photo', - field=models.ImageField(upload_to='products'), + model_name="blind", + name="photo", + field=models.ImageField(upload_to="products"), ), ] diff --git a/project/example_app/migrations/0003_blind_unique_name_if_provided.py b/project/example_app/migrations/0003_blind_unique_name_if_provided.py index 45b7c36d..4d2901e6 100644 --- a/project/example_app/migrations/0003_blind_unique_name_if_provided.py +++ b/project/example_app/migrations/0003_blind_unique_name_if_provided.py @@ -6,12 +6,16 @@ class Migration(migrations.Migration): dependencies = [ - ('example_app', '0002_alter_blind_photo'), + ("example_app", "0002_alter_blind_photo"), ] operations = [ migrations.AddConstraint( - model_name='blind', - constraint=models.UniqueConstraint(condition=models.Q(('name', ''), _negated=True), fields=('name',), name='unique_name_if_provided'), + model_name="blind", + constraint=models.UniqueConstraint( + condition=models.Q(("name", ""), _negated=True), + fields=("name",), + name="unique_name_if_provided", + ), ), ] diff --git a/project/example_app/models.py b/project/example_app/models.py index 4cabe808..1671a432 100644 --- a/project/example_app/models.py +++ b/project/example_app/models.py @@ -5,7 +5,7 @@ class Product(models.Model): - photo = ImageField(upload_to='products') + photo = ImageField(upload_to="products") class Meta: abstract = True diff --git a/project/example_app/urls.py b/project/example_app/urls.py index 42d28db2..4c120061 100644 --- a/project/example_app/urls.py +++ b/project/example_app/urls.py @@ -2,8 +2,8 @@ from . import views -app_name = 'example_app' +app_name = "example_app" urlpatterns = [ - path(route='', view=views.index, name='index'), - path(route='create', view=views.ExampleCreateView.as_view(), name='create'), + path(route="", view=views.index, name="index"), + path(route="create", view=views.ExampleCreateView.as_view(), name="create"), ] diff --git a/project/example_app/views.py b/project/example_app/views.py index 03497a09..9b1a359d 100644 --- a/project/example_app/views.py +++ b/project/example_app/views.py @@ -14,12 +14,14 @@ def index(request): def do_something_long(): sleep(1.345) - with silk_profile(name='Why do this take so long?'): + with silk_profile(name="Why do this take so long?"): do_something_long() - return render(request, 'example_app/index.html', {'blinds': models.Blind.objects.all()}) + return render( + request, "example_app/index.html", {"blinds": models.Blind.objects.all()} + ) class ExampleCreateView(CreateView): model = models.Blind - fields = ['name'] - success_url = reverse_lazy('example_app:index') + fields = ["name"] + success_url = reverse_lazy("example_app:index") diff --git a/project/project/__init__.py b/project/project/__init__.py index 037d9736..afb804c7 100644 --- a/project/project/__init__.py +++ b/project/project/__init__.py @@ -1 +1 @@ -__author__ = 'mtford' +__author__ = "mtford" diff --git a/project/project/settings.py b/project/project/settings.py index 8428f3f5..5944c9ed 100644 --- a/project/project/settings.py +++ b/project/project/settings.py @@ -2,7 +2,7 @@ BASE_DIR = os.path.dirname(os.path.dirname(__file__)) -SECRET_KEY = 'ey5!m&h-uj6c7dzp@(o1%96okkq4!&bjja%oi*v3r=2t(!$7os' +SECRET_KEY = "ey5!m&h-uj6c7dzp@(o1%96okkq4!&bjja%oi*v3r=2t(!$7os" DEBUG = True DEBUG_PROPAGATE_EXCEPTIONS = True @@ -10,31 +10,31 @@ ALLOWED_HOSTS = [] INSTALLED_APPS = ( - 'django.contrib.staticfiles', - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.messages', - 'django.contrib.sessions', - 'silk', - 'example_app' + "django.contrib.staticfiles", + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.messages", + "django.contrib.sessions", + "silk", + "example_app", ) -ROOT_URLCONF = 'project.urls' +ROOT_URLCONF = "project.urls" -DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" MIDDLEWARE = [ - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', - 'silk.middleware.SilkyMiddleware' + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", + "silk.middleware.SilkyMiddleware", ] -WSGI_APPLICATION = 'wsgi.application' +WSGI_APPLICATION = "wsgi.application" DB_ENGINE = os.environ.get("DB_ENGINE", "postgresql") @@ -42,49 +42,44 @@ "default": { "ENGINE": f"django.db.backends.{DB_ENGINE}", "NAME": os.environ.get("DB_NAME", "postgres"), - "USER": os.environ.get("DB_USER", 'postgres'), + "USER": os.environ.get("DB_USER", "postgres"), "PASSWORD": os.environ.get("DB_PASSWORD", "postgres"), "HOST": os.environ.get("DB_HOST", "127.0.0.1"), "PORT": os.environ.get("DB_PORT", 5432), - "ATOMIC_REQUESTS": True + "ATOMIC_REQUESTS": True, }, } -LANGUAGE_CODE = 'en-us' +LANGUAGE_CODE = "en-us" -TIME_ZONE = 'UTC' +TIME_ZONE = "UTC" USE_I18N = True USE_TZ = True LOGGING = { - 'version': 1, - 'formatters': { - 'mosayc': { - 'format': '%(asctime)-15s %(levelname)-7s %(message)s [%(funcName)s (%(filename)s:%(lineno)s)]', + "version": 1, + "formatters": { + "mosayc": { + "format": "%(asctime)-15s %(levelname)-7s %(message)s [%(funcName)s (%(filename)s:%(lineno)s)]", } }, - 'handlers': { - 'console': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'mosayc' - } - }, - 'loggers': { - 'silk': { - 'handlers': ['console'], - 'level': 'DEBUG' + "handlers": { + "console": { + "level": "DEBUG", + "class": "logging.StreamHandler", + "formatter": "mosayc", } }, + "loggers": {"silk": {"handlers": ["console"], "level": "DEBUG"}}, } -STATIC_URL = '/static/' +STATIC_URL = "/static/" STATICFILES_FINDERS = ( - 'django.contrib.staticfiles.finders.FileSystemFinder', - 'django.contrib.staticfiles.finders.AppDirectoriesFinder', + "django.contrib.staticfiles.finders.FileSystemFinder", + "django.contrib.staticfiles.finders.AppDirectoriesFinder", ) TEMP_DIR = os.path.join(BASE_DIR, "tmp") @@ -93,30 +88,30 @@ if not os.path.exists(STATIC_ROOT): os.makedirs(STATIC_ROOT) -MEDIA_ROOT = BASE_DIR + '/media/' -MEDIA_URL = '/media/' +MEDIA_ROOT = BASE_DIR + "/media/" +MEDIA_URL = "/media/" if not os.path.exists(MEDIA_ROOT): os.mkdir(MEDIA_ROOT) TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", ], }, }, ] -LOGIN_URL = '/login/' -LOGIN_REDIRECT_URL = '/' +LOGIN_URL = "/login/" +LOGIN_REDIRECT_URL = "/" SILKY_META = True SILKY_PYTHON_PROFILER = True diff --git a/project/project/urls.py b/project/project/urls.py index e331a636..b57a8c21 100644 --- a/project/project/urls.py +++ b/project/project/urls.py @@ -6,27 +6,26 @@ urlpatterns = [ path( - route='silk/', - view=include('silk.urls', namespace='silk'), + route="silk/", + view=include("silk.urls", namespace="silk"), ), path( - route='example_app/', - view=include('example_app.urls', namespace='example_app'), + route="example_app/", + view=include("example_app.urls", namespace="example_app"), ), - path(route='admin/', view=admin.site.urls), + path(route="admin/", view=admin.site.urls), ] urlpatterns += [ path( - route='login/', - view=views.LoginView.as_view( - template_name='example_app/login.html' - ), - name='login', + route="login/", + view=views.LoginView.as_view(template_name="example_app/login.html"), + name="login", ), ] -urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \ - static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) +urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static( + settings.MEDIA_URL, document_root=settings.MEDIA_ROOT +) diff --git a/project/tests/data/__init__.py b/project/tests/data/__init__.py index 037d9736..afb804c7 100644 --- a/project/tests/data/__init__.py +++ b/project/tests/data/__init__.py @@ -1 +1 @@ -__author__ = 'mtford' +__author__ = "mtford" diff --git a/project/tests/data/dynamic.py b/project/tests/data/dynamic.py index 43fdc07f..ff39e12e 100644 --- a/project/tests/data/dynamic.py +++ b/project/tests/data/dynamic.py @@ -1,10 +1,10 @@ def foo(): - print('1') - print('2') - print('3') + print("1") + print("2") + print("3") def foo2(): - print('1') - print('2') - print('3') + print("1") + print("2") + print("3") diff --git a/project/tests/factories.py b/project/tests/factories.py index 30e98616..37ce0b9c 100644 --- a/project/tests/factories.py +++ b/project/tests/factories.py @@ -4,14 +4,14 @@ from silk.models import Request, Response, SQLQuery -HTTP_METHODS = ['GET', 'POST', 'PUT', 'PATCH', 'HEAD', 'OPTIONS'] +HTTP_METHODS = ["GET", "POST", "PUT", "PATCH", "HEAD", "OPTIONS"] STATUS_CODES = [200, 201, 300, 301, 302, 401, 403, 404] class SQLQueryFactory(factory.django.DjangoModelFactory): - query = factory.Sequence(lambda num: 'SELECT foo FROM bar WHERE foo=%s' % num) - traceback = factory.Sequence(lambda num: 'Traceback #%s' % num) + query = factory.Sequence(lambda num: "SELECT foo FROM bar WHERE foo=%s" % num) + traceback = factory.Sequence(lambda num: "Traceback #%s" % num) class Meta: model = SQLQuery @@ -19,7 +19,7 @@ class Meta: class RequestMinFactory(factory.django.DjangoModelFactory): - path = factory.Faker('uri_path') + path = factory.Faker("uri_path") method = factory.fuzzy.FuzzyChoice(HTTP_METHODS) class Meta: @@ -35,8 +35,8 @@ class Meta: class BlindFactory(factory.django.DjangoModelFactory): - name = factory.Faker('pystr', min_chars=5, max_chars=10) - child_safe = factory.Faker('pybool') + name = factory.Faker("pystr", min_chars=5, max_chars=10) + child_safe = factory.Faker("pybool") photo = factory.django.ImageField() class Meta: diff --git a/project/tests/test_code.py b/project/tests/test_code.py index db891e36..3bc72e6d 100644 --- a/project/tests/test_code.py +++ b/project/tests/test_code.py @@ -9,17 +9,19 @@ END_LINE_NUM = 10 with open(__file__) as f: - ACTUAL_LINES = [line + '\n' for line in f.read().split('\n')] + ACTUAL_LINES = [line + "\n" for line in f.read().split("\n")] class CodeTestCase(TestCase): def assertActualLineEqual(self, actual_line, end_line_num=None): - expected_actual_line = ACTUAL_LINES[LINE_NUM - 1:end_line_num or LINE_NUM] + expected_actual_line = ACTUAL_LINES[LINE_NUM - 1 : end_line_num or LINE_NUM] self.assertEqual(actual_line, expected_actual_line) def assertCodeEqual(self, code): - expected_code = [line.strip('\n') for line in ACTUAL_LINES[0:LINE_NUM + 10]] + [''] + expected_code = [ + line.strip("\n") for line in ACTUAL_LINES[0 : LINE_NUM + 10] + ] + [""] self.assertEqual(code, expected_code) def test_code(self): @@ -30,19 +32,25 @@ def test_code(self): def test_code_context(self): for end_line_num in None, END_LINE_NUM: - for prefix in '', 'salchicha_': + for prefix in "", "salchicha_": context = _code_context(FILE_PATH, LINE_NUM, end_line_num, prefix) - self.assertActualLineEqual(context[prefix + 'actual_line'], end_line_num) - self.assertCodeEqual(context[prefix + 'code']) - self.assertEqual(context[prefix + 'file_path'], FILE_PATH) - self.assertEqual(context[prefix + 'line_num'], LINE_NUM) + self.assertActualLineEqual( + context[prefix + "actual_line"], end_line_num + ) + self.assertCodeEqual(context[prefix + "code"]) + self.assertEqual(context[prefix + "file_path"], FILE_PATH) + self.assertEqual(context[prefix + "line_num"], LINE_NUM) def test_code_context_from_request(self): for end_line_num in None, END_LINE_NUM: - for prefix in '', 'salchicha_': - request = namedtuple('Request', 'GET')(dict(file_path=FILE_PATH, line_num=LINE_NUM)) + for prefix in "", "salchicha_": + request = namedtuple("Request", "GET")( + dict(file_path=FILE_PATH, line_num=LINE_NUM) + ) context = _code_context_from_request(request, end_line_num, prefix) - self.assertActualLineEqual(context[prefix + 'actual_line'], end_line_num) - self.assertCodeEqual(context[prefix + 'code']) - self.assertEqual(context[prefix + 'file_path'], FILE_PATH) - self.assertEqual(context[prefix + 'line_num'], LINE_NUM) + self.assertActualLineEqual( + context[prefix + "actual_line"], end_line_num + ) + self.assertCodeEqual(context[prefix + "code"]) + self.assertEqual(context[prefix + "file_path"], FILE_PATH) + self.assertEqual(context[prefix + "line_num"], LINE_NUM) diff --git a/project/tests/test_code_gen_curl.py b/project/tests/test_code_gen_curl.py index 2288c2f3..ef055f2d 100644 --- a/project/tests/test_code_gen_curl.py +++ b/project/tests/test_code_gen_curl.py @@ -15,9 +15,16 @@ def test_post_json(self): result_words = shlex.split(result) - self.assertEqual(result_words, [ - 'curl', '-X', 'POST', - '-H', 'content-type: application/json', - '-d', '{"gamma": "delta"}', - 'https://example.org/alpha/beta' - ]) + self.assertEqual( + result_words, + [ + "curl", + "-X", + "POST", + "-H", + "content-type: application/json", + "-d", + '{"gamma": "delta"}', + "https://example.org/alpha/beta", + ], + ) diff --git a/project/tests/test_code_gen_django.py b/project/tests/test_code_gen_django.py index a5b3872a..ca99cd57 100644 --- a/project/tests/test_code_gen_django.py +++ b/project/tests/test_code_gen_django.py @@ -13,10 +13,15 @@ def test_post(self): content_type="application/x-www-form-urlencoded", ) - self.assertEqual(result, textwrap.dedent("""\ + self.assertEqual( + result, + textwrap.dedent( + """\ from django.test import Client c = Client() response = c.post(path='/alpha/beta', data={'gamma': 'delta', 'epsilon': 'zeta'}, content_type='application/x-www-form-urlencoded') - """)) + """ + ), + ) diff --git a/project/tests/test_collector.py b/project/tests/test_collector.py index 0bf5d8b6..ca380f67 100644 --- a/project/tests/test_collector.py +++ b/project/tests/test_collector.py @@ -65,26 +65,26 @@ def test_configure_exception(self): def test_profile_file_name_with_disabled_extended_file_name(self): SilkyConfig().SILKY_PYTHON_PROFILER_EXTENDED_FILE_NAME = False - request_path = 'normal/uri/' + request_path = "normal/uri/" resulting_prefix = self._get_prof_file_name(request_path) - self.assertEqual(resulting_prefix, '') + self.assertEqual(resulting_prefix, "") def test_profile_file_name_with_enabled_extended_file_name(self): SilkyConfig().SILKY_PYTHON_PROFILER_EXTENDED_FILE_NAME = True - request_path = 'normal/uri/' + request_path = "normal/uri/" resulting_prefix = self._get_prof_file_name(request_path) - self.assertEqual(resulting_prefix, 'normal_uri_') + self.assertEqual(resulting_prefix, "normal_uri_") def test_profile_file_name_with_path_traversal_and_special_char(self): SilkyConfig().SILKY_PYTHON_PROFILER_EXTENDED_FILE_NAME = True - request_path = 'spÉciàl/.././大/uri/@É/' + request_path = "spÉciàl/.././大/uri/@É/" resulting_prefix = self._get_prof_file_name(request_path) - self.assertEqual(resulting_prefix, 'special_uri_e_') + self.assertEqual(resulting_prefix, "special_uri_e_") def test_profile_file_name_with_long_path(self): SilkyConfig().SILKY_PYTHON_PROFILER_EXTENDED_FILE_NAME = True - request_path = 'long/path/' + 'a' * 100 + request_path = "long/path/" + "a" * 100 resulting_prefix = self._get_prof_file_name(request_path) # the path is limited to 50 char plus the last `_` self.assertEqual(len(resulting_prefix), 51) diff --git a/project/tests/test_compat.py b/project/tests/test_compat.py index 8deb3458..1a311f12 100644 --- a/project/tests/test_compat.py +++ b/project/tests/test_compat.py @@ -5,8 +5,8 @@ from silk.model_factory import ResponseModelFactory -DJANGO_META_CONTENT_TYPE = 'CONTENT_TYPE' -HTTP_CONTENT_TYPE = 'content-type' +DJANGO_META_CONTENT_TYPE = "CONTENT_TYPE" +HTTP_CONTENT_TYPE = "content-type" class TestByteStringCompatForResponse(TestCase): @@ -16,9 +16,9 @@ def test_bytes_compat(self): Test ResponseModelFactory formats json with bytes content """ mock = Mock() - mock.headers = {HTTP_CONTENT_TYPE: 'application/json;'} - d = {'k': 'v'} - mock.content = bytes(json.dumps(d), 'utf-8') + mock.headers = {HTTP_CONTENT_TYPE: "application/json;"} + d = {"k": "v"} + mock.content = bytes(json.dumps(d), "utf-8") mock.get = mock.headers.get factory = ResponseModelFactory(mock) body, content = factory.body() diff --git a/project/tests/test_config_auth.py b/project/tests/test_config_auth.py index f0bd7c72..40259dec 100644 --- a/project/tests/test_config_auth.py +++ b/project/tests/test_config_auth.py @@ -9,30 +9,34 @@ class TestAuth(TestCase): def test_authentication(self): SilkyConfig().SILKY_AUTHENTICATION = True - response = self.client.get(silky_reverse('requests')) + response = self.client.get(silky_reverse("requests")) self.assertEqual(response.status_code, 302) url = response.url try: # If we run tests within the django_silk project, a login url is available from example_app - self.assertIn(reverse('login'), url) + self.assertIn(reverse("login"), url) except NoReverseMatch: # Otherwise the Django default login url is used, in which case we can test for that instead - self.assertIn('http://testserver/login/', url) + self.assertIn("http://testserver/login/", url) def test_default_authorisation(self): SilkyConfig().SILKY_AUTHENTICATION = True SilkyConfig().SILKY_AUTHORISATION = True SilkyConfig().SILKY_PERMISSIONS = default_permissions - username_and_password = 'bob' # bob is an imbecile and uses the same pass as his username + username_and_password = ( + "bob" # bob is an imbecile and uses the same pass as his username + ) user = User.objects.create(username=username_and_password) user.set_password(username_and_password) user.save() - self.client.login(username=username_and_password, password=username_and_password) - response = self.client.get(silky_reverse('requests')) + self.client.login( + username=username_and_password, password=username_and_password + ) + response = self.client.get(silky_reverse("requests")) self.assertEqual(response.status_code, 403) user.is_staff = True user.save() - response = self.client.get(silky_reverse('requests')) + response = self.client.get(silky_reverse("requests")) self.assertEqual(response.status_code, 200) def test_custom_authorisation(self): @@ -40,17 +44,21 @@ def test_custom_authorisation(self): SilkyConfig().SILKY_AUTHORISATION = True def custom_authorisation(user): - return user.username.startswith('mike') + return user.username.startswith("mike") SilkyConfig().SILKY_PERMISSIONS = custom_authorisation - username_and_password = 'bob' # bob is an imbecile and uses the same pass as his username + username_and_password = ( + "bob" # bob is an imbecile and uses the same pass as his username + ) user = User.objects.create(username=username_and_password) user.set_password(username_and_password) user.save() - self.client.login(username=username_and_password, password=username_and_password) - response = self.client.get(silky_reverse('requests')) + self.client.login( + username=username_and_password, password=username_and_password + ) + response = self.client.get(silky_reverse("requests")) self.assertEqual(response.status_code, 403) - user.username = 'mike2' + user.username = "mike2" user.save() - response = self.client.get(silky_reverse('requests')) + response = self.client.get(silky_reverse("requests")) self.assertEqual(response.status_code, 200) diff --git a/project/tests/test_config_long_urls.py b/project/tests/test_config_long_urls.py index efaceb62..fa076fae 100644 --- a/project/tests/test_config_long_urls.py +++ b/project/tests/test_config_long_urls.py @@ -8,22 +8,22 @@ class TestLongRequestUrl(TestCase): def test_no_long_url(self): - url = '1234567890' * 19 # 190-character URL + url = "1234567890" * 19 # 190-character URL mock_request = Mock() - mock_request.headers = {'content-type': 'text/plain'} + mock_request.headers = {"content-type": "text/plain"} mock_request.GET = {} mock_request.path = url - mock_request.method = 'get' + mock_request.method = "get" request_model = RequestModelFactory(mock_request).construct_request_model() self.assertEqual(request_model.path, url) def test_long_url(self): - url = '1234567890' * 200 # 2000-character URL + url = "1234567890" * 200 # 2000-character URL mock_request = Mock() - mock_request.headers = {'content-type': 'text/plain'} + mock_request.headers = {"content-type": "text/plain"} mock_request.GET = {} - mock_request.method = 'get' + mock_request.method = "get" mock_request.path = url request_model = RequestModelFactory(mock_request).construct_request_model() - self.assertEqual(request_model.path, f'{url[:94]}...{url[1907:]}') + self.assertEqual(request_model.path, f"{url[:94]}...{url[1907:]}") self.assertEqual(len(request_model.path), 190) diff --git a/project/tests/test_config_max_body_size.py b/project/tests/test_config_max_body_size.py index a20f585a..26928d86 100644 --- a/project/tests/test_config_max_body_size.py +++ b/project/tests/test_config_max_body_size.py @@ -14,22 +14,22 @@ class TestMaxBodySizeRequest(TestCase): def test_no_max_request(self): SilkyConfig().SILKY_MAX_REQUEST_BODY_SIZE = -1 mock_request = Mock() - mock_request.headers = {'content-type': 'text/plain'} + mock_request.headers = {"content-type": "text/plain"} mock_request.GET = {} - mock_request.path = reverse('silk:requests') - mock_request.method = 'get' - mock_request.body = b'a' * 1000 # 1000 bytes? + mock_request.path = reverse("silk:requests") + mock_request.method = "get" + mock_request.body = b"a" * 1000 # 1000 bytes? request_model = RequestModelFactory(mock_request).construct_request_model() self.assertTrue(request_model.raw_body) def test_max_request(self): SilkyConfig().SILKY_MAX_REQUEST_BODY_SIZE = 10 # 10kb mock_request = Mock() - mock_request.headers = {'content-type': 'text/plain'} + mock_request.headers = {"content-type": "text/plain"} mock_request.GET = {} - mock_request.method = 'get' - mock_request.body = b'a' * 1024 * 100 # 100kb - mock_request.path = reverse('silk:requests') + mock_request.method = "get" + mock_request.body = b"a" * 1024 * 100 # 100kb + mock_request.path = reverse("silk:requests") request_model = RequestModelFactory(mock_request).construct_request_model() self.assertFalse(request_model.raw_body) @@ -42,8 +42,8 @@ def setUp(self): def test_no_max_response(self): SilkyConfig().SILKY_MAX_RESPONSE_BODY_SIZE = -1 mock_response = Mock() - mock_response.headers = {'content-type': 'text/plain'} - mock_response.content = b'a' * 1000 # 1000 bytes? + mock_response.headers = {"content-type": "text/plain"} + mock_response.content = b"a" * 1000 # 1000 bytes? mock_response.status_code = 200 mock_response.get = mock_response.headers.get response_model = ResponseModelFactory(mock_response).construct_response_model() @@ -52,8 +52,8 @@ def test_no_max_response(self): def test_max_response(self): SilkyConfig().SILKY_MAX_RESPONSE_BODY_SIZE = 10 # 10kb mock_response = Mock() - mock_response.headers = {'content-type': 'text/plain'} - mock_response.content = b'a' * 1024 * 100 # 100kb + mock_response.headers = {"content-type": "text/plain"} + mock_response.content = b"a" * 1024 * 100 # 100kb mock_response.status_code = 200 mock_response.get = mock_response.headers.get response_model = ResponseModelFactory(mock_response).construct_response_model() diff --git a/project/tests/test_config_meta.py b/project/tests/test_config_meta.py index fd4566b1..62e689d8 100644 --- a/project/tests/test_config_meta.py +++ b/project/tests/test_config_meta.py @@ -12,7 +12,8 @@ def fake_get_response(): def fake_response(): - return 'hello world' + return "hello world" + return fake_response @@ -23,14 +24,14 @@ def _mock_response(self): response.status_code = 200 response.queries = [] response.get = response.headers.get - response.content = '' + response.content = "" return response def _execute_request(self): delete_all_models(Request) DataCollector().configure(Request.objects.create()) response = self._mock_response() - SilkyMiddleware(fake_get_response)._process_response('', response) + SilkyMiddleware(fake_get_response)._process_response("", response) self.assertTrue(response.status_code == 200) objs = Request.objects.all() self.assertEqual(objs.count(), 1) @@ -40,9 +41,11 @@ def _execute_request(self): def test_enabled(self): SilkyConfig().SILKY_META = True r = self._execute_request() - self.assertTrue(r.meta_time is not None - or r.meta_num_queries is not None - or r.meta_time_spent_queries is not None) + self.assertTrue( + r.meta_time is not None + or r.meta_num_queries is not None + or r.meta_time_spent_queries is not None + ) def test_disabled(self): SilkyConfig().SILKY_META = False diff --git a/project/tests/test_db.py b/project/tests/test_db.py index 3157725d..8d50ba42 100644 --- a/project/tests/test_db.py +++ b/project/tests/test_db.py @@ -2,6 +2,7 @@ Test profiling of DB queries without mocking, to catch possible incompatibility """ + from django.shortcuts import reverse from django.test import Client, TestCase @@ -21,18 +22,18 @@ def setUpClass(cls): SilkyConfig().SILKY_META = False def test_profile_request_to_db(self): - DataCollector().configure(Request(reverse('example_app:index'))) + DataCollector().configure(Request(reverse("example_app:index"))) - with silk_profile(name='test_profile'): - resp = self.client.get(reverse('example_app:index')) + with silk_profile(name="test_profile"): + resp = self.client.get(reverse("example_app:index")) DataCollector().profiles.values() - assert len(resp.context['blinds']) == 5 + assert len(resp.context["blinds"]) == 5 def test_profile_request_to_db_with_constraints(self): - DataCollector().configure(Request(reverse('example_app:create'))) + DataCollector().configure(Request(reverse("example_app:create"))) - resp = self.client.post(reverse('example_app:create'), {'name': 'Foo'}) + resp = self.client.post(reverse("example_app:create"), {"name": "Foo"}) self.assertEqual(resp.status_code, 302) @@ -51,14 +52,14 @@ def tearDownClass(cls): SilkyConfig().SILKLY_ANALYZE_QUERIES = False def test_analyze_queries(self): - DataCollector().configure(Request(reverse('example_app:index'))) + DataCollector().configure(Request(reverse("example_app:index"))) client = Client() - with silk_profile(name='test_profile'): - resp = client.get(reverse('example_app:index')) + with silk_profile(name="test_profile"): + resp = client.get(reverse("example_app:index")) DataCollector().profiles.values() - assert len(resp.context['blinds']) == 5 + assert len(resp.context["blinds"]) == 5 class TestAnalyzeQueriesExplainParams(TestAnalyzeQueries): @@ -66,7 +67,7 @@ class TestAnalyzeQueriesExplainParams(TestAnalyzeQueries): @classmethod def setUpClass(cls): super().setUpClass() - SilkyConfig().SILKY_EXPLAIN_FLAGS = {'verbose': True} + SilkyConfig().SILKY_EXPLAIN_FLAGS = {"verbose": True} @classmethod def tearDownClass(cls): diff --git a/project/tests/test_dynamic_profiling.py b/project/tests/test_dynamic_profiling.py index f1e84107..bca15586 100644 --- a/project/tests/test_dynamic_profiling.py +++ b/project/tests/test_dynamic_profiling.py @@ -17,16 +17,16 @@ class TestGetModule(TestCase): """test for _get_module""" def test_singular(self): - module = _get_module('silk') - self.assertEqual(module.__class__.__name__, 'module') - self.assertEqual('silk', module.__name__) - self.assertTrue(hasattr(module, 'models')) + module = _get_module("silk") + self.assertEqual(module.__class__.__name__, "module") + self.assertEqual("silk", module.__name__) + self.assertTrue(hasattr(module, "models")) def test_dot(self): - module = _get_module('silk.models') - self.assertEqual(module.__class__.__name__, 'module') - self.assertEqual('silk.models', module.__name__) - self.assertTrue(hasattr(module, 'SQLQuery')) + module = _get_module("silk.models") + self.assertEqual(module.__class__.__name__, "module") + self.assertEqual("silk.models", module.__name__) + self.assertTrue(hasattr(module, "SQLQuery")) class TestGetParentModule(TestCase): @@ -54,7 +54,7 @@ def foo(): def source_file_name(): file_name = __file__ - if file_name[-1] == 'c': + if file_name[-1] == "c": file_name = file_name[:-1] return file_name @@ -66,34 +66,56 @@ def foo(_): pass # noinspection PyUnresolvedReferences - with patch.object(MyClass, 'foo', foo): - profile_function_or_method('tests.test_dynamic_profiling', 'MyClass.foo', 'test') + with patch.object(MyClass, "foo", foo): + profile_function_or_method( + "tests.test_dynamic_profiling", "MyClass.foo", "test" + ) dc = mock_data_collector() - with patch('silk.profiling.profiler.DataCollector', return_value=dc) as mock_DataCollector: + with patch( + "silk.profiling.profiler.DataCollector", return_value=dc + ) as mock_DataCollector: MyClass().foo() - self.assertEqual(mock_DataCollector.return_value.register_profile.call_count, 1) - call_args = mock_DataCollector.return_value.register_profile.call_args[0][0] - self.assertTrue(dict_contains({ - 'func_name': foo.__name__, - 'dynamic': True, - 'file_path': source_file_name(), - 'name': 'test', - 'line_num': foo.__code__.co_firstlineno - }, call_args)) + self.assertEqual( + mock_DataCollector.return_value.register_profile.call_count, 1 + ) + call_args = mock_DataCollector.return_value.register_profile.call_args[ + 0 + ][0] + self.assertTrue( + dict_contains( + { + "func_name": foo.__name__, + "dynamic": True, + "file_path": source_file_name(), + "name": "test", + "line_num": foo.__code__.co_firstlineno, + }, + call_args, + ) + ) def test_func_as_str(self): name = foo.__name__ line_num = foo.__code__.co_firstlineno - profile_function_or_method('tests.test_dynamic_profiling', 'foo', 'test') + profile_function_or_method("tests.test_dynamic_profiling", "foo", "test") dc = mock_data_collector() - with patch('silk.profiling.profiler.DataCollector', return_value=dc) as mock_DataCollector: + with patch( + "silk.profiling.profiler.DataCollector", return_value=dc + ) as mock_DataCollector: foo() - self.assertEqual(mock_DataCollector.return_value.register_profile.call_count, 1) + self.assertEqual( + mock_DataCollector.return_value.register_profile.call_count, 1 + ) call_args = mock_DataCollector.return_value.register_profile.call_args[0][0] - self.assertTrue(dict_contains({ - 'func_name': name, - 'dynamic': True, - 'file_path': source_file_name(), - 'name': 'test', - 'line_num': line_num - }, call_args)) + self.assertTrue( + dict_contains( + { + "func_name": name, + "dynamic": True, + "file_path": source_file_name(), + "name": "test", + "line_num": line_num, + }, + call_args, + ) + ) diff --git a/project/tests/test_encoding.py b/project/tests/test_encoding.py index 20edc389..cc057a98 100644 --- a/project/tests/test_encoding.py +++ b/project/tests/test_encoding.py @@ -5,7 +5,7 @@ from silk.model_factory import RequestModelFactory, ResponseModelFactory -HTTP_CONTENT_TYPE = 'content-type' +HTTP_CONTENT_TYPE = "content-type" class TestEncodingForRequests(TestCase): @@ -15,8 +15,8 @@ class TestEncodingForRequests(TestCase): def test_utf_plain(self): mock_request = Mock() - mock_request.headers = {HTTP_CONTENT_TYPE: 'text/plain; charset=UTF-8'} - mock_request.body = '语' + mock_request.headers = {HTTP_CONTENT_TYPE: "text/plain; charset=UTF-8"} + mock_request.body = "语" mock_request.get = mock_request.headers.get factory = RequestModelFactory(mock_request) body, raw_body = factory.body() @@ -25,8 +25,8 @@ def test_utf_plain(self): def test_plain(self): mock_request = Mock() - mock_request.headers = {HTTP_CONTENT_TYPE: 'text/plain'} - mock_request.body = 'sdfsdf' + mock_request.headers = {HTTP_CONTENT_TYPE: "text/plain"} + mock_request.body = "sdfsdf" mock_request.get = mock_request.headers.get factory = RequestModelFactory(mock_request) body, raw_body = factory.body() @@ -35,8 +35,8 @@ def test_plain(self): def test_utf_json_not_encoded(self): mock_request = Mock() - mock_request.headers = {HTTP_CONTENT_TYPE: 'application/json; charset=UTF-8'} - d = {'x': '语'} + mock_request.headers = {HTTP_CONTENT_TYPE: "application/json; charset=UTF-8"} + d = {"x": "语"} mock_request.body = json.dumps(d) mock_request.get = mock_request.headers.get factory = RequestModelFactory(mock_request) @@ -46,32 +46,32 @@ def test_utf_json_not_encoded(self): def test_utf_json_encoded(self): mock_request = Mock() - mock_request.headers = {HTTP_CONTENT_TYPE: 'application/json; charset=UTF-8'} - d = {'x': '语'} - mock_request.body = json.dumps(d).encode('UTF-8') + mock_request.headers = {HTTP_CONTENT_TYPE: "application/json; charset=UTF-8"} + d = {"x": "语"} + mock_request.body = json.dumps(d).encode("UTF-8") mock_request.get = mock_request.headers.get factory = RequestModelFactory(mock_request) body, raw_body = factory.body() self.assertDictEqual(json.loads(body), d) - self.assertEqual(raw_body, mock_request.body.decode('UTF-8')) + self.assertEqual(raw_body, mock_request.body.decode("UTF-8")) def test_utf_json_encoded_no_charset(self): """default to UTF-8""" mock_request = Mock() - mock_request.headers = {HTTP_CONTENT_TYPE: 'application/json'} - d = {'x': '语'} - mock_request.body = json.dumps(d).encode('UTF-8') + mock_request.headers = {HTTP_CONTENT_TYPE: "application/json"} + d = {"x": "语"} + mock_request.body = json.dumps(d).encode("UTF-8") mock_request.get = mock_request.headers.get factory = RequestModelFactory(mock_request) body, raw_body = factory.body() self.assertDictEqual(json.loads(body), d) - self.assertEqual(raw_body, mock_request.body.decode('UTF-8')) + self.assertEqual(raw_body, mock_request.body.decode("UTF-8")) def test_invalid_encoding_json(self): mock_request = Mock() - mock_request.headers = {HTTP_CONTENT_TYPE: 'application/json; charset=asdas-8'} - d = {'x': '语'} - mock_request.body = json.dumps(d).encode('UTF-8') + mock_request.headers = {HTTP_CONTENT_TYPE: "application/json; charset=asdas-8"} + d = {"x": "语"} + mock_request.body = json.dumps(d).encode("UTF-8") mock_request.get = mock_request.headers.get factory = RequestModelFactory(mock_request) body, raw_body = factory.body() @@ -86,8 +86,8 @@ class TestEncodingForResponse(TestCase): def test_utf_plain(self): mock = Mock() - mock.headers = {HTTP_CONTENT_TYPE: 'text/plain; charset=UTF-8'} - mock.content = '语' + mock.headers = {HTTP_CONTENT_TYPE: "text/plain; charset=UTF-8"} + mock.content = "语" mock.get = mock.headers.get factory = ResponseModelFactory(mock) body, content = factory.body() @@ -96,8 +96,8 @@ def test_utf_plain(self): def test_plain(self): mock = Mock() - mock.headers = {HTTP_CONTENT_TYPE: 'text/plain'} - mock.content = 'sdfsdf' + mock.headers = {HTTP_CONTENT_TYPE: "text/plain"} + mock.content = "sdfsdf" mock.get = mock.headers.get factory = ResponseModelFactory(mock) body, content = factory.body() @@ -106,8 +106,8 @@ def test_plain(self): def test_utf_json_not_encoded(self): mock = Mock() - mock.headers = {HTTP_CONTENT_TYPE: 'application/json; charset=UTF-8'} - d = {'x': '语'} + mock.headers = {HTTP_CONTENT_TYPE: "application/json; charset=UTF-8"} + d = {"x": "语"} mock.content = json.dumps(d) mock.get = mock.headers.get factory = ResponseModelFactory(mock) @@ -117,8 +117,8 @@ def test_utf_json_not_encoded(self): def test_utf_json_encoded(self): mock = Mock() - mock.headers = {HTTP_CONTENT_TYPE: 'application/json; charset=UTF-8'} - d = {'x': '语'} + mock.headers = {HTTP_CONTENT_TYPE: "application/json; charset=UTF-8"} + d = {"x": "语"} mock.content = json.dumps(d) mock.get = mock.headers.get factory = ResponseModelFactory(mock) @@ -129,8 +129,8 @@ def test_utf_json_encoded(self): def test_utf_json_encoded_no_charset(self): """default to UTF-8""" mock = Mock() - mock.headers = {HTTP_CONTENT_TYPE: 'application/json'} - d = {'x': '语'} + mock.headers = {HTTP_CONTENT_TYPE: "application/json"} + d = {"x": "语"} mock.content = json.dumps(d) mock.get = mock.headers.get factory = ResponseModelFactory(mock) @@ -140,8 +140,8 @@ def test_utf_json_encoded_no_charset(self): def test_invalid_encoding_json(self): mock = Mock() - mock.headers = {HTTP_CONTENT_TYPE: 'application/json; charset=asdas-8'} - d = {'x': '语'} + mock.headers = {HTTP_CONTENT_TYPE: "application/json; charset=asdas-8"} + d = {"x": "语"} mock.content = json.dumps(d) mock.get = mock.headers.get factory = ResponseModelFactory(mock) diff --git a/project/tests/test_end_points.py b/project/tests/test_end_points.py index 58e520ff..a337e321 100644 --- a/project/tests/test_end_points.py +++ b/project/tests/test_end_points.py @@ -29,145 +29,151 @@ def setUpClass(cls): mock_suite.mock_request() def test_summary(self): - response = self.client.get(silky_reverse('summary')) + response = self.client.get(silky_reverse("summary")) self.assertTrue(response.status_code == 200) def test_requests(self): - response = self.client.get(silky_reverse('requests')) + response = self.client.get(silky_reverse("requests")) self.assertTrue(response.status_code == 200) def test_request_detail_on_get_request(self): request_id = random.choice( - models.Request.objects.filter(method='GET').values_list('id', flat=True), + models.Request.objects.filter(method="GET").values_list("id", flat=True), + ) + response = self.client.get( + silky_reverse("request_detail", kwargs={"request_id": request_id}) ) - response = self.client.get(silky_reverse('request_detail', kwargs={ - 'request_id': request_id - })) self.assertEqual(response.status_code, 200) def test_request_detail_on_post_request(self): request_id = random.choice( - models.Request.objects.filter(method='POST').values_list('id', flat=True), + models.Request.objects.filter(method="POST").values_list("id", flat=True), + ) + response = self.client.get( + silky_reverse("request_detail", kwargs={"request_id": request_id}) ) - response = self.client.get(silky_reverse('request_detail', kwargs={ - 'request_id': request_id - })) self.assertEqual(response.status_code, 200) def test_request_sql(self): request_query_data = random.choice( - models.SQLQuery.objects - .values('request_id') - .filter(request_id__isnull=False) + models.SQLQuery.objects.values("request_id").filter( + request_id__isnull=False + ) + ) + request_id = request_query_data["request_id"] + response = self.client.get( + silky_reverse("request_sql", kwargs={"request_id": request_id}) ) - request_id = request_query_data['request_id'] - response = self.client.get(silky_reverse('request_sql', kwargs={'request_id': request_id})) self.assertTrue(response.status_code == 200) def test_request_sql_detail(self): kwargs = random.choice( - models.SQLQuery.objects - .annotate(sql_id=F('id')) - .values('sql_id', 'request_id') + models.SQLQuery.objects.annotate(sql_id=F("id")) + .values("sql_id", "request_id") .filter(request_id__isnull=False) ) - response = self.client.get(silky_reverse('request_sql_detail', kwargs=kwargs)) + response = self.client.get(silky_reverse("request_sql_detail", kwargs=kwargs)) self.assertTrue(response.status_code == 200) def test_raw(self): request_query_data = random.choice( - models.Request.objects - .values('id') - .filter(body__isnull=False) - ) - request_id = request_query_data['id'] - url = reverse('silk:raw', kwargs={ - 'request_id': request_id - }) + '?typ=request&subtyp=processed' + models.Request.objects.values("id").filter(body__isnull=False) + ) + request_id = request_query_data["id"] + url = ( + reverse("silk:raw", kwargs={"request_id": request_id}) + + "?typ=request&subtyp=processed" + ) response = self.client.get(url) code = response.status_code self.assertTrue(code == 200) def test_request_profiling(self): request_id = random.choice( - models.Profile.objects - .values('request_id') - .filter(request_id__isnull=False) + models.Profile.objects.values("request_id").filter(request_id__isnull=False) + ) + response = self.client.get( + silky_reverse("request_profiling", kwargs=request_id) ) - response = self.client.get(silky_reverse('request_profiling', kwargs=request_id)) self.assertTrue(response.status_code == 200) def test_request_profile_detail(self): kwargs = random.choice( - models.Profile.objects - .annotate(profile_id=F('id')) - .values('profile_id', 'request_id') + models.Profile.objects.annotate(profile_id=F("id")) + .values("profile_id", "request_id") .filter(request_id__isnull=False) ) - response = self.client.get(silky_reverse('request_profile_detail', kwargs=kwargs)) + response = self.client.get( + silky_reverse("request_profile_detail", kwargs=kwargs) + ) self.assertTrue(response.status_code == 200) def test_request_and_profile_sql(self): kwargs = random.choice( - models.Profile.objects - .annotate(num=Count('queries'), profile_id=F('id')) - .values('profile_id', 'request_id') + models.Profile.objects.annotate(num=Count("queries"), profile_id=F("id")) + .values("profile_id", "request_id") .filter(request_id__isnull=False, num__gt=0) ) - response = self.client.get(silky_reverse('request_and_profile_sql', kwargs=kwargs)) + response = self.client.get( + silky_reverse("request_and_profile_sql", kwargs=kwargs) + ) self.assertTrue(response.status_code == 200) def test_request_and_profile_sql_detail(self): random_profile = random.choice( - models.Profile.objects - .annotate(num=Count('queries'), profile_id=F('id')) - .values('profile_id', 'request_id') + models.Profile.objects.annotate(num=Count("queries"), profile_id=F("id")) + .values("profile_id", "request_id") .filter(request_id__isnull=False, num__gt=0) ) random_sql_query = random.choice( - models.SQLQuery.objects - .annotate(sql_id=F('id')) - .values('sql_id') - .filter(profiles__id=random_profile['profile_id']) + models.SQLQuery.objects.annotate(sql_id=F("id")) + .values("sql_id") + .filter(profiles__id=random_profile["profile_id"]) ) kwargs = {} kwargs.update(random_profile) kwargs.update(random_sql_query) - response = self.client.get(silky_reverse('request_and_profile_sql_detail', kwargs=kwargs)) + response = self.client.get( + silky_reverse("request_and_profile_sql_detail", kwargs=kwargs) + ) self.assertTrue(response.status_code == 200) def test_profile_detail(self): - profile_query_data = random.choice(models.Profile.objects.values('id')) - profile_id = profile_query_data['id'] - response = self.client.get(silky_reverse('profile_detail', kwargs={ - 'profile_id': profile_id - })) + profile_query_data = random.choice(models.Profile.objects.values("id")) + profile_id = profile_query_data["id"] + response = self.client.get( + silky_reverse("profile_detail", kwargs={"profile_id": profile_id}) + ) self.assertTrue(response.status_code == 200) def test_profile_sql(self): profile_query_data = random.choice( - models.Profile.objects - .annotate(num=Count('queries')) - .values('id') + models.Profile.objects.annotate(num=Count("queries")) + .values("id") .filter(num__gt=0) ) - profile_id = profile_query_data['id'] - response = self.client.get(silky_reverse('profile_sql', kwargs={'profile_id': profile_id})) + profile_id = profile_query_data["id"] + response = self.client.get( + silky_reverse("profile_sql", kwargs={"profile_id": profile_id}) + ) self.assertTrue(response.status_code == 200) def test_profile_sql_detail(self): profile_query_data = random.choice( - models.Profile.objects - .annotate(num=Count('queries')) - .values('id') + models.Profile.objects.annotate(num=Count("queries")) + .values("id") .filter(num__gt=0) ) - profile_id = profile_query_data['id'] + profile_id = profile_query_data["id"] sql_id = random.choice(models.SQLQuery.objects.filter(profiles=profile_id)).pk - response = self.client.get(silky_reverse('profile_sql_detail', kwargs={'profile_id': profile_id, - 'sql_id': sql_id})) + response = self.client.get( + silky_reverse( + "profile_sql_detail", + kwargs={"profile_id": profile_id, "sql_id": sql_id}, + ) + ) self.assertTrue(response.status_code == 200) def test_profiling(self): - response = self.client.get(silky_reverse('profiling')) + response = self.client.get(silky_reverse("profiling")) self.assertTrue(response.status_code == 200) diff --git a/project/tests/test_execute_sql.py b/project/tests/test_execute_sql.py index e84fdd62..252016a9 100644 --- a/project/tests/test_execute_sql.py +++ b/project/tests/test_execute_sql.py @@ -10,24 +10,26 @@ def mock_sql(): - mock_sql_query = Mock(spec_set=['_execute_sql', 'query', 'as_sql', 'connection']) + mock_sql_query = Mock(spec_set=["_execute_sql", "query", "as_sql", "connection"]) mock_sql_query._execute_sql = Mock() - mock_sql_query.query = NonCallableMock(spec_set=['model']) + mock_sql_query.query = NonCallableMock(spec_set=["model"]) mock_sql_query.query.model = Mock() - query_string = 'SELECT * from table_name' + query_string = "SELECT * from table_name" mock_sql_query.as_sql = Mock(return_value=(query_string, ())) mock_sql_query.connection = NonCallableMock( - spec_set=['cursor', 'features', 'ops'], + spec_set=["cursor", "features", "ops"], cursor=Mock( - spec_set=['__call__'], - return_value=NonCallableMagicMock(spec_set=['__enter__', '__exit__', 'execute']) + spec_set=["__call__"], + return_value=NonCallableMagicMock( + spec_set=["__enter__", "__exit__", "execute"] + ), ), features=NonCallableMock( - spec_set=['supports_explaining_query_execution'], - supports_explaining_query_execution=True + spec_set=["supports_explaining_query_execution"], + supports_explaining_query_execution=True, ), - ops=NonCallableMock(spec_set=['explain_query_prefix']), + ops=NonCallableMock(spec_set=["explain_query_prefix"]), ) return mock_sql_query, query_string @@ -37,10 +39,7 @@ def call_execute_sql(cls, request): DataCollector().configure(request=request) delete_all_models(SQLQuery) cls.mock_sql, cls.query_string = mock_sql() - kwargs = { - 'one': 1, - 'two': 2 - } + kwargs = {"one": 1, "two": 2} cls.args = [1, 2] cls.kwargs = kwargs execute_sql(cls.mock_sql, *cls.args, **cls.kwargs) @@ -79,7 +78,7 @@ def test_count(self): def test_query(self): query = list(DataCollector().queries.values())[0] - self.assertEqual(query['query'], self.query_string) + self.assertEqual(query["query"], self.query_string) class TestCallSilky(TestCase): @@ -89,10 +88,10 @@ def tearDown(self): def test_no_effect(self): DataCollector().configure() sql, _ = mock_sql() - sql.query.model = NonCallableMagicMock(spec_set=['__module__']) - sql.query.model.__module__ = 'silk.models' + sql.query.model = NonCallableMagicMock(spec_set=["__module__"]) + sql.query.model.__module__ = "silk.models" # No SQLQuery models should be created for silk requests for obvious reasons - with patch('silk.sql.DataCollector', return_value=Mock()) as mock_DataCollector: + with patch("silk.sql.DataCollector", return_value=Mock()) as mock_DataCollector: execute_sql(sql) self.assertFalse(mock_DataCollector().register_query.call_count) @@ -105,25 +104,31 @@ def _query(self): try: query = list(DataCollector().queries.values())[0] except IndexError: - self.fail('No queries created') + self.fail("No queries created") return query def test_request(self): - DataCollector().configure(request=Request.objects.create(path='/path/to/somewhere')) + DataCollector().configure( + request=Request.objects.create(path="/path/to/somewhere") + ) sql, _ = mock_sql() execute_sql(sql) query = self._query() - self.assertEqual(query['request'], DataCollector().request) + self.assertEqual(query["request"], DataCollector().request) def test_registration(self): - DataCollector().configure(request=Request.objects.create(path='/path/to/somewhere')) + DataCollector().configure( + request=Request.objects.create(path="/path/to/somewhere") + ) sql, _ = mock_sql() execute_sql(sql) query = self._query() self.assertIn(query, DataCollector().queries.values()) def test_explain(self): - DataCollector().configure(request=Request.objects.create(path='/path/to/somewhere')) + DataCollector().configure( + request=Request.objects.create(path="/path/to/somewhere") + ) sql, qs = mock_sql() prefix = "EXPLAIN" mock_cursor = sql.connection.cursor.return_value.__enter__.return_value diff --git a/project/tests/test_filters.py b/project/tests/test_filters.py index b466e829..949c5c6c 100644 --- a/project/tests/test_filters.py +++ b/project/tests/test_filters.py @@ -48,7 +48,9 @@ def test_seconds_filter(self): for r in requests: dt = r.start_time seconds = self._time_stamp(django_timezone.now()) - self._time_stamp(dt) - self.assertTrue(seconds < 6) # 6 to give a bit of leeway in case takes too long + self.assertTrue( + seconds < 6 + ) # 6 to give a bit of leeway in case takes too long def test_view_name_filter(self): requests = [mock_suite.mock_request() for _ in range(0, 10)] @@ -79,7 +81,9 @@ def test_num_queries_filter(self): def test_time_spent_queries_filter(self): requests = [mock_suite.mock_request() for _ in range(0, 10)] - time_taken = sorted(sum(q.time_taken for q in x.queries.all()) for x in requests) + time_taken = sorted( + sum(q.time_taken for q in x.queries.all()) for x in requests + ) c = time_taken[int(floor(len(time_taken) / 2))] time_taken_filter = TimeSpentOnQueriesFilter(c) query_set = models.Request.objects.all() @@ -213,7 +217,9 @@ def test_num_queries_filter(self): def test_time_spent_queries_filter(self): profiles = mock_suite.mock_profiles(n=10) - time_taken = sorted(sum(q.time_taken for q in x.queries.all()) for x in profiles) + time_taken = sorted( + sum(q.time_taken for q in x.queries.all()) for x in profiles + ) c = time_taken[int(floor(len(time_taken) / 2))] time_taken_filter = TimeSpentOnQueriesFilter(c) query_set = models.Profile.objects.all() diff --git a/project/tests/test_lib/__init__.py b/project/tests/test_lib/__init__.py index 037d9736..afb804c7 100644 --- a/project/tests/test_lib/__init__.py +++ b/project/tests/test_lib/__init__.py @@ -1 +1 @@ -__author__ = 'mtford' +__author__ = "mtford" diff --git a/project/tests/test_lib/mock_suite.py b/project/tests/test_lib/mock_suite.py index 91d6f56a..48b1971c 100644 --- a/project/tests/test_lib/mock_suite.py +++ b/project/tests/test_lib/mock_suite.py @@ -15,36 +15,53 @@ class MockSuite: """ Provides some fake data to play around with. Also useful for testing """ - methods = ['GET', 'POST', 'PUT', 'PATCH', 'HEAD', 'OPTIONS'] - path_components = ['path', 'to', 'somewhere', 'around', 'here', 'bobs', 'your', 'uncle'] + + methods = ["GET", "POST", "PUT", "PATCH", "HEAD", "OPTIONS"] + path_components = [ + "path", + "to", + "somewhere", + "around", + "here", + "bobs", + "your", + "uncle", + ] status_codes = [200, 201, 300, 301, 302, 403, 404, 500] - profile_names = ['slow_bit_of_code', 'terrible_dependency', 'what_on_earth_is_this_code_doing'] + profile_names = [ + "slow_bit_of_code", + "terrible_dependency", + "what_on_earth_is_this_code_doing", + ] file_path = [os.path.realpath(__file__)] - func_names = ['', '', '', 'foo', 'bar'] - view_names = ['app:blah', 'index', 'root', 'xxx:xyx'] - sql_queries = [''' + func_names = ["", "", "", "foo", "bar"] + view_names = ["app:blah", "index", "root", "xxx:xyx"] + sql_queries = [ + """ SELECT Book.title AS Title, COUNT(*) AS Authors FROM Book JOIN Book_author ON Book.isbn = Book_author.isbn GROUP BY Book.title; - ''', - ''' + """, + """ SELECT * FROM table - ''', ''' + """, + """ SELECT * FROM Book WHERE price > 100.00 ORDER BY title; - ''', ''' + """, + """ SELECT title, COUNT(*) AS Authors FROM Book NATURAL JOIN Book_author GROUP BY title; - ''', - ''' + """, + """ SELECT A.Col1, A.Col2, B.Col1,B.Col2 FROM (SELECT RealTableZ.Col1, RealTableY.Col2, RealTableY.ID AS ID FROM RealTableZ @@ -54,25 +71,24 @@ class MockSuite: ) AS B INNER JOIN A ON A.ForeignKeyY=B.ID - '''] + """, + ] - response_content_types = ['text/html', 'application/json', 'text/css'] + response_content_types = ["text/html", "application/json", "text/css"] response_content = { - 'text/html': [''], - 'text/css': ['#blah {font-weight: bold}'], - 'application/json': ['[1, 2, 3]'] - } - request_content_types = ['application/json'] - request_content = { - 'application/json': ['{"blah": 5}'] + "text/html": [""], + "text/css": ["#blah {font-weight: bold}"], + "application/json": ["[1, 2, 3]"], } + request_content_types = ["application/json"] + request_content = {"application/json": ['{"blah": 5}']} def _random_method(self): return random.choice(self.methods) def _random_path(self): num_components = random.randint(1, 5) - return '/' + '/'.join(random.sample(self.path_components, num_components)) + '/' + return "/" + "/".join(random.sample(self.path_components, num_components)) + "/" def _random_query(self): return random.choice(self.sql_queries) @@ -81,13 +97,13 @@ def mock_sql_queries(self, request=None, profile=None, n=1, as_dict=False): start_time, end_time = self._random_time() queries = [] for _ in range(0, n): - tb = ''.join(reversed(traceback.format_stack())) + tb = "".join(reversed(traceback.format_stack())) d = { - 'query': self._random_query(), - 'start_time': start_time, - 'end_time': end_time, - 'request': request, - 'traceback': tb + "query": self._random_query(), + "start_time": start_time, + "end_time": end_time, + "request": request, + "traceback": tb, } if as_dict: queries.append(d) @@ -97,7 +113,7 @@ def mock_sql_queries(self, request=None, profile=None, n=1, as_dict=False): if profile: if as_dict: for q in queries: - profile['queries'].append(q) + profile["queries"].append(q) else: profile.queries.set(queries) return queries @@ -105,17 +121,18 @@ def mock_sql_queries(self, request=None, profile=None, n=1, as_dict=False): def mock_profile(self, request=None): start_time, end_time = self._random_time() dynamic = random.choice([True, False]) - profile = Profile.objects.create(start_time=start_time, - end_time=end_time, - request=request, - name=random.choice(self.profile_names), - file_path=random.choice(self.file_path), - line_num=3, - func_name=random.choice(self.func_names), - dynamic=dynamic, - end_line_num=6 if dynamic else None, - exception_raised=random.choice([True, False]) - ) + profile = Profile.objects.create( + start_time=start_time, + end_time=end_time, + request=request, + name=random.choice(self.profile_names), + file_path=random.choice(self.file_path), + line_num=3, + func_name=random.choice(self.func_names), + dynamic=dynamic, + end_line_num=6 if dynamic else None, + exception_raised=random.choice([True, False]), + ) self.mock_sql_queries(profile=profile, n=random.randint(0, 10)) return profile @@ -140,26 +157,30 @@ def mock_request(self): request_body = random.choice(self.request_content[request_content_type]) time_taken = end_time - start_time time_taken = time_taken.total_seconds() - request = models.Request.objects.create(method=self._random_method(), - path=self._random_path(), - num_sql_queries=num_sql_queries, - start_time=start_time, - end_time=end_time, - view_name=random.choice(self.view_names), - time_taken=time_taken, - encoded_headers=json.dumps({'content-type': request_content_type}), - body=request_body) + request = models.Request.objects.create( + method=self._random_method(), + path=self._random_path(), + num_sql_queries=num_sql_queries, + start_time=start_time, + end_time=end_time, + view_name=random.choice(self.view_names), + time_taken=time_taken, + encoded_headers=json.dumps({"content-type": request_content_type}), + body=request_body, + ) response_content_type = random.choice(self.response_content_types) response_body = random.choice(self.response_content[response_content_type]) - models.Response.objects.create(request=request, - status_code=random.choice(self.status_codes), - encoded_headers=json.dumps({'content-type': response_content_type}), - body=response_body) + models.Response.objects.create( + request=request, + status_code=random.choice(self.status_codes), + encoded_headers=json.dumps({"content-type": response_content_type}), + body=response_body, + ) self.mock_sql_queries(request=request, n=num_sql_queries) self.mock_profiles(request, random.randint(0, 2)) return request -if __name__ == '__main__': - management.call_command('flush', interactive=False) +if __name__ == "__main__": + management.call_command("flush", interactive=False) requests = [MockSuite().mock_request() for _ in range(0, 100)] diff --git a/project/tests/test_models.py b/project/tests/test_models.py index 03dd90fe..8fa8a987 100644 --- a/project/tests/test_models.py +++ b/project/tests/test_models.py @@ -40,11 +40,14 @@ def test_uuid_is_primary_key(self): self.assertIsInstance(self.obj.id, uuid.UUID) - @freeze_time('2016-01-01 12:00:00') + @freeze_time("2016-01-01 12:00:00") def test_start_time_field_default(self): obj = RequestMinFactory.create() - self.assertEqual(obj.start_time, datetime.datetime(2016, 1, 1, 12, 0, 0, tzinfo=datetime.timezone.utc)) + self.assertEqual( + obj.start_time, + datetime.datetime(2016, 1, 1, 12, 0, 0, tzinfo=datetime.timezone.utc), + ) def test_total_meta_time_if_have_no_meta_and_queries_time(self): @@ -69,7 +72,9 @@ def test_time_spent_on_sql_queries_if_has_no_related_SQLQueries(self): self.assertEqual(self.obj.time_spent_on_sql_queries, 0) - def test_time_spent_on_sql_queries_if_has_related_SQLQueries_with_no_time_taken(self): + def test_time_spent_on_sql_queries_if_has_related_SQLQueries_with_no_time_taken( + self, + ): query = SQLQueryFactory() self.obj.queries.add(query) @@ -96,7 +101,7 @@ def test_headers_if_has_encoded_headers(self): self.obj.encoded_headers = '{"some-header": "some_data"}' self.assertIsInstance(self.obj.headers, models.CaseInsensitiveDictionary) - self.assertDictEqual(self.obj.headers, {'some-header': 'some_data'}) + self.assertDictEqual(self.obj.headers, {"some-header": "some_data"}) def test_content_type_if_no_headers(self): @@ -139,7 +144,7 @@ def test_force_garbage_collect(self): def test_greedy_garbage_collect(self): for x in range(3): - obj = models.Request(path='/', method='get') + obj = models.Request(path="/", method="get") obj.save() self.assertEqual(models.Request.objects.count(), 4) SilkyConfig().SILKY_MAX_RECORDED_REQUESTS_CHECK_PERCENT = 50 @@ -149,48 +154,48 @@ def test_greedy_garbage_collect(self): def test_save_if_have_no_raw_body(self): - obj = models.Request(path='/some/path/', method='get') - self.assertEqual(obj.raw_body, '') + obj = models.Request(path="/some/path/", method="get") + self.assertEqual(obj.raw_body, "") obj.save() - self.assertEqual(obj.raw_body, '') + self.assertEqual(obj.raw_body, "") def test_save_if_have_raw_body(self): - obj = models.Request(path='/some/path/', method='get', raw_body='some text') + obj = models.Request(path="/some/path/", method="get", raw_body="some text") obj.save() - self.assertEqual(obj.raw_body, 'some text') + self.assertEqual(obj.raw_body, "some text") def test_save_if_have_no_body(self): - obj = models.Request(path='/some/path/', method='get') - self.assertEqual(obj.body, '') + obj = models.Request(path="/some/path/", method="get") + self.assertEqual(obj.body, "") obj.save() - self.assertEqual(obj.body, '') + self.assertEqual(obj.body, "") def test_save_if_have_body(self): - obj = models.Request(path='/some/path/', method='get', body='some text') + obj = models.Request(path="/some/path/", method="get", body="some text") obj.save() - self.assertEqual(obj.body, 'some text') + self.assertEqual(obj.body, "some text") def test_save_if_have_no_end_time(self): - obj = models.Request(path='/some/path/', method='get') + obj = models.Request(path="/some/path/", method="get") self.assertEqual(obj.time_taken, None) obj.save() self.assertEqual(obj.time_taken, None) - @freeze_time('2016-01-01 12:00:00') + @freeze_time("2016-01-01 12:00:00") def test_save_if_have_end_time(self): date = datetime.datetime(2016, 1, 1, 12, 0, 3, tzinfo=datetime.timezone.utc) - obj = models.Request(path='/some/path/', method='get', end_time=date) + obj = models.Request(path="/some/path/", method="get", end_time=date) obj.save() self.assertEqual(obj.end_time, date) self.assertEqual(obj.time_taken, 3000.0) def test_prof_file_default_storage(self): - obj = models.Request(path='/some/path/', method='get') + obj = models.Request(path="/some/path/", method="get") self.assertEqual(obj.prof_file.storage.__class__, ProfilerResultStorage) @@ -220,7 +225,7 @@ def test_headers_if_has_encoded_headers(self): self.obj.encoded_headers = '{"some-header": "some_data"}' self.assertIsInstance(self.obj.headers, models.CaseInsensitiveDictionary) - self.assertDictEqual(self.obj.headers, {'some-header': 'some_data'}) + self.assertDictEqual(self.obj.headers, {"some-header": "some_data"}) def test_content_type_if_no_headers(self): @@ -260,14 +265,21 @@ class SQLQueryTest(TestCase): def setUp(self): self.obj = SQLQueryFactory.create() - self.end_time = datetime.datetime(2016, 1, 1, 12, 0, 5, tzinfo=datetime.timezone.utc) - self.start_time = datetime.datetime(2016, 1, 1, 12, 0, 0, tzinfo=datetime.timezone.utc) - - @freeze_time('2016-01-01 12:00:00') + self.end_time = datetime.datetime( + 2016, 1, 1, 12, 0, 5, tzinfo=datetime.timezone.utc + ) + self.start_time = datetime.datetime( + 2016, 1, 1, 12, 0, 0, tzinfo=datetime.timezone.utc + ) + + @freeze_time("2016-01-01 12:00:00") def test_start_time_field_default(self): obj = SQLQueryFactory.create() - self.assertEqual(obj.start_time, datetime.datetime(2016, 1, 1, 12, 0, 0, tzinfo=datetime.timezone.utc)) + self.assertEqual( + obj.start_time, + datetime.datetime(2016, 1, 1, 12, 0, 0, tzinfo=datetime.timezone.utc), + ) def test_is_m2o_related_to_request(self): @@ -292,10 +304,12 @@ def test_traceback_ln_only(self): if self.quitting: raise BdbQuit BdbQuit""" - output = ('Traceback (most recent call last):\n' - ' pass\n' - ' return self.dispatch_return(frame, arg)\n' - ' if self.quitting: raise BdbQuit') + output = ( + "Traceback (most recent call last):\n" + " pass\n" + " return self.dispatch_return(frame, arg)\n" + " if self.quitting: raise BdbQuit" + ) self.assertEqual(self.obj.traceback_ln_only, output) @@ -359,7 +373,7 @@ def test_num_joins_if_multiple_statement_in_query(self): def test_tables_involved_if_no_query(self): - self.obj.query = '' + self.obj.query = "" self.assertEqual(self.obj.tables_involved, []) @@ -367,85 +381,87 @@ def test_tables_involved_if_query_has_only_a_from_token(self): query = """SELECT * FROM Book;""" self.obj.query = query - self.assertEqual(self.obj.tables_involved, ['Book;']) + self.assertEqual(self.obj.tables_involved, ["Book;"]) def test_tables_involved_if_query_has_a_join_token(self): query = """SELECT p.id FROM Person p JOIN Address a ON p.Id = a.Person_ID;""" self.obj.query = query - self.assertEqual(self.obj.tables_involved, ['Person', 'Address']) + self.assertEqual(self.obj.tables_involved, ["Person", "Address"]) def test_tables_involved_if_query_has_an_as_token(self): - query = 'SELECT Book.title AS Title FROM Book GROUP BY Book.title;' + query = "SELECT Book.title AS Title FROM Book GROUP BY Book.title;" self.obj.query = query - self.assertEqual(self.obj.tables_involved, ['Title', 'Book']) + self.assertEqual(self.obj.tables_involved, ["Title", "Book"]) # FIXME bug, not a feature def test_tables_involved_check_with_fake_a_from_token(self): query = """SELECT * FROM Book WHERE Book.title=`EVIL FROM WITHIN`;""" self.obj.query = query - self.assertEqual(self.obj.tables_involved, ['Book', 'WITHIN`;']) + self.assertEqual(self.obj.tables_involved, ["Book", "WITHIN`;"]) # FIXME bug, not a feature def test_tables_involved_check_with_fake_a_join_token(self): query = """SELECT * FROM Book WHERE Book.title=`Luke, join the dark side!`;""" self.obj.query = query - self.assertEqual(self.obj.tables_involved, ['Book', 'the']) + self.assertEqual(self.obj.tables_involved, ["Book", "the"]) # FIXME bug, not a feature def test_tables_involved_check_with_fake_an_as_token(self): query = """SELECT * FROM Book WHERE Book.title=`AS SOON AS POSIABLE`;""" self.obj.query = query - self.assertEqual(self.obj.tables_involved, ['Book', 'POSIABLE`;']) + self.assertEqual(self.obj.tables_involved, ["Book", "POSIABLE`;"]) def test_tables_involved_if_query_has_subquery(self): - query = '''SELECT A.Col1, A.Col2, B.Col1,B.Col2 + query = """SELECT A.Col1, A.Col2, B.Col1,B.Col2 FROM (SELECT RealTableZ.Col1, RealTableY.Col2, RealTableY.ID AS ID FROM RealTableZ LEFT OUTER JOIN RealTableY ON RealTableZ.ForeignKeyY=RealTableY.ID WHERE RealTableY.Col11>14 ) AS B INNER JOIN A - ON A.ForeignKeyY=B.ID;''' + ON A.ForeignKeyY=B.ID;""" self.obj.query = query - self.assertEqual(self.obj.tables_involved, ['ID', 'RealTableZ', 'RealTableY', 'B', 'A']) + self.assertEqual( + self.obj.tables_involved, ["ID", "RealTableZ", "RealTableY", "B", "A"] + ) # FIXME bug, not a feature def test_tables_involved_if_query_has_django_aliase_on_column_names(self): - query = 'SELECT foo AS bar FROM some_table;' + query = "SELECT foo AS bar FROM some_table;" self.obj.query = query - self.assertEqual(self.obj.tables_involved, ['bar', 'some_table;']) + self.assertEqual(self.obj.tables_involved, ["bar", "some_table;"]) def test_tables_involved_if_query_has_update_token(self): query = """UPDATE Book SET title = 'New Title' WHERE id = 1;""" self.obj.query = query - self.assertEqual(self.obj.tables_involved, ['Book']) + self.assertEqual(self.obj.tables_involved, ["Book"]) def test_tables_involved_in_complex_update_query(self): - query = '''UPDATE Person p + query = """UPDATE Person p SET p.name = (SELECT c.name FROM Company c WHERE c.id = p.company_id), p.salary = p.salary * 1.1 FROM Department d WHERE p.department_id = d.id AND d.budget > 100000; - ''' + """ self.obj.query = query - self.assertEqual(self.obj.tables_involved, ['Person', 'Company', 'Department']) + self.assertEqual(self.obj.tables_involved, ["Person", "Company", "Department"]) def test_tables_involved_in_update_with_subquery(self): - query = '''UPDATE Employee e + query = """UPDATE Employee e SET e.bonus = (SELECT AVG(salary) FROM Employee WHERE department_id = e.department_id) WHERE e.performance = 'excellent'; - ''' + """ self.obj.query = query - self.assertEqual(self.obj.tables_involved, ['Employee', 'Employee']) + self.assertEqual(self.obj.tables_involved, ["Employee", "Employee"]) def test_save_if_no_end_and_start_time(self): @@ -453,7 +469,7 @@ def test_save_if_no_end_and_start_time(self): self.assertEqual(obj.time_taken, None) - @freeze_time('2016-01-01 12:00:00') + @freeze_time("2016-01-01 12:00:00") def test_save_if_has_end_time(self): # datetime.datetime(2016, 1, 1, 12, 0, 5, tzinfo=datetime.timezone.utc) @@ -461,7 +477,7 @@ def test_save_if_has_end_time(self): self.assertEqual(obj.time_taken, 5000.0) - @freeze_time('2016-01-01 12:00:00') + @freeze_time("2016-01-01 12:00:00") def test_save_if_has_start_time(self): obj = SQLQueryFactory.create(start_time=self.start_time) @@ -520,7 +536,7 @@ class NoPendingMigrationsTest(TestCase): def test_no_pending_migrations(self): call_command("makemigrations", "silk", "--check", "--dry-run") - @override_settings(DEFAULT_AUTO_FIELD='django.db.models.BigAutoField') + @override_settings(DEFAULT_AUTO_FIELD="django.db.models.BigAutoField") def test_check_with_overridden_default_auto_field(self): """ Test with `BigAutoField` set as `DEFAULT_AUTO_FIELD` - which is diff --git a/project/tests/test_multipart_forms.py b/project/tests/test_multipart_forms.py index f4e9a8b6..2db9684a 100644 --- a/project/tests/test_multipart_forms.py +++ b/project/tests/test_multipart_forms.py @@ -10,12 +10,15 @@ class TestMultipartForms(TestCase): def test_no_max_request(self): mock_request = Mock() - mock_request.headers = {'content-type': multipart_form} + mock_request.headers = {"content-type": multipart_form} mock_request.GET = {} - mock_request.path = reverse('silk:requests') - mock_request.method = 'post' + mock_request.path = reverse("silk:requests") + mock_request.method = "post" mock_request.body = Mock() request_model = RequestModelFactory(mock_request).construct_request_model() self.assertFalse(request_model.body) - self.assertEqual(b"Raw body not available for multipart_form data, Silk is not showing file uploads.", request_model.raw_body) + self.assertEqual( + b"Raw body not available for multipart_form data, Silk is not showing file uploads.", + request_model.raw_body, + ) mock_request.body.assert_not_called() diff --git a/project/tests/test_profile_dot.py b/project/tests/test_profile_dot.py index 87ea0c58..7ccfca76 100644 --- a/project/tests/test_profile_dot.py +++ b/project/tests/test_profile_dot.py @@ -29,7 +29,7 @@ def _stats_file(cls): try: with tempfile.NamedTemporaryFile(delete=False) as stats: pass - cProfile.run('1+1', stats.name) + cProfile.run("1+1", stats.name) yield stats.name finally: os.unlink(stats.name) @@ -42,7 +42,7 @@ def _stats_data(cls): and removing the temp file on exit. """ with cls._stats_file() as filename: - with open(filename, 'rb') as f: + with open(filename, "rb") as f: yield f.read() @classmethod @@ -54,6 +54,7 @@ def _profile(cls): @contextmanager def dummy(_): yield filename + return _create_profile(filename, dummy) @classmethod @@ -84,7 +85,7 @@ def test_create_dot(self): # create dot with tempfile.NamedTemporaryFile(delete=False) as dotfile: dot = _create_dot(self._profile(), 5) - dotfile.write(dot.encode('utf-8')) + dotfile.write(dot.encode("utf-8")) # verify generated dot is valid G = read_dot(dotfile.name) @@ -97,11 +98,11 @@ def test_temp_file_from_file_field(self): """ Verify that data held in a file like object is copied to a temp file. """ - dummy_data = b'dummy data' + dummy_data = b"dummy data" stream = self._mock_file(dummy_data) with _temp_file_from_file_field(stream) as filename: - with open(filename, 'rb') as f: + with open(filename, "rb") as f: self.assertEqual(f.read(), dummy_data) # file should have been removed on exit diff --git a/project/tests/test_profile_parser.py b/project/tests/test_profile_parser.py index 3f9c9ef2..5fdede7d 100644 --- a/project/tests/test_profile_parser.py +++ b/project/tests/test_profile_parser.py @@ -15,7 +15,7 @@ def test_profile_parser(self): """ with contextlib.closing(io.StringIO()) as stream: with contextlib.redirect_stdout(stream): - cProfile.run('print()') + cProfile.run("print()") stream.seek(0) actual = list(parse_profile(stream)) @@ -28,17 +28,32 @@ def test_profile_parser(self): # ["1", "0.000", "0.000", "0.000", "0.000", "{method 'disable' of '_lsprof.Profiler' objects}"], # ] - exc_header = ["ncalls", "tottime", "percall", "cumtime", "percall", "filename:lineno(function)"] + exc_header = [ + "ncalls", + "tottime", + "percall", + "cumtime", + "percall", + "filename:lineno(function)", + ] self.assertEqual(actual[0], exc_header) exc_number = re.compile(r"\d(.\d+)?") exc_module = re.compile(r"({method.*})|({built-in.*})|(<.+>:\d+\(<.+>\))") - exc_row = [exc_number, exc_number, exc_number, exc_number, exc_number, exc_module] + exc_row = [ + exc_number, + exc_number, + exc_number, + exc_number, + exc_number, + exc_module, + ] for row in actual[1:]: for text, expected_regex in zip(row, exc_row): self.assertRegex( - text, expected_regex, - msg="Expected something like {} but found {}" + text, + expected_regex, + msg="Expected something like {} but found {}", ) diff --git a/project/tests/test_response_assumptions.py b/project/tests/test_response_assumptions.py index 0500a44e..bf288529 100644 --- a/project/tests/test_response_assumptions.py +++ b/project/tests/test_response_assumptions.py @@ -7,4 +7,6 @@ class TestResponseAssumptions(TestCase): def test_headers_present_in_http_response(self): """Verify that HttpResponse has a headers or _headers attribute, which we use and Mock in our tests.""" django_response = HttpResponse() - self.assertTrue(hasattr(django_response, '_headers') or hasattr(django_response, 'headers')) + self.assertTrue( + hasattr(django_response, "_headers") or hasattr(django_response, "headers") + ) diff --git a/project/tests/test_sensitive_data_in_request.py b/project/tests/test_sensitive_data_in_request.py index aa0d79a7..e37dcda7 100644 --- a/project/tests/test_sensitive_data_in_request.py +++ b/project/tests/test_sensitive_data_in_request.py @@ -6,9 +6,17 @@ from silk.config import SilkyConfig from silk.model_factory import RequestModelFactory -HTTP_CONTENT_TYPE = 'content-type' +HTTP_CONTENT_TYPE = "content-type" CLEANSED = RequestModelFactory.CLEANSED_SUBSTITUTE -DEFAULT_SENSITIVE_KEYS = {'username', 'api', 'token', 'key', 'secret', 'password', 'signature'} +DEFAULT_SENSITIVE_KEYS = { + "username", + "api", + "token", + "key", + "secret", + "password", + "signature", +} DEFAULT_HIDE_COOKIES = True @@ -39,7 +47,9 @@ def test_mask_credentials_masks_sensitive_values_between_insensitive_values(self expected = f"public1=foo&password={CLEANSED}&public2=bar" self.assertEqual(expected, self._mask(body)) - def test_mask_credentials_preserves_insensitive_values_between_sensitive_values(self): + def test_mask_credentials_preserves_insensitive_values_between_sensitive_values( + self, + ): body = "password=1&foo=public&secret=2" expected = f"password={CLEANSED}&foo=public&secret={CLEANSED}" self.assertEqual(expected, self._mask(body)) @@ -86,7 +96,9 @@ def _mask(self, value): def test_mask_credentials_preserves_single_insensitive_values(self): self.assertIn("public", self._mask({"foo": "public"})) - def test_mask_credentials_preserves_insensitive_values_in_presence_of_sensitive(self): + def test_mask_credentials_preserves_insensitive_values_in_presence_of_sensitive( + self, + ): self.assertIn("public", self._mask({"password": "secret", "foo": "public"})) def test_mask_credentials_masks_sensitive_values(self): @@ -105,18 +117,23 @@ def test_mask_credentials_handles_suffixes(self): self.assertNotIn("secret", self._mask({"username-with-suffix": "secret"})) def test_mask_credentials_handles_complex_cases(self): - self.assertNotIn("secret", self._mask({ - "foo": "public", - "prefixed-uSeRname-with-suffix": "secret" - })) + self.assertNotIn( + "secret", + self._mask({"foo": "public", "prefixed-uSeRname-with-suffix": "secret"}), + ) def test_mask_credentials_in_nested_data_structures(self): - self.assertNotIn("secret", self._mask({ - "foo": "public", - "nested": { - "prefixed-uSeRname-with-suffix": "secret", - }, - })) + self.assertNotIn( + "secret", + self._mask( + { + "foo": "public", + "nested": { + "prefixed-uSeRname-with-suffix": "secret", + }, + } + ), + ) def test_mask_credentials_masks_sensitive_values_listed_in_settings(self): SilkyConfig().SILKY_SENSITIVE_KEYS = {"foo"} @@ -134,117 +151,141 @@ def tearDown(self): def test_password_in_body(self): mock_request = Mock() - mock_request.headers = {HTTP_CONTENT_TYPE: 'text/plain'} - mock_request.body = 'username=test_username&unmasked=testunmasked&password=testpassword' + mock_request.headers = {HTTP_CONTENT_TYPE: "text/plain"} + mock_request.body = ( + "username=test_username&unmasked=testunmasked&password=testpassword" + ) mock_request.get = mock_request.headers.get factory = RequestModelFactory(mock_request) body, raw_body = factory.body() - self.assertIn('testunmasked', raw_body) - self.assertNotIn('test_username', raw_body) - self.assertNotIn('testpassword', raw_body) - self.assertNotIn('test_username', body) - self.assertNotIn('testpassword', body) + self.assertIn("testunmasked", raw_body) + self.assertNotIn("test_username", raw_body) + self.assertNotIn("testpassword", raw_body) + self.assertNotIn("test_username", body) + self.assertNotIn("testpassword", body) def test_password_in_json(self): mock_request = Mock() - mock_request.headers = {HTTP_CONTENT_TYPE: 'application/json; charset=UTF-8'} - d = {'x': 'testunmasked', 'username': 'test_username', 'password': 'testpassword', - 'prefixed-secret': 'testsecret'} + mock_request.headers = {HTTP_CONTENT_TYPE: "application/json; charset=UTF-8"} + d = { + "x": "testunmasked", + "username": "test_username", + "password": "testpassword", + "prefixed-secret": "testsecret", + } mock_request.body = json.dumps(d) mock_request.get = mock_request.headers.get factory = RequestModelFactory(mock_request) body, raw_body = factory.body() - self.assertIn('testunmasked', raw_body) - self.assertNotIn('test_username', raw_body) - self.assertNotIn('testpassword', raw_body) - self.assertNotIn('testsecret', raw_body) - self.assertNotIn('test_username', body) - self.assertNotIn('testpassword', body) - self.assertNotIn('testsecret', body) + self.assertIn("testunmasked", raw_body) + self.assertNotIn("test_username", raw_body) + self.assertNotIn("testpassword", raw_body) + self.assertNotIn("testsecret", raw_body) + self.assertNotIn("test_username", body) + self.assertNotIn("testpassword", body) + self.assertNotIn("testsecret", body) for datum in [json.loads(body), json.loads(raw_body)]: - self.assertEqual(datum['username'], RequestModelFactory.CLEANSED_SUBSTITUTE) - self.assertEqual(datum['password'], RequestModelFactory.CLEANSED_SUBSTITUTE) - self.assertEqual(datum['prefixed-secret'], RequestModelFactory.CLEANSED_SUBSTITUTE) - self.assertEqual(datum['x'], 'testunmasked') + self.assertEqual(datum["username"], RequestModelFactory.CLEANSED_SUBSTITUTE) + self.assertEqual(datum["password"], RequestModelFactory.CLEANSED_SUBSTITUTE) + self.assertEqual( + datum["prefixed-secret"], RequestModelFactory.CLEANSED_SUBSTITUTE + ) + self.assertEqual(datum["x"], "testunmasked") def test_password_in_batched_json(self): mock_request = Mock() - mock_request.headers = {HTTP_CONTENT_TYPE: 'application/json; charset=UTF-8'} + mock_request.headers = {HTTP_CONTENT_TYPE: "application/json; charset=UTF-8"} d = [ - {'x': 'testunmasked', 'username': 'test_username', 'password': 'testpassword'}, - {'x': 'testunmasked', 'username': 'test_username', 'password': 'testpassword'} + { + "x": "testunmasked", + "username": "test_username", + "password": "testpassword", + }, + { + "x": "testunmasked", + "username": "test_username", + "password": "testpassword", + }, ] mock_request.body = json.dumps(d) mock_request.get = mock_request.headers.get factory = RequestModelFactory(mock_request) body, raw_body = factory.body() - self.assertIn('testunmasked', raw_body) - self.assertNotIn('test_username', raw_body) - self.assertNotIn('testpassword', raw_body) - self.assertNotIn('test_username', body[0]) - self.assertNotIn('testpassword', body[0]) - self.assertNotIn('test_username', body[1]) - self.assertNotIn('testpassword', body[1]) + self.assertIn("testunmasked", raw_body) + self.assertNotIn("test_username", raw_body) + self.assertNotIn("testpassword", raw_body) + self.assertNotIn("test_username", body[0]) + self.assertNotIn("testpassword", body[0]) + self.assertNotIn("test_username", body[1]) + self.assertNotIn("testpassword", body[1]) for data in [json.loads(body), json.loads(raw_body)]: for datum in data: - self.assertEqual(datum['username'], RequestModelFactory.CLEANSED_SUBSTITUTE) - self.assertEqual(datum['password'], RequestModelFactory.CLEANSED_SUBSTITUTE) - self.assertEqual(datum['x'], 'testunmasked') + self.assertEqual( + datum["username"], RequestModelFactory.CLEANSED_SUBSTITUTE + ) + self.assertEqual( + datum["password"], RequestModelFactory.CLEANSED_SUBSTITUTE + ) + self.assertEqual(datum["x"], "testunmasked") def test_authorization_header(self): mock_request = Mock() - mock_request.headers = {'authorization': 'secret'} - mock_request.body = '' + mock_request.headers = {"authorization": "secret"} + mock_request.body = "" mock_request.get = mock_request.headers.get factory = RequestModelFactory(mock_request) headers = factory.encoded_headers() json_headers = json.loads(headers) - self.assertIn('authorization', json_headers) - self.assertEqual(json_headers['authorization'], RequestModelFactory.CLEANSED_SUBSTITUTE) + self.assertIn("authorization", json_headers) + self.assertEqual( + json_headers["authorization"], RequestModelFactory.CLEANSED_SUBSTITUTE + ) def test_hide_cookies(self): SilkyConfig().SILKY_HIDE_COOKIES = True mock_request = Mock() - mock_request.headers = {'Cookie': 'secret'} - mock_request.body = '' + mock_request.headers = {"Cookie": "secret"} + mock_request.body = "" mock_request.get = mock_request.headers.get factory = RequestModelFactory(mock_request) headers = factory.encoded_headers() json_headers = json.loads(headers) - self.assertIn('cookie', json_headers) - self.assertEqual(json_headers['cookie'], RequestModelFactory.CLEANSED_SUBSTITUTE) + self.assertIn("cookie", json_headers) + self.assertEqual( + json_headers["cookie"], RequestModelFactory.CLEANSED_SUBSTITUTE + ) def test_no_hide_cookies(self): SilkyConfig().SILKY_HIDE_COOKIES = False mock_request = Mock() - mock_request.headers = {'Cookie': 'Cookies!!!'} - mock_request.body = '' + mock_request.headers = {"Cookie": "Cookies!!!"} + mock_request.body = "" mock_request.get = mock_request.headers.get factory = RequestModelFactory(mock_request) headers = factory.encoded_headers() json_headers = json.loads(headers) - self.assertIn('cookie', json_headers) - self.assertEqual(json_headers['cookie'], 'Cookies!!!') + self.assertIn("cookie", json_headers) + self.assertEqual(json_headers["cookie"], "Cookies!!!") def test_hide_sensitive_headers(self): SilkyConfig().SILKY_SENSITIVE_KEYS = ["foo", "bar"] mock_request = Mock() - mock_request.headers = {'FOO': 'secret', 'BAR': 'secret', 'BAZ': 'not-secret'} - mock_request.body = '' + mock_request.headers = {"FOO": "secret", "BAR": "secret", "BAZ": "not-secret"} + mock_request.body = "" mock_request.get = mock_request.headers.get factory = RequestModelFactory(mock_request) headers = factory.encoded_headers() json_headers = json.loads(headers) - self.assertIn('foo', json_headers) - self.assertIn('bar', json_headers) - self.assertIn('baz', json_headers) - self.assertEqual(json_headers['foo'], RequestModelFactory.CLEANSED_SUBSTITUTE) - self.assertEqual(json_headers['bar'], RequestModelFactory.CLEANSED_SUBSTITUTE) - self.assertEqual(json_headers['baz'], 'not-secret') + self.assertIn("foo", json_headers) + self.assertIn("bar", json_headers) + self.assertIn("baz", json_headers) + self.assertEqual(json_headers["foo"], RequestModelFactory.CLEANSED_SUBSTITUTE) + self.assertEqual(json_headers["bar"], RequestModelFactory.CLEANSED_SUBSTITUTE) + self.assertEqual(json_headers["baz"], "not-secret") diff --git a/project/tests/test_silky_middleware.py b/project/tests/test_silky_middleware.py index 6ebe9474..f3b8698e 100644 --- a/project/tests/test_silky_middleware.py +++ b/project/tests/test_silky_middleware.py @@ -13,7 +13,8 @@ def fake_get_response(): def fake_response(): - return 'hello world' + return "hello world" + return fake_response @@ -21,16 +22,15 @@ class TestApplyDynamicMappings(TestCase): def test_dynamic_decorator(self): middleware = SilkyMiddleware(fake_get_response) SilkyConfig().SILKY_DYNAMIC_PROFILING = [ - { - 'module': 'tests.data.dynamic', - 'function': 'foo' - } + {"module": "tests.data.dynamic", "function": "foo"} ] middleware._apply_dynamic_mappings() from .data.dynamic import foo mock = mock_data_collector() - with patch('silk.profiling.profiler.DataCollector', return_value=mock) as mock_DataCollector: + with patch( + "silk.profiling.profiler.DataCollector", return_value=mock + ) as mock_DataCollector: foo() # Should be wrapped in a decorator self.assertTrue(mock_DataCollector.return_value.register_profile.call_count) @@ -38,17 +38,19 @@ def test_dynamic_context_manager(self): middleware = SilkyMiddleware(fake_get_response) SilkyConfig().SILKY_DYNAMIC_PROFILING = [ { - 'module': 'tests.data.dynamic', - 'function': 'foo', - 'start_line': 1, - 'end_line': 2, + "module": "tests.data.dynamic", + "function": "foo", + "start_line": 1, + "end_line": 2, } ] middleware._apply_dynamic_mappings() from .data.dynamic import foo mock = mock_data_collector() - with patch('silk.profiling.profiler.DataCollector', return_value=mock) as mock_DataCollector: + with patch( + "silk.profiling.profiler.DataCollector", return_value=mock + ) as mock_DataCollector: foo() self.assertTrue(mock_DataCollector.return_value.register_profile.call_count) @@ -56,10 +58,10 @@ def test_invalid_dynamic_context_manager(self): middleware = SilkyMiddleware(fake_get_response) SilkyConfig().SILKY_DYNAMIC_PROFILING = [ { - 'module': 'tests.data.dynamic', - 'function': 'foo2', - 'start_line': 1, - 'end_line': 7, + "module": "tests.data.dynamic", + "function": "foo2", + "start_line": 1, + "end_line": 7, } ] self.assertRaises(IndexError, middleware._apply_dynamic_mappings) @@ -67,52 +69,43 @@ def test_invalid_dynamic_context_manager(self): def test_invalid_dynamic_decorator_module(self): middleware = SilkyMiddleware(fake_get_response) SilkyConfig().SILKY_DYNAMIC_PROFILING = [ - { - 'module': 'tests.data.dfsdf', - 'function': 'foo' - } + {"module": "tests.data.dfsdf", "function": "foo"} ] self.assertRaises(AttributeError, middleware._apply_dynamic_mappings) def test_invalid_dynamic_decorator_function_name(self): middleware = SilkyMiddleware(fake_get_response) SilkyConfig().SILKY_DYNAMIC_PROFILING = [ - { - 'module': 'tests.data.dynamic', - 'function': 'bar' - } + {"module": "tests.data.dynamic", "function": "bar"} ] self.assertRaises(AttributeError, middleware._apply_dynamic_mappings) def test_invalid_dynamic_mapping(self): middleware = SilkyMiddleware(fake_get_response) SilkyConfig().SILKY_DYNAMIC_PROFILING = [ - { - 'dfgdf': 'tests.data.dynamic', - 'funcgdfgtion': 'bar' - } + {"dfgdf": "tests.data.dynamic", "funcgdfgtion": "bar"} ] self.assertRaises(KeyError, middleware._apply_dynamic_mappings) def test_no_mappings(self): middleware = SilkyMiddleware(fake_get_response) - SilkyConfig().SILKY_DYNAMIC_PROFILING = [ - - ] + SilkyConfig().SILKY_DYNAMIC_PROFILING = [] middleware._apply_dynamic_mappings() # Just checking no crash def test_raise_if_authentication_is_enable_but_no_middlewares(self): SilkyConfig().SILKY_AUTHENTICATION = True - with self.modify_settings(MIDDLEWARE={ - 'remove': [ - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - ], - }): + with self.modify_settings( + MIDDLEWARE={ + "remove": [ + "django.contrib.sessions.middleware.SessionMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + ], + } + ): with self.assertRaisesMessage( SilkNotConfigured, - "SILKY_AUTHENTICATION can not be enabled without Session, Authentication or Message Django's middlewares" + "SILKY_AUTHENTICATION can not be enabled without Session, Authentication or Message Django's middlewares", ): SilkyMiddleware(fake_get_response) @@ -120,30 +113,28 @@ def test_raise_if_authentication_is_enable_but_no_middlewares(self): class TestShouldIntercept(TestCase): def test_should_intercept_non_silk_request(self): request = Request() - request.path = '/myapp/foo' + request.path = "/myapp/foo" should_intercept = _should_intercept(request) self.assertTrue(should_intercept) def test_should_intercept_silk_request(self): request = Request() - request.path = reverse('silk:summary') + request.path = reverse("silk:summary") should_intercept = _should_intercept(request) self.assertFalse(should_intercept) - @override_settings(ROOT_URLCONF='tests.urlconf_without_silk') + @override_settings(ROOT_URLCONF="tests.urlconf_without_silk") def test_should_intercept_without_silk_urls(self): request = Request() - request.path = '/login' + request.path = "/login" _should_intercept(request) # Just checking no crash def test_should_intercept_ignore_paths(self): - SilkyConfig().SILKY_IGNORE_PATHS = [ - '/ignorethis' - ] + SilkyConfig().SILKY_IGNORE_PATHS = ["/ignorethis"] request = Request() - request.path = '/ignorethis' + request.path = "/ignorethis" should_intercept = _should_intercept(request) self.assertFalse(should_intercept) diff --git a/project/tests/test_silky_profiler.py b/project/tests/test_silky_profiler.py index 38262ec4..a8fc005f 100644 --- a/project/tests/test_silky_profiler.py +++ b/project/tests/test_silky_profiler.py @@ -12,7 +12,7 @@ class TestProfilerRequests(TestCase): def test_context_manager_no_request(self): DataCollector().configure() - with silk_profile(name='test_profile'): + with silk_profile(name="test_profile"): sleep(0.1) self.assertFalse(DataCollector().profiles) @@ -25,17 +25,17 @@ def func(): func() profile = list(DataCollector().profiles.values())[0] - self.assertFalse(profile['request']) + self.assertFalse(profile["request"]) def test_context_manager_request(self): - DataCollector().configure(Request.objects.create(path='/to/somewhere')) - with silk_profile(name='test_profile'): + DataCollector().configure(Request.objects.create(path="/to/somewhere")) + with silk_profile(name="test_profile"): sleep(0.1) profile = list(DataCollector().profiles.values())[0] - self.assertEqual(DataCollector().request, profile['request']) + self.assertEqual(DataCollector().request, profile["request"]) def test_decorator_request(self): - DataCollector().configure(Request.objects.create(path='/to/somewhere')) + DataCollector().configure(Request.objects.create(path="/to/somewhere")) @silk_profile() def func(): @@ -43,7 +43,7 @@ def func(): func() profile = list(DataCollector().profiles.values())[0] - self.assertEqual(DataCollector().request, profile['request']) + self.assertEqual(DataCollector().request, profile["request"]) class TestProfilertContextManager(TestCase): @@ -52,7 +52,7 @@ def setUpClass(cls): super().setUpClass() r = Request.objects.create() DataCollector().configure(r) - with silk_profile(name='test_profile'): + with silk_profile(name="test_profile"): sleep(0.1) def test_one_object(self): @@ -60,11 +60,13 @@ def test_one_object(self): def test_name(self): profile = list(DataCollector().profiles.values())[0] - self.assertEqual(profile['name'], 'test_profile') + self.assertEqual(profile["name"], "test_profile") def test_time_taken(self): profile = list(DataCollector().profiles.values())[0] - time_taken = _time_taken(start_time=profile['start_time'], end_time=profile['end_time']) + time_taken = _time_taken( + start_time=profile["start_time"], end_time=profile["end_time"] + ) self.assertGreaterEqual(time_taken, 100) self.assertLess(time_taken, 110) @@ -86,11 +88,13 @@ def test_one_object(self): def test_name(self): profile = list(DataCollector().profiles.values())[0] - self.assertEqual(profile['name'], 'func') + self.assertEqual(profile["name"], "func") def test_time_taken(self): profile = list(DataCollector().profiles.values())[0] - time_taken = _time_taken(start_time=profile['start_time'], end_time=profile['end_time']) + time_taken = _time_taken( + start_time=profile["start_time"], end_time=profile["end_time"] + ) self.assertGreaterEqual(time_taken, 100) self.assertLess(time_taken, 115) @@ -98,12 +102,12 @@ def test_time_taken(self): class TestQueries(TestCase): def test_no_queries_before(self): DataCollector().configure(Request.objects.create()) - with silk_profile(name='test_no_queries_before_profile'): + with silk_profile(name="test_no_queries_before_profile"): mock_queries = MockSuite().mock_sql_queries(n=5, as_dict=True) DataCollector().register_query(*mock_queries) profile = list(DataCollector().profiles.values())[0] - self.assertEqual(profile['name'], 'test_no_queries_before_profile') - queries = profile['queries'] + self.assertEqual(profile["name"], "test_no_queries_before_profile") + queries = profile["queries"] self.assertEqual(len(queries), 5) for query in DataCollector().queries: self.assertIn(query, queries) @@ -113,12 +117,12 @@ def test_queries_before(self): DataCollector().configure(Request.objects.create()) DataCollector().register_query(*MockSuite().mock_sql_queries(n=2, as_dict=True)) before = [x for x in DataCollector().queries] - with silk_profile(name='test_no_queries_before_profile'): + with silk_profile(name="test_no_queries_before_profile"): mock_queries = MockSuite().mock_sql_queries(n=5, as_dict=True) DataCollector().register_query(*mock_queries) profile = list(DataCollector().profiles.values())[0] - self.assertEqual(profile['name'], 'test_no_queries_before_profile') - queries = profile['queries'] + self.assertEqual(profile["name"], "test_no_queries_before_profile") + queries = profile["queries"] self.assertEqual(len(queries), 5) for query in set(DataCollector().queries).difference(before): self.assertIn(query, queries) diff --git a/project/tests/test_view_profiling.py b/project/tests/test_view_profiling.py index 1ce21cbe..bb0da757 100644 --- a/project/tests/test_view_profiling.py +++ b/project/tests/test_view_profiling.py @@ -15,7 +15,7 @@ def test_func_names(self): func_names = ProfilingView()._get_function_names() for p in profiles: self.assertIn(p.func_name, func_names) - self.assertIn('', func_names) + self.assertIn("", func_names) def test_show(self): self.assertIn(ProfilingView.default_show, ProfilingView.show) @@ -31,15 +31,15 @@ def setUpClass(cls): cls.profiles = [MockSuite().mock_profile() for _ in range(0, 10)] def test_ordering(self): - results = ProfilingView()._get_objects(order_by='Recent') - self.assertSorted(results, 'start_time') + results = ProfilingView()._get_objects(order_by="Recent") + self.assertSorted(results, "start_time") def test_show(self): results = ProfilingView()._get_objects(show=5) self.assertEqual(5, len(results)) def test_func_name(self): - func_name = 'a_func_name' + func_name = "a_func_name" self.profiles[1].func_name = func_name self.profiles[1].save() results = ProfilingView()._get_objects(func_name=func_name) @@ -50,91 +50,127 @@ def assertSorted(self, objects, sort_field): for idx, r in enumerate(objects): try: nxt = objects[idx + 1] - self.assertGreaterEqual(getattr(r, sort_field), getattr(nxt, sort_field)) + self.assertGreaterEqual( + getattr(r, sort_field), getattr(nxt, sort_field) + ) except IndexError: pass class TestProfilingContext(TestCase): def test_default(self): - request = Mock(spec_set=['GET', 'session']) + request = Mock(spec_set=["GET", "session"]) request.GET = {} request.session = {} context = ProfilingView()._create_context(request) - self.assertTrue(dict_contains({ - 'show': ProfilingView.default_show, - 'order_by': ProfilingView.defualt_order_by, - 'options_show': ProfilingView.show, - 'options_order_by': ProfilingView.order_by, - 'options_func_names': ProfilingView()._get_function_names() - }, context)) - self.assertNotIn('path', context) - self.assertIn('results', context) + self.assertTrue( + dict_contains( + { + "show": ProfilingView.default_show, + "order_by": ProfilingView.defualt_order_by, + "options_show": ProfilingView.show, + "options_order_by": ProfilingView.order_by, + "options_func_names": ProfilingView()._get_function_names(), + }, + context, + ) + ) + self.assertNotIn("path", context) + self.assertIn("results", context) def test_get(self): - request = Mock(spec_set=['GET', 'session']) + request = Mock(spec_set=["GET", "session"]) request.session = {} show = 10 - func_name = 'func_name' - name = 'name' - order_by = 'Time' - request.GET = {'show': show, - 'func_name': func_name, - 'name': name, - 'order_by': order_by} + func_name = "func_name" + name = "name" + order_by = "Time" + request.GET = { + "show": show, + "func_name": func_name, + "name": name, + "order_by": order_by, + } context = ProfilingView()._create_context(request) - self.assertTrue(dict_contains({ - 'show': show, - 'order_by': order_by, - 'func_name': func_name, - 'name': name, - 'options_show': ProfilingView.show, - 'options_order_by': ProfilingView.order_by, - 'options_func_names': ProfilingView()._get_function_names() - }, context)) - self.assertIn('results', context) + self.assertTrue( + dict_contains( + { + "show": show, + "order_by": order_by, + "func_name": func_name, + "name": name, + "options_show": ProfilingView.show, + "options_order_by": ProfilingView.order_by, + "options_func_names": ProfilingView()._get_function_names(), + }, + context, + ) + ) + self.assertIn("results", context) def test_view_without_session_and_auth_middlewares(self): """ Filters are not present because there is no `session` to store them. """ - with self.modify_settings(MIDDLEWARE={ - 'remove': [ - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - ], - }): + with self.modify_settings( + MIDDLEWARE={ + "remove": [ + "django.contrib.sessions.middleware.SessionMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + ], + } + ): # test filters on GET show = 10 - func_name = 'func_name' - name = 'name' - order_by = 'Time' - response = self.client.get(silky_reverse('profiling'), { - 'show': show, - 'func_name': func_name, - 'name': name, - 'order_by': order_by - }) + func_name = "func_name" + name = "name" + order_by = "Time" + response = self.client.get( + silky_reverse("profiling"), + { + "show": show, + "func_name": func_name, + "name": name, + "order_by": order_by, + }, + ) context = response.context - self.assertTrue(dict_contains({ - 'show': show, - 'order_by': order_by, - 'func_name': func_name, - 'name': name, - 'options_show': ProfilingView.show, - 'options_order_by': ProfilingView.order_by, - 'options_func_names': ProfilingView()._get_function_names() - }, context)) + self.assertTrue( + dict_contains( + { + "show": show, + "order_by": order_by, + "func_name": func_name, + "name": name, + "options_show": ProfilingView.show, + "options_order_by": ProfilingView.order_by, + "options_func_names": ProfilingView()._get_function_names(), + }, + context, + ) + ) # test filters on POST - response = self.client.post(silky_reverse('profiling'), { - 'filter-overalltime-value': 100, - 'filter-overalltime-typ': 'TimeSpentOnQueriesFilter', - }) - context = response.context - self.assertTrue(dict_contains({ - 'filters': { - 'overalltime': {'typ': 'TimeSpentOnQueriesFilter', 'value': 100, 'str': 'DB Time >= 100'} + response = self.client.post( + silky_reverse("profiling"), + { + "filter-overalltime-value": 100, + "filter-overalltime-typ": "TimeSpentOnQueriesFilter", }, - }, context)) + ) + context = response.context + self.assertTrue( + dict_contains( + { + "filters": { + "overalltime": { + "typ": "TimeSpentOnQueriesFilter", + "value": 100, + "str": "DB Time >= 100", + } + }, + }, + context, + ) + ) diff --git a/project/tests/test_view_requests.py b/project/tests/test_view_requests.py index d36e93fd..d0cedb04 100644 --- a/project/tests/test_view_requests.py +++ b/project/tests/test_view_requests.py @@ -27,94 +27,141 @@ def test_order_by(self): class TestContext(TestCase): def test_default(self): - request = Mock(spec_set=['GET', 'session']) + request = Mock(spec_set=["GET", "session"]) request.session = {} request.GET = {} context = RequestsView()._create_context(request) - self.assertTrue(dict_contains({ - 'show': RequestsView.default_show, - 'order_by': RequestsView.default_order_by, - 'options_show': RequestsView.show, - 'options_order_by': RequestsView().options_order_by, - 'options_order_dir': RequestsView().options_order_dir, - }, context)) - self.assertQuerySetEqual(context['options_paths'], RequestsView()._get_paths()) - self.assertNotIn('path', context) - self.assertIn('results', context) + self.assertTrue( + dict_contains( + { + "show": RequestsView.default_show, + "order_by": RequestsView.default_order_by, + "options_show": RequestsView.show, + "options_order_by": RequestsView().options_order_by, + "options_order_dir": RequestsView().options_order_dir, + }, + context, + ) + ) + self.assertQuerySetEqual(context["options_paths"], RequestsView()._get_paths()) + self.assertNotIn("path", context) + self.assertIn("results", context) def test_get(self): show = 10 - path = '/path/to/somewhere/' - order_by = 'path' - response = self.client.get(silky_reverse('requests'), { - 'show': show, - 'path': path, - 'order_by': order_by, - }) + path = "/path/to/somewhere/" + order_by = "path" + response = self.client.get( + silky_reverse("requests"), + { + "show": show, + "path": path, + "order_by": order_by, + }, + ) context = response.context - self.assertTrue(dict_contains({ - 'show': show, - 'order_by': order_by, - 'path': path, - 'options_show': RequestsView.show, - 'options_order_by': RequestsView().options_order_by, - 'options_order_dir': RequestsView().options_order_dir, - }, context)) - self.assertQuerySetEqual(context['options_paths'], RequestsView()._get_paths()) - self.assertIn('results', context) + self.assertTrue( + dict_contains( + { + "show": show, + "order_by": order_by, + "path": path, + "options_show": RequestsView.show, + "options_order_by": RequestsView().options_order_by, + "options_order_dir": RequestsView().options_order_dir, + }, + context, + ) + ) + self.assertQuerySetEqual(context["options_paths"], RequestsView()._get_paths()) + self.assertIn("results", context) def test_post(self): - response = self.client.post(silky_reverse('requests'), { - 'filter-overalltime-value': 100, - 'filter-overalltime-typ': 'TimeSpentOnQueriesFilter', - }) - context = response.context - self.assertTrue(dict_contains({ - 'filters': { - 'overalltime': {'typ': 'TimeSpentOnQueriesFilter', 'value': 100, 'str': 'DB Time >= 100'} + response = self.client.post( + silky_reverse("requests"), + { + "filter-overalltime-value": 100, + "filter-overalltime-typ": "TimeSpentOnQueriesFilter", }, - }, context)) - self.assertQuerySetEqual(context['options_paths'], RequestsView()._get_paths()) - self.assertIn('results', context) + ) + context = response.context + self.assertTrue( + dict_contains( + { + "filters": { + "overalltime": { + "typ": "TimeSpentOnQueriesFilter", + "value": 100, + "str": "DB Time >= 100", + } + }, + }, + context, + ) + ) + self.assertQuerySetEqual(context["options_paths"], RequestsView()._get_paths()) + self.assertIn("results", context) def test_view_without_session_and_auth_middlewares(self): """ Filters are not present because there is no `session` to store them. """ - with self.modify_settings(MIDDLEWARE={ - 'remove': [ - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - ], - }): + with self.modify_settings( + MIDDLEWARE={ + "remove": [ + "django.contrib.sessions.middleware.SessionMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + ], + } + ): # test filters on GET show = 10 - path = '/path/to/somewhere/' - order_by = 'path' - response = self.client.get(silky_reverse('requests'), { - 'show': show, - 'path': path, - 'order_by': order_by, - }) + path = "/path/to/somewhere/" + order_by = "path" + response = self.client.get( + silky_reverse("requests"), + { + "show": show, + "path": path, + "order_by": order_by, + }, + ) context = response.context - self.assertTrue(dict_contains({ - 'show': show, - 'order_by': order_by, - 'path': path, - }, context)) + self.assertTrue( + dict_contains( + { + "show": show, + "order_by": order_by, + "path": path, + }, + context, + ) + ) # test filters on POST - response = self.client.post(silky_reverse('requests'), { - 'filter-overalltime-value': 100, - 'filter-overalltime-typ': 'TimeSpentOnQueriesFilter', - }) - context = response.context - self.assertTrue(dict_contains({ - 'filters': { - 'overalltime': {'typ': 'TimeSpentOnQueriesFilter', 'value': 100, 'str': 'DB Time >= 100'} + response = self.client.post( + silky_reverse("requests"), + { + "filter-overalltime-value": 100, + "filter-overalltime-typ": "TimeSpentOnQueriesFilter", }, - }, context)) + ) + context = response.context + self.assertTrue( + dict_contains( + { + "filters": { + "overalltime": { + "typ": "TimeSpentOnQueriesFilter", + "value": 100, + "str": "DB Time >= 100", + } + }, + }, + context, + ) + ) class TestGetObjects(TestCase): @@ -122,7 +169,9 @@ def assertSorted(self, objects, sort_field): for idx, r in enumerate(objects): try: nxt = objects[idx + 1] - self.assertGreaterEqual(getattr(r, sort_field), getattr(nxt, sort_field)) + self.assertGreaterEqual( + getattr(r, sort_field), getattr(nxt, sort_field) + ) except IndexError: pass @@ -134,7 +183,7 @@ def setUpClass(cls): def test_defaults(self): objects = RequestsView()._get_objects() self.assertEqual(len(objects), 25) - self.assertSorted(objects, 'start_time') + self.assertSorted(objects, "start_time") def test_show(self): objects = RequestsView()._get_objects(show=10) @@ -149,8 +198,7 @@ def test_path(self): @unittest.skip("Flaky") def test_time_spent_db_with_path(self): request = random.choice(self.requests) - query_set = RequestsView()._get_objects(order_by='db_time', - path=request.path) + query_set = RequestsView()._get_objects(order_by="db_time", path=request.path) num_results = query_set.count() self.assertTrue(num_results) for result in query_set: @@ -162,18 +210,29 @@ def assertSorted(self, objects, sort_field): for idx, r in enumerate(objects): try: nxt = objects[idx + 1] - self.assertGreaterEqual(getattr(r, sort_field), getattr(nxt, sort_field)) + self.assertGreaterEqual( + getattr(r, sort_field), getattr(nxt, sort_field) + ) except IndexError: pass def test_ordering(self): - self.assertSorted(objects=RequestsView()._get_objects(order_by='start_time'), - sort_field='start_time') - self.assertSorted(objects=RequestsView()._get_objects(order_by='path'), - sort_field='path') - self.assertSorted(objects=RequestsView()._get_objects(order_by='num_sql_queries'), - sort_field='num_sql_queries') - self.assertSorted(objects=RequestsView()._get_objects(order_by='time_taken'), - sort_field='time_taken') - self.assertSorted(objects=RequestsView()._get_objects(order_by='db_time'), - sort_field='db_time') + self.assertSorted( + objects=RequestsView()._get_objects(order_by="start_time"), + sort_field="start_time", + ) + self.assertSorted( + objects=RequestsView()._get_objects(order_by="path"), sort_field="path" + ) + self.assertSorted( + objects=RequestsView()._get_objects(order_by="num_sql_queries"), + sort_field="num_sql_queries", + ) + self.assertSorted( + objects=RequestsView()._get_objects(order_by="time_taken"), + sort_field="time_taken", + ) + self.assertSorted( + objects=RequestsView()._get_objects(order_by="db_time"), + sort_field="db_time", + ) diff --git a/project/tests/test_view_sql_detail.py b/project/tests/test_view_sql_detail.py index b3365659..41c806ed 100644 --- a/project/tests/test_view_sql_detail.py +++ b/project/tests/test_view_sql_detail.py @@ -23,7 +23,12 @@ def test_allowed_file_paths_nothing_specified(self): """by default we dont display any source, and it should return correctly""" request = MockSuite().mock_request() query = MockSuite().mock_sql_queries(request=request, n=1)[0] - response = self.client.get(silky_reverse('request_sql_detail', kwargs={'sql_id': query.id, 'request_id': request.id})) + response = self.client.get( + silky_reverse( + "request_sql_detail", + kwargs={"sql_id": query.id, "request_id": request.id}, + ) + ) self.assertTrue(response.status_code == 200) def test_allowed_file_paths_available_source(self): @@ -34,48 +39,58 @@ def test_allowed_file_paths_available_source(self): _, files = SQLDetailView()._urlify(tb) file_path = random.choice(files) with open(file_path) as f: - line_num = random.randint(0, len(f.read().split('\n'))) - response = self.client.get(silky_reverse('request_sql_detail', - kwargs={'sql_id': query.id, 'request_id': request.id}), - data={ - 'line_num': line_num, - 'file_path': file_path - }) + line_num = random.randint(0, len(f.read().split("\n"))) + response = self.client.get( + silky_reverse( + "request_sql_detail", + kwargs={"sql_id": query.id, "request_id": request.id}, + ), + data={"line_num": line_num, "file_path": file_path}, + ) self.assertTrue(response.status_code == 200) def test_allowed_file_paths_unavailable_source(self): """if we request to view source that is not in the traceback we should get a 403""" request = MockSuite().mock_request() query = MockSuite().mock_sql_queries(request=request, n=1)[0] - file_path = settings.TEMP_DIR + '/blah' - with open(file_path, 'w') as f: - f.write('test') - response = self.client.get(silky_reverse('request_sql_detail', - kwargs={'sql_id': query.id, 'request_id': request.id}), - data={ - 'line_num': 0, - 'file_path': file_path - }) + file_path = settings.TEMP_DIR + "/blah" + with open(file_path, "w") as f: + f.write("test") + response = self.client.get( + silky_reverse( + "request_sql_detail", + kwargs={"sql_id": query.id, "request_id": request.id}, + ), + data={"line_num": 0, "file_path": file_path}, + ) self.assertTrue(response.status_code == 403) def test_virtualenv_not_available_no_highlight(self): """if we don't have a virtualenv, there should be no code hightlighted""" request = MockSuite().mock_request() query = MockSuite().mock_sql_queries(request=request)[0] - url = silky_reverse('request_sql_detail', kwargs={'sql_id': query.id, 'request_id': request.id}) + url = silky_reverse( + "request_sql_detail", kwargs={"sql_id": query.id, "request_id": request.id} + ) with patch.dict(os.environ, {}, clear=True): - self.assertIsNone(os.environ.get('VIRTUAL_ENV')) + self.assertIsNone(os.environ.get("VIRTUAL_ENV")) response = self.client.get(url) self.assertEqual(response.status_code, 200) - self.assertContains(response, ' is-third-party') - self.assertNotContains(response, ' not-third-party') + self.assertContains(response, " is-third-party") + self.assertNotContains(response, " not-third-party") def test_virtualenv_hightlight(self): """if we have a virtualenv, there should be code hightlighted""" request = MockSuite().mock_request() query = MockSuite().mock_sql_queries(request=request)[0] - url = silky_reverse('request_sql_detail', kwargs={'sql_id': query.id, 'request_id': request.id}) - with patch.dict(os.environ, {'VIRTUAL_ENV': '/some/virtualenv/that/doesnt/really/exist'}, clear=True): + url = silky_reverse( + "request_sql_detail", kwargs={"sql_id": query.id, "request_id": request.id} + ) + with patch.dict( + os.environ, + {"VIRTUAL_ENV": "/some/virtualenv/that/doesnt/really/exist"}, + clear=True, + ): response = self.client.get(url) self.assertEqual(response.status_code, 200) - self.assertContains(response, ' not-third-party') + self.assertContains(response, " not-third-party") diff --git a/project/tests/test_view_summary_view.py b/project/tests/test_view_summary_view.py index 3598f1ae..b1a2e535 100644 --- a/project/tests/test_view_summary_view.py +++ b/project/tests/test_view_summary_view.py @@ -18,22 +18,36 @@ def test_view_without_session_and_auth_middlewares(self): """ Filters are not present because there is no `session` to store them. """ - with self.modify_settings(MIDDLEWARE={ - 'remove': [ - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - ], - }): + with self.modify_settings( + MIDDLEWARE={ + "remove": [ + "django.contrib.sessions.middleware.SessionMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + ], + } + ): # test filters on POST seconds = 3600 - response = self.client.post(silky_reverse('summary'), { - 'filter-seconds-value': seconds, - 'filter-seconds-typ': 'SecondsFilter', - }) + response = self.client.post( + silky_reverse("summary"), + { + "filter-seconds-value": seconds, + "filter-seconds-typ": "SecondsFilter", + }, + ) context = response.context - self.assertTrue(dict_contains({ - 'filters': { - 'seconds': {'typ': 'SecondsFilter', 'value': seconds, 'str': f'>{seconds} seconds ago'} - } - }, context)) + self.assertTrue( + dict_contains( + { + "filters": { + "seconds": { + "typ": "SecondsFilter", + "value": seconds, + "str": f">{seconds} seconds ago", + } + } + }, + context, + ) + ) diff --git a/project/tests/urlconf_without_silk.py b/project/tests/urlconf_without_silk.py index 4d8ab986..ed5bda5e 100644 --- a/project/tests/urlconf_without_silk.py +++ b/project/tests/urlconf_without_silk.py @@ -1,8 +1,5 @@ from django.urls import include, path urlpatterns = [ - path( - 'example_app/', - include('example_app.urls', namespace='example_app') - ), + path("example_app/", include("example_app.urls", namespace="example_app")), ] diff --git a/project/tests/util.py b/project/tests/util.py index 67b4cac3..57272cc5 100644 --- a/project/tests/util.py +++ b/project/tests/util.py @@ -24,7 +24,7 @@ def delete_all_models(model_class): :return: """ while model_class.objects.count(): - ids = model_class.objects.values_list('pk', flat=True)[:80] + ids = model_class.objects.values_list("pk", flat=True)[:80] model_class.objects.filter(pk__in=ids).delete() diff --git a/project/wsgi.py b/project/wsgi.py index d93cab75..bfc51ead 100644 --- a/project/wsgi.py +++ b/project/wsgi.py @@ -6,6 +6,7 @@ https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/ """ + import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") diff --git a/setup.py b/setup.py index be200560..4adb8e7f 100644 --- a/setup.py +++ b/setup.py @@ -6,41 +6,41 @@ os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( - name='django-silk', + name="django-silk", use_scm_version=True, - packages=['silk'], + packages=["silk"], include_package_data=True, - license='MIT License', - description='Silky smooth profiling for the Django Framework', - long_description=open('README.md').read(), - long_description_content_type='text/markdown', - url='https://github.com/jazzband/django-silk', - author='Michael Ford', - author_email='mtford@gmail.com', + license="MIT License", + description="Silky smooth profiling for the Django Framework", + long_description=open("README.md").read(), + long_description_content_type="text/markdown", + url="https://github.com/jazzband/django-silk", + author="Michael Ford", + author_email="mtford@gmail.com", classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Environment :: Web Environment', - 'Framework :: Django', - 'Framework :: Django :: 4.2', - 'Framework :: Django :: 5.0', - 'Framework :: Django :: 5.1', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3.12', - 'Programming Language :: Python :: 3.13', - 'Topic :: Internet :: WWW/HTTP', - 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', + "Development Status :: 5 - Production/Stable", + "Environment :: Web Environment", + "Framework :: Django", + "Framework :: Django :: 4.2", + "Framework :: Django :: 5.0", + "Framework :: Django :: 5.1", + "Intended Audience :: Developers", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Topic :: Internet :: WWW/HTTP", + "Topic :: Internet :: WWW/HTTP :: Dynamic Content", ], install_requires=[ - 'Django>=4.2', - 'sqlparse', - 'autopep8', - 'gprof2dot>=2017.09.19', + "Django>=4.2", + "sqlparse", + "autopep8", + "gprof2dot>=2017.09.19", ], - python_requires='>=3.9', - setup_requires=['setuptools_scm'], + python_requires=">=3.9", + setup_requires=["setuptools_scm"], ) diff --git a/silk/auth.py b/silk/auth.py index f878623e..5cd42729 100644 --- a/silk/auth.py +++ b/silk/auth.py @@ -14,9 +14,7 @@ def login_possibly_required(function=None, **kwargs): def permissions_possibly_required(function=None): if SilkyConfig().SILKY_AUTHORISATION: - actual_decorator = user_passes_test( - SilkyConfig().SILKY_PERMISSIONS - ) + actual_decorator = user_passes_test(SilkyConfig().SILKY_PERMISSIONS) if function: return actual_decorator(function) return actual_decorator diff --git a/silk/code_generation/__init__.py b/silk/code_generation/__init__.py index 037d9736..afb804c7 100644 --- a/silk/code_generation/__init__.py +++ b/silk/code_generation/__init__.py @@ -1 +1 @@ -__author__ = 'mtford' +__author__ = "mtford" diff --git a/silk/code_generation/curl.py b/silk/code_generation/curl.py index db135911..016af14c 100644 --- a/silk/code_generation/curl.py +++ b/silk/code_generation/curl.py @@ -17,30 +17,30 @@ def _curl_process_params(body, content_type, query_params): if query_params: try: query_params = urlencode( - [(k, v.encode('utf8')) for k, v in query_params.items()] + [(k, v.encode("utf8")) for k, v in query_params.items()] ) except TypeError: pass - query_params = '?' + str(query_params) - if 'json' in content_type or 'javascript' in content_type: + query_params = "?" + str(query_params) + if "json" in content_type or "javascript" in content_type: if isinstance(body, dict): body = json.dumps(body) - modifier = '-d' + modifier = "-d" # See http://curl.haxx.se/docs/manpage.html#-F # for multipart vs x-www-form-urlencoded # x-www-form-urlencoded is same way as browser, # multipart is RFC 2388 which allows file uploads. - elif 'multipart' in content_type or 'x-www-form-urlencoded' in content_type: + elif "multipart" in content_type or "x-www-form-urlencoded" in content_type: try: - body = ' '.join([f'{k}={v}' for k, v in body.items()]) + body = " ".join([f"{k}={v}" for k, v in body.items()]) except AttributeError: - modifier = '-d' + modifier = "-d" else: content_type = None - modifier = '-F' + modifier = "-F" elif body: body = str(body) - modifier = '-d' + modifier = "-d" else: modifier = None content_type = None @@ -50,7 +50,7 @@ def _curl_process_params(body, content_type, query_params): def curl_cmd(url, method=None, query_params=None, body=None, content_type=None): if not content_type: - content_type = 'text/plain' + content_type = "text/plain" modifier, body, query_params, content_type, extra = _curl_process_params( body, content_type, @@ -58,12 +58,12 @@ def curl_cmd(url, method=None, query_params=None, body=None, content_type=None): ) t = Template(curl_template) context = { - 'url': url, - 'method': method, - 'query_params': query_params, - 'body': body, - 'modifier': modifier, - 'content_type': content_type, - 'extra': extra, + "url": url, + "method": method, + "query_params": query_params, + "body": body, + "modifier": modifier, + "content_type": content_type, + "extra": extra, } - return t.render(Context(context, autoescape=False)).replace('\n', ' ') + return t.render(Context(context, autoescape=False)).replace("\n", " ") diff --git a/silk/code_generation/django_test_client.py b/silk/code_generation/django_test_client.py index cd34cd5f..978499d0 100644 --- a/silk/code_generation/django_test_client.py +++ b/silk/code_generation/django_test_client.py @@ -19,7 +19,7 @@ def _encode_query_params(query_params): query_params = urlencode(query_params) except TypeError: pass - return '?' + query_params + return "?" + query_params def gen(path, method=None, query_params=None, data=None, content_type=None): @@ -28,21 +28,21 @@ def gen(path, method=None, query_params=None, data=None, content_type=None): method = method.lower() t = Template(template) context = { - 'path': path, - 'lower_case_method': method, - 'content_type': content_type, + "path": path, + "lower_case_method": method, + "content_type": content_type, } - if method == 'get': - context['data'] = query_params + if method == "get": + context["data"] = query_params else: if query_params: query_params = _encode_query_params(query_params) path += query_params if is_str_typ(data): data = "'%s'" % data - context['data'] = data - context['query_params'] = query_params + context["data"] = data + context["query_params"] = query_params return autopep8.fix_code( t.render(Context(context, autoescape=False)), - options=autopep8.parse_args(['--aggressive', '']), + options=autopep8.parse_args(["--aggressive", ""]), ) diff --git a/silk/collector.py b/silk/collector.py index de9d7a3a..d60f4b0c 100644 --- a/silk/collector.py +++ b/silk/collector.py @@ -13,18 +13,19 @@ from silk.models import _time_taken from silk.singleton import Singleton -TYP_SILK_QUERIES = 'silk_queries' -TYP_PROFILES = 'profiles' -TYP_QUERIES = 'queries' +TYP_SILK_QUERIES = "silk_queries" +TYP_PROFILES = "profiles" +TYP_QUERIES = "queries" -Logger = logging.getLogger('silk.collector') +Logger = logging.getLogger("silk.collector") def raise_middleware_error(): raise RuntimeError( - 'Silk middleware has not been installed correctly. Ordering must ensure that Silk middleware can ' - 'execute process_request and process_response. If an earlier middleware returns from either of ' - 'these methods, Silk will not have the chance to inspect the request/response objects.') + "Silk middleware has not been installed correctly. Ordering must ensure that Silk middleware can " + "execute process_request and process_response. If an earlier middleware returns from either of " + "these methods, Silk will not have the chance to inspect the request/response objects." + ) class DataCollector(metaclass=Singleton): @@ -40,12 +41,12 @@ def __init__(self): self._configure() def ensure_middleware_installed(self): - if not hasattr(self.local, 'temp_identifier'): + if not hasattr(self.local, "temp_identifier"): raise_middleware_error() @property def request(self): - return getattr(self.local, 'request', None) + return getattr(self.local, "request", None) def get_identifier(self): self.ensure_middleware_installed() @@ -64,7 +65,7 @@ def _configure(self): @property def objects(self): - return getattr(self.local, 'objects', None) + return getattr(self.local, "objects", None) @property def queries(self): @@ -78,7 +79,7 @@ def _get_objects(self, typ): objects = self.objects if objects is None: self._raise_not_configured( - 'Attempt to access %s without initialisation.' % typ + "Attempt to access %s without initialisation." % typ ) if typ not in objects: objects[typ] = {} @@ -98,7 +99,9 @@ def configure(self, request=None, should_profile=True): except ValueError as e: # Deal with cProfile not being allowed to run concurrently # https://github.com/jazzband/django-silk/issues/682 - Logger.error('Could not enable python profiler, %s' % str(e), exc_info=True) + Logger.error( + "Could not enable python profiler, %s" % str(e), exc_info=True + ) self.local.pythonprofiler = None def clear(self): @@ -106,7 +109,7 @@ def clear(self): self._configure() def _raise_not_configured(self, err): - raise SilkNotConfigured(err + ' Is the middleware installed correctly?') + raise SilkNotConfigured(err + " Is the middleware installed correctly?") def register_objects(self, typ, *args): self.ensure_middleware_installed() @@ -118,7 +121,7 @@ def register_objects(self, typ, *args): # called for whatever reason. Perhaps if another piece of # middleware is not playing ball. self._raise_not_configured( - 'Attempt to register object of type %s without initialisation. ' + "Attempt to register object of type %s without initialisation. " ) if typ not in objects: self.objects[typ] = {} @@ -133,34 +136,42 @@ def register_profile(self, *args): def _record_meta_profiling(self): if SilkyConfig().SILKY_META: num_queries = len(self.silk_queries) - query_time = sum(_time_taken(x['start_time'], x['end_time']) for _, x in self.silk_queries.items()) + query_time = sum( + _time_taken(x["start_time"], x["end_time"]) + for _, x in self.silk_queries.items() + ) self.request.meta_num_queries = num_queries self.request.meta_time_spent_queries = query_time def stop_python_profiler(self): - if getattr(self.local, 'pythonprofiler', None): + if getattr(self.local, "pythonprofiler", None): self.local.pythonprofiler.disable() def finalise(self): - if getattr(self.local, 'pythonprofiler', None): + if getattr(self.local, "pythonprofiler", None): s = StringIO() - ps = pstats.Stats(self.local.pythonprofiler, stream=s).sort_stats('cumulative') + ps = pstats.Stats(self.local.pythonprofiler, stream=s).sort_stats( + "cumulative" + ) ps.print_stats() profile_text = s.getvalue() profile_text = "\n".join( - profile_text.split("\n")[0:256]) # don't record too much because it can overflow the field storage size + profile_text.split("\n")[0:256] + ) # don't record too much because it can overflow the field storage size self.request.pyprofile = profile_text if SilkyConfig().SILKY_PYTHON_PROFILER_BINARY: proposed_file_name = self._get_proposed_file_name() - file_name = self.request.prof_file.storage.get_available_name(proposed_file_name) - with self.request.prof_file.storage.open(file_name, 'w+b') as f: + file_name = self.request.prof_file.storage.get_available_name( + proposed_file_name + ) + with self.request.prof_file.storage.open(file_name, "w+b") as f: marshal.dump(ps.stats, f) self.request.prof_file = f.name sql_queries = [] for identifier, query in self.queries.items(): - query['identifier'] = identifier + query["identifier"] = identifier sql_query = models.SQLQuery(**query) sql_queries += [sql_query] @@ -169,7 +180,7 @@ def finalise(self): for sql_query in sql_queries.all(): query = self.queries.get(sql_query.identifier) if query: - query['model'] = sql_query + query["model"] = sql_query for profile in self.profiles.values(): profile_query_models = [] @@ -180,17 +191,17 @@ def finalise(self): try: query = self.queries[query_temp_id] try: - profile_query_models.append(query['model']) + profile_query_models.append(query["model"]) except KeyError: raise SilkInternalInconsistency( - 'Profile references a query dictionary that has not ' - 'been converted into a Django model. This should ' - 'never happen, please file a bug report' + "Profile references a query dictionary that has not " + "been converted into a Django model. This should " + "never happen, please file a bug report" ) except KeyError: raise SilkInternalInconsistency( - 'Profile references a query temp_id that does not exist. ' - 'This should never happen, please file a bug report' + "Profile references a query temp_id that does not exist. " + "This should never happen, please file a bug report" ) profile = models.Profile.objects.create(**profile) if profile_query_models: @@ -224,4 +235,4 @@ def slugify_path(request_path: str) -> str: .decode("ascii") ) request_path = request_path.lower()[:50] - return re.sub(r'\W+', '_', request_path).strip('_') + return re.sub(r"\W+", "_", request_path).strip("_") diff --git a/silk/config.py b/silk/config.py index 8b237340..a258a871 100644 --- a/silk/config.py +++ b/silk/config.py @@ -11,38 +11,50 @@ def default_permissions(user): class SilkyConfig(metaclass=Singleton): defaults = { - 'SILKY_DYNAMIC_PROFILING': [], - 'SILKY_IGNORE_PATHS': [], - 'SILKY_HIDE_COOKIES': True, - 'SILKY_IGNORE_QUERIES': [], - 'SILKY_META': False, - 'SILKY_AUTHENTICATION': False, - 'SILKY_AUTHORISATION': False, - 'SILKY_PERMISSIONS': default_permissions, - 'SILKY_MAX_RECORDED_REQUESTS': 10**4, - 'SILKY_MAX_RECORDED_REQUESTS_CHECK_PERCENT': 10, - 'SILKY_MAX_REQUEST_BODY_SIZE': -1, - 'SILKY_MAX_RESPONSE_BODY_SIZE': -1, - 'SILKY_INTERCEPT_PERCENT': 100, - 'SILKY_INTERCEPT_FUNC': None, - 'SILKY_PYTHON_PROFILER': False, - 'SILKY_PYTHON_PROFILER_FUNC': None, - 'SILKY_STORAGE_CLASS': 'silk.storage.ProfilerResultStorage', - 'SILKY_PYTHON_PROFILER_EXTENDED_FILE_NAME': False, - 'SILKY_MIDDLEWARE_CLASS': 'silk.middleware.SilkyMiddleware', - 'SILKY_JSON_ENSURE_ASCII': True, - 'SILKY_ANALYZE_QUERIES': False, - 'SILKY_EXPLAIN_FLAGS': None, - 'SILKY_SENSITIVE_KEYS': {'username', 'api', 'token', 'key', 'secret', 'password', 'signature'}, - 'SILKY_DELETE_PROFILES': False + "SILKY_DYNAMIC_PROFILING": [], + "SILKY_IGNORE_PATHS": [], + "SILKY_HIDE_COOKIES": True, + "SILKY_IGNORE_QUERIES": [], + "SILKY_META": False, + "SILKY_AUTHENTICATION": False, + "SILKY_AUTHORISATION": False, + "SILKY_PERMISSIONS": default_permissions, + "SILKY_MAX_RECORDED_REQUESTS": 10**4, + "SILKY_MAX_RECORDED_REQUESTS_CHECK_PERCENT": 10, + "SILKY_MAX_REQUEST_BODY_SIZE": -1, + "SILKY_MAX_RESPONSE_BODY_SIZE": -1, + "SILKY_INTERCEPT_PERCENT": 100, + "SILKY_INTERCEPT_FUNC": None, + "SILKY_PYTHON_PROFILER": False, + "SILKY_PYTHON_PROFILER_FUNC": None, + "SILKY_STORAGE_CLASS": "silk.storage.ProfilerResultStorage", + "SILKY_PYTHON_PROFILER_EXTENDED_FILE_NAME": False, + "SILKY_MIDDLEWARE_CLASS": "silk.middleware.SilkyMiddleware", + "SILKY_JSON_ENSURE_ASCII": True, + "SILKY_ANALYZE_QUERIES": False, + "SILKY_EXPLAIN_FLAGS": None, + "SILKY_SENSITIVE_KEYS": { + "username", + "api", + "token", + "key", + "secret", + "password", + "signature", + }, + "SILKY_DELETE_PROFILES": False, } def _setup(self): from django.conf import settings - options = {option: getattr(settings, option) for option in dir(settings) if option.startswith('SILKY')} + options = { + option: getattr(settings, option) + for option in dir(settings) + if option.startswith("SILKY") + } self.attrs = copy(self.defaults) - self.attrs['SILKY_PYTHON_PROFILER_RESULT_PATH'] = settings.MEDIA_ROOT + self.attrs["SILKY_PYTHON_PROFILER_RESULT_PATH"] = settings.MEDIA_ROOT self.attrs.update(options) def __init__(self): diff --git a/silk/middleware.py b/silk/middleware.py index f1843777..58f078c5 100644 --- a/silk/middleware.py +++ b/silk/middleware.py @@ -16,12 +16,12 @@ from silk.profiling.profiler import silk_meta_profiler from silk.sql import execute_sql -Logger = logging.getLogger('silk.middleware') +Logger = logging.getLogger("silk.middleware") def silky_reverse(name, *args, **kwargs): try: - r = reverse('silk:%s' % name, *args, **kwargs) + r = reverse("silk:%s" % name, *args, **kwargs) except NoReverseMatch: # In case user forgets to set namespace, but also fixes Django 1.5 tests on Travis # Hopefully if user has forgotten to add namespace there are no clashes with their own @@ -31,14 +31,14 @@ def silky_reverse(name, *args, **kwargs): def get_fpath(): - return silky_reverse('summary') + return silky_reverse("summary") config = SilkyConfig() AUTH_AND_SESSION_MIDDLEWARES = [ - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', + "django.contrib.sessions.middleware.SessionMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", ] @@ -76,8 +76,10 @@ def __init__(self, get_response): set(AUTH_AND_SESSION_MIDDLEWARES) & set(settings.MIDDLEWARE) ): raise SilkNotConfigured( - _("SILKY_AUTHENTICATION can not be enabled without Session, " - "Authentication or Message Django's middlewares") + _( + "SILKY_AUTHENTICATION can not be enabled without Session, " + "Authentication or Message Django's middlewares" + ) ) self.get_response = get_response @@ -100,24 +102,26 @@ def __call__(self, request): def _apply_dynamic_mappings(self): dynamic_profile_configs = config.SILKY_DYNAMIC_PROFILING for conf in dynamic_profile_configs: - module = conf.get('module') - function = conf.get('function') - start_line = conf.get('start_line') - end_line = conf.get('end_line') - name = conf.get('name') + module = conf.get("module") + function = conf.get("function") + start_line = conf.get("start_line") + end_line = conf.get("end_line") + name = conf.get("name") if module and function: if start_line and end_line: # Dynamic context manager - dynamic.inject_context_manager_func(module=module, - func=function, - start_line=start_line, - end_line=end_line, - name=name) + dynamic.inject_context_manager_func( + module=module, + func=function, + start_line=start_line, + end_line=end_line, + name=name, + ) else: # Dynamic decorator - dynamic.profile_function_or_method(module=module, - func=function, - name=name) + dynamic.profile_function_or_method( + module=module, func=function, name=name + ) else: - raise KeyError('Invalid dynamic mapping %s' % conf) + raise KeyError("Invalid dynamic mapping %s" % conf) @silk_meta_profiler() def process_request(self, request): @@ -126,10 +130,10 @@ def process_request(self, request): if not _should_intercept(request): return - Logger.debug('process_request') + Logger.debug("process_request") request.silk_is_intercepted = True self._apply_dynamic_mappings() - if not hasattr(SQLCompiler, '_execute_sql'): + if not hasattr(SQLCompiler, "_execute_sql"): SQLCompiler._execute_sql = SQLCompiler.execute_sql SQLCompiler.execute_sql = execute_sql @@ -144,7 +148,7 @@ def process_request(self, request): @transaction.atomic() def _process_response(self, request, response): - Logger.debug('Process response') + Logger.debug("Process response") with silk_meta_profiler(): collector = DataCollector() collector.stop_python_profiler() @@ -155,30 +159,32 @@ def _process_response(self, request, response): collector.finalise() else: Logger.error( - 'No request model was available when processing response. ' - 'Did something go wrong in process_request/process_view?' - '\n' + str(request) + '\n\n' + str(response) + "No request model was available when processing response. " + "Did something go wrong in process_request/process_view?" + "\n" + str(request) + "\n\n" + str(response) ) # Need to save the data outside the silk_meta_profiler # Otherwise the meta time collected in the context manager # is not taken in account if silk_request: silk_request.save() - Logger.debug('Process response done.') + Logger.debug("Process response done.") def process_response(self, request, response): max_attempts = 2 attempts = 1 - if getattr(request, 'silk_is_intercepted', False): + if getattr(request, "silk_is_intercepted", False): while attempts <= max_attempts: if attempts > 1: - Logger.debug('Retrying _process_response; attempt %s' % attempts) + Logger.debug("Retrying _process_response; attempt %s" % attempts) try: self._process_response(request, response) break except (AttributeError, DatabaseError): if attempts >= max_attempts: - Logger.warn('Exhausted _process_response attempts; not processing request') + Logger.warn( + "Exhausted _process_response attempts; not processing request" + ) break attempts += 1 return response diff --git a/silk/migrations/0001_initial.py b/silk/migrations/0001_initial.py index 791f45b7..b0f7816d 100644 --- a/silk/migrations/0001_initial.py +++ b/silk/migrations/0001_initial.py @@ -6,81 +6,147 @@ class Migration(migrations.Migration): - dependencies = [ - ] + dependencies = [] operations = [ migrations.CreateModel( - name='Profile', + name="Profile", fields=[ - ('id', models.AutoField(serialize=False, primary_key=True, verbose_name='ID', auto_created=True)), - ('name', models.CharField(max_length=300, blank=True, default='')), - ('start_time', models.DateTimeField(default=django.utils.timezone.now)), - ('end_time', models.DateTimeField(blank=True, null=True)), - ('time_taken', models.FloatField(blank=True, null=True)), - ('file_path', models.CharField(max_length=300, blank=True, default='')), - ('line_num', models.IntegerField(blank=True, null=True)), - ('end_line_num', models.IntegerField(blank=True, null=True)), - ('func_name', models.CharField(max_length=300, blank=True, default='')), - ('exception_raised', models.BooleanField(default=False)), - ('dynamic', models.BooleanField(default=False)), + ( + "id", + models.AutoField( + serialize=False, + primary_key=True, + verbose_name="ID", + auto_created=True, + ), + ), + ("name", models.CharField(max_length=300, blank=True, default="")), + ("start_time", models.DateTimeField(default=django.utils.timezone.now)), + ("end_time", models.DateTimeField(blank=True, null=True)), + ("time_taken", models.FloatField(blank=True, null=True)), + ("file_path", models.CharField(max_length=300, blank=True, default="")), + ("line_num", models.IntegerField(blank=True, null=True)), + ("end_line_num", models.IntegerField(blank=True, null=True)), + ("func_name", models.CharField(max_length=300, blank=True, default="")), + ("exception_raised", models.BooleanField(default=False)), + ("dynamic", models.BooleanField(default=False)), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='Request', + name="Request", fields=[ - ('id', models.CharField(max_length=36, primary_key=True, default=uuid.uuid1, serialize=False)), - ('path', models.CharField(db_index=True, max_length=190)), - ('query_params', models.TextField(blank=True, default='')), - ('raw_body', models.TextField(blank=True, default='')), - ('body', models.TextField(blank=True, default='')), - ('method', models.CharField(max_length=10)), - ('start_time', models.DateTimeField(db_index=True, default=django.utils.timezone.now)), - ('view_name', models.CharField(db_index=True, blank=True, default='', max_length=190, null=True)), - ('end_time', models.DateTimeField(blank=True, null=True)), - ('time_taken', models.FloatField(blank=True, null=True)), - ('encoded_headers', models.TextField(blank=True, default='')), - ('meta_time', models.FloatField(blank=True, null=True)), - ('meta_num_queries', models.IntegerField(blank=True, null=True)), - ('meta_time_spent_queries', models.FloatField(blank=True, null=True)), - ('pyprofile', models.TextField(blank=True, default='')), - ('num_sql_queries', models.IntegerField(default=0)), + ( + "id", + models.CharField( + max_length=36, + primary_key=True, + default=uuid.uuid1, + serialize=False, + ), + ), + ("path", models.CharField(db_index=True, max_length=190)), + ("query_params", models.TextField(blank=True, default="")), + ("raw_body", models.TextField(blank=True, default="")), + ("body", models.TextField(blank=True, default="")), + ("method", models.CharField(max_length=10)), + ( + "start_time", + models.DateTimeField( + db_index=True, default=django.utils.timezone.now + ), + ), + ( + "view_name", + models.CharField( + db_index=True, blank=True, default="", max_length=190, null=True + ), + ), + ("end_time", models.DateTimeField(blank=True, null=True)), + ("time_taken", models.FloatField(blank=True, null=True)), + ("encoded_headers", models.TextField(blank=True, default="")), + ("meta_time", models.FloatField(blank=True, null=True)), + ("meta_num_queries", models.IntegerField(blank=True, null=True)), + ("meta_time_spent_queries", models.FloatField(blank=True, null=True)), + ("pyprofile", models.TextField(blank=True, default="")), + ("num_sql_queries", models.IntegerField(default=0)), ], ), migrations.CreateModel( - name='Response', + name="Response", fields=[ - ('id', models.CharField(max_length=36, primary_key=True, default=uuid.uuid1, serialize=False)), - ('status_code', models.IntegerField()), - ('raw_body', models.TextField(blank=True, default='')), - ('body', models.TextField(blank=True, default='')), - ('encoded_headers', models.TextField(blank=True, default='')), - ('request', models.OneToOneField(to='silk.Request', related_name='response', on_delete=models.CASCADE)), + ( + "id", + models.CharField( + max_length=36, + primary_key=True, + default=uuid.uuid1, + serialize=False, + ), + ), + ("status_code", models.IntegerField()), + ("raw_body", models.TextField(blank=True, default="")), + ("body", models.TextField(blank=True, default="")), + ("encoded_headers", models.TextField(blank=True, default="")), + ( + "request", + models.OneToOneField( + to="silk.Request", + related_name="response", + on_delete=models.CASCADE, + ), + ), ], ), migrations.CreateModel( - name='SQLQuery', + name="SQLQuery", fields=[ - ('id', models.AutoField(serialize=False, primary_key=True, verbose_name='ID', auto_created=True)), - ('query', models.TextField()), - ('start_time', models.DateTimeField(default=django.utils.timezone.now, blank=True, null=True)), - ('end_time', models.DateTimeField(blank=True, null=True)), - ('time_taken', models.FloatField(blank=True, null=True)), - ('traceback', models.TextField()), - ('request', models.ForeignKey(to='silk.Request', blank=True, null=True, related_name='queries', on_delete=models.CASCADE)), + ( + "id", + models.AutoField( + serialize=False, + primary_key=True, + verbose_name="ID", + auto_created=True, + ), + ), + ("query", models.TextField()), + ( + "start_time", + models.DateTimeField( + default=django.utils.timezone.now, blank=True, null=True + ), + ), + ("end_time", models.DateTimeField(blank=True, null=True)), + ("time_taken", models.FloatField(blank=True, null=True)), + ("traceback", models.TextField()), + ( + "request", + models.ForeignKey( + to="silk.Request", + blank=True, + null=True, + related_name="queries", + on_delete=models.CASCADE, + ), + ), ], ), migrations.AddField( - model_name='profile', - name='queries', - field=models.ManyToManyField(to='silk.SQLQuery', db_index=True, related_name='profiles'), + model_name="profile", + name="queries", + field=models.ManyToManyField( + to="silk.SQLQuery", db_index=True, related_name="profiles" + ), ), migrations.AddField( - model_name='profile', - name='request', - field=models.ForeignKey(to='silk.Request', blank=True, null=True, on_delete=models.CASCADE), + model_name="profile", + name="request", + field=models.ForeignKey( + to="silk.Request", blank=True, null=True, on_delete=models.CASCADE + ), ), ] diff --git a/silk/migrations/0002_auto_update_uuid4_id_field.py b/silk/migrations/0002_auto_update_uuid4_id_field.py index ab7cd3e0..4fca4abe 100644 --- a/silk/migrations/0002_auto_update_uuid4_id_field.py +++ b/silk/migrations/0002_auto_update_uuid4_id_field.py @@ -6,18 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('silk', '0001_initial'), + ("silk", "0001_initial"), ] operations = [ migrations.AlterField( - model_name='request', - name='id', - field=models.CharField(default=uuid.uuid4, max_length=36, serialize=False, primary_key=True), + model_name="request", + name="id", + field=models.CharField( + default=uuid.uuid4, max_length=36, serialize=False, primary_key=True + ), ), migrations.AlterField( - model_name='response', - name='id', - field=models.CharField(default=uuid.uuid4, max_length=36, serialize=False, primary_key=True), + model_name="response", + name="id", + field=models.CharField( + default=uuid.uuid4, max_length=36, serialize=False, primary_key=True + ), ), ] diff --git a/silk/migrations/0003_request_prof_file.py b/silk/migrations/0003_request_prof_file.py index 2de6306f..dbd75534 100644 --- a/silk/migrations/0003_request_prof_file.py +++ b/silk/migrations/0003_request_prof_file.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('silk', '0002_auto_update_uuid4_id_field'), + ("silk", "0002_auto_update_uuid4_id_field"), ] operations = [ migrations.AddField( - model_name='request', - name='prof_file', - field=models.FileField(null=True, upload_to=''), + model_name="request", + name="prof_file", + field=models.FileField(null=True, upload_to=""), ), ] diff --git a/silk/migrations/0004_request_prof_file_storage.py b/silk/migrations/0004_request_prof_file_storage.py index 9826794f..605e6533 100644 --- a/silk/migrations/0004_request_prof_file_storage.py +++ b/silk/migrations/0004_request_prof_file_storage.py @@ -8,13 +8,15 @@ class Migration(migrations.Migration): dependencies = [ - ('silk', '0003_request_prof_file'), + ("silk", "0003_request_prof_file"), ] operations = [ migrations.AlterField( - model_name='request', - name='prof_file', - field=models.FileField(null=True, storage=silk.models.silk_storage, upload_to=''), + model_name="request", + name="prof_file", + field=models.FileField( + null=True, storage=silk.models.silk_storage, upload_to="" + ), ), ] diff --git a/silk/migrations/0005_increase_request_prof_file_length.py b/silk/migrations/0005_increase_request_prof_file_length.py index d774fc11..0df4beb7 100644 --- a/silk/migrations/0005_increase_request_prof_file_length.py +++ b/silk/migrations/0005_increase_request_prof_file_length.py @@ -8,13 +8,18 @@ class Migration(migrations.Migration): dependencies = [ - ('silk', '0004_request_prof_file_storage'), + ("silk", "0004_request_prof_file_storage"), ] operations = [ migrations.AlterField( - model_name='request', - name='prof_file', - field=models.FileField(max_length=300, null=True, storage=silk.models.silk_storage, upload_to=''), + model_name="request", + name="prof_file", + field=models.FileField( + max_length=300, + null=True, + storage=silk.models.silk_storage, + upload_to="", + ), ), ] diff --git a/silk/migrations/0006_fix_request_prof_file_blank.py b/silk/migrations/0006_fix_request_prof_file_blank.py index d346051b..60187560 100644 --- a/silk/migrations/0006_fix_request_prof_file_blank.py +++ b/silk/migrations/0006_fix_request_prof_file_blank.py @@ -8,13 +8,18 @@ class Migration(migrations.Migration): dependencies = [ - ('silk', '0005_increase_request_prof_file_length'), + ("silk", "0005_increase_request_prof_file_length"), ] operations = [ migrations.AlterField( - model_name='request', - name='prof_file', - field=models.FileField(blank=True, max_length=300, storage=silk.storage.ProfilerResultStorage(), upload_to=''), + model_name="request", + name="prof_file", + field=models.FileField( + blank=True, + max_length=300, + storage=silk.storage.ProfilerResultStorage(), + upload_to="", + ), ), ] diff --git a/silk/migrations/0007_sqlquery_identifier.py b/silk/migrations/0007_sqlquery_identifier.py index ce6a33d2..7de23eb5 100644 --- a/silk/migrations/0007_sqlquery_identifier.py +++ b/silk/migrations/0007_sqlquery_identifier.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('silk', '0006_fix_request_prof_file_blank'), + ("silk", "0006_fix_request_prof_file_blank"), ] operations = [ migrations.AddField( - model_name='sqlquery', - name='identifier', + model_name="sqlquery", + name="identifier", field=models.IntegerField(default=-1), ), ] diff --git a/silk/migrations/0008_sqlquery_analysis.py b/silk/migrations/0008_sqlquery_analysis.py index bdcd33cd..619a2a5d 100644 --- a/silk/migrations/0008_sqlquery_analysis.py +++ b/silk/migrations/0008_sqlquery_analysis.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('silk', '0007_sqlquery_identifier'), + ("silk", "0007_sqlquery_identifier"), ] operations = [ migrations.AddField( - model_name='sqlquery', - name='analysis', + model_name="sqlquery", + name="analysis", field=models.TextField(blank=True, null=True), ), ] diff --git a/silk/model_factory.py b/silk/model_factory.py index a802beb4..630a196c 100644 --- a/silk/model_factory.py +++ b/silk/model_factory.py @@ -13,18 +13,19 @@ from silk.collector import DataCollector from silk.config import SilkyConfig -Logger = logging.getLogger('silk.model_factory') +Logger = logging.getLogger("silk.model_factory") -content_types_json = ['application/json', - 'application/x-javascript', - 'text/javascript', - 'text/x-javascript', - 'text/x-json'] -multipart_form = 'multipart/form-data' -content_type_form = [multipart_form, - 'application/x-www-form-urlencoded'] -content_type_html = ['text/html'] -content_type_css = ['text/css'] +content_types_json = [ + "application/json", + "application/x-javascript", + "text/javascript", + "text/x-javascript", + "text/x-json", +] +multipart_form = "multipart/form-data" +content_type_form = [multipart_form, "application/x-www-form-urlencoded"] +content_type_html = ["text/html"] +content_type_css = ["text/css"] class DefaultEncoder(json.JSONEncoder): @@ -37,12 +38,12 @@ def _parse_content_type(content_type): """best efforts on pulling out the content type and encoding from content-type header""" char_set = None if content_type.strip(): - splt = content_type.split(';') + splt = content_type.split(";") content_type = splt[0] try: raw_char_set = splt[1].strip() - key, char_set = raw_char_set.split('=') - if key != 'charset': + key, char_set = raw_char_set.split("=") + if key != "charset": char_set = None except (IndexError, ValueError): pass @@ -51,15 +52,16 @@ def _parse_content_type(content_type): class RequestModelFactory: """Produce Request models from Django request objects""" + # String to replace on masking - CLEANSED_SUBSTITUTE = '********************' + CLEANSED_SUBSTITUTE = "********************" def __init__(self, request): super().__init__() self.request = request def content_type(self): - content_type = self.request.headers.get('content-type', '') + content_type = self.request.headers.get("content-type", "") return _parse_content_type(content_type) def encoded_headers(self): @@ -67,9 +69,9 @@ def encoded_headers(self): From Django docs (https://docs.djangoproject.com/en/2.0/ref/request-response/#httprequest-objects): """ sensitive_headers = set(map(str.lower, SilkyConfig().SILKY_SENSITIVE_KEYS)) - sensitive_headers.add('authorization') + sensitive_headers.add("authorization") if SilkyConfig().SILKY_HIDE_COOKIES: - sensitive_headers.add('cookie') + sensitive_headers.add("cookie") headers = {} for k, v in self.request.headers.items(): @@ -78,14 +80,18 @@ def encoded_headers(self): v = RequestModelFactory.CLEANSED_SUBSTITUTE headers[k] = v - return json.dumps(headers, cls=DefaultEncoder, ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII) + return json.dumps( + headers, + cls=DefaultEncoder, + ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII, + ) def _mask_credentials(self, body): """ Mask credentials of potentially sensitive info before saving to db. """ sensitive_keys = SilkyConfig().SILKY_SENSITIVE_KEYS - key_string = '|'.join(sensitive_keys) + key_string = "|".join(sensitive_keys) def replace_pattern_values(obj): pattern = re.compile(key_string, re.I) @@ -106,13 +112,18 @@ def replace_pattern_values(obj): try: json_body = json.loads(body) except Exception as e: - pattern = re.compile(fr'(({key_string})[^=]*)=(.*?)(&|$)', re.M | re.I) + pattern = re.compile(rf"(({key_string})[^=]*)=(.*?)(&|$)", re.M | re.I) try: - body = re.sub(pattern, f'\\1={RequestModelFactory.CLEANSED_SUBSTITUTE}\\4', body) + body = re.sub( + pattern, f"\\1={RequestModelFactory.CLEANSED_SUBSTITUTE}\\4", body + ) except Exception: - Logger.debug(f'{str(e)}') + Logger.debug(f"{str(e)}") else: - body = json.dumps(replace_pattern_values(json_body), ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII) + body = json.dumps( + replace_pattern_values(json_body), + ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII, + ) return body @@ -121,15 +132,23 @@ def _body(self, raw_body, content_type): Encode body as JSON if possible so can be used as a dictionary in generation of curl/django test client code """ - body = '' + body = "" if content_type in content_type_form: body = self.request.POST - body = json.dumps(dict(body), sort_keys=True, indent=4 - , ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII) + body = json.dumps( + dict(body), + sort_keys=True, + indent=4, + ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII, + ) elif content_type in content_types_json: try: - body = json.dumps(json.loads(raw_body), sort_keys=True, indent=4 - , ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII) + body = json.dumps( + json.loads(raw_body), + sort_keys=True, + indent=4, + ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII, + ) except Exception: body = raw_body return body @@ -138,7 +157,7 @@ def body(self): content_type, char_set = self.content_type() if content_type == multipart_form: raw_body = b"Raw body not available for multipart_form data, Silk is not showing file uploads." - body = '' + body = "" return body, raw_body try: raw_body = self.request.body @@ -155,51 +174,51 @@ def body(self): pass except LookupError: # If no encoding exists, default to UTF-8 try: - raw_body = raw_body.decode('UTF-8') + raw_body = raw_body.decode("UTF-8") except AttributeError: pass except UnicodeDecodeError: - raw_body = '' + raw_body = "" except Exception as e: Logger.error( - 'Unable to decode request body using char_set %s due to error: %s. Will ignore. Stacktrace:' + "Unable to decode request body using char_set %s due to error: %s. Will ignore. Stacktrace:" % (char_set, e) ) traceback.print_exc() else: # Default to an attempt at UTF-8 decoding. try: - raw_body = raw_body.decode('UTF-8') + raw_body = raw_body.decode("UTF-8") except AttributeError: pass except UnicodeDecodeError: - raw_body = '' + raw_body = "" max_size = SilkyConfig().SILKY_MAX_REQUEST_BODY_SIZE - body = '' + body = "" if raw_body: if max_size > -1: - Logger.debug('A max request size is set so checking size') + Logger.debug("A max request size is set so checking size") size = sys.getsizeof(raw_body, default=None) request_identifier = self.request.path if not size: Logger.error( - 'No way in which to get size of request body for %s, will ignore it', - request_identifier + "No way in which to get size of request body for %s, will ignore it", + request_identifier, ) elif size <= max_size: Logger.debug( - 'Request %s has body of size %d which is less than %d so will save the body' + "Request %s has body of size %d which is less than %d so will save the body" % (request_identifier, size, max_size) ) body = self._body(raw_body, content_type) else: Logger.debug( - 'Request %s has body of size %d which is greater than %d, therefore ignoring' + "Request %s has body of size %d which is greater than %d, therefore ignoring" % (request_identifier, size, max_size) ) raw_body = None else: - Logger.debug('No maximum request body size is set, continuing.') + Logger.debug("No maximum request body size is set, continuing.") body = self._body(raw_body, content_type) body = self._mask_credentials(body) raw_body = self._mask_credentials(raw_body) @@ -207,10 +226,12 @@ def body(self): def query_params(self): query_params = self.request.GET - encoded_query_params = '' + encoded_query_params = "" if query_params: query_params_dict = dict(zip(query_params.keys(), query_params.values())) - encoded_query_params = json.dumps(query_params_dict, ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII) + encoded_query_params = json.dumps( + query_params_dict, ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII + ) return encoded_query_params def view_name(self): @@ -233,14 +254,15 @@ def construct_request_model(self): method=self.request.method, query_params=query_params, view_name=view_name, - body=body) + body=body, + ) # Text fields are encoded as UTF-8 in Django and hence will try to coerce # anything to we pass to UTF-8. Some stuff like binary will fail. try: request_model.raw_body = raw_body except UnicodeDecodeError: - Logger.debug('NYI: Binary request bodies') # TODO - Logger.debug('Created new request model with pk %s' % request_model.pk) + Logger.debug("NYI: Binary request bodies") # TODO + Logger.debug("Created new request model with pk %s" % request_model.pk) return request_model @@ -253,27 +275,29 @@ def __init__(self, response): self.request = DataCollector().request def body(self): - body = '' - content_type, char_set = _parse_content_type(self.response.get('content-type', '')) - content = getattr(self.response, 'content', '') + body = "" + content_type, char_set = _parse_content_type( + self.response.get("content-type", "") + ) + content = getattr(self.response, "content", "") if content: max_body_size = SilkyConfig().SILKY_MAX_RESPONSE_BODY_SIZE if max_body_size > -1: - Logger.debug('Max size of response body defined so checking') + Logger.debug("Max size of response body defined so checking") size = sys.getsizeof(content, None) if not size: - Logger.error('Could not get size of response body. Ignoring') - content = '' + Logger.error("Could not get size of response body. Ignoring") + content = "" else: if size > max_body_size: - content = '' + content = "" Logger.debug( - 'Size of %d for %s is bigger than %d so ignoring response body' + "Size of %d for %s is bigger than %d so ignoring response body" % (size, self.request.path, max_body_size) ) else: Logger.debug( - 'Size of %d for %s is less than %d so saving response body' + "Size of %d for %s is less than %d so saving response body" % (size, self.request.path, max_body_size) ) if content and content_type in content_types_json: @@ -283,20 +307,25 @@ def body(self): # and json.dumps(...) in python3 content = content.decode() try: - body = json.dumps(json.loads(content), sort_keys=True, indent=4 - , ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII) + body = json.dumps( + json.loads(content), + sort_keys=True, + indent=4, + ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII, + ) except (TypeError, ValueError): Logger.warn( - 'Response to request with pk %s has content type %s but was unable to parse it' + "Response to request with pk %s has content type %s but was unable to parse it" % (self.request.pk, content_type) ) return body, content def construct_response_model(self): - assert self.request, 'Cant construct a response model if there is no request model' + assert ( + self.request + ), "Cant construct a response model if there is no request model" Logger.debug( - 'Creating response model for request model with pk %s' - % self.request.pk + "Creating response model for request model with pk %s" % self.request.pk ) b, content = self.body() headers = {} @@ -310,14 +339,16 @@ def construct_response_model(self): silky_response = models.Response( request_id=self.request.id, status_code=self.response.status_code, - encoded_headers=json.dumps(headers, ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII), - body=b + encoded_headers=json.dumps( + headers, ensure_ascii=SilkyConfig().SILKY_JSON_ENSURE_ASCII + ), + body=b, ) try: raw_body = base64.b64encode(content) except TypeError: - raw_body = base64.b64encode(content.encode('utf-8')) - silky_response.raw_body = raw_body.decode('ascii') + raw_body = base64.b64encode(content.encode("utf-8")) + silky_response.raw_body = raw_body.decode("ascii") silky_response.save() return silky_response diff --git a/silk/models.py b/silk/models.py index 140d2f3c..94709a2d 100644 --- a/silk/models.py +++ b/silk/models.py @@ -29,9 +29,10 @@ from silk.utils.profile_parser import parse_profile try: - silk_storage = storages['SILKY_STORAGE'] + silk_storage = storages["SILKY_STORAGE"] except InvalidStorageError: from django.utils.module_loading import import_string + storage_class = SilkyConfig().SILKY_STORAGE_CLASS or settings.DEFAULT_FILE_STORAGE silk_storage = import_string(storage_class)() @@ -68,28 +69,27 @@ def __init__(self, d): class Request(models.Model): id = CharField(max_length=36, default=uuid4, primary_key=True) path = CharField(max_length=190, db_index=True) - query_params = TextField(blank=True, default='') - raw_body = TextField(blank=True, default='') - body = TextField(blank=True, default='') + query_params = TextField(blank=True, default="") + raw_body = TextField(blank=True, default="") + body = TextField(blank=True, default="") method = CharField(max_length=10) start_time = DateTimeField(default=timezone.now, db_index=True) view_name = CharField( - max_length=190, db_index=True, blank=True, - default='', null=True + max_length=190, db_index=True, blank=True, default="", null=True ) end_time = DateTimeField(null=True, blank=True) time_taken = FloatField(blank=True, null=True) # milliseconds - encoded_headers = TextField(blank=True, default='') # stores json + encoded_headers = TextField(blank=True, default="") # stores json meta_time = FloatField(null=True, blank=True) meta_num_queries = IntegerField(null=True, blank=True) meta_time_spent_queries = FloatField(null=True, blank=True) - pyprofile = TextField(blank=True, default='') + pyprofile = TextField(blank=True, default="") prof_file = FileField(max_length=300, blank=True, storage=silk_storage) # Useful method to create shortened copies of strings without losing start and end context # Used to ensure path and view_name don't exceed 190 characters def _shorten(self, string): - return f'{string[:94]}...{string[len(string) - 93:]}' + return f"{string[:94]}...{string[len(string) - 93:]}" @property def total_meta_time(self): @@ -99,13 +99,13 @@ def total_meta_time(self): def profile_table(self): for n, columns in enumerate(parse_profile(self.pyprofile)): location = columns[-1] - if n and '{' not in location and '<' not in location: - r = re.compile(r'(?P.*\.py)\:(?P[0-9]+).*') + if n and "{" not in location and "<" not in location: + r = re.compile(r"(?P.*\.py)\:(?P[0-9]+).*") m = r.search(location) group = m.groupdict() - src = group['src'] - num = group['num'] - name = 'c%d' % n + src = group["src"] + num = group["num"] + name = "c%d" % n fmt = '{location}' rep = fmt.format(**dict(group, **locals())) yield columns[:-1] + [mark_safe(rep)] @@ -119,13 +119,13 @@ def profile_table(self): @property def time_spent_on_sql_queries(self): - """" + """ " Calculate the total time spent in milliseconds on SQL queries using Django aggregates. """ result = SQLQuery.objects.filter(request=self).aggregate( - total_time=Sum('time_taken', output_field=FloatField()) + total_time=Sum("time_taken", output_field=FloatField()) ) - return result['total_time'] or 0.0 + return result["total_time"] or 0.0 @property def headers(self): @@ -138,13 +138,13 @@ def headers(self): @property def content_type(self): - return self.headers.get('content-type', None) + return self.headers.get("content-type", None) @classmethod def garbage_collect(cls, force=False): - """ Remove Request/Responses when we are at the SILKY_MAX_RECORDED_REQUESTS limit + """Remove Request/Responses when we are at the SILKY_MAX_RECORDED_REQUESTS limit Note that multiple in-flight requests may call this at once causing a - double collection """ + double collection""" check_percent = SilkyConfig().SILKY_MAX_RECORDED_REQUESTS_CHECK_PERCENT check_percent /= 100.0 if check_percent < random.random() and not force: @@ -162,11 +162,8 @@ def garbage_collect(cls, force=False): return try: - time_cutoff = cls.objects.order_by( - '-start_time' - ).values_list( - 'start_time', - flat=True + time_cutoff = cls.objects.order_by("-start_time").values_list( + "start_time", flat=True )[target_count] except IndexError: return @@ -176,10 +173,10 @@ def garbage_collect(cls, force=False): def save(self, *args, **kwargs): # sometimes django requests return the body as 'None' if self.raw_body is None: - self.raw_body = '' + self.raw_body = "" if self.body is None: - self.body = '' + self.body = "" if self.end_time and self.start_time: interval = self.end_time - self.start_time @@ -199,17 +196,19 @@ def save(self, *args, **kwargs): class Response(models.Model): id = CharField(max_length=36, default=uuid4, primary_key=True) request = OneToOneField( - Request, related_name='response', db_index=True, + Request, + related_name="response", + db_index=True, on_delete=models.CASCADE, ) status_code = IntegerField() - raw_body = TextField(blank=True, default='') - body = TextField(blank=True, default='') - encoded_headers = TextField(blank=True, default='') + raw_body = TextField(blank=True, default="") + body = TextField(blank=True, default="") + encoded_headers = TextField(blank=True, default="") @property def content_type(self): - return self.headers.get('content-type', None) + return self.headers.get("content-type", None) @property def headers(self): @@ -233,7 +232,7 @@ def bulk_create(self, *args, **kwargs): if len(args): objs = args[0] else: - objs = kwargs.get('objs') + objs = kwargs.get("objs") for obj in objs: obj.prepare_save() @@ -247,8 +246,12 @@ class SQLQuery(models.Model): time_taken = FloatField(blank=True, null=True) # milliseconds identifier = IntegerField(default=-1) request = ForeignKey( - Request, related_name='queries', null=True, - blank=True, db_index=True, on_delete=models.CASCADE, + Request, + related_name="queries", + null=True, + blank=True, + db_index=True, + on_delete=models.CASCADE, ) traceback = TextField() analysis = TextField(null=True, blank=True) @@ -257,18 +260,25 @@ class SQLQuery(models.Model): # TODO docstring @property def traceback_ln_only(self): - return '\n'.join(self.traceback.split('\n')[::2]) + return "\n".join(self.traceback.split("\n")[::2]) @property def formatted_query(self): - return sqlparse.format(self.query, reindent=True, keyword_case='upper') + return sqlparse.format(self.query, reindent=True, keyword_case="upper") @property def num_joins(self): parsed_query = sqlparse.parse(self.query) count = 0 for statement in parsed_query: - count += sum(map(lambda t: t.match(sqlparse.tokens.Keyword, r'\.*join\.*', regex=True), statement.flatten())) + count += sum( + map( + lambda t: t.match( + sqlparse.tokens.Keyword, r"\.*join\.*", regex=True + ), + statement.flatten(), + ) + ) return count @property @@ -279,7 +289,7 @@ def first_keywords(self): if not statement.is_keyword: break keywords.append(statement.value) - return ' '.join(keywords) + return " ".join(keywords) @property def tables_involved(self): @@ -303,8 +313,8 @@ def tables_involved(self): ): try: _next = components[idx + 1] - if not _next.startswith('('): # Subquery - stripped = _next.strip().strip(',') + if not _next.startswith("("): # Subquery + stripped = _next.strip().strip(",") if stripped: tables.append(stripped) @@ -320,7 +330,7 @@ def prepare_save(self): if not self.pk: if self.request: self.request.num_sql_queries += 1 - self.request.save(update_fields=['num_sql_queries']) + self.request.save(update_fields=["num_sql_queries"]) @transaction.atomic() def save(self, *args, **kwargs): @@ -335,11 +345,14 @@ def delete(self, *args, **kwargs): class BaseProfile(models.Model): - name = CharField(max_length=300, blank=True, default='') + name = CharField(max_length=300, blank=True, default="") start_time = DateTimeField(default=timezone.now) end_time = DateTimeField(null=True, blank=True) request = ForeignKey( - Request, null=True, blank=True, db_index=True, + Request, + null=True, + blank=True, + db_index=True, on_delete=models.CASCADE, ) time_taken = FloatField(blank=True, null=True) # milliseconds @@ -355,12 +368,12 @@ def save(self, *args, **kwargs): class Profile(BaseProfile): - file_path = CharField(max_length=300, blank=True, default='') + file_path = CharField(max_length=300, blank=True, default="") line_num = IntegerField(null=True, blank=True) end_line_num = IntegerField(null=True, blank=True) - func_name = CharField(max_length=300, blank=True, default='') + func_name = CharField(max_length=300, blank=True, default="") exception_raised = BooleanField(default=False) - queries = ManyToManyField(SQLQuery, related_name='profiles', db_index=True) + queries = ManyToManyField(SQLQuery, related_name="profiles", db_index=True) dynamic = BooleanField(default=False) @property @@ -377,6 +390,6 @@ def time_spent_on_sql_queries(self): Calculate the total time spent in milliseconds on SQL queries using Django aggregates. """ result = self.queries.aggregate( - total_time=Sum('time_taken', output_field=FloatField()) + total_time=Sum("time_taken", output_field=FloatField()) ) - return result['total_time'] or 0.0 + return result["total_time"] or 0.0 diff --git a/silk/profiling/__init__.py b/silk/profiling/__init__.py index 037d9736..afb804c7 100644 --- a/silk/profiling/__init__.py +++ b/silk/profiling/__init__.py @@ -1 +1 @@ -__author__ = 'mtford' +__author__ = "mtford" diff --git a/silk/profiling/dynamic.py b/silk/profiling/dynamic.py index a1262d12..6f65ab33 100644 --- a/silk/profiling/dynamic.py +++ b/silk/profiling/dynamic.py @@ -5,16 +5,16 @@ from silk.profiling.profiler import silk_profile -Logger = logging.getLogger('silk.profiling.dynamic') +Logger = logging.getLogger("silk.profiling.dynamic") def _get_module(module_name): """ Given a module name in form 'path.to.module' return module object for 'module'. """ - if '.' in module_name: - splt = module_name.split('.') - imp = '.'.join(splt[:-1]) + if "." in module_name: + splt = module_name.split(".") + imp = ".".join(splt[:-1]) frm = splt[-1] module = __import__(imp, globals(), locals(), [frm], 0) module = getattr(module, frm) @@ -33,8 +33,8 @@ def _get_func(module, func_name): """ cls_name = None cls = None - if '.' in func_name: - cls_name, func_name = func_name.split('.') + if "." in func_name: + cls_name, func_name = func_name.split(".") if cls_name: cls = getattr(module, cls_name) func = getattr(cls, func_name) @@ -57,14 +57,14 @@ def profile_function_or_method(module, func, name=None): cls, func = _get_func(module, func_name) wrapped_target = decorator(func) if cls: - setattr(cls, func_name.split('.')[-1], wrapped_target) + setattr(cls, func_name.split(".")[-1], wrapped_target) else: setattr(module, func_name, wrapped_target) def _get_parent_module(module): parent = sys.modules - splt = module.__name__.split('.') + splt = module.__name__.split(".") if len(splt) > 1: for module_name in splt[:-1]: try: @@ -76,9 +76,9 @@ def _get_parent_module(module): def _get_context_manager_source(end_line, file_path, name, start_line): inject_code = "with silk_profile('%s', _dynamic=True):\n" % name - code = 'from silk.profiling.profiler import silk_profile\n' + code = "from silk.profiling.profiler import silk_profile\n" with open(file_path) as f: - ws = '' + ws = "" for i, line in enumerate(f): if i == start_line: # Use the same amount of whitespace as the line currently occupying @@ -86,11 +86,11 @@ def _get_context_manager_source(end_line, file_path, name, start_line): try: ws = x.groups()[0] except IndexError: - ws = '' + ws = "" code += ws + inject_code - code += ws + ' ' + line + code += ws + " " + line elif start_line < i <= end_line: - code += ws + ' ' + line + code += ws + " " + line else: code += line return code @@ -104,7 +104,7 @@ def _get_ws(txt): try: fws = m.groups()[0] except AttributeError: - fws = '' + fws = "" return fws @@ -112,7 +112,7 @@ def _get_source_lines(func): source = inspect.getsourcelines(func)[0] fws = _get_ws(source[0]) for i in range(0, len(source)): - source[i] = source[i].replace(fws, '', 1) + source[i] = source[i].replace(fws, "", 1) return source @@ -123,7 +123,7 @@ def _new_func_from_source(source, func): @param func: The function whose global + local context we will use @param source: Python source code containing def statement """ - src_str = ''.join(source) + src_str = "".join(source) frames = inspect.getouterframes(inspect.currentframe()) calling_frame = frames[2][0] @@ -145,7 +145,7 @@ def _new_func_from_source(source, func): locals = calling_frame.f_locals combined = globals.copy() combined.update(locals) - Logger.debug('New src_str:\n %s' % src_str) + Logger.debug("New src_str:\n %s" % src_str) exec(src_str, combined, context) return context[func.__name__] @@ -186,12 +186,14 @@ def foo(): ws = _get_ws(source[start_line]) for i in range(start_line, end_line): try: - source[i] = ' ' + source[i] + source[i] = " " + source[i] except IndexError: - raise IndexError('Function %s does not have line %d' % (func.__name__, i)) + raise IndexError("Function %s does not have line %d" % (func.__name__, i)) source.insert(start_line, ws + "from silk.profiling.profiler import silk_profile\n") - source.insert(start_line + 1, ws + "with silk_profile('%s', _dynamic=True):\n" % name) + source.insert( + start_line + 1, ws + "with silk_profile('%s', _dynamic=True):\n" % name + ) return _new_func_from_source(source, func) diff --git a/silk/profiling/profiler.py b/silk/profiling/profiler.py index 2e71c4cc..c1bd8cb3 100644 --- a/silk/profiling/profiler.py +++ b/silk/profiling/profiler.py @@ -12,7 +12,7 @@ from silk.config import SilkyConfig from silk.models import _time_taken -logger = logging.getLogger('silk.profiling.profiler') +logger = logging.getLogger("silk.profiling.profiler") # noinspection PyPep8Naming @@ -36,15 +36,18 @@ def __exit__(self, exc_type, exc_val, exc_tb): end_time = timezone.now() exception_raised = exc_type is not None if exception_raised: - logger.error('Exception when performing meta profiling, dumping trace below') + logger.error( + "Exception when performing meta profiling, dumping trace below" + ) traceback.print_exception(exc_type, exc_val, exc_tb) - request = getattr(DataCollector().local, 'request', None) + request = getattr(DataCollector().local, "request", None) if request: curr = request.meta_time or 0 request.meta_time = curr + _time_taken(self.start_time, end_time) def __call__(self, target): if self._should_meta_profile: + def wrapped_target(*args, **kwargs): request = DataCollector().request if request: @@ -87,7 +90,9 @@ def __enter__(self): with silk_meta_profiler(): self._start_queries() if not self.name: - raise ValueError('silk_profile used as a context manager must have a name') + raise ValueError( + "silk_profile used as a context manager must have a name" + ) frame = inspect.currentframe() frames = inspect.getouterframes(frame) outer_frame = frames[1] @@ -95,22 +100,24 @@ def __enter__(self): line_num = outer_frame[2] request = DataCollector().request self.profile = { - 'name': self.name, - 'file_path': path, - 'line_num': line_num, - 'dynamic': self._dynamic, - 'request': request, - 'start_time': timezone.now(), + "name": self.name, + "file_path": path, + "line_num": line_num, + "dynamic": self._dynamic, + "request": request, + "start_time": timezone.now(), } else: - logger.warning('Cannot execute silk_profile as silk is not installed correctly.') + logger.warning( + "Cannot execute silk_profile as silk is not installed correctly." + ) def _finalise_queries(self): collector = DataCollector() self._end_queries() - assert self.profile, 'no profile was created' + assert self.profile, "no profile was created" diff = set(self._queries_after).difference(set(self._queries_before)) - self.profile['queries'] = diff + self.profile["queries"] = diff collector.register_profile(self.profile) # noinspection PyUnusedLocal @@ -118,22 +125,23 @@ def __exit__(self, exc_type, exc_val, exc_tb): if self._silk_installed() and self._should_profile(): with silk_meta_profiler(): exception_raised = exc_type is not None - self.profile['exception_raised'] = exception_raised - self.profile['end_time'] = timezone.now() + self.profile["exception_raised"] = exception_raised + self.profile["end_time"] = timezone.now() self._finalise_queries() def _silk_installed(self): - middlewares = getattr(settings, 'MIDDLEWARE', []) + middlewares = getattr(settings, "MIDDLEWARE", []) if not middlewares: middlewares = [] middleware_installed = SilkyConfig().SILKY_MIDDLEWARE_CLASS in middlewares - return apps.is_installed('silk') and middleware_installed + return apps.is_installed("silk") and middleware_installed def _should_profile(self): return DataCollector().request is not None def __call__(self, target): if self._silk_installed(): + def decorator(view_func): @wraps(view_func) def wrapped_target(*args, **kwargs): @@ -141,30 +149,33 @@ def wrapped_target(*args, **kwargs): try: func_code = target.__code__ except AttributeError: - raise NotImplementedError('Profile not implemented to decorate type %s' % target.__class__.__name__) + raise NotImplementedError( + "Profile not implemented to decorate type %s" + % target.__class__.__name__ + ) line_num = func_code.co_firstlineno file_path = func_code.co_filename func_name = target.__name__ if not self.name: self.name = func_name self.profile = { - 'func_name': func_name, - 'name': self.name, - 'file_path': file_path, - 'line_num': line_num, - 'dynamic': self._dynamic, - 'start_time': timezone.now(), - 'request': DataCollector().request + "func_name": func_name, + "name": self.name, + "file_path": file_path, + "line_num": line_num, + "dynamic": self._dynamic, + "start_time": timezone.now(), + "request": DataCollector().request, } self._start_queries() try: result = target(*args, **kwargs) except Exception: - self.profile['exception_raised'] = True + self.profile["exception_raised"] = True raise finally: with silk_meta_profiler(): - self.profile['end_time'] = timezone.now() + self.profile["end_time"] = timezone.now() self._finalise_queries() return result @@ -172,7 +183,9 @@ def wrapped_target(*args, **kwargs): return decorator(target) else: - logger.warning('Cannot execute silk_profile as silk is not installed correctly.') + logger.warning( + "Cannot execute silk_profile as silk is not installed correctly." + ) return target def distinct_queries(self): @@ -184,5 +197,5 @@ def blah(): time.sleep(1) -if __name__ == '__main__': +if __name__ == "__main__": blah() diff --git a/silk/request_filters.py b/silk/request_filters.py index 547272da..1fd6b413 100644 --- a/silk/request_filters.py +++ b/silk/request_filters.py @@ -1,6 +1,7 @@ """ Django queryset filters used by the requests view """ + import logging from datetime import datetime, timedelta @@ -10,7 +11,7 @@ from silk.profiling.dynamic import _get_module from silk.templatetags.silk_filters import _silk_date_time -logger = logging.getLogger('silk.request_filters') +logger = logging.getLogger("silk.request_filters") class FilterValidationError(Exception): @@ -31,13 +32,13 @@ def serialisable_value(self): return self.value def as_dict(self): - return {'typ': self.typ, 'value': self.serialisable_value, 'str': str(self)} + return {"typ": self.typ, "value": self.serialisable_value, "str": str(self)} @staticmethod def from_dict(d): - typ = d['typ'] + typ = d["typ"] filter_class = globals()[typ] - val = d.get('value', None) + val = d.get("value", None) return filter_class(val) def contribute_to_query_set(self, query_set): @@ -65,7 +66,7 @@ def __init__(self, n): super().__init__() def __str__(self): - return '>%d seconds ago' % self.value + return ">%d seconds ago" % self.value def _parse(dt, fmt): @@ -75,14 +76,14 @@ def _parse(dt, fmt): dt = datetime.strptime(dt, fmt) except TypeError: if not isinstance(dt, datetime): - raise FilterValidationError('Must be a datetime object') + raise FilterValidationError("Must be a datetime object") except ValueError as e: raise FilterValidationError(e) return dt class BeforeDateFilter(BaseFilter): - fmt = '%Y/%m/%d %H:%M' + fmt = "%Y/%m/%d %H:%M" def __init__(self, dt): value = _parse(dt, self.fmt) @@ -93,11 +94,11 @@ def serialisable_value(self): return self.value.strftime(self.fmt) def __str__(self): - return '<%s' % _silk_date_time(self.value) + return "<%s" % _silk_date_time(self.value) class AfterDateFilter(BaseFilter): - fmt = '%Y/%m/%d %H:%M' + fmt = "%Y/%m/%d %H:%M" def __init__(self, dt): value = _parse(dt, self.fmt) @@ -108,7 +109,7 @@ def serialisable_value(self): return self.value.strftime(self.fmt) def __str__(self): - return '>%s' % _silk_date_time(self.value) + return ">%s" % _silk_date_time(self.value) class ViewNameFilter(BaseFilter): @@ -119,7 +120,7 @@ def __init__(self, view_name): super().__init__(value, view_name=view_name) def __str__(self): - return 'View == %s' % self.value + return "View == %s" % self.value class PathFilter(BaseFilter): @@ -130,7 +131,7 @@ def __init__(self, path): super().__init__(value, path=path) def __str__(self): - return 'Path == %s' % self.value + return "Path == %s" % self.value class NameFilter(BaseFilter): @@ -139,7 +140,7 @@ def __init__(self, name): super().__init__(value, name=name) def __str__(self): - return 'name == %s' % self.value + return "name == %s" % self.value class FunctionNameFilter(BaseFilter): @@ -148,7 +149,7 @@ def __init__(self, func_name): super().__init__(value, func_name=func_name) def __str__(self): - return 'func_name == %s' % self.value + return "func_name == %s" % self.value class NumQueriesFilter(BaseFilter): @@ -160,10 +161,10 @@ def __init__(self, n): super().__init__(value, num_queries__gte=n) def __str__(self): - return '#queries >= %s' % self.value + return "#queries >= %s" % self.value def contribute_to_query_set(self, query_set): - return query_set.annotate(num_queries=Count('queries')) + return query_set.annotate(num_queries=Count("queries")) class TimeSpentOnQueriesFilter(BaseFilter): @@ -175,10 +176,10 @@ def __init__(self, n): super().__init__(value, db_time__gte=n) def __str__(self): - return 'DB Time >= %s' % self.value + return "DB Time >= %s" % self.value def contribute_to_query_set(self, query_set): - return query_set.annotate(db_time=Sum('queries__time_taken')) + return query_set.annotate(db_time=Sum("queries__time_taken")) class OverallTimeFilter(BaseFilter): @@ -190,7 +191,7 @@ def __init__(self, n): super().__init__(value, time_taken__gte=n) def __str__(self): - return 'Time >= %s' % self.value + return "Time >= %s" % self.value class StatusCodeFilter(BaseFilter): @@ -210,8 +211,8 @@ def __init__(self, value): def filters_from_request(request): raw_filters = {} for key in request.POST: - splt = key.split('-') - if splt[0].startswith('filter'): + splt = key.split("-") + if splt[0].startswith("filter"): ident = splt[1] typ = splt[2] if ident not in raw_filters: @@ -219,16 +220,18 @@ def filters_from_request(request): raw_filters[ident][typ] = request.POST[key] filters = {} for ident, raw_filter in raw_filters.items(): - value = raw_filter.get('value', '') + value = raw_filter.get("value", "") if value.strip(): - typ = raw_filter['typ'] - module = _get_module('silk.request_filters') + typ = raw_filter["typ"] + module = _get_module("silk.request_filters") filter_class = getattr(module, typ) try: f = filter_class(value) filters[ident] = f except FilterValidationError: - logger.warning(f'Validation error when processing filter {typ}({value})') + logger.warning( + f"Validation error when processing filter {typ}({value})" + ) return filters @@ -237,11 +240,11 @@ def __init__(self, filters_key): self.key = filters_key def save(self, request, filters): - if hasattr(request, 'session'): + if hasattr(request, "session"): request.session[self.key] = filters request.silk_filters = filters def get(self, request): - if hasattr(request, 'session'): + if hasattr(request, "session"): return request.session.get(self.key, {}) return request.silk_filters diff --git a/silk/singleton.py b/silk/singleton.py index 9e2ec2b6..8b015478 100644 --- a/silk/singleton.py +++ b/silk/singleton.py @@ -1,4 +1,4 @@ -__author__ = 'mtford' +__author__ = "mtford" class Singleton(type, metaclass=object): diff --git a/silk/sql.py b/silk/sql.py index cc54276f..26ed2f0a 100644 --- a/silk/sql.py +++ b/silk/sql.py @@ -8,7 +8,7 @@ from silk.collector import DataCollector from silk.config import SilkyConfig -Logger = logging.getLogger('silk.sql') +Logger = logging.getLogger("silk.sql") def _should_wrap(sql_query): @@ -24,7 +24,7 @@ def _should_wrap(sql_query): def _unpack_explanation(result): for row in result: if not isinstance(row, str): - yield ' '.join(str(c) for c in row) + yield " ".join(str(c) for c in row) else: yield row @@ -43,7 +43,7 @@ def _explain_query(connection, q, params): Logger.warning( "Database does not support analyzing queries with provided params. %s." "SILKY_ANALYZE_QUERIES option will be ignored", - error_str + error_str, ) prefix = connection.ops.explain_query_prefix() else: @@ -57,7 +57,7 @@ def _explain_query(connection, q, params): with connection.cursor() as cur: cur.execute(prefixed_query, params) result = _unpack_explanation(cur.fetchall()) - return '\n'.join(result) + return "\n".join(result) return None @@ -72,28 +72,24 @@ def execute_sql(self, *args, **kwargs): try: result_type = args[0] except IndexError: - result_type = kwargs.get('result_type', 'multi') - if result_type == 'multi': + result_type = kwargs.get("result_type", "multi") + if result_type == "multi": return iter([]) else: return sql_query = q % tuple(force_str(param) for param in params) if _should_wrap(sql_query): - tb = ''.join(reversed(traceback.format_stack())) - query_dict = { - 'query': sql_query, - 'start_time': timezone.now(), - 'traceback': tb - } + tb = "".join(reversed(traceback.format_stack())) + query_dict = {"query": sql_query, "start_time": timezone.now(), "traceback": tb} try: return self._execute_sql(*args, **kwargs) finally: - query_dict['end_time'] = timezone.now() + query_dict["end_time"] = timezone.now() request = DataCollector().request if request: - query_dict['request'] = request - if getattr(self.query.model, '__module__', '') != 'silk.models': - query_dict['analysis'] = _explain_query(self.connection, q, params) + query_dict["request"] = request + if getattr(self.query.model, "__module__", "") != "silk.models": + query_dict["analysis"] = _explain_query(self.connection, q, params) DataCollector().register_query(query_dict) else: DataCollector().register_silk_query(query_dict) diff --git a/silk/storage.py b/silk/storage.py index 4c870f6a..650fddf8 100644 --- a/silk/storage.py +++ b/silk/storage.py @@ -7,7 +7,6 @@ class ProfilerResultStorage(FileSystemStorage): # the default storage will only store under MEDIA_ROOT, so we must define our own. def __init__(self): super().__init__( - location=SilkyConfig().SILKY_PYTHON_PROFILER_RESULT_PATH, - base_url='' + location=SilkyConfig().SILKY_PYTHON_PROFILER_RESULT_PATH, base_url="" ) self.base_url = None diff --git a/silk/templatetags/__init__.py b/silk/templatetags/__init__.py index 037d9736..afb804c7 100644 --- a/silk/templatetags/__init__.py +++ b/silk/templatetags/__init__.py @@ -1 +1 @@ -__author__ = 'mtford' +__author__ = "mtford" diff --git a/silk/templatetags/silk_filters.py b/silk/templatetags/silk_filters.py index ecf20afc..905c23dd 100644 --- a/silk/templatetags/silk_filters.py +++ b/silk/templatetags/silk_filters.py @@ -22,8 +22,8 @@ def _esc_func(autoescape): @stringfilter def spacify(value, autoescape=None): esc = _esc_func(autoescape) - val = esc(value).replace(' ', " ") - val = val.replace('\t', ' ') + val = esc(value).replace(" ", " ") + val = val.replace("\t", " ") return mark_safe(val) @@ -32,11 +32,13 @@ def _urlify(str): m = r.search(str) while m: group = m.groupdict() - src = group['src'] - num = group['num'] - start = m.start('src') - end = m.end('src') - rep = '{src}'.format(src=src, num=num) + src = group["src"] + num = group["num"] + start = m.start("src") + end = m.end("src") + rep = '{src}'.format( + src=src, num=num + ) str = str[:start] + rep + str[end:] m = r.search(str) return str @@ -48,20 +50,20 @@ def hash(h, key): def _process_microseconds(dt_strftime): - splt = dt_strftime.split('.') + splt = dt_strftime.split(".") micro = splt[-1] - time = '.'.join(splt[0:-1]) - micro = '%.3f' % float('0.' + micro) + time = ".".join(splt[0:-1]) + micro = "%.3f" % float("0." + micro) return time + micro[1:] def _silk_date_time(dt): today = timezone.now().date() if dt.date() == today: - dt_strftime = dt.strftime('%H:%M:%S.%f') + dt_strftime = dt.strftime("%H:%M:%S.%f") return _process_microseconds(dt_strftime) else: - return _process_microseconds(dt.strftime('%Y.%m.%d %H:%M.%f')) + return _process_microseconds(dt.strftime("%Y.%m.%d %H:%M.%f")) @register.filter(expects_localtime=True) @@ -84,7 +86,7 @@ def filepath_urlify(value, autoescape=None): def body_filter(value): print(value) if len(value) > 20: - return 'Too big!' + return "Too big!" else: return value diff --git a/silk/templatetags/silk_inclusion.py b/silk/templatetags/silk_inclusion.py index 21322bf3..f01edce2 100644 --- a/silk/templatetags/silk_inclusion.py +++ b/silk/templatetags/silk_inclusion.py @@ -4,46 +4,45 @@ def request_summary(silk_request): - return {'silk_request': silk_request} + return {"silk_request": silk_request} def request_summary_row(silk_request): - return {'silk_request': silk_request} + return {"silk_request": silk_request} def request_menu(request, silk_request): - return {'request': request, - 'silk_request': silk_request} + return {"request": request, "silk_request": silk_request} def root_menu(request): - return {'request': request} + return {"request": request} def profile_menu(request, profile, silk_request=None): - context = {'request': request, 'profile': profile} + context = {"request": request, "profile": profile} if silk_request: - context['silk_request'] = silk_request + context["silk_request"] = silk_request return context def profile_summary(profile): - return {'profile': profile} + return {"profile": profile} def heading(text): - return {'text': text} + return {"text": text} def code(lines, actual_line): - return {'code': lines, 'actual_line': [x.strip() for x in actual_line]} + return {"code": lines, "actual_line": [x.strip() for x in actual_line]} -register.inclusion_tag('silk/inclusion/request_summary.html')(request_summary) -register.inclusion_tag('silk/inclusion/request_summary_row.html')(request_summary_row) -register.inclusion_tag('silk/inclusion/profile_summary.html')(profile_summary) -register.inclusion_tag('silk/inclusion/code.html')(code) -register.inclusion_tag('silk/inclusion/request_menu.html')(request_menu) -register.inclusion_tag('silk/inclusion/profile_menu.html')(profile_menu) -register.inclusion_tag('silk/inclusion/root_menu.html')(root_menu) -register.inclusion_tag('silk/inclusion/heading.html')(heading) +register.inclusion_tag("silk/inclusion/request_summary.html")(request_summary) +register.inclusion_tag("silk/inclusion/request_summary_row.html")(request_summary_row) +register.inclusion_tag("silk/inclusion/profile_summary.html")(profile_summary) +register.inclusion_tag("silk/inclusion/code.html")(code) +register.inclusion_tag("silk/inclusion/request_menu.html")(request_menu) +register.inclusion_tag("silk/inclusion/profile_menu.html")(profile_menu) +register.inclusion_tag("silk/inclusion/root_menu.html")(root_menu) +register.inclusion_tag("silk/inclusion/heading.html")(heading) diff --git a/silk/templatetags/silk_nav.py b/silk/templatetags/silk_nav.py index 59cd088e..48eeadeb 100644 --- a/silk/templatetags/silk_nav.py +++ b/silk/templatetags/silk_nav.py @@ -9,7 +9,7 @@ def navactive(request, urls, *args, **kwargs): path = request.path urls = [reverse(url, args=args) for url in urls.split()] if path in urls: - cls = kwargs.get('class', None) + cls = kwargs.get("class", None) if not cls: cls = "menu-item-selected" return cls diff --git a/silk/urls.py b/silk/urls.py index c23f8d12..8b1bdb76 100644 --- a/silk/urls.py +++ b/silk/urls.py @@ -13,80 +13,80 @@ from silk.views.sql_detail import SQLDetailView from silk.views.summary import SummaryView -app_name = 'silk' +app_name = "silk" urlpatterns = [ - path(route='', view=SummaryView.as_view(), name='summary'), - path(route='requests/', view=RequestsView.as_view(), name='requests'), + path(route="", view=SummaryView.as_view(), name="summary"), + path(route="requests/", view=RequestsView.as_view(), name="requests"), path( - route='request//', + route="request//", view=RequestView.as_view(), - name='request_detail', + name="request_detail", ), path( - route='request//sql/', + route="request//sql/", view=SQLView.as_view(), - name='request_sql', + name="request_sql", ), path( - route='request//sql//', + route="request//sql//", view=SQLDetailView.as_view(), - name='request_sql_detail', + name="request_sql_detail", ), path( - route='request//raw/', + route="request//raw/", view=Raw.as_view(), - name='raw', + name="raw", ), path( - route='request//pyprofile/', + route="request//pyprofile/", view=ProfileDownloadView.as_view(), - name='request_profile_download', + name="request_profile_download", ), path( - route='request//json/', + route="request//json/", view=ProfileDotView.as_view(), - name='request_profile_dot', + name="request_profile_dot", ), path( - route='request//profiling/', + route="request//profiling/", view=ProfilingView.as_view(), - name='request_profiling', + name="request_profiling", ), path( - route='request//profile//', + route="request//profile//", view=ProfilingDetailView.as_view(), - name='request_profile_detail', + name="request_profile_detail", ), path( - route='request//profile//sql/', + route="request//profile//sql/", view=SQLView.as_view(), - name='request_and_profile_sql', + name="request_and_profile_sql", ), path( - route='request//profile//sql//', + route="request//profile//sql//", view=SQLDetailView.as_view(), - name='request_and_profile_sql_detail', + name="request_and_profile_sql_detail", ), path( - route='profile//', + route="profile//", view=ProfilingDetailView.as_view(), - name='profile_detail', + name="profile_detail", ), path( - route='profile//sql/', + route="profile//sql/", view=SQLView.as_view(), - name='profile_sql', + name="profile_sql", ), path( - route='profile//sql//', + route="profile//sql//", view=SQLDetailView.as_view(), - name='profile_sql_detail', + name="profile_sql_detail", ), - path(route='profiling/', view=ProfilingView.as_view(), name='profiling'), - path(route='cleardb/', view=ClearDBView.as_view(), name='cleardb'), + path(route="profiling/", view=ProfilingView.as_view(), name="profiling"), + path(route="cleardb/", view=ClearDBView.as_view(), name="cleardb"), path( - route='request//cprofile/', + route="request//cprofile/", view=CProfileView.as_view(), - name='cprofile', + name="cprofile", ), ] diff --git a/silk/utils/__init__.py b/silk/utils/__init__.py index 037d9736..afb804c7 100644 --- a/silk/utils/__init__.py +++ b/silk/utils/__init__.py @@ -1 +1 @@ -__author__ = 'mtford' +__author__ = "mtford" diff --git a/silk/utils/data_deletion.py b/silk/utils/data_deletion.py index 5806f6c3..4eb2eb26 100644 --- a/silk/utils/data_deletion.py +++ b/silk/utils/data_deletion.py @@ -3,16 +3,16 @@ def delete_model(model): - engine = settings.DATABASES[model.objects.db]['ENGINE'] + engine = settings.DATABASES[model.objects.db]["ENGINE"] table = model._meta.db_table - if 'mysql' in engine or 'postgresql' in engine: + if "mysql" in engine or "postgresql" in engine: # Use "TRUNCATE" on the table with connections[model.objects.db].cursor() as cursor: - if 'mysql' in engine: + if "mysql" in engine: cursor.execute("SET FOREIGN_KEY_CHECKS=0;") cursor.execute(f"TRUNCATE TABLE {table}") cursor.execute("SET FOREIGN_KEY_CHECKS=1;") - elif 'postgres' in engine: + elif "postgres" in engine: cursor.execute(f"ALTER TABLE {table} DISABLE TRIGGER USER;") cursor.execute(f"TRUNCATE TABLE {table} CASCADE") cursor.execute(f"ALTER TABLE {table} ENABLE TRIGGER USER;") @@ -21,8 +21,7 @@ def delete_model(model): # Manually delete rows because sqlite does not support TRUNCATE and # oracle doesn't provide good support for disabling foreign key checks while True: - items_to_delete = list( - model.objects.values_list('pk', flat=True).all()[:800]) + items_to_delete = list(model.objects.values_list("pk", flat=True).all()[:800]) if not items_to_delete: break model.objects.filter(pk__in=items_to_delete).delete() diff --git a/silk/utils/pagination.py b/silk/utils/pagination.py index 9a1a046d..365b3530 100644 --- a/silk/utils/pagination.py +++ b/silk/utils/pagination.py @@ -1,11 +1,11 @@ from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator -__author__ = 'mtford' +__author__ = "mtford" def _page(request, query_set): paginator = Paginator(query_set, 200) - page_number = request.GET.get('page') + page_number = request.GET.get("page") try: page = paginator.page(page_number) except PageNotAnInteger: diff --git a/silk/utils/profile_parser.py b/silk/utils/profile_parser.py index 13fb6073..0dfe4939 100644 --- a/silk/utils/profile_parser.py +++ b/silk/utils/profile_parser.py @@ -1,6 +1,6 @@ import re -_pattern = re.compile(' +') +_pattern = re.compile(" +") def parse_profile(output): @@ -8,12 +8,12 @@ def parse_profile(output): Parse the output of cProfile to a list of tuples. """ if isinstance(output, str): - output = output.split('\n') + output = output.split("\n") for i, line in enumerate(output): # ignore n function calls, total time and ordered by and empty lines line = line.strip() if i > 3 and line: columns = _pattern.split(line)[0:] - function = ' '.join(columns[5:]) + function = " ".join(columns[5:]) columns = columns[:5] + [function] yield columns diff --git a/silk/views/__init__.py b/silk/views/__init__.py index 037d9736..afb804c7 100644 --- a/silk/views/__init__.py +++ b/silk/views/__init__.py @@ -1 +1 @@ -__author__ = 'mtford' +__author__ = "mtford" diff --git a/silk/views/clear_db.py b/silk/views/clear_db.py index 893ee29b..24790791 100644 --- a/silk/views/clear_db.py +++ b/silk/views/clear_db.py @@ -18,19 +18,21 @@ class ClearDBView(View): @method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) def get(self, request, *_, **kwargs): - return render(request, 'silk/clear_db.html') + return render(request, "silk/clear_db.html") @method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) def post(self, request, *_, **kwargs): context = {} - if 'clear_all' in request.POST: + if "clear_all" in request.POST: delete_model(Profile) delete_model(SQLQuery) delete_model(Response) delete_model(Request) - tables = ['Response', 'SQLQuery', 'Profile', 'Request'] - context['msg'] = 'Cleared data for following silk tables: {}'.format(', '.join(tables)) + tables = ["Response", "SQLQuery", "Profile", "Request"] + context["msg"] = "Cleared data for following silk tables: {}".format( + ", ".join(tables) + ) if SilkyConfig().SILKY_DELETE_PROFILES: dir = SilkyConfig().SILKY_PYTHON_PROFILER_RESULT_PATH @@ -40,6 +42,6 @@ def post(self, request, *_, **kwargs): shutil.rmtree(path) except OSError: os.remove(path) - context['msg'] += '\nDeleted all profiles from the directory.' + context["msg"] += "\nDeleted all profiles from the directory." - return render(request, 'silk/clear_db.html', context=context) + return render(request, "silk/clear_db.html", context=context) diff --git a/silk/views/code.py b/silk/views/code.py index 5cec4cc1..d7450f14 100644 --- a/silk/views/code.py +++ b/silk/views/code.py @@ -1,6 +1,6 @@ from silk.config import SilkyConfig -__author__ = 'mtford' +__author__ = "mtford" def _code(file_path, line_num, end_line_num=None): @@ -9,31 +9,31 @@ def _code(file_path, line_num, end_line_num=None): end_line_num = line_num end_line_num = int(end_line_num) actual_line = [] - lines = '' - with open(file_path, encoding='utf-8') as f: + lines = "" + with open(file_path, encoding="utf-8") as f: r = range(max(0, line_num - 10), line_num + 10) for i, line in enumerate(f): if i in r: lines += line if i + 1 in range(line_num, end_line_num + 1): actual_line.append(line) - code = lines.split('\n') + code = lines.split("\n") return actual_line, code -def _code_context(file_path, line_num, end_line_num=None, prefix=''): +def _code_context(file_path, line_num, end_line_num=None, prefix=""): actual_line, code = _code(file_path, line_num, end_line_num) return { - prefix + 'code': code, - prefix + 'file_path': file_path, - prefix + 'line_num': line_num, - prefix + 'actual_line': actual_line + prefix + "code": code, + prefix + "file_path": file_path, + prefix + "line_num": line_num, + prefix + "actual_line": actual_line, } -def _code_context_from_request(request, end_line_num=None, prefix=''): - file_path = request.GET.get('file_path') - line_num = request.GET.get('line_num') +def _code_context_from_request(request, end_line_num=None, prefix=""): + file_path = request.GET.get("file_path") + line_num = request.GET.get("line_num") result = {} if file_path is not None and line_num is not None: result = _code_context(file_path, line_num, end_line_num, prefix) diff --git a/silk/views/cprofile.py b/silk/views/cprofile.py index 2cbdd50b..c84c2066 100644 --- a/silk/views/cprofile.py +++ b/silk/views/cprofile.py @@ -11,10 +11,8 @@ class CProfileView(View): @method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) def get(self, request, *_, **kwargs): - request_id = kwargs['request_id'] + request_id = kwargs["request_id"] silk_request = Request.objects.get(pk=request_id) - context = { - 'silk_request': silk_request, - 'request': request} + context = {"silk_request": silk_request, "request": request} - return render(request, 'silk/cprofile.html', context) + return render(request, "silk/cprofile.html", context) diff --git a/silk/views/profile_detail.py b/silk/views/profile_detail.py index 73d0f714..0138001b 100644 --- a/silk/views/profile_detail.py +++ b/silk/views/profile_detail.py @@ -12,32 +12,30 @@ class ProfilingDetailView(View): @method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) def get(self, request, *_, **kwargs): - profile_id = kwargs['profile_id'] - context = { - 'request': request - } + profile_id = kwargs["profile_id"] + context = {"request": request} profile = Profile.objects.get(pk=profile_id) file_path = profile.file_path line_num = profile.line_num - context['pos'] = pos = int(request.GET.get('pos', 0)) + context["pos"] = pos = int(request.GET.get("pos", 0)) if pos: - context.update(_code_context_from_request(request, prefix='pyprofile_')) + context.update(_code_context_from_request(request, prefix="pyprofile_")) - context['profile'] = profile - context['line_num'] = file_path - context['file_path'] = line_num - context['file_column'] = 5 + context["profile"] = profile + context["line_num"] = file_path + context["file_path"] = line_num + context["file_column"] = 5 if profile.request: - context['silk_request'] = profile.request + context["silk_request"] = profile.request if file_path and line_num: try: context.update(_code_context(file_path, line_num, profile.end_line_num)) except OSError as e: if e.errno == 2: - context['code_error'] = e.filename + ' does not exist.' + context["code_error"] = e.filename + " does not exist." else: raise e - return render(request, 'silk/profile_detail.html', context) + return render(request, "silk/profile_detail.html", context) diff --git a/silk/views/profile_dot.py b/silk/views/profile_dot.py index c0d32072..503fb36d 100644 --- a/silk/views/profile_dot.py +++ b/silk/views/profile_dot.py @@ -22,7 +22,7 @@ mincolor=(0.18, 0.51, 0.53), maxcolor=(0.03, 0.49, 0.50), gamma=1.5, - fontname='FiraSans', + fontname="FiraSans", minfontsize=6.0, maxfontsize=6.0, ) @@ -69,8 +69,12 @@ class ProfileDotView(View): @method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) def get(self, request, request_id): - silk_request = get_object_or_404(Request, pk=request_id, prof_file__isnull=False) - cutoff = float(request.GET.get('cutoff', '') or 5) + silk_request = get_object_or_404( + Request, pk=request_id, prof_file__isnull=False + ) + cutoff = float(request.GET.get("cutoff", "") or 5) profile = _create_profile(silk_request.prof_file) result = dict(dot=_create_dot(profile, cutoff)) - return HttpResponse(json.dumps(result).encode('utf-8'), content_type='application/json') + return HttpResponse( + json.dumps(result).encode("utf-8"), content_type="application/json" + ) diff --git a/silk/views/profile_download.py b/silk/views/profile_download.py index af71c459..f8e82c9b 100644 --- a/silk/views/profile_download.py +++ b/silk/views/profile_download.py @@ -12,7 +12,11 @@ class ProfileDownloadView(View): @method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) def get(self, request, request_id): - silk_request = get_object_or_404(Request, pk=request_id, prof_file__isnull=False) + silk_request = get_object_or_404( + Request, pk=request_id, prof_file__isnull=False + ) response = FileResponse(silk_request.prof_file) - response['Content-Disposition'] = f'attachment; filename="{silk_request.prof_file.name}"' + response["Content-Disposition"] = ( + f'attachment; filename="{silk_request.prof_file.name}"' + ) return response diff --git a/silk/views/profiling.py b/silk/views/profiling.py index 4dd96ad7..9e55859f 100644 --- a/silk/views/profiling.py +++ b/silk/views/profiling.py @@ -12,14 +12,16 @@ class ProfilingView(View): show = [5, 10, 25, 100, 250] default_show = 25 - order_by = ['Recent', - 'Name', - 'Function Name', - 'Num. Queries', - 'Time', - 'Time on queries'] - defualt_order_by = 'Recent' - session_key_profile_filters = 'session_key_profile_filters' + order_by = [ + "Recent", + "Name", + "Function Name", + "Num. Queries", + "Time", + "Time on queries", + ] + defualt_order_by = "Recent" + session_key_profile_filters = "session_key_profile_filters" filters_manager = FiltersManager(session_key_profile_filters) def __init__(self, **kwargs): @@ -33,18 +35,26 @@ def _get_distinct_values(self, field, silk_request): function_names = [x[field] for x in query_set.values(field).distinct()] # Ensure top, default option is '' try: - function_names.remove('') + function_names.remove("") except ValueError: pass - return [''] + function_names + return [""] + function_names def _get_function_names(self, silk_request=None): - return self._get_distinct_values('func_name', silk_request) + return self._get_distinct_values("func_name", silk_request) def _get_names(self, silk_request=None): - return self._get_distinct_values('name', silk_request) + return self._get_distinct_values("name", silk_request) - def _get_objects(self, show=None, order_by=None, name=None, func_name=None, silk_request=None, filters=None): + def _get_objects( + self, + show=None, + order_by=None, + name=None, + func_name=None, + silk_request=None, + filters=None, + ): if not filters: filters = [] if not show: @@ -56,18 +66,22 @@ def _get_objects(self, show=None, order_by=None, name=None, func_name=None, silk query_set = manager.all() if not order_by: order_by = self.defualt_order_by - if order_by == 'Recent': - query_set = query_set.order_by('-start_time') - elif order_by == 'Name': - query_set = query_set.order_by('-name') - elif order_by == 'Function Name': - query_set = query_set.order_by('-func_name') - elif order_by == 'Num. Queries': - query_set = query_set.annotate(num_queries=Count('queries')).order_by('-num_queries') - elif order_by == 'Time': - query_set = query_set.order_by('-time_taken') - elif order_by == 'Time on queries': - query_set = query_set.annotate(db_time=Sum('queries__time_taken')).order_by('-db_time') + if order_by == "Recent": + query_set = query_set.order_by("-start_time") + elif order_by == "Name": + query_set = query_set.order_by("-name") + elif order_by == "Function Name": + query_set = query_set.order_by("-func_name") + elif order_by == "Num. Queries": + query_set = query_set.annotate(num_queries=Count("queries")).order_by( + "-num_queries" + ) + elif order_by == "Time": + query_set = query_set.order_by("-time_taken") + elif order_by == "Time on queries": + query_set = query_set.annotate(db_time=Sum("queries__time_taken")).order_by( + "-db_time" + ) elif order_by: raise RuntimeError('Unknown order_by: "%s"' % order_by) if func_name: @@ -80,49 +94,55 @@ def _get_objects(self, show=None, order_by=None, name=None, func_name=None, silk return list(query_set[:show]) def _create_context(self, request, *args, **kwargs): - request_id = kwargs.get('request_id') + request_id = kwargs.get("request_id") if request_id: silk_request = Request.objects.get(pk=request_id) else: silk_request = None - show = request.GET.get('show', self.default_show) - order_by = request.GET.get('order_by', self.defualt_order_by) + show = request.GET.get("show", self.default_show) + order_by = request.GET.get("order_by", self.defualt_order_by) if show: show = int(show) - func_name = request.GET.get('func_name', None) - name = request.GET.get('name', None) + func_name = request.GET.get("func_name", None) + name = request.GET.get("name", None) filters = self.filters_manager.get(request) context = { - 'show': show, - 'order_by': order_by, - 'request': request, - 'func_name': func_name, - 'options_show': self.show, - 'options_order_by': self.order_by, - 'options_func_names': self._get_function_names(silk_request), - 'options_names': self._get_names(silk_request), - 'filters': filters + "show": show, + "order_by": order_by, + "request": request, + "func_name": func_name, + "options_show": self.show, + "options_order_by": self.order_by, + "options_func_names": self._get_function_names(silk_request), + "options_names": self._get_names(silk_request), + "filters": filters, } context.update(csrf(request)) if silk_request: - context['silk_request'] = silk_request + context["silk_request"] = silk_request if func_name: - context['func_name'] = func_name + context["func_name"] = func_name if name: - context['name'] = name - objs = self._get_objects(show=show, - order_by=order_by, - func_name=func_name, - silk_request=silk_request, - name=name, - filters=[BaseFilter.from_dict(x) for _, x in filters.items()]) - context['results'] = objs + context["name"] = name + objs = self._get_objects( + show=show, + order_by=order_by, + func_name=func_name, + silk_request=silk_request, + name=name, + filters=[BaseFilter.from_dict(x) for _, x in filters.items()], + ) + context["results"] = objs return context @method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) def get(self, request, *args, **kwargs): - return render(request, 'silk/profiling.html', self._create_context(request, *args, **kwargs)) + return render( + request, + "silk/profiling.html", + self._create_context(request, *args, **kwargs), + ) @method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) @@ -130,4 +150,4 @@ def post(self, request): filters = filters_from_request(request) filters_as_dict = {ident: f.as_dict() for ident, f in filters.items()} self.filters_manager.save(request, filters_as_dict) - return render(request, 'silk/profiling.html', self._create_context(request)) + return render(request, "silk/profiling.html", self._create_context(request)) diff --git a/silk/views/raw.py b/silk/views/raw.py index 87d05571..bf55dbd5 100644 --- a/silk/views/raw.py +++ b/silk/views/raw.py @@ -8,7 +8,7 @@ from silk.auth import login_possibly_required, permissions_possibly_required from silk.models import Request -Logger = logging.getLogger('silk.views.raw') +Logger = logging.getLogger("silk.views.raw") class Raw(View): @@ -16,18 +16,20 @@ class Raw(View): @method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) def get(self, request, request_id): - typ = request.GET.get('typ', None) - subtyp = request.GET.get('subtyp', None) + typ = request.GET.get("typ", None) + subtyp = request.GET.get("subtyp", None) body = None if typ and subtyp: silk_request = Request.objects.get(pk=request_id) - if typ == 'request': - body = silk_request.raw_body if subtyp == 'raw' else silk_request.body - elif typ == 'response': + if typ == "request": + body = silk_request.raw_body if subtyp == "raw" else silk_request.body + elif typ == "response": Logger.debug(silk_request.response.raw_body_decoded) - body = silk_request.response.raw_body_decoded if subtyp == 'raw' else silk_request.response.body - return render(request, 'silk/raw.html', { - 'body': body - }) + body = ( + silk_request.response.raw_body_decoded + if subtyp == "raw" + else silk_request.response.body + ) + return render(request, "silk/raw.html", {"body": body}) else: - return HttpResponse(content='Bad Request', status=400) + return HttpResponse(content="Bad Request", status=400) diff --git a/silk/views/request_detail.py b/silk/views/request_detail.py index 6baa3adb..623ddbb2 100644 --- a/silk/views/request_detail.py +++ b/silk/views/request_detail.py @@ -25,18 +25,26 @@ def get(self, request, request_id): except (ValueError, TypeError): pass context = { - 'silk_request': silk_request, - 'curl': curl_cmd(url=request.build_absolute_uri(silk_request.path), - method=silk_request.method, - query_params=query_params, - body=body, - content_type=silk_request.content_type), - 'query_params': json.dumps(query_params, sort_keys=True, indent=4) if query_params else None, - 'client': gen(path=silk_request.path, - method=silk_request.method, - query_params=query_params, - data=body, - content_type=silk_request.content_type), - 'request': request + "silk_request": silk_request, + "curl": curl_cmd( + url=request.build_absolute_uri(silk_request.path), + method=silk_request.method, + query_params=query_params, + body=body, + content_type=silk_request.content_type, + ), + "query_params": ( + json.dumps(query_params, sort_keys=True, indent=4) + if query_params + else None + ), + "client": gen( + path=silk_request.path, + method=silk_request.method, + query_params=query_params, + data=body, + content_type=silk_request.content_type, + ), + "request": request, } - return render(request, 'silk/request.html', context) + return render(request, "silk/request.html", context) diff --git a/silk/views/requests.py b/silk/views/requests.py index a0a93d67..a0fcf0a4 100644 --- a/silk/views/requests.py +++ b/silk/views/requests.py @@ -8,7 +8,7 @@ from silk.models import Request, Response from silk.request_filters import BaseFilter, FiltersManager, filters_from_request -__author__ = 'mtford' +__author__ = "mtford" class RequestsView(View): @@ -16,98 +16,80 @@ class RequestsView(View): default_show = 25 order_by = { - 'start_time': { - 'label': 'Recent', - 'additional_query_filter': None + "start_time": {"label": "Recent", "additional_query_filter": None}, + "path": {"label": "Path", "additional_query_filter": None}, + "num_sql_queries": {"label": "Num. Queries", "additional_query_filter": None}, + "time_taken": { + "label": "Time", + "additional_query_filter": lambda x: x.filter(time_taken__gte=0), }, - 'path': { - 'label': 'Path', - 'additional_query_filter': None + "db_time": { + "label": "Time on queries", + "additional_query_filter": lambda x: x.annotate( + db_time=Sum("queries__time_taken") + ).filter(db_time__gte=0), }, - 'num_sql_queries': { - 'label': 'Num. Queries', - 'additional_query_filter': None - }, - 'time_taken': { - 'label': 'Time', - 'additional_query_filter': lambda x: x.filter(time_taken__gte=0) - }, - 'db_time': { - 'label': 'Time on queries', - 'additional_query_filter': lambda x: x.annotate(db_time=Sum('queries__time_taken')) - .filter(db_time__gte=0) - }, - } - order_dir = { - 'ASC': { - 'label': 'Ascending' - }, - 'DESC': { - 'label': 'Descending' - } - } - view_style = { - 'card': { - 'label': 'Cards' - }, - 'row': { - 'label': 'Rows' - } } - default_order_by = 'start_time' - default_order_dir = 'DESC' - default_view_style = 'card' + order_dir = {"ASC": {"label": "Ascending"}, "DESC": {"label": "Descending"}} + view_style = {"card": {"label": "Cards"}, "row": {"label": "Rows"}} + default_order_by = "start_time" + default_order_dir = "DESC" + default_view_style = "card" - session_key_request_filters = 'request_filters' + session_key_request_filters = "request_filters" filters_manager = FiltersManager(session_key_request_filters) @property def options_order_by(self): - return [{'value': x, 'label': self.order_by[x]['label']} for x in self.order_by.keys()] + return [ + {"value": x, "label": self.order_by[x]["label"]} + for x in self.order_by.keys() + ] @property def options_order_dir(self): - return [{'value': x, 'label': self.order_dir[x]['label']} for x in self.order_dir.keys()] + return [ + {"value": x, "label": self.order_dir[x]["label"]} + for x in self.order_dir.keys() + ] @property def options_view_style(self): - return [{'value': x, 'label': self.view_style[x]['label']} for x in self.view_style.keys()] + return [ + {"value": x, "label": self.view_style[x]["label"]} + for x in self.view_style.keys() + ] def _get_paths(self): - return Request.objects.values_list( - 'path', - flat=True - ).order_by( - 'path' - ).distinct() + return ( + Request.objects.values_list("path", flat=True).order_by("path").distinct() + ) def _get_views(self): - return Request.objects.values_list( - 'view_name', - flat=True - ).exclude( - view_name='' - ).order_by( - 'view_name' - ).distinct() + return ( + Request.objects.values_list("view_name", flat=True) + .exclude(view_name="") + .order_by("view_name") + .distinct() + ) def _get_status_codes(self): - return Response.objects.values_list( - 'status_code', - flat=True - ).order_by( - 'status_code' - ).distinct() + return ( + Response.objects.values_list("status_code", flat=True) + .order_by("status_code") + .distinct() + ) def _get_methods(self): - return Request.objects.values_list( - 'method', - flat=True - ).order_by( - 'method' - ).distinct() - - def _get_objects(self, show=None, order_by=None, order_dir=None, path=None, filters=None): + return ( + Request.objects.values_list("method", flat=True) + .order_by("method") + .distinct() + ) + + def _get_objects( + self, show=None, order_by=None, order_dir=None, path=None, filters=None + ): if not filters: filters = [] if not show: @@ -120,9 +102,11 @@ def _get_objects(self, show=None, order_by=None, order_dir=None, path=None, filt if order_by not in self.order_by.keys(): raise RuntimeError('Unknown order_by: "%s"' % order_by) ob = self.order_by[order_by] - if ob['additional_query_filter'] is not None: - query_set = ob['additional_query_filter'](query_set) - query_set = query_set.order_by('{}{}'.format('-' if order_dir == 'DESC' else '', order_by)) + if ob["additional_query_filter"] is not None: + query_set = ob["additional_query_filter"](query_set) + query_set = query_set.order_by( + "{}{}".format("-" if order_dir == "DESC" else "", order_by) + ) if path: query_set = query_set.filter(path=path) for f in filters: @@ -132,35 +116,40 @@ def _get_objects(self, show=None, order_by=None, order_dir=None, path=None, filt def _create_context(self, request): raw_filters = self.filters_manager.get(request).copy() - show = raw_filters.pop('show', self.default_show) - order_by = raw_filters.pop('order_by', self.default_order_by) - order_dir = raw_filters.pop('order_dir', self.default_order_dir) - view_style = raw_filters.pop('view_style', self.default_view_style) + show = raw_filters.pop("show", self.default_show) + order_by = raw_filters.pop("order_by", self.default_order_by) + order_dir = raw_filters.pop("order_dir", self.default_order_dir) + view_style = raw_filters.pop("view_style", self.default_view_style) if show: show = int(show) - path = request.GET.get('path', None) + path = request.GET.get("path", None) context = { - 'show': show, - 'order_by': order_by, - 'order_dir': order_dir, - 'view_style': view_style, - 'request': request, - 'options_show': self.show, - 'options_order_by': self.options_order_by, - 'options_order_dir': self.options_order_dir, - 'options_view_style': self.options_view_style, - 'options_paths': self._get_paths(), - 'options_status_codes': self._get_status_codes(), - 'options_methods': self._get_methods(), - 'view_names': self._get_views(), - 'filters': raw_filters, + "show": show, + "order_by": order_by, + "order_dir": order_dir, + "view_style": view_style, + "request": request, + "options_show": self.show, + "options_order_by": self.options_order_by, + "options_order_dir": self.options_order_dir, + "options_view_style": self.options_view_style, + "options_paths": self._get_paths(), + "options_status_codes": self._get_status_codes(), + "options_methods": self._get_methods(), + "view_names": self._get_views(), + "filters": raw_filters, } context.update(csrf(request)) if path: - context['path'] = path - context['results'] = self._get_objects(show, order_by, order_dir, path, - filters=[BaseFilter.from_dict(x) for _, x in raw_filters.items()]) + context["path"] = path + context["results"] = self._get_objects( + show, + order_by, + order_dir, + path, + filters=[BaseFilter.from_dict(x) for _, x in raw_filters.items()], + ) return context @method_decorator(login_possibly_required) @@ -172,10 +161,14 @@ def get(self, request): # filters from previous session **self.filters_manager.get(request), # new filters from GET, overriding old - **{k: v for k, v in request.GET.items() if k in ['show', 'order_by', 'order_dir', 'view_style']}, + **{ + k: v + for k, v in request.GET.items() + if k in ["show", "order_by", "order_dir", "view_style"] + }, } self.filters_manager.save(request, filters) - return render(request, 'silk/requests.html', self._create_context(request)) + return render(request, "silk/requests.html", self._create_context(request)) @method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) @@ -183,9 +176,15 @@ def post(self, request): previous_session = self.filters_manager.get(request) filters = { # filters from previous session but only GET values - **{k: v for k, v in previous_session.items() if k in ['show', 'order_by', 'order_dir', 'view_style']}, + **{ + k: v + for k, v in previous_session.items() + if k in ["show", "order_by", "order_dir", "view_style"] + }, # new filters from POST, overriding old - **{ident: f.as_dict() for ident, f in filters_from_request(request).items()}, + **{ + ident: f.as_dict() for ident, f in filters_from_request(request).items() + }, } self.filters_manager.save(request, filters) - return render(request, 'silk/requests.html', self._create_context(request)) + return render(request, "silk/requests.html", self._create_context(request)) diff --git a/silk/views/sql.py b/silk/views/sql.py index f3a69072..cc5a97e3 100644 --- a/silk/views/sql.py +++ b/silk/views/sql.py @@ -6,7 +6,7 @@ from silk.models import Profile, Request, SQLQuery from silk.utils.pagination import _page -__author__ = 'mtford' +__author__ = "mtford" class SQLView(View): @@ -14,24 +14,26 @@ class SQLView(View): @method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) def get(self, request, *_, **kwargs): - request_id = kwargs.get('request_id') - profile_id = kwargs.get('profile_id') + request_id = kwargs.get("request_id") + profile_id = kwargs.get("profile_id") context = { - 'request': request, + "request": request, } if request_id: silk_request = Request.objects.get(id=request_id) - query_set = SQLQuery.objects.filter(request=silk_request).order_by('-start_time') + query_set = SQLQuery.objects.filter(request=silk_request).order_by( + "-start_time" + ) for q in query_set: q.start_time_relative = q.start_time - silk_request.start_time page = _page(request, query_set) - context['silk_request'] = silk_request + context["silk_request"] = silk_request if profile_id: p = Profile.objects.get(id=profile_id) - page = _page(request, p.queries.order_by('-start_time').all()) - context['profile'] = p + page = _page(request, p.queries.order_by("-start_time").all()) + context["profile"] = p if not (request_id or profile_id): - raise KeyError('no profile_id or request_id') + raise KeyError("no profile_id or request_id") # noinspection PyUnboundLocalVariable - context['items'] = page - return render(request, 'silk/sql.html', context) + context["items"] = page + return render(request, "silk/sql.html", context) diff --git a/silk/views/sql_detail.py b/silk/views/sql_detail.py index 4d1d5d5a..64af951a 100644 --- a/silk/views/sql_detail.py +++ b/silk/views/sql_detail.py @@ -20,16 +20,16 @@ def _urlify(self, str): n = 1 while m: group = m.groupdict() - src = group['src'] + src = group["src"] files.append(src) - num = group['num'] - start = m.start('src') - end = m.end('src') + num = group["num"] + start = m.start("src") + end = m.end("src") rep = '{src}'.format( pos=n, src=src, num=num, - name='c%d' % n, + name="c%d" % n, ) str = str[:start] + rep + str[end:] m = r.search(str) @@ -39,34 +39,34 @@ def _urlify(self, str): @method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) def get(self, request, *_, **kwargs): - sql_id = kwargs.get('sql_id', None) - request_id = kwargs.get('request_id', None) - profile_id = kwargs.get('profile_id', None) + sql_id = kwargs.get("sql_id", None) + request_id = kwargs.get("request_id", None) + profile_id = kwargs.get("profile_id", None) sql_query = SQLQuery.objects.get(pk=sql_id) - pos = int(request.GET.get('pos', 0)) - file_path = request.GET.get('file_path', '') - line_num = int(request.GET.get('line_num', 0)) + pos = int(request.GET.get("pos", 0)) + file_path = request.GET.get("file_path", "") + line_num = int(request.GET.get("line_num", 0)) tb = sql_query.traceback_ln_only analysis = sql_query.analysis str, files = self._urlify(tb) if file_path and file_path not in files: raise PermissionDenied - tb = [mark_safe(x) for x in str.split('\n')] + tb = [mark_safe(x) for x in str.split("\n")] context = { - 'sql_query': sql_query, - 'traceback': tb, - 'pos': pos, - 'line_num': line_num, - 'file_path': file_path, - 'analysis': analysis, - 'virtualenv_path': os.environ.get('VIRTUAL_ENV') or '', + "sql_query": sql_query, + "traceback": tb, + "pos": pos, + "line_num": line_num, + "file_path": file_path, + "analysis": analysis, + "virtualenv_path": os.environ.get("VIRTUAL_ENV") or "", } if request_id: - context['silk_request'] = Request.objects.get(pk=request_id) + context["silk_request"] = Request.objects.get(pk=request_id) if profile_id: - context['profile'] = Profile.objects.get(pk=int(profile_id)) + context["profile"] = Profile.objects.get(pk=int(profile_id)) if pos and file_path and line_num: actual_line, code = _code(file_path, line_num) - context['code'] = code - context['actual_line'] = actual_line - return render(request, 'silk/sql_detail.html', context) + context["code"] = code + context["actual_line"] = actual_line + return render(request, "silk/sql_detail.html", context) diff --git a/silk/views/summary.py b/silk/views/summary.py index 52651f60..ee2d45c5 100644 --- a/silk/views/summary.py +++ b/silk/views/summary.py @@ -10,45 +10,87 @@ class SummaryView(View): - filters_key = 'summary_filters' + filters_key = "summary_filters" filters_manager = FiltersManager(filters_key) def _avg_num_queries(self, filters): - queries__aggregate = models.Request.objects.filter(*filters).annotate(num_queries=Count('queries')).aggregate(num=Avg('num_queries')) - return queries__aggregate['num'] + queries__aggregate = ( + models.Request.objects.filter(*filters) + .annotate(num_queries=Count("queries")) + .aggregate(num=Avg("num_queries")) + ) + return queries__aggregate["num"] def _avg_time_spent_on_queries(self, filters): - taken__aggregate = models.Request.objects.filter(*filters).annotate(time_spent=Sum('queries__time_taken')).aggregate(num=Avg('time_spent')) - return taken__aggregate['num'] + taken__aggregate = ( + models.Request.objects.filter(*filters) + .annotate(time_spent=Sum("queries__time_taken")) + .aggregate(num=Avg("time_spent")) + ) + return taken__aggregate["num"] def _avg_overall_time(self, filters): - taken__aggregate = models.Request.objects.filter(*filters).annotate(time_spent=Sum('time_taken')).aggregate(num=Avg('time_spent')) - return taken__aggregate['num'] + taken__aggregate = ( + models.Request.objects.filter(*filters) + .annotate(time_spent=Sum("time_taken")) + .aggregate(num=Avg("time_spent")) + ) + return taken__aggregate["num"] # TODO: Find a more efficient way to do this. Currently has to go to DB num. views + 1 times and is prob quite expensive def _longest_query_by_view(self, filters): - values_list = models.Request.objects.filter(*filters).values_list("view_name").annotate(max=Max('time_taken')).filter(max__isnull=False).order_by('-max')[:5] + values_list = ( + models.Request.objects.filter(*filters) + .values_list("view_name") + .annotate(max=Max("time_taken")) + .filter(max__isnull=False) + .order_by("-max")[:5] + ) requests = [] for view_name, _ in values_list: - request = models.Request.objects.filter(view_name=view_name, *filters).filter(time_taken__isnull=False).order_by('-time_taken')[0] + request = ( + models.Request.objects.filter(view_name=view_name, *filters) + .filter(time_taken__isnull=False) + .order_by("-time_taken")[0] + ) requests.append(request) return sorted(requests, key=lambda item: item.time_taken, reverse=True) def _time_spent_in_db_by_view(self, filters): - values_list = models.Request.objects.filter(*filters).values_list('view_name').annotate(t=Sum('queries__time_taken')).filter(t__gte=0).order_by('-t')[:5] + values_list = ( + models.Request.objects.filter(*filters) + .values_list("view_name") + .annotate(t=Sum("queries__time_taken")) + .filter(t__gte=0) + .order_by("-t")[:5] + ) requests = [] for view, _ in values_list: - r = models.Request.objects.filter(view_name=view, *filters).annotate(t=Sum('queries__time_taken')).filter(t__isnull=False).order_by('-t')[0] + r = ( + models.Request.objects.filter(view_name=view, *filters) + .annotate(t=Sum("queries__time_taken")) + .filter(t__isnull=False) + .order_by("-t")[0] + ) requests.append(r) return sorted(requests, key=lambda item: item.t, reverse=True) def _num_queries_by_view(self, filters): - queryset = models.Request.objects.filter(*filters).values_list('view_name').annotate(t=Count('queries')).order_by('-t')[:5] + queryset = ( + models.Request.objects.filter(*filters) + .values_list("view_name") + .annotate(t=Count("queries")) + .order_by("-t")[:5] + ) views = [r[0] for r in queryset[:6]] requests = [] for view in views: try: - r = models.Request.objects.filter(view_name=view, *filters).annotate(t=Count('queries')).order_by('-t')[0] + r = ( + models.Request.objects.filter(view_name=view, *filters) + .annotate(t=Count("queries")) + .order_by("-t")[0] + ) requests.append(r) except IndexError: pass @@ -56,19 +98,21 @@ def _num_queries_by_view(self, filters): def _create_context(self, request): raw_filters = self.filters_manager.get(request) - filters = [BaseFilter.from_dict(filter_d) for _, filter_d in raw_filters.items()] + filters = [ + BaseFilter.from_dict(filter_d) for _, filter_d in raw_filters.items() + ] avg_overall_time = self._avg_num_queries(filters) c = { - 'request': request, - 'num_requests': models.Request.objects.filter(*filters).count(), - 'num_profiles': models.Profile.objects.filter(*filters).count(), - 'avg_num_queries': avg_overall_time, - 'avg_time_spent_on_queries': self._avg_time_spent_on_queries(filters), - 'avg_overall_time': self._avg_overall_time(filters), - 'longest_queries_by_view': self._longest_query_by_view(filters), - 'most_time_spent_in_db': self._time_spent_in_db_by_view(filters), - 'most_queries': self._num_queries_by_view(filters), - 'filters': raw_filters + "request": request, + "num_requests": models.Request.objects.filter(*filters).count(), + "num_profiles": models.Profile.objects.filter(*filters).count(), + "avg_num_queries": avg_overall_time, + "avg_time_spent_on_queries": self._avg_time_spent_on_queries(filters), + "avg_overall_time": self._avg_overall_time(filters), + "longest_queries_by_view": self._longest_query_by_view(filters), + "most_time_spent_in_db": self._time_spent_in_db_by_view(filters), + "most_queries": self._num_queries_by_view(filters), + "filters": raw_filters, } c.update(csrf(request)) return c @@ -77,11 +121,13 @@ def _create_context(self, request): @method_decorator(permissions_possibly_required) def get(self, request): c = self._create_context(request) - return render(request, 'silk/summary.html', c) + return render(request, "silk/summary.html", c) @method_decorator(login_possibly_required) @method_decorator(permissions_possibly_required) def post(self, request): - filters = {ident: f.as_dict() for ident, f in filters_from_request(request).items()} + filters = { + ident: f.as_dict() for ident, f in filters_from_request(request).items() + } self.filters_manager.save(request, filters) - return render(request, 'silk/summary.html', self._create_context(request)) + return render(request, "silk/summary.html", self._create_context(request))