mirror of
https://git.linux-kernel.at/oliver/ivatar.git
synced 2025-11-15 04:28:03 +00:00
Enhance performance tests
This commit is contained in:
@@ -158,21 +158,24 @@ performance_tests_dev:
|
|||||||
image: python:3.11-alpine
|
image: python:3.11-alpine
|
||||||
only:
|
only:
|
||||||
- devel
|
- devel
|
||||||
|
when: on_success # Run automatically after successful deployment verification
|
||||||
variables:
|
variables:
|
||||||
DEV_URL: "https://dev.libravatar.org"
|
DEV_URL: "https://dev.libravatar.org"
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
before_script:
|
before_script:
|
||||||
- apk add --no-cache curl
|
- apk add --no-cache curl
|
||||||
- pip install requests
|
- pip install requests Pillow prettytable pyLibravatar dnspython py3dns
|
||||||
script:
|
script:
|
||||||
- echo "Running performance tests against dev.libravatar.org..."
|
- echo "Running performance tests against dev.libravatar.org..."
|
||||||
- python3 scripts/performance_tests.py --base-url $DEV_URL --concurrent-users 5 --output performance_dev.json
|
- python3 scripts/performance_tests.py --base-url $DEV_URL --concurrent-users 5 --avatar-threshold 2500 --response-threshold 2500 --p95-threshold 5000 --ignore-cache-warnings --output performance_dev.json
|
||||||
artifacts:
|
artifacts:
|
||||||
paths:
|
paths:
|
||||||
- performance_dev.json
|
- performance_dev.json
|
||||||
expire_in: 7 days
|
expire_in: 7 days
|
||||||
allow_failure: true # Don't fail deployment on performance issues
|
allow_failure: true # Don't fail deployment on performance issues
|
||||||
needs:
|
needs:
|
||||||
- verify_dev_deployment # Run after deployment verification
|
- job: verify_dev_deployment
|
||||||
|
artifacts: false # Run after deployment verification succeeds
|
||||||
|
|
||||||
# Performance testing against production server (master branch only)
|
# Performance testing against production server (master branch only)
|
||||||
performance_tests_prod:
|
performance_tests_prod:
|
||||||
@@ -180,12 +183,13 @@ performance_tests_prod:
|
|||||||
image: python:3.11-alpine
|
image: python:3.11-alpine
|
||||||
only:
|
only:
|
||||||
- master
|
- master
|
||||||
when: manual # Manual trigger to avoid impacting production unnecessarily
|
when: on_success # Run automatically after successful deployment verification
|
||||||
variables:
|
variables:
|
||||||
PROD_URL: "https://libravatar.org"
|
PROD_URL: "https://libravatar.org"
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
before_script:
|
before_script:
|
||||||
- apk add --no-cache curl
|
- apk add --no-cache curl
|
||||||
- pip install requests
|
- pip install requests Pillow prettytable pyLibravatar dnspython py3dns
|
||||||
script:
|
script:
|
||||||
- echo "Running performance tests against libravatar.org..."
|
- echo "Running performance tests against libravatar.org..."
|
||||||
- python3 scripts/performance_tests.py --base-url $PROD_URL --concurrent-users 3 --output performance_prod.json
|
- python3 scripts/performance_tests.py --base-url $PROD_URL --concurrent-users 3 --output performance_prod.json
|
||||||
@@ -195,7 +199,30 @@ performance_tests_prod:
|
|||||||
expire_in: 30 days # Keep production results longer
|
expire_in: 30 days # Keep production results longer
|
||||||
allow_failure: true # Don't fail deployment on performance issues
|
allow_failure: true # Don't fail deployment on performance issues
|
||||||
needs:
|
needs:
|
||||||
- verify_prod_deployment # Run after deployment verification
|
- job: verify_prod_deployment
|
||||||
|
artifacts: false # Run after deployment verification succeeds
|
||||||
|
|
||||||
|
# Manual performance testing against production (for on-demand testing)
|
||||||
|
performance_tests_prod_manual:
|
||||||
|
stage: deploy
|
||||||
|
image: python:3.11-alpine
|
||||||
|
only:
|
||||||
|
- master
|
||||||
|
when: manual # Manual trigger for on-demand performance testing
|
||||||
|
variables:
|
||||||
|
PROD_URL: "https://libravatar.org"
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
|
before_script:
|
||||||
|
- apk add --no-cache curl
|
||||||
|
- pip install requests Pillow prettytable pyLibravatar dnspython py3dns
|
||||||
|
script:
|
||||||
|
- echo "Running manual performance tests against libravatar.org..."
|
||||||
|
- python3 scripts/performance_tests.py --base-url $PROD_URL --concurrent-users 5 --output performance_prod_manual.json
|
||||||
|
artifacts:
|
||||||
|
paths:
|
||||||
|
- performance_prod_manual.json
|
||||||
|
expire_in: 30 days
|
||||||
|
allow_failure: true
|
||||||
|
|
||||||
# Deployment verification jobs
|
# Deployment verification jobs
|
||||||
verify_dev_deployment:
|
verify_dev_deployment:
|
||||||
@@ -207,8 +234,9 @@ verify_dev_deployment:
|
|||||||
DEV_URL: "https://dev.libravatar.org"
|
DEV_URL: "https://dev.libravatar.org"
|
||||||
MAX_RETRIES: 30
|
MAX_RETRIES: 30
|
||||||
RETRY_DELAY: 60
|
RETRY_DELAY: 60
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
before_script:
|
before_script:
|
||||||
- apk add --no-cache curl
|
- apk add --no-cache curl git
|
||||||
- pip install Pillow
|
- pip install Pillow
|
||||||
script:
|
script:
|
||||||
- echo "Waiting for dev.libravatar.org deployment to complete..."
|
- echo "Waiting for dev.libravatar.org deployment to complete..."
|
||||||
@@ -225,8 +253,9 @@ verify_prod_deployment:
|
|||||||
PROD_URL: "https://libravatar.org"
|
PROD_URL: "https://libravatar.org"
|
||||||
MAX_RETRIES: 10
|
MAX_RETRIES: 10
|
||||||
RETRY_DELAY: 30
|
RETRY_DELAY: 30
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
before_script:
|
before_script:
|
||||||
- apk add --no-cache curl
|
- apk add --no-cache curl git
|
||||||
- pip install Pillow
|
- pip install Pillow
|
||||||
script:
|
script:
|
||||||
- echo "Verifying production deployment..."
|
- echo "Verifying production deployment..."
|
||||||
|
|||||||
@@ -4,16 +4,20 @@ repos:
|
|||||||
hooks:
|
hooks:
|
||||||
- id: check-useless-excludes
|
- id: check-useless-excludes
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
rev: v3.0.0-alpha.4
|
rev: v4.0.0-alpha.8
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
files: \.(css|js|md|markdown|json)
|
files: \.(css|js|md|markdown|json)
|
||||||
- repo: https://github.com/python/black
|
- repo: https://github.com/python/black
|
||||||
rev: 22.12.0
|
rev: 25.9.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v3.21.0
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.4.0
|
rev: v6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-added-large-files
|
- id: check-added-large-files
|
||||||
- id: check-ast
|
- id: check-ast
|
||||||
@@ -28,7 +32,6 @@ repos:
|
|||||||
args:
|
args:
|
||||||
- --unsafe
|
- --unsafe
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
- id: fix-encoding-pragma
|
|
||||||
- id: forbid-new-submodules
|
- id: forbid-new-submodules
|
||||||
- id: no-commit-to-branch
|
- id: no-commit-to-branch
|
||||||
args:
|
args:
|
||||||
@@ -38,7 +41,7 @@ repos:
|
|||||||
- id: sort-simple-yaml
|
- id: sort-simple-yaml
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- repo: https://github.com/PyCQA/flake8
|
- repo: https://github.com/PyCQA/flake8
|
||||||
rev: 6.0.0
|
rev: 7.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
- repo: local
|
- repo: local
|
||||||
@@ -57,7 +60,7 @@ repos:
|
|||||||
types:
|
types:
|
||||||
- shell
|
- shell
|
||||||
- repo: https://github.com/asottile/blacken-docs
|
- repo: https://github.com/asottile/blacken-docs
|
||||||
rev: v1.12.1
|
rev: 1.20.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: blacken-docs
|
- id: blacken-docs
|
||||||
# YASpeller does not seem to work anymore
|
# YASpeller does not seem to work anymore
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Configuration overrides for settings.py
|
Configuration overrides for settings.py
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,3 +1,2 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Test configuration to verify LOGS_DIR override
|
# Test configuration to verify LOGS_DIR override
|
||||||
LOGS_DIR = "/tmp/ivatar_test_logs"
|
LOGS_DIR = "/tmp/ivatar_test_logs"
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Import the whole libravatar export
|
Import the whole libravatar export
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Module init
|
Module init
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Default: useful variables for the base page templates.
|
Default: useful variables for the base page templates.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
File upload security utilities for ivatar
|
File upload security utilities for ivatar
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Module init
|
Module init
|
||||||
"""
|
"""
|
||||||
|
|
||||||
app_label = __name__ # pylint: disable=invalid-name
|
app_label = __name__ # pylint: disable=invalid-name
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Register models in admin
|
Register models in admin
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
|
||||||
from .models import Photo, ConfirmedEmail, UnconfirmedEmail
|
from .models import Photo, ConfirmedEmail, UnconfirmedEmail
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
from social_core.backends.open_id_connect import OpenIdConnectAuth
|
from social_core.backends.open_id_connect import OpenIdConnectAuth
|
||||||
|
|
||||||
from ivatar.ivataraccount.models import ConfirmedEmail, Photo
|
from ivatar.ivataraccount.models import ConfirmedEmail, Photo
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Classes for our ivatar.ivataraccount.forms
|
Classes for our ivatar.ivataraccount.forms
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from urllib.parse import urlsplit, urlunsplit
|
from urllib.parse import urlsplit, urlunsplit
|
||||||
|
|
||||||
from django import forms
|
from django import forms
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Helper method to fetch Gravatar image
|
Helper method to fetch Gravatar image
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from ssl import SSLError
|
from ssl import SSLError
|
||||||
from urllib.request import HTTPError, URLError
|
from urllib.request import HTTPError, URLError
|
||||||
from ivatar.utils import urlopen
|
from ivatar.utils import urlopen
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 2.0.5 on 2018-05-07 07:13
|
# Generated by Django 2.0.5 on 2018-05-07 07:13
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 2.0.5 on 2018-05-07 07:23
|
# Generated by Django 2.0.5 on 2018-05-07 07:23
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 2.0.5 on 2018-05-08 06:37
|
# Generated by Django 2.0.5 on 2018-05-08 06:37
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 2.0.5 on 2018-05-08 07:42
|
# Generated by Django 2.0.5 on 2018-05-08 07:42
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 2.0.5 on 2018-05-22 11:55
|
# Generated by Django 2.0.5 on 2018-05-22 11:55
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 2.0.6 on 2018-06-26 14:45
|
# Generated by Django 2.0.6 on 2018-06-26 14:45
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 2.0.6 on 2018-06-27 06:24
|
# Generated by Django 2.0.6 on 2018-06-27 06:24
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# pylint: disable=invalid-name,missing-docstring
|
# pylint: disable=invalid-name,missing-docstring
|
||||||
# Generated by Django 2.0.6 on 2018-07-04 12:32
|
# Generated by Django 2.0.6 on 2018-07-04 12:32
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 2.0.6 on 2018-07-05 11:52
|
# Generated by Django 2.0.6 on 2018-07-05 11:52
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 2.0.6 on 2018-07-05 12:01
|
# Generated by Django 2.0.6 on 2018-07-05 12:01
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 2.1.3 on 2018-11-07 15:50
|
# Generated by Django 2.1.3 on 2018-11-07 15:50
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 2.1.3 on 2018-11-07 17:32
|
# Generated by Django 2.1.3 on 2018-11-07 17:32
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 2.1.3 on 2018-12-03 14:21
|
# Generated by Django 2.1.3 on 2018-12-03 14:21
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 2.1.5 on 2019-02-18 16:02
|
# Generated by Django 2.1.5 on 2019-02-18 16:02
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 3.0.3 on 2020-02-25 09:34
|
# Generated by Django 3.0.3 on 2020-02-25 09:34
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 3.1.7 on 2021-04-13 09:04
|
# Generated by Django 3.1.7 on 2021-04-13 09:04
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 3.2.3 on 2021-05-28 13:14
|
# Generated by Django 3.2.3 on 2021-05-28 13:14
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 5.0 on 2024-05-31 15:00
|
# Generated by Django 5.0 on 2024-05-31 15:00
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 5.1.5 on 2025-01-27 10:54
|
# Generated by Django 5.1.5 on 2025-01-27 10:54
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 5.1.5 on 2025-01-27 13:33
|
# Generated by Django 5.1.5 on 2025-01-27 13:33
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated manually for performance optimization
|
# Generated manually for performance optimization
|
||||||
|
|
||||||
from typing import Any, List, Tuple, Optional
|
from typing import Any, List, Tuple, Optional
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Our models for ivatar.ivataraccount
|
Our models for ivatar.ivataraccount
|
||||||
"""
|
"""
|
||||||
@@ -404,7 +403,7 @@ class ConfirmedEmail(BaseAccountModel):
|
|||||||
logger.debug("Successfully cleaned up cached page: %s" % cache_key)
|
logger.debug("Successfully cleaned up cached page: %s" % cache_key)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Failed to clean up cached page %s: %s" % (cache_key, exc)
|
"Failed to clean up cached page {}: {}".format(cache_key, exc)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Invalidate Bluesky avatar URL cache if bluesky_handle changed
|
# Invalidate Bluesky avatar URL cache if bluesky_handle changed
|
||||||
@@ -455,9 +454,7 @@ class UnconfirmedEmail(BaseAccountModel):
|
|||||||
+ self.user.username.encode("utf-8") # pylint: disable=no-member
|
+ self.user.username.encode("utf-8") # pylint: disable=no-member
|
||||||
) # pylint: disable=no-member
|
) # pylint: disable=no-member
|
||||||
self.verification_key = hash_object.hexdigest()
|
self.verification_key = hash_object.hexdigest()
|
||||||
super(UnconfirmedEmail, self).save(
|
super().save(force_insert, force_update, using, update_fields)
|
||||||
force_insert, force_update, using, update_fields
|
|
||||||
)
|
|
||||||
|
|
||||||
def send_confirmation_mail(self, url=SECURE_BASE_URL):
|
def send_confirmation_mail(self, url=SECURE_BASE_URL):
|
||||||
"""
|
"""
|
||||||
@@ -602,7 +599,7 @@ class ConfirmedOpenId(BaseAccountModel):
|
|||||||
logger.debug("Successfully cleaned up cached page: %s" % cache_key)
|
logger.debug("Successfully cleaned up cached page: %s" % cache_key)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Failed to clean up cached page %s: %s" % (cache_key, exc)
|
"Failed to clean up cached page {}: {}".format(cache_key, exc)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Invalidate Bluesky avatar URL cache if bluesky_handle exists
|
# Invalidate Bluesky avatar URL cache if bluesky_handle exists
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Reading libravatar export
|
Reading libravatar export
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Test our views in ivatar.ivataraccount.views and ivatar.views
|
Test our views in ivatar.ivataraccount.views and ivatar.views
|
||||||
"""
|
"""
|
||||||
@@ -53,9 +52,9 @@ class Tester(TestCase): # pylint: disable=too-many-public-methods
|
|||||||
user = None
|
user = None
|
||||||
username = random_string()
|
username = random_string()
|
||||||
password = random_string()
|
password = random_string()
|
||||||
email = "%s@%s.org" % (username, random_string())
|
email = "{}@{}.org".format(username, random_string())
|
||||||
# Dunno why random tld doesn't work, but I'm too lazy now to investigate
|
# Dunno why random tld doesn't work, but I'm too lazy now to investigate
|
||||||
openid = "http://%s.%s.%s/" % (username, random_string(), "org")
|
openid = "http://{}.{}.{}/".format(username, random_string(), "org")
|
||||||
first_name = random_string()
|
first_name = random_string()
|
||||||
last_name = random_string()
|
last_name = random_string()
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Test our views in ivatar.ivataraccount.views and ivatar.views
|
Test our views in ivatar.ivataraccount.views and ivatar.views
|
||||||
"""
|
"""
|
||||||
@@ -37,9 +36,9 @@ class Tester(TestCase): # pylint: disable=too-many-public-methods
|
|||||||
user = None
|
user = None
|
||||||
username = random_string()
|
username = random_string()
|
||||||
password = random_string()
|
password = random_string()
|
||||||
email = "%s@%s.%s" % (username, random_string(), random_string(2))
|
email = "{}@{}.{}".format(username, random_string(), random_string(2))
|
||||||
# Dunno why random tld doesn't work, but I'm too lazy now to investigate
|
# Dunno why random tld doesn't work, but I'm too lazy now to investigate
|
||||||
openid = "http://%s.%s.%s/" % (username, random_string(), "org")
|
openid = "http://{}.{}.{}/".format(username, random_string(), "org")
|
||||||
first_name = random_string()
|
first_name = random_string()
|
||||||
last_name = random_string()
|
last_name = random_string()
|
||||||
bsky_test_account = "libravatar.org"
|
bsky_test_account = "libravatar.org"
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
URLs for ivatar.ivataraccount
|
URLs for ivatar.ivataraccount
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from django.urls import path, re_path
|
from django.urls import path, re_path
|
||||||
|
|
||||||
from django.contrib.auth.views import LogoutView
|
from django.contrib.auth.views import LogoutView
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
View classes for ivatar/ivataraccount/
|
View classes for ivatar/ivataraccount/
|
||||||
"""
|
"""
|
||||||
@@ -140,7 +139,7 @@ class PasswordSetView(SuccessMessageMixin, FormView):
|
|||||||
success_url = reverse_lazy("profile")
|
success_url = reverse_lazy("profile")
|
||||||
|
|
||||||
def get_form_kwargs(self):
|
def get_form_kwargs(self):
|
||||||
kwargs = super(PasswordSetView, self).get_form_kwargs()
|
kwargs = super().get_form_kwargs()
|
||||||
kwargs["user"] = self.request.user
|
kwargs["user"] = self.request.user
|
||||||
return kwargs
|
return kwargs
|
||||||
|
|
||||||
@@ -712,7 +711,9 @@ class RemoveUnconfirmedOpenIDView(View):
|
|||||||
)
|
)
|
||||||
openid.delete()
|
openid.delete()
|
||||||
messages.success(request, _("ID removed"))
|
messages.success(request, _("ID removed"))
|
||||||
except self.model.DoesNotExist: # pragma: no cover pylint: disable=no-member,line-too-long
|
except (
|
||||||
|
self.model.DoesNotExist
|
||||||
|
): # pragma: no cover pylint: disable=no-member,line-too-long
|
||||||
messages.error(request, _("ID does not exist"))
|
messages.error(request, _("ID does not exist"))
|
||||||
return HttpResponseRedirect(reverse_lazy("profile"))
|
return HttpResponseRedirect(reverse_lazy("profile"))
|
||||||
|
|
||||||
@@ -766,7 +767,9 @@ class RedirectOpenIDView(View):
|
|||||||
unconfirmed = self.model.objects.get( # pylint: disable=no-member
|
unconfirmed = self.model.objects.get( # pylint: disable=no-member
|
||||||
user=request.user, id=kwargs["openid_id"]
|
user=request.user, id=kwargs["openid_id"]
|
||||||
)
|
)
|
||||||
except self.model.DoesNotExist: # pragma: no cover pylint: disable=no-member,line-too-long
|
except (
|
||||||
|
self.model.DoesNotExist
|
||||||
|
): # pragma: no cover pylint: disable=no-member,line-too-long
|
||||||
messages.error(request, _("ID does not exist"))
|
messages.error(request, _("ID does not exist"))
|
||||||
return HttpResponseRedirect(reverse_lazy("profile"))
|
return HttpResponseRedirect(reverse_lazy("profile"))
|
||||||
|
|
||||||
@@ -1321,7 +1324,7 @@ class ExportView(SuccessMessageMixin, TemplateView):
|
|||||||
def xml_account(user):
|
def xml_account(user):
|
||||||
escaped_username = saxutils.quoteattr(user.username)
|
escaped_username = saxutils.quoteattr(user.username)
|
||||||
escaped_password = saxutils.quoteattr(user.password)
|
escaped_password = saxutils.quoteattr(user.password)
|
||||||
return " <account username=%s password=%s/>\n" % (
|
return " <account username={} password={}/>\n".format(
|
||||||
escaped_username,
|
escaped_username,
|
||||||
escaped_password,
|
escaped_password,
|
||||||
)
|
)
|
||||||
@@ -1387,8 +1390,8 @@ class ExportView(SuccessMessageMixin, TemplateView):
|
|||||||
bytesobj.seek(0)
|
bytesobj.seek(0)
|
||||||
|
|
||||||
response = HttpResponse(content_type="application/gzip")
|
response = HttpResponse(content_type="application/gzip")
|
||||||
response[
|
response["Content-Disposition"] = (
|
||||||
"Content-Disposition"
|
f'attachment; filename="libravatar-export_{user.username}.xml.gz"'
|
||||||
] = f'attachment; filename="libravatar-export_{user.username}.xml.gz"'
|
)
|
||||||
response.write(bytesobj.read())
|
response.write(bytesobj.read())
|
||||||
return response
|
return response
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Middleware classes
|
Middleware classes
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
OpenTelemetry configuration for ivatar project.
|
OpenTelemetry configuration for ivatar project.
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
OpenTelemetry middleware and custom instrumentation for ivatar.
|
OpenTelemetry middleware and custom instrumentation for ivatar.
|
||||||
|
|
||||||
@@ -94,9 +93,9 @@ class OpenTelemetryMiddleware(MiddlewareMixin):
|
|||||||
span.set_attributes(
|
span.set_attributes(
|
||||||
{
|
{
|
||||||
"http.status_code": response.status_code,
|
"http.status_code": response.status_code,
|
||||||
"http.response_size": len(response.content)
|
"http.response_size": (
|
||||||
if hasattr(response, "content")
|
len(response.content) if hasattr(response, "content") else 0
|
||||||
else 0,
|
),
|
||||||
"http.request.duration": duration,
|
"http.request.duration": duration,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Django settings for ivatar project.
|
Django settings for ivatar project.
|
||||||
"""
|
"""
|
||||||
|
|||||||
50
ivatar/static/css/bootstrap.min.css
vendored
50
ivatar/static/css/bootstrap.min.css
vendored
@@ -260,7 +260,8 @@ th {
|
|||||||
@font-face {
|
@font-face {
|
||||||
font-family: "Glyphicons Halflings";
|
font-family: "Glyphicons Halflings";
|
||||||
src: url(../fonts/glyphicons-halflings-regular.eot);
|
src: url(../fonts/glyphicons-halflings-regular.eot);
|
||||||
src: url(../fonts/glyphicons-halflings-regular.eot?#iefix)
|
src:
|
||||||
|
url(../fonts/glyphicons-halflings-regular.eot?#iefix)
|
||||||
format("embedded-opentype"),
|
format("embedded-opentype"),
|
||||||
url(../fonts/glyphicons-halflings-regular.woff2) format("woff2"),
|
url(../fonts/glyphicons-halflings-regular.woff2) format("woff2"),
|
||||||
url(../fonts/glyphicons-halflings-regular.woff) format("woff"),
|
url(../fonts/glyphicons-halflings-regular.woff) format("woff"),
|
||||||
@@ -2651,17 +2652,24 @@ output {
|
|||||||
border-radius: 4px;
|
border-radius: 4px;
|
||||||
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
|
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
|
||||||
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
|
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
|
||||||
-webkit-transition: border-color ease-in-out 0.15s,
|
-webkit-transition:
|
||||||
|
border-color ease-in-out 0.15s,
|
||||||
-webkit-box-shadow ease-in-out 0.15s;
|
-webkit-box-shadow ease-in-out 0.15s;
|
||||||
-o-transition: border-color ease-in-out 0.15s, box-shadow ease-in-out 0.15s;
|
-o-transition:
|
||||||
transition: border-color ease-in-out 0.15s, box-shadow ease-in-out 0.15s;
|
border-color ease-in-out 0.15s,
|
||||||
|
box-shadow ease-in-out 0.15s;
|
||||||
|
transition:
|
||||||
|
border-color ease-in-out 0.15s,
|
||||||
|
box-shadow ease-in-out 0.15s;
|
||||||
}
|
}
|
||||||
.form-control:focus {
|
.form-control:focus {
|
||||||
border-color: #66afe9;
|
border-color: #66afe9;
|
||||||
outline: 0;
|
outline: 0;
|
||||||
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075),
|
-webkit-box-shadow:
|
||||||
|
inset 0 1px 1px rgba(0, 0, 0, 0.075),
|
||||||
0 0 8px rgba(102, 175, 233, 0.6);
|
0 0 8px rgba(102, 175, 233, 0.6);
|
||||||
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075),
|
box-shadow:
|
||||||
|
inset 0 1px 1px rgba(0, 0, 0, 0.075),
|
||||||
0 0 8px rgba(102, 175, 233, 0.6);
|
0 0 8px rgba(102, 175, 233, 0.6);
|
||||||
}
|
}
|
||||||
.form-control::-moz-placeholder {
|
.form-control::-moz-placeholder {
|
||||||
@@ -2923,8 +2931,12 @@ textarea.input-lg {
|
|||||||
}
|
}
|
||||||
.has-success .form-control:focus {
|
.has-success .form-control:focus {
|
||||||
border-color: #2b542c;
|
border-color: #2b542c;
|
||||||
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #67b168;
|
-webkit-box-shadow:
|
||||||
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #67b168;
|
inset 0 1px 1px rgba(0, 0, 0, 0.075),
|
||||||
|
0 0 6px #67b168;
|
||||||
|
box-shadow:
|
||||||
|
inset 0 1px 1px rgba(0, 0, 0, 0.075),
|
||||||
|
0 0 6px #67b168;
|
||||||
}
|
}
|
||||||
.has-success .input-group-addon {
|
.has-success .input-group-addon {
|
||||||
color: #3c763d;
|
color: #3c763d;
|
||||||
@@ -2953,8 +2965,12 @@ textarea.input-lg {
|
|||||||
}
|
}
|
||||||
.has-warning .form-control:focus {
|
.has-warning .form-control:focus {
|
||||||
border-color: #66512c;
|
border-color: #66512c;
|
||||||
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #c0a16b;
|
-webkit-box-shadow:
|
||||||
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #c0a16b;
|
inset 0 1px 1px rgba(0, 0, 0, 0.075),
|
||||||
|
0 0 6px #c0a16b;
|
||||||
|
box-shadow:
|
||||||
|
inset 0 1px 1px rgba(0, 0, 0, 0.075),
|
||||||
|
0 0 6px #c0a16b;
|
||||||
}
|
}
|
||||||
.has-warning .input-group-addon {
|
.has-warning .input-group-addon {
|
||||||
color: #8a6d3b;
|
color: #8a6d3b;
|
||||||
@@ -2983,8 +2999,12 @@ textarea.input-lg {
|
|||||||
}
|
}
|
||||||
.has-error .form-control:focus {
|
.has-error .form-control:focus {
|
||||||
border-color: #843534;
|
border-color: #843534;
|
||||||
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #ce8483;
|
-webkit-box-shadow:
|
||||||
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #ce8483;
|
inset 0 1px 1px rgba(0, 0, 0, 0.075),
|
||||||
|
0 0 6px #ce8483;
|
||||||
|
box-shadow:
|
||||||
|
inset 0 1px 1px rgba(0, 0, 0, 0.075),
|
||||||
|
0 0 6px #ce8483;
|
||||||
}
|
}
|
||||||
.has-error .input-group-addon {
|
.has-error .input-group-addon {
|
||||||
color: #a94442;
|
color: #a94442;
|
||||||
@@ -4470,9 +4490,11 @@ textarea.input-group-sm > .input-group-btn > .btn {
|
|||||||
margin-left: -15px;
|
margin-left: -15px;
|
||||||
border-top: 1px solid transparent;
|
border-top: 1px solid transparent;
|
||||||
border-bottom: 1px solid transparent;
|
border-bottom: 1px solid transparent;
|
||||||
-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1),
|
-webkit-box-shadow:
|
||||||
|
inset 0 1px 0 rgba(255, 255, 255, 0.1),
|
||||||
0 1px 0 rgba(255, 255, 255, 0.1);
|
0 1px 0 rgba(255, 255, 255, 0.1);
|
||||||
box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1),
|
box-shadow:
|
||||||
|
inset 0 1px 0 rgba(255, 255, 255, 0.1),
|
||||||
0 1px 0 rgba(255, 255, 255, 0.1);
|
0 1px 0 rgba(255, 255, 255, 0.1);
|
||||||
}
|
}
|
||||||
@media (min-width: 768px) {
|
@media (min-width: 768px) {
|
||||||
|
|||||||
@@ -4,7 +4,9 @@
|
|||||||
font-style: normal;
|
font-style: normal;
|
||||||
font-weight: 300;
|
font-weight: 300;
|
||||||
src: url("../fonts/lato-v15-latin-300.eot"); /* IE9 Compat Modes */
|
src: url("../fonts/lato-v15-latin-300.eot"); /* IE9 Compat Modes */
|
||||||
src: local("Lato Light"), local("Lato-Light"),
|
src:
|
||||||
|
local("Lato Light"),
|
||||||
|
local("Lato-Light"),
|
||||||
url("../fonts/LatoLatin-Light.eot?#iefix") format("embedded-opentype"),
|
url("../fonts/LatoLatin-Light.eot?#iefix") format("embedded-opentype"),
|
||||||
/* IE6-IE8 */ url("../fonts/LatoLatin-Light.woff2") format("woff2"),
|
/* IE6-IE8 */ url("../fonts/LatoLatin-Light.woff2") format("woff2"),
|
||||||
/* Super Modern Browsers */ url("../fonts/LatoLatin-Light.woff")
|
/* Super Modern Browsers */ url("../fonts/LatoLatin-Light.woff")
|
||||||
@@ -19,7 +21,9 @@
|
|||||||
font-style: normal;
|
font-style: normal;
|
||||||
font-weight: 400;
|
font-weight: 400;
|
||||||
src: url("../fonts/lato-v15-latin-regular.eot"); /* IE9 Compat Modes */
|
src: url("../fonts/lato-v15-latin-regular.eot"); /* IE9 Compat Modes */
|
||||||
src: local("Lato Regular"), local("Lato-Regular"),
|
src:
|
||||||
|
local("Lato Regular"),
|
||||||
|
local("Lato-Regular"),
|
||||||
url("../fonts/LatoLatin-Regular.eot?#iefix") format("embedded-opentype"),
|
url("../fonts/LatoLatin-Regular.eot?#iefix") format("embedded-opentype"),
|
||||||
/* IE6-IE8 */ url("../fonts/LatoLatin-Regular.woff2") format("woff2"),
|
/* IE6-IE8 */ url("../fonts/LatoLatin-Regular.woff2") format("woff2"),
|
||||||
/* Super Modern Browsers */ url("../fonts/LatoLatin-Regular.woff")
|
/* Super Modern Browsers */ url("../fonts/LatoLatin-Regular.woff")
|
||||||
@@ -35,7 +39,9 @@
|
|||||||
font-style: normal;
|
font-style: normal;
|
||||||
font-weight: 700;
|
font-weight: 700;
|
||||||
src: url("../fonts/lato-v15-latin-700.eot"); /* IE9 Compat Modes */
|
src: url("../fonts/lato-v15-latin-700.eot"); /* IE9 Compat Modes */
|
||||||
src: local("Lato Bold"), local("Lato-Bold"),
|
src:
|
||||||
|
local("Lato Bold"),
|
||||||
|
local("Lato-Bold"),
|
||||||
url("../fonts/LatoLatin-Bold.eot?#iefix") format("embedded-opentype"),
|
url("../fonts/LatoLatin-Bold.eot?#iefix") format("embedded-opentype"),
|
||||||
/* IE6-IE8 */ url("../fonts/LatoLatin-Bold.woff2") format("woff2"),
|
/* IE6-IE8 */ url("../fonts/LatoLatin-Bold.woff2") format("woff2"),
|
||||||
/* Super Modern Browsers */ url("../fonts/LatoLatin-Bold.woff")
|
/* Super Modern Browsers */ url("../fonts/LatoLatin-Bold.woff")
|
||||||
@@ -50,7 +56,9 @@
|
|||||||
font-style: normal;
|
font-style: normal;
|
||||||
font-weight: 400;
|
font-weight: 400;
|
||||||
src: url("../fonts/open-sans-v16-latin-regular.eot"); /* IE9 Compat Modes */
|
src: url("../fonts/open-sans-v16-latin-regular.eot"); /* IE9 Compat Modes */
|
||||||
src: local("Open Sans Regular"), local("OpenSans-Regular"),
|
src:
|
||||||
|
local("Open Sans Regular"),
|
||||||
|
local("OpenSans-Regular"),
|
||||||
url("../fonts/open-sans-v16-latin-regular.eot?#iefix")
|
url("../fonts/open-sans-v16-latin-regular.eot?#iefix")
|
||||||
format("embedded-opentype"),
|
format("embedded-opentype"),
|
||||||
/* IE6-IE8 */ url("../fonts/open-sans-v16-latin-regular.woff2")
|
/* IE6-IE8 */ url("../fonts/open-sans-v16-latin-regular.woff2")
|
||||||
|
|||||||
32
ivatar/static/js/bootstrap.min.js
vendored
32
ivatar/static/js/bootstrap.min.js
vendored
@@ -199,8 +199,8 @@ if ("undefined" == typeof jQuery)
|
|||||||
"number" == typeof b
|
"number" == typeof b
|
||||||
? e.to(b)
|
? e.to(b)
|
||||||
: g
|
: g
|
||||||
? e[g]()
|
? e[g]()
|
||||||
: f.interval && e.pause().cycle();
|
: f.interval && e.pause().cycle();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
var c = function (b, c) {
|
var c = function (b, c) {
|
||||||
@@ -278,8 +278,8 @@ if ("undefined" == typeof jQuery)
|
|||||||
b.to(a);
|
b.to(a);
|
||||||
})
|
})
|
||||||
: c == a
|
: c == a
|
||||||
? this.pause().cycle()
|
? this.pause().cycle()
|
||||||
: this.slide(a > c ? "next" : "prev", this.$items.eq(a));
|
: this.slide(a > c ? "next" : "prev", this.$items.eq(a));
|
||||||
}),
|
}),
|
||||||
(c.prototype.pause = function (b) {
|
(c.prototype.pause = function (b) {
|
||||||
return (
|
return (
|
||||||
@@ -1106,12 +1106,12 @@ if ("undefined" == typeof jQuery)
|
|||||||
"bottom" == h && k.bottom + m > o.bottom
|
"bottom" == h && k.bottom + m > o.bottom
|
||||||
? "top"
|
? "top"
|
||||||
: "top" == h && k.top - m < o.top
|
: "top" == h && k.top - m < o.top
|
||||||
? "bottom"
|
? "bottom"
|
||||||
: "right" == h && k.right + l > o.width
|
: "right" == h && k.right + l > o.width
|
||||||
? "left"
|
? "left"
|
||||||
: "left" == h && k.left - l < o.left
|
: "left" == h && k.left - l < o.left
|
||||||
? "right"
|
? "right"
|
||||||
: h),
|
: h),
|
||||||
f.removeClass(n).addClass(h);
|
f.removeClass(n).addClass(h);
|
||||||
}
|
}
|
||||||
var p = this.getCalculatedOffset(h, k, l, m);
|
var p = this.getCalculatedOffset(h, k, l, m);
|
||||||
@@ -1233,10 +1233,10 @@ if ("undefined" == typeof jQuery)
|
|||||||
return "bottom" == a
|
return "bottom" == a
|
||||||
? { top: b.top + b.height, left: b.left + b.width / 2 - c / 2 }
|
? { top: b.top + b.height, left: b.left + b.width / 2 - c / 2 }
|
||||||
: "top" == a
|
: "top" == a
|
||||||
? { top: b.top - d, left: b.left + b.width / 2 - c / 2 }
|
? { top: b.top - d, left: b.left + b.width / 2 - c / 2 }
|
||||||
: "left" == a
|
: "left" == a
|
||||||
? { top: b.top + b.height / 2 - d / 2, left: b.left - c }
|
? { top: b.top + b.height / 2 - d / 2, left: b.left - c }
|
||||||
: { top: b.top + b.height / 2 - d / 2, left: b.left + b.width };
|
: { top: b.top + b.height / 2 - d / 2, left: b.left + b.width };
|
||||||
}),
|
}),
|
||||||
(c.prototype.getViewportAdjustedDelta = function (a, b, c, d) {
|
(c.prototype.getViewportAdjustedDelta = function (a, b, c, d) {
|
||||||
var e = { top: 0, left: 0 };
|
var e = { top: 0, left: 0 };
|
||||||
@@ -1308,8 +1308,8 @@ if ("undefined" == typeof jQuery)
|
|||||||
? ((c.inState.click = !c.inState.click),
|
? ((c.inState.click = !c.inState.click),
|
||||||
c.isInStateTrue() ? c.enter(c) : c.leave(c))
|
c.isInStateTrue() ? c.enter(c) : c.leave(c))
|
||||||
: c.tip().hasClass("in")
|
: c.tip().hasClass("in")
|
||||||
? c.leave(c)
|
? c.leave(c)
|
||||||
: c.enter(c);
|
: c.enter(c);
|
||||||
}),
|
}),
|
||||||
(c.prototype.destroy = function () {
|
(c.prototype.destroy = function () {
|
||||||
var a = this;
|
var a = this;
|
||||||
|
|||||||
131
ivatar/static/js/cropper.min.js
vendored
131
ivatar/static/js/cropper.min.js
vendored
@@ -11,9 +11,10 @@
|
|||||||
"object" == typeof exports && "undefined" != typeof module
|
"object" == typeof exports && "undefined" != typeof module
|
||||||
? (module.exports = e())
|
? (module.exports = e())
|
||||||
: "function" == typeof define && define.amd
|
: "function" == typeof define && define.amd
|
||||||
? define(e)
|
? define(e)
|
||||||
: ((t = "undefined" != typeof globalThis ? globalThis : t || self).Cropper =
|
: ((t =
|
||||||
e());
|
"undefined" != typeof globalThis ? globalThis : t || self).Cropper =
|
||||||
|
e());
|
||||||
})(this, function () {
|
})(this, function () {
|
||||||
"use strict";
|
"use strict";
|
||||||
function C(e, t) {
|
function C(e, t) {
|
||||||
@@ -48,10 +49,14 @@
|
|||||||
: (e[t] = i);
|
: (e[t] = i);
|
||||||
})
|
})
|
||||||
: Object.getOwnPropertyDescriptors
|
: Object.getOwnPropertyDescriptors
|
||||||
? Object.defineProperties(a, Object.getOwnPropertyDescriptors(n))
|
? Object.defineProperties(a, Object.getOwnPropertyDescriptors(n))
|
||||||
: C(Object(n)).forEach(function (t) {
|
: C(Object(n)).forEach(function (t) {
|
||||||
Object.defineProperty(a, t, Object.getOwnPropertyDescriptor(n, t));
|
Object.defineProperty(
|
||||||
});
|
a,
|
||||||
|
t,
|
||||||
|
Object.getOwnPropertyDescriptor(n, t),
|
||||||
|
);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return a;
|
return a;
|
||||||
}
|
}
|
||||||
@@ -107,17 +112,17 @@
|
|||||||
return "string" == typeof t
|
return "string" == typeof t
|
||||||
? a(t, e)
|
? a(t, e)
|
||||||
: "Map" ===
|
: "Map" ===
|
||||||
(i =
|
(i =
|
||||||
"Object" ===
|
"Object" ===
|
||||||
(i = Object.prototype.toString.call(t).slice(8, -1)) &&
|
(i = Object.prototype.toString.call(t).slice(8, -1)) &&
|
||||||
t.constructor
|
t.constructor
|
||||||
? t.constructor.name
|
? t.constructor.name
|
||||||
: i) || "Set" === i
|
: i) || "Set" === i
|
||||||
? Array.from(t)
|
? Array.from(t)
|
||||||
: "Arguments" === i ||
|
: "Arguments" === i ||
|
||||||
/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(i)
|
/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(i)
|
||||||
? a(t, e)
|
? a(t, e)
|
||||||
: void 0;
|
: void 0;
|
||||||
})(t) ||
|
})(t) ||
|
||||||
(function () {
|
(function () {
|
||||||
throw new TypeError(
|
throw new TypeError(
|
||||||
@@ -304,10 +309,10 @@
|
|||||||
v(t, e);
|
v(t, e);
|
||||||
})
|
})
|
||||||
: t.classList
|
: t.classList
|
||||||
? t.classList.add(e)
|
? t.classList.add(e)
|
||||||
: (i = t.className.trim())
|
: (i = t.className.trim())
|
||||||
? i.indexOf(e) < 0 && (t.className = "".concat(i, " ").concat(e))
|
? i.indexOf(e) < 0 && (t.className = "".concat(i, " ").concat(e))
|
||||||
: (t.className = e));
|
: (t.className = e));
|
||||||
}
|
}
|
||||||
function X(t, e) {
|
function X(t, e) {
|
||||||
e &&
|
e &&
|
||||||
@@ -316,9 +321,9 @@
|
|||||||
X(t, e);
|
X(t, e);
|
||||||
})
|
})
|
||||||
: t.classList
|
: t.classList
|
||||||
? t.classList.remove(e)
|
? t.classList.remove(e)
|
||||||
: 0 <= t.className.indexOf(e) &&
|
: 0 <= t.className.indexOf(e) &&
|
||||||
(t.className = t.className.replace(e, "")));
|
(t.className = t.className.replace(e, "")));
|
||||||
}
|
}
|
||||||
function r(t, e, i) {
|
function r(t, e, i) {
|
||||||
e &&
|
e &&
|
||||||
@@ -336,15 +341,15 @@
|
|||||||
return o(t[e])
|
return o(t[e])
|
||||||
? t[e]
|
? t[e]
|
||||||
: t.dataset
|
: t.dataset
|
||||||
? t.dataset[e]
|
? t.dataset[e]
|
||||||
: t.getAttribute("data-".concat(Dt(e)));
|
: t.getAttribute("data-".concat(Dt(e)));
|
||||||
}
|
}
|
||||||
function w(t, e, i) {
|
function w(t, e, i) {
|
||||||
o(i)
|
o(i)
|
||||||
? (t[e] = i)
|
? (t[e] = i)
|
||||||
: t.dataset
|
: t.dataset
|
||||||
? (t.dataset[e] = i)
|
? (t.dataset[e] = i)
|
||||||
: t.setAttribute("data-".concat(Dt(e)), i);
|
: t.setAttribute("data-".concat(Dt(e)), i);
|
||||||
}
|
}
|
||||||
var kt,
|
var kt,
|
||||||
Ot,
|
Ot,
|
||||||
@@ -478,8 +483,8 @@
|
|||||||
? (n = t / a)
|
? (n = t / a)
|
||||||
: (t = n * a))
|
: (t = n * a))
|
||||||
: o
|
: o
|
||||||
? (n = t / a)
|
? (n = t / a)
|
||||||
: h && (t = n * a),
|
: h && (t = n * a),
|
||||||
{ width: t, height: n }
|
{ width: t, height: n }
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -572,8 +577,8 @@
|
|||||||
? (o = t.height * e)
|
? (o = t.height * e)
|
||||||
: (h = t.width / e)
|
: (h = t.width / e)
|
||||||
: 3 === i
|
: 3 === i
|
||||||
? (h = t.width / e)
|
? (h = t.width / e)
|
||||||
: (o = t.height * e),
|
: (o = t.height * e),
|
||||||
{
|
{
|
||||||
aspectRatio: e,
|
aspectRatio: e,
|
||||||
naturalWidth: n,
|
naturalWidth: n,
|
||||||
@@ -611,11 +616,11 @@
|
|||||||
(t
|
(t
|
||||||
? (t = Math.max(t, s ? o.width : 0))
|
? (t = Math.max(t, s ? o.width : 0))
|
||||||
: i
|
: i
|
||||||
? (i = Math.max(i, s ? o.height : 0))
|
? (i = Math.max(i, s ? o.height : 0))
|
||||||
: s &&
|
: s &&
|
||||||
((t = o.width) < (i = o.height) * r
|
((t = o.width) < (i = o.height) * r
|
||||||
? (t = i * r)
|
? (t = i * r)
|
||||||
: (i = t / r))),
|
: (i = t / r))),
|
||||||
(t = (r = R({ aspectRatio: r, width: t, height: i })).width),
|
(t = (r = R({ aspectRatio: r, width: t, height: i })).width),
|
||||||
(i = r.height),
|
(i = r.height),
|
||||||
(n.minWidth = t),
|
(n.minWidth = t),
|
||||||
@@ -770,8 +775,8 @@
|
|||||||
? (n = t / c)
|
? (n = t / c)
|
||||||
: (t = n * c)
|
: (t = n * c)
|
||||||
: t
|
: t
|
||||||
? (n = t / c)
|
? (n = t / c)
|
||||||
: n && (t = n * c),
|
: n && (t = n * c),
|
||||||
i < a * c ? (a = i / c) : (i = a * c)),
|
i < a * c ? (a = i / c) : (i = a * c)),
|
||||||
(r.minWidth = Math.min(t, i)),
|
(r.minWidth = Math.min(t, i)),
|
||||||
(r.minHeight = Math.min(n, a)),
|
(r.minHeight = Math.min(n, a)),
|
||||||
@@ -1011,8 +1016,8 @@
|
|||||||
t.deltaY
|
t.deltaY
|
||||||
? (a = 0 < t.deltaY ? 1 : -1)
|
? (a = 0 < t.deltaY ? 1 : -1)
|
||||||
: t.wheelDelta
|
: t.wheelDelta
|
||||||
? (a = -t.wheelDelta / 120)
|
? (a = -t.wheelDelta / 120)
|
||||||
: t.detail && (a = 0 < t.detail ? 1 : -1),
|
: t.detail && (a = 0 < t.detail ? 1 : -1),
|
||||||
this.zoom(-a * i, t));
|
this.zoom(-a * i, t));
|
||||||
},
|
},
|
||||||
cropStart: function (t) {
|
cropStart: function (t) {
|
||||||
@@ -1176,8 +1181,8 @@
|
|||||||
u < 0 && g < 0
|
u < 0 && g < 0
|
||||||
? ((d = N), (m -= g = -g), (p -= u = -u))
|
? ((d = N), (m -= g = -g), (p -= u = -u))
|
||||||
: u < 0
|
: u < 0
|
||||||
? ((d = W), (p -= u = -u))
|
? ((d = W), (p -= u = -u))
|
||||||
: g < 0 && ((d = H), (m -= g = -g));
|
: g < 0 && ((d = H), (m -= g = -g));
|
||||||
break;
|
break;
|
||||||
case W:
|
case W:
|
||||||
if (l) {
|
if (l) {
|
||||||
@@ -1196,8 +1201,8 @@
|
|||||||
u < 0 && g < 0
|
u < 0 && g < 0
|
||||||
? ((d = H), (m -= g = -g), (p -= u = -u))
|
? ((d = H), (m -= g = -g), (p -= u = -u))
|
||||||
: u < 0
|
: u < 0
|
||||||
? ((d = E), (p -= u = -u))
|
? ((d = E), (p -= u = -u))
|
||||||
: g < 0 && ((d = N), (m -= g = -g));
|
: g < 0 && ((d = N), (m -= g = -g));
|
||||||
break;
|
break;
|
||||||
case N:
|
case N:
|
||||||
if (l) {
|
if (l) {
|
||||||
@@ -1216,8 +1221,8 @@
|
|||||||
u < 0 && g < 0
|
u < 0 && g < 0
|
||||||
? ((d = E), (m -= g = -g), (p -= u = -u))
|
? ((d = E), (m -= g = -g), (p -= u = -u))
|
||||||
: u < 0
|
: u < 0
|
||||||
? ((d = H), (p -= u = -u))
|
? ((d = H), (p -= u = -u))
|
||||||
: g < 0 && ((d = W), (m -= g = -g));
|
: g < 0 && ((d = W), (m -= g = -g));
|
||||||
break;
|
break;
|
||||||
case H:
|
case H:
|
||||||
if (l) {
|
if (l) {
|
||||||
@@ -1236,8 +1241,8 @@
|
|||||||
u < 0 && g < 0
|
u < 0 && g < 0
|
||||||
? ((d = W), (m -= g = -g), (p -= u = -u))
|
? ((d = W), (m -= g = -g), (p -= u = -u))
|
||||||
: u < 0
|
: u < 0
|
||||||
? ((d = N), (p -= u = -u))
|
? ((d = N), (p -= u = -u))
|
||||||
: g < 0 && ((d = E), (m -= g = -g));
|
: g < 0 && ((d = E), (m -= g = -g));
|
||||||
break;
|
break;
|
||||||
case q:
|
case q:
|
||||||
this.move(D.x, D.y), (M = !1);
|
this.move(D.x, D.y), (M = !1);
|
||||||
@@ -1458,9 +1463,9 @@
|
|||||||
(r.left -= (h - s) * ((t.pageX - d.left - r.left) / s)),
|
(r.left -= (h - s) * ((t.pageX - d.left - r.left) / s)),
|
||||||
(r.top -= (l - c) * ((t.pageY - d.top - r.top) / c)))
|
(r.top -= (l - c) * ((t.pageY - d.top - r.top) / c)))
|
||||||
: u(e) && p(e.x) && p(e.y)
|
: u(e) && p(e.x) && p(e.y)
|
||||||
? ((r.left -= (h - s) * ((e.x - r.left) / s)),
|
? ((r.left -= (h - s) * ((e.x - r.left) / s)),
|
||||||
(r.top -= (l - c) * ((e.y - r.top) / c)))
|
(r.top -= (l - c) * ((e.y - r.top) / c)))
|
||||||
: ((r.left -= (h - s) / 2), (r.top -= (l - c) / 2)),
|
: ((r.left -= (h - s) / 2), (r.top -= (l - c) / 2)),
|
||||||
(r.width = h),
|
(r.width = h),
|
||||||
(r.height = l),
|
(r.height = l),
|
||||||
this.renderCanvas(!0);
|
this.renderCanvas(!0);
|
||||||
@@ -1777,13 +1782,13 @@
|
|||||||
(v = e) <= -a || g < v
|
(v = e) <= -a || g < v
|
||||||
? (C = x = b = v = 0)
|
? (C = x = b = v = 0)
|
||||||
: v <= 0
|
: v <= 0
|
||||||
? ((x = -v), (v = 0), (C = b = Math.min(g, a + v)))
|
? ((x = -v), (v = 0), (C = b = Math.min(g, a + v)))
|
||||||
: v <= g && ((x = 0), (C = b = Math.min(a, g - v))),
|
: v <= g && ((x = 0), (C = b = Math.min(a, g - v))),
|
||||||
b <= 0 || w <= -n || f < w
|
b <= 0 || w <= -n || f < w
|
||||||
? (D = M = y = w = 0)
|
? (D = M = y = w = 0)
|
||||||
: w <= 0
|
: w <= 0
|
||||||
? ((M = -w), (w = 0), (D = y = Math.min(f, n + w)))
|
? ((M = -w), (w = 0), (D = y = Math.min(f, n + w)))
|
||||||
: w <= f && ((M = 0), (D = y = Math.min(n, f - w))),
|
: w <= f && ((M = 0), (D = y = Math.min(n, f - w))),
|
||||||
(B = [v, w, b, y]),
|
(B = [v, w, b, y]),
|
||||||
0 < C && 0 < D && B.push(x * (k = c / a), M * k, C * k, D * k),
|
0 < C && 0 < D && B.push(x * (k = c / a), M * k, C * k, D * k),
|
||||||
p.drawImage.apply(
|
p.drawImage.apply(
|
||||||
@@ -2151,12 +2156,12 @@
|
|||||||
this.ready
|
this.ready
|
||||||
? (this.unbuild(), (this.ready = !1), (this.cropped = !1))
|
? (this.unbuild(), (this.ready = !1), (this.cropped = !1))
|
||||||
: this.sizing
|
: this.sizing
|
||||||
? ((this.sizingImage.onload = null),
|
? ((this.sizingImage.onload = null),
|
||||||
(this.sizing = !1),
|
(this.sizing = !1),
|
||||||
(this.sized = !1))
|
(this.sized = !1))
|
||||||
: this.reloading
|
: this.reloading
|
||||||
? ((this.xhr.onabort = null), this.xhr.abort())
|
? ((this.xhr.onabort = null), this.xhr.abort())
|
||||||
: this.image && this.stop();
|
: this.image && this.stop();
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
]) && A(t.prototype, e),
|
]) && A(t.prototype, e),
|
||||||
|
|||||||
@@ -115,13 +115,13 @@
|
|||||||
return options.disabled
|
return options.disabled
|
||||||
? !1
|
? !1
|
||||||
: "move" !== ord || options.allowMove
|
: "move" !== ord || options.allowMove
|
||||||
? ((docOffset = getPos($img)),
|
? ((docOffset = getPos($img)),
|
||||||
(btndown = !0),
|
(btndown = !0),
|
||||||
startDragMode(ord, mouseAbs(e)),
|
startDragMode(ord, mouseAbs(e)),
|
||||||
e.stopPropagation(),
|
e.stopPropagation(),
|
||||||
e.preventDefault(),
|
e.preventDefault(),
|
||||||
!1)
|
!1)
|
||||||
: !1;
|
: !1;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
function presize($obj, w, h) {
|
function presize($obj, w, h) {
|
||||||
@@ -463,13 +463,13 @@
|
|||||||
return options.disabled
|
return options.disabled
|
||||||
? !1
|
? !1
|
||||||
: "move" !== ord || options.allowMove
|
: "move" !== ord || options.allowMove
|
||||||
? ((docOffset = getPos($img)),
|
? ((docOffset = getPos($img)),
|
||||||
(btndown = !0),
|
(btndown = !0),
|
||||||
startDragMode(ord, mouseAbs(Touch.cfilter(e)), !0),
|
startDragMode(ord, mouseAbs(Touch.cfilter(e)), !0),
|
||||||
e.stopPropagation(),
|
e.stopPropagation(),
|
||||||
e.preventDefault(),
|
e.preventDefault(),
|
||||||
!1)
|
!1)
|
||||||
: !1;
|
: !1;
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
newSelection: function (e) {
|
newSelection: function (e) {
|
||||||
|
|||||||
349
ivatar/static/js/jquery-3.7.1.min.js
vendored
349
ivatar/static/js/jquery-3.7.1.min.js
vendored
@@ -56,8 +56,8 @@
|
|||||||
return null == e
|
return null == e
|
||||||
? e + ""
|
? e + ""
|
||||||
: "object" == typeof e || "function" == typeof e
|
: "object" == typeof e || "function" == typeof e
|
||||||
? n[i.call(e)] || "object"
|
? n[i.call(e)] || "object"
|
||||||
: typeof e;
|
: typeof e;
|
||||||
}
|
}
|
||||||
var t = "3.7.1",
|
var t = "3.7.1",
|
||||||
l = /HTML$/i,
|
l = /HTML$/i,
|
||||||
@@ -90,8 +90,8 @@
|
|||||||
return null == e
|
return null == e
|
||||||
? ae.call(this)
|
? ae.call(this)
|
||||||
: e < 0
|
: e < 0
|
||||||
? this[e + this.length]
|
? this[e + this.length]
|
||||||
: this[e];
|
: this[e];
|
||||||
},
|
},
|
||||||
pushStack: function (e) {
|
pushStack: function (e) {
|
||||||
var t = ce.merge(this.constructor(), e);
|
var t = ce.merge(this.constructor(), e);
|
||||||
@@ -172,8 +172,8 @@
|
|||||||
i && !Array.isArray(n)
|
i && !Array.isArray(n)
|
||||||
? []
|
? []
|
||||||
: i || ce.isPlainObject(n)
|
: i || ce.isPlainObject(n)
|
||||||
? n
|
? n
|
||||||
: {}),
|
: {}),
|
||||||
(i = !1),
|
(i = !1),
|
||||||
(a[t] = ce.extend(l, o, r)))
|
(a[t] = ce.extend(l, o, r)))
|
||||||
: void 0 !== r && (a[t] = r));
|
: void 0 !== r && (a[t] = r));
|
||||||
@@ -222,10 +222,10 @@
|
|||||||
return 1 === i || 11 === i
|
return 1 === i || 11 === i
|
||||||
? e.textContent
|
? e.textContent
|
||||||
: 9 === i
|
: 9 === i
|
||||||
? e.documentElement.textContent
|
? e.documentElement.textContent
|
||||||
: 3 === i || 4 === i
|
: 3 === i || 4 === i
|
||||||
? e.nodeValue
|
? e.nodeValue
|
||||||
: n;
|
: n;
|
||||||
},
|
},
|
||||||
makeArray: function (e, t) {
|
makeArray: function (e, t) {
|
||||||
var n = t || [];
|
var n = t || [];
|
||||||
@@ -666,13 +666,13 @@
|
|||||||
? e === T || (e.ownerDocument == ye && I.contains(ye, e))
|
? e === T || (e.ownerDocument == ye && I.contains(ye, e))
|
||||||
? -1
|
? -1
|
||||||
: t === T || (t.ownerDocument == ye && I.contains(ye, t))
|
: t === T || (t.ownerDocument == ye && I.contains(ye, t))
|
||||||
? 1
|
? 1
|
||||||
: o
|
: o
|
||||||
? se.call(o, e) - se.call(o, t)
|
? se.call(o, e) - se.call(o, t)
|
||||||
: 0
|
: 0
|
||||||
: 4 & n
|
: 4 & n
|
||||||
? -1
|
? -1
|
||||||
: 1)
|
: 1)
|
||||||
);
|
);
|
||||||
})),
|
})),
|
||||||
T
|
T
|
||||||
@@ -813,17 +813,19 @@
|
|||||||
"=" === r
|
"=" === r
|
||||||
? t === i
|
? t === i
|
||||||
: "!=" === r
|
: "!=" === r
|
||||||
? t !== i
|
? t !== i
|
||||||
: "^=" === r
|
: "^=" === r
|
||||||
? i && 0 === t.indexOf(i)
|
? i && 0 === t.indexOf(i)
|
||||||
: "*=" === r
|
: "*=" === r
|
||||||
? i && -1 < t.indexOf(i)
|
? i && -1 < t.indexOf(i)
|
||||||
: "$=" === r
|
: "$=" === r
|
||||||
? i && t.slice(-i.length) === i
|
? i && t.slice(-i.length) === i
|
||||||
: "~=" === r
|
: "~=" === r
|
||||||
? -1 < (" " + t.replace(v, " ") + " ").indexOf(i)
|
? -1 <
|
||||||
: "|=" === r &&
|
(" " + t.replace(v, " ") + " ").indexOf(i)
|
||||||
(t === i || t.slice(0, i.length + 1) === i + "-"));
|
: "|=" === r &&
|
||||||
|
(t === i ||
|
||||||
|
t.slice(0, i.length + 1) === i + "-"));
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
CHILD: function (d, e, t, h, g) {
|
CHILD: function (d, e, t, h, g) {
|
||||||
@@ -894,18 +896,18 @@
|
|||||||
return a[S]
|
return a[S]
|
||||||
? a(o)
|
? a(o)
|
||||||
: 1 < a.length
|
: 1 < a.length
|
||||||
? ((t = [e, e, "", o]),
|
? ((t = [e, e, "", o]),
|
||||||
b.setFilters.hasOwnProperty(e.toLowerCase())
|
b.setFilters.hasOwnProperty(e.toLowerCase())
|
||||||
? F(function (e, t) {
|
? F(function (e, t) {
|
||||||
var n,
|
var n,
|
||||||
r = a(e, o),
|
r = a(e, o),
|
||||||
i = r.length;
|
i = r.length;
|
||||||
while (i--) e[(n = se.call(e, r[i]))] = !(t[n] = r[i]);
|
while (i--) e[(n = se.call(e, r[i]))] = !(t[n] = r[i]);
|
||||||
})
|
})
|
||||||
: function (e) {
|
: function (e) {
|
||||||
return a(e, 0, t);
|
return a(e, 0, t);
|
||||||
})
|
})
|
||||||
: a;
|
: a;
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
pseudos: {
|
pseudos: {
|
||||||
@@ -1372,14 +1374,14 @@
|
|||||||
return !!n.call(e, t, e) !== r;
|
return !!n.call(e, t, e) !== r;
|
||||||
})
|
})
|
||||||
: n.nodeType
|
: n.nodeType
|
||||||
? ce.grep(e, function (e) {
|
? ce.grep(e, function (e) {
|
||||||
return (e === n) !== r;
|
return (e === n) !== r;
|
||||||
})
|
})
|
||||||
: "string" != typeof n
|
: "string" != typeof n
|
||||||
? ce.grep(e, function (e) {
|
? ce.grep(e, function (e) {
|
||||||
return -1 < se.call(n, e) !== r;
|
return -1 < se.call(n, e) !== r;
|
||||||
})
|
})
|
||||||
: ce.filter(n, e, r);
|
: ce.filter(n, e, r);
|
||||||
}
|
}
|
||||||
(ce.filter = function (e, t, n) {
|
(ce.filter = function (e, t, n) {
|
||||||
var r = t[0];
|
var r = t[0];
|
||||||
@@ -1459,10 +1461,10 @@
|
|||||||
return e.nodeType
|
return e.nodeType
|
||||||
? ((this[0] = e), (this.length = 1), this)
|
? ((this[0] = e), (this.length = 1), this)
|
||||||
: v(e)
|
: v(e)
|
||||||
? void 0 !== n.ready
|
? void 0 !== n.ready
|
||||||
? n.ready(e)
|
? n.ready(e)
|
||||||
: e(ce)
|
: e(ce)
|
||||||
: ce.makeArray(e, this);
|
: ce.makeArray(e, this);
|
||||||
}).prototype = ce.fn),
|
}).prototype = ce.fn),
|
||||||
(k = ce(C));
|
(k = ce(C));
|
||||||
var E = /^(?:parents|prev(?:Until|All))/,
|
var E = /^(?:parents|prev(?:Until|All))/,
|
||||||
@@ -1505,8 +1507,8 @@
|
|||||||
? se.call(ce(e), this[0])
|
? se.call(ce(e), this[0])
|
||||||
: se.call(this, e.jquery ? e[0] : e)
|
: se.call(this, e.jquery ? e[0] : e)
|
||||||
: this[0] && this[0].parentNode
|
: this[0] && this[0].parentNode
|
||||||
? this.first().prevAll().length
|
? this.first().prevAll().length
|
||||||
: -1;
|
: -1;
|
||||||
},
|
},
|
||||||
add: function (e, t) {
|
add: function (e, t) {
|
||||||
return this.pushStack(ce.uniqueSort(ce.merge(this.get(), ce(e, t))));
|
return this.pushStack(ce.uniqueSort(ce.merge(this.get(), ce(e, t))));
|
||||||
@@ -1584,8 +1586,8 @@
|
|||||||
e && v((i = e.promise))
|
e && v((i = e.promise))
|
||||||
? i.call(e).done(t).fail(n)
|
? i.call(e).done(t).fail(n)
|
||||||
: e && v((i = e.then))
|
: e && v((i = e.then))
|
||||||
? i.call(e, t, n)
|
? i.call(e, t, n)
|
||||||
: t.apply(void 0, [e].slice(r));
|
: t.apply(void 0, [e].slice(r));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
n.apply(void 0, [e]);
|
n.apply(void 0, [e]);
|
||||||
}
|
}
|
||||||
@@ -1974,8 +1976,8 @@
|
|||||||
n = (t = Array.isArray(t)
|
n = (t = Array.isArray(t)
|
||||||
? t.map(F)
|
? t.map(F)
|
||||||
: (t = F(t)) in r
|
: (t = F(t)) in r
|
||||||
? [t]
|
? [t]
|
||||||
: t.match(D) || []).length;
|
: t.match(D) || []).length;
|
||||||
while (n--) delete r[t[n]];
|
while (n--) delete r[t[n]];
|
||||||
}
|
}
|
||||||
(void 0 === t || ce.isEmptyObject(r)) &&
|
(void 0 === t || ce.isEmptyObject(r)) &&
|
||||||
@@ -2005,10 +2007,10 @@
|
|||||||
("null" === i
|
("null" === i
|
||||||
? null
|
? null
|
||||||
: i === +i + ""
|
: i === +i + ""
|
||||||
? +i
|
? +i
|
||||||
: X.test(i)
|
: X.test(i)
|
||||||
? JSON.parse(i)
|
? JSON.parse(i)
|
||||||
: i));
|
: i));
|
||||||
} catch (e) {}
|
} catch (e) {}
|
||||||
z.set(e, t, n);
|
z.set(e, t, n);
|
||||||
} else n = void 0;
|
} else n = void 0;
|
||||||
@@ -2064,8 +2066,8 @@
|
|||||||
return void 0 !== (t = z.get(o, n))
|
return void 0 !== (t = z.get(o, n))
|
||||||
? t
|
? t
|
||||||
: void 0 !== (t = V(o, n))
|
: void 0 !== (t = V(o, n))
|
||||||
? t
|
? t
|
||||||
: void 0;
|
: void 0;
|
||||||
this.each(function () {
|
this.each(function () {
|
||||||
z.set(this, n, e);
|
z.set(this, n, e);
|
||||||
});
|
});
|
||||||
@@ -2136,12 +2138,12 @@
|
|||||||
arguments.length < e
|
arguments.length < e
|
||||||
? ce.queue(this[0], t)
|
? ce.queue(this[0], t)
|
||||||
: void 0 === n
|
: void 0 === n
|
||||||
? this
|
? this
|
||||||
: this.each(function () {
|
: this.each(function () {
|
||||||
var e = ce.queue(this, t, n);
|
var e = ce.queue(this, t, n);
|
||||||
ce._queueHooks(this, t),
|
ce._queueHooks(this, t),
|
||||||
"fx" === t && "inprogress" !== e[0] && ce.dequeue(this, t);
|
"fx" === t && "inprogress" !== e[0] && ce.dequeue(this, t);
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
dequeue: function (e) {
|
dequeue: function (e) {
|
||||||
@@ -2293,8 +2295,8 @@
|
|||||||
"undefined" != typeof e.getElementsByTagName
|
"undefined" != typeof e.getElementsByTagName
|
||||||
? e.getElementsByTagName(t || "*")
|
? e.getElementsByTagName(t || "*")
|
||||||
: "undefined" != typeof e.querySelectorAll
|
: "undefined" != typeof e.querySelectorAll
|
||||||
? e.querySelectorAll(t || "*")
|
? e.querySelectorAll(t || "*")
|
||||||
: []),
|
: []),
|
||||||
void 0 === t || (t && fe(e, t)) ? ce.merge([e], n) : n
|
void 0 === t || (t && fe(e, t)) ? ce.merge([e], n) : n
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -3522,16 +3524,16 @@
|
|||||||
(null != e.elem[e.prop] && null == e.elem.style[e.prop])
|
(null != e.elem[e.prop] && null == e.elem.style[e.prop])
|
||||||
? e.elem[e.prop]
|
? e.elem[e.prop]
|
||||||
: (t = ce.css(e.elem, e.prop, "")) && "auto" !== t
|
: (t = ce.css(e.elem, e.prop, "")) && "auto" !== t
|
||||||
? t
|
? t
|
||||||
: 0;
|
: 0;
|
||||||
},
|
},
|
||||||
set: function (e) {
|
set: function (e) {
|
||||||
ce.fx.step[e.prop]
|
ce.fx.step[e.prop]
|
||||||
? ce.fx.step[e.prop](e)
|
? ce.fx.step[e.prop](e)
|
||||||
: 1 !== e.elem.nodeType ||
|
: 1 !== e.elem.nodeType ||
|
||||||
(!ce.cssHooks[e.prop] && null == e.elem.style[Ze(e.prop)])
|
(!ce.cssHooks[e.prop] && null == e.elem.style[Ze(e.prop)])
|
||||||
? (e.elem[e.prop] = e.now)
|
? (e.elem[e.prop] = e.now)
|
||||||
: ce.style(e.elem, e.prop, e.now + e.unit);
|
: ce.style(e.elem, e.prop, e.now + e.unit);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}).scrollTop = at.propHooks.scrollLeft =
|
}).scrollTop = at.propHooks.scrollLeft =
|
||||||
@@ -3982,13 +3984,13 @@
|
|||||||
? null === n
|
? null === n
|
||||||
? void ce.removeAttr(e, t)
|
? void ce.removeAttr(e, t)
|
||||||
: i && "set" in i && void 0 !== (r = i.set(e, n, t))
|
: i && "set" in i && void 0 !== (r = i.set(e, n, t))
|
||||||
? r
|
? r
|
||||||
: (e.setAttribute(t, n + ""), n)
|
: (e.setAttribute(t, n + ""), n)
|
||||||
: i && "get" in i && null !== (r = i.get(e, t))
|
: i && "get" in i && null !== (r = i.get(e, t))
|
||||||
? r
|
? r
|
||||||
: null == (r = ce.find.attr(e, t))
|
: null == (r = ce.find.attr(e, t))
|
||||||
? void 0
|
? void 0
|
||||||
: r);
|
: r);
|
||||||
},
|
},
|
||||||
attrHooks: {
|
attrHooks: {
|
||||||
type: {
|
type: {
|
||||||
@@ -4063,8 +4065,8 @@
|
|||||||
? r
|
? r
|
||||||
: (e[t] = n)
|
: (e[t] = n)
|
||||||
: i && "get" in i && null !== (r = i.get(e, t))
|
: i && "get" in i && null !== (r = i.get(e, t))
|
||||||
? r
|
? r
|
||||||
: e[t]
|
: e[t]
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
propHooks: {
|
propHooks: {
|
||||||
@@ -4074,8 +4076,8 @@
|
|||||||
return t
|
return t
|
||||||
? parseInt(t, 10)
|
? parseInt(t, 10)
|
||||||
: bt.test(e.nodeName) || (wt.test(e.nodeName) && e.href)
|
: bt.test(e.nodeName) || (wt.test(e.nodeName) && e.href)
|
||||||
? 0
|
? 0
|
||||||
: -1;
|
: -1;
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -4117,16 +4119,17 @@
|
|||||||
ce(this).addClass(t.call(this, e, Ct(this)));
|
ce(this).addClass(t.call(this, e, Ct(this)));
|
||||||
})
|
})
|
||||||
: (e = kt(t)).length
|
: (e = kt(t)).length
|
||||||
? this.each(function () {
|
? this.each(function () {
|
||||||
if (
|
if (
|
||||||
((r = Ct(this)), (n = 1 === this.nodeType && " " + Tt(r) + " "))
|
((r = Ct(this)),
|
||||||
) {
|
(n = 1 === this.nodeType && " " + Tt(r) + " "))
|
||||||
for (o = 0; o < e.length; o++)
|
) {
|
||||||
(i = e[o]), n.indexOf(" " + i + " ") < 0 && (n += i + " ");
|
for (o = 0; o < e.length; o++)
|
||||||
(a = Tt(n)), r !== a && this.setAttribute("class", a);
|
(i = e[o]), n.indexOf(" " + i + " ") < 0 && (n += i + " ");
|
||||||
}
|
(a = Tt(n)), r !== a && this.setAttribute("class", a);
|
||||||
})
|
}
|
||||||
: this;
|
})
|
||||||
|
: this;
|
||||||
},
|
},
|
||||||
removeClass: function (t) {
|
removeClass: function (t) {
|
||||||
var e, n, r, i, o, a;
|
var e, n, r, i, o, a;
|
||||||
@@ -4135,22 +4138,22 @@
|
|||||||
ce(this).removeClass(t.call(this, e, Ct(this)));
|
ce(this).removeClass(t.call(this, e, Ct(this)));
|
||||||
})
|
})
|
||||||
: arguments.length
|
: arguments.length
|
||||||
? (e = kt(t)).length
|
? (e = kt(t)).length
|
||||||
? this.each(function () {
|
? this.each(function () {
|
||||||
if (
|
if (
|
||||||
((r = Ct(this)),
|
((r = Ct(this)),
|
||||||
(n = 1 === this.nodeType && " " + Tt(r) + " "))
|
(n = 1 === this.nodeType && " " + Tt(r) + " "))
|
||||||
) {
|
) {
|
||||||
for (o = 0; o < e.length; o++) {
|
for (o = 0; o < e.length; o++) {
|
||||||
i = e[o];
|
i = e[o];
|
||||||
while (-1 < n.indexOf(" " + i + " "))
|
while (-1 < n.indexOf(" " + i + " "))
|
||||||
n = n.replace(" " + i + " ", " ");
|
n = n.replace(" " + i + " ", " ");
|
||||||
|
}
|
||||||
|
(a = Tt(n)), r !== a && this.setAttribute("class", a);
|
||||||
}
|
}
|
||||||
(a = Tt(n)), r !== a && this.setAttribute("class", a);
|
})
|
||||||
}
|
: this
|
||||||
})
|
: this.attr("class", "");
|
||||||
: this
|
|
||||||
: this.attr("class", "");
|
|
||||||
},
|
},
|
||||||
toggleClass: function (t, n) {
|
toggleClass: function (t, n) {
|
||||||
var e,
|
var e,
|
||||||
@@ -4164,23 +4167,24 @@
|
|||||||
ce(this).toggleClass(t.call(this, e, Ct(this), n), n);
|
ce(this).toggleClass(t.call(this, e, Ct(this), n), n);
|
||||||
})
|
})
|
||||||
: "boolean" == typeof n && s
|
: "boolean" == typeof n && s
|
||||||
? n
|
? n
|
||||||
? this.addClass(t)
|
? this.addClass(t)
|
||||||
: this.removeClass(t)
|
: this.removeClass(t)
|
||||||
: ((e = kt(t)),
|
: ((e = kt(t)),
|
||||||
this.each(function () {
|
this.each(function () {
|
||||||
if (s)
|
if (s)
|
||||||
for (o = ce(this), i = 0; i < e.length; i++)
|
for (o = ce(this), i = 0; i < e.length; i++)
|
||||||
(r = e[i]), o.hasClass(r) ? o.removeClass(r) : o.addClass(r);
|
(r = e[i]),
|
||||||
else
|
o.hasClass(r) ? o.removeClass(r) : o.addClass(r);
|
||||||
(void 0 !== t && "boolean" !== a) ||
|
else
|
||||||
((r = Ct(this)) && _.set(this, "__className__", r),
|
(void 0 !== t && "boolean" !== a) ||
|
||||||
this.setAttribute &&
|
((r = Ct(this)) && _.set(this, "__className__", r),
|
||||||
this.setAttribute(
|
this.setAttribute &&
|
||||||
"class",
|
this.setAttribute(
|
||||||
r || !1 === t ? "" : _.get(this, "__className__") || "",
|
"class",
|
||||||
));
|
r || !1 === t ? "" : _.get(this, "__className__") || "",
|
||||||
}));
|
));
|
||||||
|
}));
|
||||||
},
|
},
|
||||||
hasClass: function (e) {
|
hasClass: function (e) {
|
||||||
var t,
|
var t,
|
||||||
@@ -4208,11 +4212,11 @@
|
|||||||
(null == (t = i ? n.call(this, e, ce(this).val()) : n)
|
(null == (t = i ? n.call(this, e, ce(this).val()) : n)
|
||||||
? (t = "")
|
? (t = "")
|
||||||
: "number" == typeof t
|
: "number" == typeof t
|
||||||
? (t += "")
|
? (t += "")
|
||||||
: Array.isArray(t) &&
|
: Array.isArray(t) &&
|
||||||
(t = ce.map(t, function (e) {
|
(t = ce.map(t, function (e) {
|
||||||
return null == e ? "" : e + "";
|
return null == e ? "" : e + "";
|
||||||
})),
|
})),
|
||||||
((r =
|
((r =
|
||||||
ce.valHooks[this.type] ||
|
ce.valHooks[this.type] ||
|
||||||
ce.valHooks[this.nodeName.toLowerCase()]) &&
|
ce.valHooks[this.nodeName.toLowerCase()]) &&
|
||||||
@@ -4221,16 +4225,17 @@
|
|||||||
(this.value = t));
|
(this.value = t));
|
||||||
}))
|
}))
|
||||||
: t
|
: t
|
||||||
? (r = ce.valHooks[t.type] || ce.valHooks[t.nodeName.toLowerCase()]) &&
|
? (r =
|
||||||
"get" in r &&
|
ce.valHooks[t.type] || ce.valHooks[t.nodeName.toLowerCase()]) &&
|
||||||
void 0 !== (e = r.get(t, "value"))
|
"get" in r &&
|
||||||
? e
|
void 0 !== (e = r.get(t, "value"))
|
||||||
: "string" == typeof (e = t.value)
|
? e
|
||||||
? e.replace(St, "")
|
: "string" == typeof (e = t.value)
|
||||||
: null == e
|
? e.replace(St, "")
|
||||||
? ""
|
: null == e
|
||||||
: e
|
? ""
|
||||||
: void 0;
|
: e
|
||||||
|
: void 0;
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
ce.extend({
|
ce.extend({
|
||||||
@@ -4470,10 +4475,10 @@
|
|||||||
return null == n
|
return null == n
|
||||||
? null
|
? null
|
||||||
: Array.isArray(n)
|
: Array.isArray(n)
|
||||||
? ce.map(n, function (e) {
|
? ce.map(n, function (e) {
|
||||||
return { name: t.name, value: e.replace(Lt, "\r\n") };
|
return { name: t.name, value: e.replace(Lt, "\r\n") };
|
||||||
})
|
})
|
||||||
: { name: t.name, value: n.replace(Lt, "\r\n") };
|
: { name: t.name, value: n.replace(Lt, "\r\n") };
|
||||||
})
|
})
|
||||||
.get();
|
.get();
|
||||||
},
|
},
|
||||||
@@ -4826,8 +4831,8 @@
|
|||||||
204 === e || "HEAD" === v.type
|
204 === e || "HEAD" === v.type
|
||||||
? (l = "nocontent")
|
? (l = "nocontent")
|
||||||
: 304 === e
|
: 304 === e
|
||||||
? (l = "notmodified")
|
? (l = "notmodified")
|
||||||
: ((l = s.state), (o = s.data), (i = !(a = s.error))))
|
: ((l = s.state), (o = s.data), (i = !(a = s.error))))
|
||||||
: ((a = l), (!e && l) || ((l = "error"), e < 0 && (e = 0))),
|
: ((a = l), (!e && l) || ((l = "error"), e < 0 && (e = 0))),
|
||||||
(T.status = e),
|
(T.status = e),
|
||||||
(T.statusText = (t || l) + ""),
|
(T.statusText = (t || l) + ""),
|
||||||
@@ -4977,18 +4982,18 @@
|
|||||||
"abort" === e
|
"abort" === e
|
||||||
? r.abort()
|
? r.abort()
|
||||||
: "error" === e
|
: "error" === e
|
||||||
? "number" != typeof r.status
|
? "number" != typeof r.status
|
||||||
? t(0, "error")
|
? t(0, "error")
|
||||||
: t(r.status, r.statusText)
|
: t(r.status, r.statusText)
|
||||||
: t(
|
: t(
|
||||||
Yt[r.status] || r.status,
|
Yt[r.status] || r.status,
|
||||||
r.statusText,
|
r.statusText,
|
||||||
"text" !== (r.responseType || "text") ||
|
"text" !== (r.responseType || "text") ||
|
||||||
"string" != typeof r.responseText
|
"string" != typeof r.responseText
|
||||||
? { binary: r.response }
|
? { binary: r.response }
|
||||||
: { text: r.responseText },
|
: { text: r.responseText },
|
||||||
r.getAllResponseHeaders(),
|
r.getAllResponseHeaders(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
}),
|
}),
|
||||||
(r.onload = o()),
|
(r.onload = o()),
|
||||||
@@ -5290,17 +5295,17 @@
|
|||||||
? e["inner" + a]
|
? e["inner" + a]
|
||||||
: e.document.documentElement["client" + a]
|
: e.document.documentElement["client" + a]
|
||||||
: 9 === e.nodeType
|
: 9 === e.nodeType
|
||||||
? ((r = e.documentElement),
|
? ((r = e.documentElement),
|
||||||
Math.max(
|
Math.max(
|
||||||
e.body["scroll" + a],
|
e.body["scroll" + a],
|
||||||
r["scroll" + a],
|
r["scroll" + a],
|
||||||
e.body["offset" + a],
|
e.body["offset" + a],
|
||||||
r["offset" + a],
|
r["offset" + a],
|
||||||
r["client" + a],
|
r["client" + a],
|
||||||
))
|
))
|
||||||
: void 0 === n
|
: void 0 === n
|
||||||
? ce.css(e, t, i)
|
? ce.css(e, t, i)
|
||||||
: ce.style(e, t, n, i);
|
: ce.style(e, t, n, i);
|
||||||
},
|
},
|
||||||
s,
|
s,
|
||||||
n ? e : void 0,
|
n ? e : void 0,
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Test various other parts of ivatar/libravatar in order
|
Test various other parts of ivatar/libravatar in order
|
||||||
to increase the overall test coverage. Test in here, didn't
|
to increase the overall test coverage. Test in here, didn't
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Tests for file upload security enhancements
|
Tests for file upload security enhancements
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Tests for OpenTelemetry integration in ivatar.
|
Tests for OpenTelemetry integration in ivatar.
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Test our views in ivatar.ivataraccount.views and ivatar.views
|
Test our views in ivatar.ivataraccount.views and ivatar.views
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# pylint: disable=too-many-lines
|
# pylint: disable=too-many-lines
|
||||||
import os
|
import os
|
||||||
import django
|
import django
|
||||||
@@ -25,9 +25,9 @@ class Tester(TestCase): # pylint: disable=too-many-public-methods
|
|||||||
user = None
|
user = None
|
||||||
username = random_string()
|
username = random_string()
|
||||||
password = random_string()
|
password = random_string()
|
||||||
email = "%s@%s.%s" % (username, random_string(), random_string(2))
|
email = "{}@{}.{}".format(username, random_string(), random_string(2))
|
||||||
# Dunno why random tld doesn't work, but I'm too lazy now to investigate
|
# Dunno why random tld doesn't work, but I'm too lazy now to investigate
|
||||||
openid = "http://%s.%s.%s/" % (username, random_string(), "org")
|
openid = "http://{}.{}.{}/".format(username, random_string(), "org")
|
||||||
|
|
||||||
def login(self):
|
def login(self):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Test our utils from ivatar.utils
|
Test our utils from ivatar.utils
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Test our views in ivatar.ivataraccount.views and ivatar.views
|
Test our views in ivatar.ivataraccount.views and ivatar.views
|
||||||
"""
|
"""
|
||||||
@@ -31,9 +30,9 @@ class Tester(TestCase): # pylint: disable=too-many-public-methods
|
|||||||
user = None
|
user = None
|
||||||
username = random_string()
|
username = random_string()
|
||||||
password = random_string()
|
password = random_string()
|
||||||
email = "%s@%s.%s" % (username, random_string(), random_string(2))
|
email = "{}@{}.{}".format(username, random_string(), random_string(2))
|
||||||
# Dunno why random tld doesn't work, but I'm too lazy now to investigate
|
# Dunno why random tld doesn't work, but I'm too lazy now to investigate
|
||||||
openid = "http://%s.%s.%s/" % (username, random_string(), "org")
|
openid = "http://{}.{}.{}/".format(username, random_string(), "org")
|
||||||
|
|
||||||
def login(self):
|
def login(self):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Test our StatsView in ivatar.views
|
Test our StatsView in ivatar.views
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Unit tests for WSGI
|
Unit tests for WSGI
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Classes for our ivatar.tools.forms
|
Classes for our ivatar.tools.forms
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from django import forms
|
from django import forms
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Test our views in ivatar.ivataraccount.views and ivatar.views
|
Test our views in ivatar.ivataraccount.views and ivatar.views
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# pylint: disable=too-many-lines
|
# pylint: disable=too-many-lines
|
||||||
import os
|
import os
|
||||||
import django
|
import django
|
||||||
@@ -28,9 +28,9 @@ class Tester(TestCase): # pylint: disable=too-many-public-methods
|
|||||||
user = None
|
user = None
|
||||||
username = random_string()
|
username = random_string()
|
||||||
password = random_string()
|
password = random_string()
|
||||||
email = "%s@%s.%s" % (username, random_string(), random_string(2))
|
email = "{}@{}.{}".format(username, random_string(), random_string(2))
|
||||||
# Dunno why random tld doesn't work, but I'm too lazy now to investigate
|
# Dunno why random tld doesn't work, but I'm too lazy now to investigate
|
||||||
openid = "http://%s.%s.%s/" % (username, random_string(), "org")
|
openid = "http://{}.{}.{}/".format(username, random_string(), "org")
|
||||||
|
|
||||||
def login(self):
|
def login(self):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
ivatar/tools URL configuration
|
ivatar/tools URL configuration
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
View classes for ivatar/tools/
|
View classes for ivatar/tools/
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from socket import inet_ntop, AF_INET6
|
from socket import inet_ntop, AF_INET6
|
||||||
import hashlib
|
import hashlib
|
||||||
import random
|
import random
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
ivatar URL configuration
|
ivatar URL configuration
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Simple module providing reusable random_string function
|
Simple module providing reusable random_string function
|
||||||
"""
|
"""
|
||||||
@@ -179,6 +178,16 @@ def random_string(length=10):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_random_email():
|
||||||
|
"""
|
||||||
|
Generate a random email address using the same pattern as test_views.py
|
||||||
|
"""
|
||||||
|
username = random_string()
|
||||||
|
domain = random_string()
|
||||||
|
tld = random_string(2)
|
||||||
|
return f"{username}@{domain}.{tld}"
|
||||||
|
|
||||||
|
|
||||||
def random_ip_address():
|
def random_ip_address():
|
||||||
"""
|
"""
|
||||||
Return a random IP address (IPv4)
|
Return a random IP address (IPv4)
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
views under /
|
views under /
|
||||||
"""
|
"""
|
||||||
@@ -762,9 +761,9 @@ class StatsView(TemplateView, JsonResponse):
|
|||||||
)
|
)
|
||||||
|
|
||||||
retval["photo_size_stats"] = {
|
retval["photo_size_stats"] = {
|
||||||
"average_size_bytes": round(avg_size_bytes, 2)
|
"average_size_bytes": (
|
||||||
if avg_size_bytes
|
round(avg_size_bytes, 2) if avg_size_bytes else 0
|
||||||
else 0,
|
),
|
||||||
"average_size_kb": avg_size_kb,
|
"average_size_kb": avg_size_kb,
|
||||||
"average_size_mb": avg_size_mb,
|
"average_size_mb": avg_size_mb,
|
||||||
"total_photos_analyzed": photo_count,
|
"total_photos_analyzed": photo_count,
|
||||||
@@ -839,7 +838,7 @@ def _get_git_info_from_files():
|
|||||||
if not path.exists(head_file):
|
if not path.exists(head_file):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
with open(head_file, "r") as f:
|
with open(head_file) as f:
|
||||||
head_content = f.read().strip()
|
head_content = f.read().strip()
|
||||||
|
|
||||||
# Parse HEAD content
|
# Parse HEAD content
|
||||||
@@ -851,7 +850,7 @@ def _get_git_info_from_files():
|
|||||||
# Read the commit hash from the ref
|
# Read the commit hash from the ref
|
||||||
ref_file = path.join(git_dir, branch_ref)
|
ref_file = path.join(git_dir, branch_ref)
|
||||||
if path.exists(ref_file):
|
if path.exists(ref_file):
|
||||||
with open(ref_file, "r") as f:
|
with open(ref_file) as f:
|
||||||
commit_hash = f.read().strip()
|
commit_hash = f.read().strip()
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
WSGI config for ivatar project.
|
WSGI config for ivatar project.
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import sys
|
import sys
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|||||||
@@ -35,6 +35,7 @@ opentelemetry-instrumentation-urllib3>=0.42b0
|
|||||||
opentelemetry-sdk>=1.20.0
|
opentelemetry-sdk>=1.20.0
|
||||||
Pillow
|
Pillow
|
||||||
pip
|
pip
|
||||||
|
prettytable
|
||||||
prometheus-client>=0.20.0
|
prometheus-client>=0.20.0
|
||||||
psycopg2-binary
|
psycopg2-binary
|
||||||
py3dns
|
py3dns
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Libravatar Deployment Verification Script
|
Libravatar Deployment Verification Script
|
||||||
|
|
||||||
@@ -18,6 +17,7 @@ Usage:
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
import random
|
import random
|
||||||
import ssl
|
import ssl
|
||||||
import subprocess
|
import subprocess
|
||||||
@@ -54,12 +54,19 @@ class Colors:
|
|||||||
|
|
||||||
|
|
||||||
def colored_print(message: str, color: str = Colors.NC) -> None:
|
def colored_print(message: str, color: str = Colors.NC) -> None:
|
||||||
"""Print a colored message."""
|
"""Print a colored message with immediate flush."""
|
||||||
print(f"{color}{message}{Colors.NC}")
|
print(f"{color}{message}{Colors.NC}", flush=True)
|
||||||
|
|
||||||
|
|
||||||
def get_current_commit_hash() -> Optional[str]:
|
def get_current_commit_hash() -> Optional[str]:
|
||||||
"""Get the current commit hash from git."""
|
"""Get the current commit hash from git or CI environment."""
|
||||||
|
# First try GitLab CI environment variable (most reliable in CI)
|
||||||
|
ci_commit = os.environ.get("CI_COMMIT_SHA")
|
||||||
|
if ci_commit:
|
||||||
|
colored_print(f"Using CI commit hash: {ci_commit}", Colors.BLUE)
|
||||||
|
return ci_commit
|
||||||
|
|
||||||
|
# Fallback to git command
|
||||||
try:
|
try:
|
||||||
result = subprocess.run(
|
result = subprocess.run(
|
||||||
["git", "rev-parse", "HEAD"],
|
["git", "rev-parse", "HEAD"],
|
||||||
@@ -67,8 +74,11 @@ def get_current_commit_hash() -> Optional[str]:
|
|||||||
text=True,
|
text=True,
|
||||||
check=True,
|
check=True,
|
||||||
)
|
)
|
||||||
return result.stdout.strip()
|
commit_hash = result.stdout.strip()
|
||||||
|
colored_print(f"Using git commit hash: {commit_hash}", Colors.BLUE)
|
||||||
|
return commit_hash
|
||||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||||
|
colored_print("Could not determine current commit hash", Colors.RED)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@@ -82,16 +92,44 @@ def is_commit_newer_or_equal(commit1: str, commit2: str) -> Optional[bool]:
|
|||||||
None if comparison fails
|
None if comparison fails
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# Use git merge-base to check if commit1 is reachable from commit2
|
# First try to get commit timestamps for comparison
|
||||||
# If commit1 is newer or equal, it should be reachable from commit2
|
try:
|
||||||
subprocess.run(
|
result1 = subprocess.run(
|
||||||
["git", "merge-base", "--is-ancestor", commit2, commit1],
|
["git", "show", "-s", "--format=%ct", commit1],
|
||||||
capture_output=True,
|
capture_output=True,
|
||||||
check=True,
|
text=True,
|
||||||
)
|
check=True,
|
||||||
return True
|
)
|
||||||
|
result2 = subprocess.run(
|
||||||
|
["git", "show", "-s", "--format=%ct", commit2],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
timestamp1 = int(result1.stdout.strip())
|
||||||
|
timestamp2 = int(result2.stdout.strip())
|
||||||
|
|
||||||
|
colored_print(f"Commit {commit1[:8]} timestamp: {timestamp1}", Colors.BLUE)
|
||||||
|
colored_print(f"Commit {commit2[:8]} timestamp: {timestamp2}", Colors.BLUE)
|
||||||
|
|
||||||
|
# commit1 is newer if it has a later timestamp
|
||||||
|
return timestamp1 >= timestamp2
|
||||||
|
|
||||||
|
except (subprocess.CalledProcessError, ValueError):
|
||||||
|
# Fallback to merge-base if timestamp comparison fails
|
||||||
|
colored_print("Timestamp comparison failed, trying merge-base", Colors.YELLOW)
|
||||||
|
|
||||||
|
# Use git merge-base to check if commit2 is ancestor of commit1
|
||||||
|
subprocess.run(
|
||||||
|
["git", "merge-base", "--is-ancestor", commit2, commit1],
|
||||||
|
capture_output=True,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
# If the above fails, try the reverse - check if commit2 is newer
|
# If the above fails, try the reverse
|
||||||
try:
|
try:
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
["git", "merge-base", "--is-ancestor", commit1, commit2],
|
["git", "merge-base", "--is-ancestor", commit1, commit2],
|
||||||
@@ -100,8 +138,11 @@ def is_commit_newer_or_equal(commit1: str, commit2: str) -> Optional[bool]:
|
|||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
# If both fail, we can't determine the relationship
|
colored_print("Git comparison failed - shallow clone or missing commits", Colors.YELLOW)
|
||||||
return None
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
colored_print(f"Git comparison error: {e}", Colors.RED)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def make_request(
|
def make_request(
|
||||||
@@ -346,17 +387,30 @@ def test_deployment(
|
|||||||
|
|
||||||
# Check if we're looking for a specific version and compare
|
# Check if we're looking for a specific version and compare
|
||||||
current_commit = get_current_commit_hash()
|
current_commit = get_current_commit_hash()
|
||||||
|
version_ok = True
|
||||||
|
|
||||||
if current_commit and deployed_commit != "Unknown":
|
if current_commit and deployed_commit != "Unknown":
|
||||||
|
colored_print(f"Expected commit: {current_commit[:8]}...", Colors.BLUE)
|
||||||
|
colored_print(f"Deployed commit: {deployed_commit[:8]}...", Colors.BLUE)
|
||||||
|
|
||||||
if deployed_commit == current_commit:
|
if deployed_commit == current_commit:
|
||||||
colored_print(
|
colored_print(
|
||||||
"✅ Exact version match - deployment is up to date!",
|
"✅ Exact version match - deployment is up to date!",
|
||||||
Colors.GREEN,
|
Colors.GREEN,
|
||||||
)
|
)
|
||||||
|
elif deployed_commit.startswith(current_commit[:8]) or current_commit.startswith(deployed_commit[:8]):
|
||||||
|
# Handle case where we have short vs long commit hashes
|
||||||
|
colored_print(
|
||||||
|
"✅ Version match (short hash) - deployment is up to date!",
|
||||||
|
Colors.GREEN,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
# Check if deployed version is newer
|
# Check if deployed version is newer using git
|
||||||
comparison = is_commit_newer_or_equal(
|
comparison = is_commit_newer_or_equal(
|
||||||
deployed_commit, current_commit
|
deployed_commit, current_commit
|
||||||
)
|
)
|
||||||
|
colored_print(f"Commit comparison result: {comparison}", Colors.BLUE)
|
||||||
|
|
||||||
if comparison is True:
|
if comparison is True:
|
||||||
colored_print(
|
colored_print(
|
||||||
"ℹ️ Note: A newer version is already deployed (this is fine!)",
|
"ℹ️ Note: A newer version is already deployed (this is fine!)",
|
||||||
@@ -364,43 +418,66 @@ def test_deployment(
|
|||||||
)
|
)
|
||||||
elif comparison is False:
|
elif comparison is False:
|
||||||
colored_print(
|
colored_print(
|
||||||
"⚠️ Warning: Deployed version appears to be older than expected",
|
f"⚠️ Deployed version ({deployed_commit[:8]}) is older than expected ({current_commit[:8]})",
|
||||||
Colors.YELLOW,
|
Colors.YELLOW,
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
colored_print(
|
colored_print(
|
||||||
"⚠️ Warning: Could not determine version relationship",
|
f"Waiting for deployment to update... (attempt {attempt}/{max_retries})",
|
||||||
|
Colors.BLUE,
|
||||||
|
)
|
||||||
|
version_ok = False
|
||||||
|
else:
|
||||||
|
# Git comparison failed - use simple string comparison as fallback
|
||||||
|
colored_print(
|
||||||
|
"⚠️ Git comparison failed - using string comparison fallback",
|
||||||
Colors.YELLOW,
|
Colors.YELLOW,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Run functionality tests
|
# If commits are different, assume we need to wait
|
||||||
colored_print("Running basic functionality tests...", Colors.YELLOW)
|
# This is safer than proceeding with wrong version
|
||||||
|
colored_print(
|
||||||
# Test avatar redirect
|
f"⚠️ Deployed version ({deployed_commit[:8]}) differs from expected ({current_commit[:8]})",
|
||||||
if test_avatar_redirect(base_url):
|
Colors.YELLOW,
|
||||||
colored_print("✅ Invalid avatar redirects correctly", Colors.GREEN)
|
)
|
||||||
|
colored_print(
|
||||||
|
f"Waiting for deployment to update... (attempt {attempt}/{max_retries})",
|
||||||
|
Colors.BLUE,
|
||||||
|
)
|
||||||
|
version_ok = False
|
||||||
|
|
||||||
|
# Only proceed with functionality tests if version is correct
|
||||||
|
if not version_ok:
|
||||||
|
# Version is not correct, skip tests and retry
|
||||||
|
pass # Will continue to retry logic below
|
||||||
else:
|
else:
|
||||||
colored_print("❌ Invalid avatar redirect failed", Colors.RED)
|
# Run functionality tests
|
||||||
return False
|
colored_print("Running basic functionality tests...", Colors.YELLOW)
|
||||||
|
|
||||||
# Test avatar sizing
|
# Test avatar redirect
|
||||||
if test_avatar_sizing(base_url):
|
if test_avatar_redirect(base_url):
|
||||||
pass # Success messages are printed within the function
|
colored_print("✅ Invalid avatar redirects correctly", Colors.GREEN)
|
||||||
else:
|
else:
|
||||||
return False
|
colored_print("❌ Invalid avatar redirect failed", Colors.RED)
|
||||||
|
return False
|
||||||
|
|
||||||
# Test stats endpoint
|
# Test avatar sizing
|
||||||
if test_stats_endpoint(base_url):
|
if test_avatar_sizing(base_url):
|
||||||
colored_print("✅ Stats endpoint working", Colors.GREEN)
|
pass # Success messages are printed within the function
|
||||||
else:
|
else:
|
||||||
colored_print("❌ Stats endpoint failed", Colors.RED)
|
return False
|
||||||
return False
|
|
||||||
|
|
||||||
colored_print(
|
# Test stats endpoint
|
||||||
f"🎉 {name} deployment verification completed successfully!",
|
if test_stats_endpoint(base_url):
|
||||||
Colors.GREEN,
|
colored_print("✅ Stats endpoint working", Colors.GREEN)
|
||||||
)
|
else:
|
||||||
return True
|
colored_print("❌ Stats endpoint failed", Colors.RED)
|
||||||
|
return False
|
||||||
|
|
||||||
|
colored_print(
|
||||||
|
f"🎉 {name} deployment verification completed successfully!",
|
||||||
|
Colors.GREEN,
|
||||||
|
)
|
||||||
|
return True
|
||||||
else:
|
else:
|
||||||
colored_print(f"{name} site not responding yet...", Colors.YELLOW)
|
colored_print(f"{name} site not responding yet...", Colors.YELLOW)
|
||||||
|
|
||||||
@@ -408,7 +485,11 @@ def test_deployment(
|
|||||||
colored_print(
|
colored_print(
|
||||||
f"Waiting {retry_delay} seconds before next attempt...", Colors.BLUE
|
f"Waiting {retry_delay} seconds before next attempt...", Colors.BLUE
|
||||||
)
|
)
|
||||||
time.sleep(retry_delay)
|
# Show progress during wait
|
||||||
|
for remaining in range(retry_delay, 0, -1):
|
||||||
|
print(f"\r⏳ Retrying in {remaining:2d} seconds...", end="", flush=True)
|
||||||
|
time.sleep(1)
|
||||||
|
print("\r" + " " * 30 + "\r", end="", flush=True) # Clear the line
|
||||||
|
|
||||||
colored_print(
|
colored_print(
|
||||||
f"❌ FAILED: {name} deployment verification timed out after {max_retries} attempts",
|
f"❌ FAILED: {name} deployment verification timed out after {max_retries} attempts",
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Performance testing script for Libravatar CI/CD pipeline
|
Performance testing script for Libravatar CI/CD pipeline
|
||||||
|
|
||||||
@@ -12,13 +11,41 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import statistics
|
import statistics
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
from typing import Dict, List, Any, Optional, Tuple
|
||||||
|
|
||||||
# Add project root to path
|
# Add project root to path
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
from libravatar import libravatar_url
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
from prettytable import PrettyTable
|
||||||
|
|
||||||
|
|
||||||
|
def random_string(length=10):
|
||||||
|
"""Return some random string with default length 10"""
|
||||||
|
return "".join(
|
||||||
|
random.SystemRandom().choice(string.ascii_lowercase + string.digits)
|
||||||
|
for _ in range(length)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Try to import Django utilities for local testing, fallback to local implementation
|
||||||
|
try:
|
||||||
|
from ivatar.utils import generate_random_email
|
||||||
|
except ImportError:
|
||||||
|
# Use local version for external testing
|
||||||
|
def generate_random_email():
|
||||||
|
"""Generate a random email address using the same pattern as test_views.py"""
|
||||||
|
username = random_string()
|
||||||
|
domain = random_string()
|
||||||
|
tld = random_string(2)
|
||||||
|
return f"{username}@{domain}.{tld}"
|
||||||
|
|
||||||
|
|
||||||
# Django setup - only for local testing
|
# Django setup - only for local testing
|
||||||
def setup_django():
|
def setup_django() -> None:
|
||||||
"""Setup Django for local testing"""
|
"""Setup Django for local testing"""
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ivatar.settings")
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ivatar.settings")
|
||||||
import django
|
import django
|
||||||
@@ -29,19 +56,32 @@ def setup_django():
|
|||||||
class PerformanceTestRunner:
|
class PerformanceTestRunner:
|
||||||
"""Main performance test runner"""
|
"""Main performance test runner"""
|
||||||
|
|
||||||
|
# Define all avatar styles and sizes to test
|
||||||
|
AVATAR_STYLES: List[str] = [
|
||||||
|
"identicon",
|
||||||
|
"monsterid",
|
||||||
|
"robohash",
|
||||||
|
"pagan",
|
||||||
|
"retro",
|
||||||
|
"wavatar",
|
||||||
|
"mm",
|
||||||
|
"mmng",
|
||||||
|
]
|
||||||
|
AVATAR_SIZES: List[int] = [80, 256]
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
base_url="http://localhost:8000",
|
base_url: str = "http://localhost:8000",
|
||||||
concurrent_users=10,
|
concurrent_users: int = 10,
|
||||||
test_cache=True,
|
test_cache: bool = True,
|
||||||
remote_testing=False,
|
remote_testing: bool = False,
|
||||||
):
|
) -> None:
|
||||||
self.base_url = base_url
|
self.base_url: str = base_url
|
||||||
self.concurrent_users = concurrent_users
|
self.concurrent_users: int = concurrent_users
|
||||||
self.test_cache = test_cache
|
self.test_cache: bool = test_cache
|
||||||
self.remote_testing = remote_testing
|
self.remote_testing: bool = remote_testing
|
||||||
self.client = None
|
self.client: Optional[Any] = None # Django test client
|
||||||
self.results = {}
|
self.results: Dict[str, Any] = {}
|
||||||
|
|
||||||
# Determine if we're testing locally or remotely
|
# Determine if we're testing locally or remotely
|
||||||
if remote_testing or not base_url.startswith("http://localhost"):
|
if remote_testing or not base_url.startswith("http://localhost"):
|
||||||
@@ -55,7 +95,7 @@ class PerformanceTestRunner:
|
|||||||
|
|
||||||
self.client = Client()
|
self.client = Client()
|
||||||
|
|
||||||
def setup_test_data(self):
|
def setup_test_data(self) -> None:
|
||||||
"""Create test data for performance tests"""
|
"""Create test data for performance tests"""
|
||||||
print("Setting up test data...")
|
print("Setting up test data...")
|
||||||
|
|
||||||
@@ -79,52 +119,249 @@ class PerformanceTestRunner:
|
|||||||
|
|
||||||
print(f"Created {len(test_emails)} test users and emails")
|
print(f"Created {len(test_emails)} test users and emails")
|
||||||
|
|
||||||
def test_avatar_generation_performance(self):
|
def _generate_test_cases(self) -> List[Dict[str, Any]]:
|
||||||
"""Test avatar generation performance"""
|
"""Generate test cases for all avatar styles and sizes"""
|
||||||
print("\n=== Avatar Generation Performance Test ===")
|
test_cases = []
|
||||||
|
for style in self.AVATAR_STYLES:
|
||||||
|
for size in self.AVATAR_SIZES:
|
||||||
|
test_cases.append({"default": style, "size": size})
|
||||||
|
return test_cases
|
||||||
|
|
||||||
# Test different avatar types and sizes
|
def _test_single_avatar_request(
|
||||||
test_cases = [
|
self, case: Dict[str, Any], email: str, use_requests: bool = False
|
||||||
{"default": "identicon", "size": 80},
|
) -> Dict[str, Any]:
|
||||||
{"default": "monsterid", "size": 80},
|
"""Test a single avatar request - shared logic for local and remote testing"""
|
||||||
{"default": "robohash", "size": 80},
|
# Use libravatar library to generate the URL
|
||||||
{"default": "identicon", "size": 256},
|
full_url = libravatar_url(
|
||||||
{"default": "monsterid", "size": 256},
|
email=email, size=case["size"], default=case["default"]
|
||||||
]
|
)
|
||||||
|
|
||||||
results = []
|
# Extract path and query from the full URL
|
||||||
|
urlobj = urlsplit(full_url)
|
||||||
|
url_path = f"{urlobj.path}?{urlobj.query}"
|
||||||
|
|
||||||
for case in test_cases:
|
start_time = time.time()
|
||||||
# Generate test hash
|
|
||||||
test_email = "perftest@example.com"
|
|
||||||
email_hash = hashlib.md5(test_email.encode()).hexdigest()
|
|
||||||
|
|
||||||
# Build URL
|
if use_requests:
|
||||||
url = f"/avatar/{email_hash}"
|
# Remote testing with requests
|
||||||
params = {"d": case["default"], "s": case["size"]}
|
import requests
|
||||||
|
|
||||||
# Time the request
|
url = f"{self.base_url}{url_path}"
|
||||||
start_time = time.time()
|
try:
|
||||||
response = self.client.get(url, params)
|
response = requests.get(url, timeout=10)
|
||||||
end_time = time.time()
|
end_time = time.time()
|
||||||
|
duration = (end_time - start_time) * 1000
|
||||||
|
|
||||||
duration = (end_time - start_time) * 1000 # Convert to ms
|
# Determine cache status from response headers
|
||||||
|
cache_detail = response.headers.get("x-cache-detail", "").lower()
|
||||||
|
age = response.headers.get("age", "0")
|
||||||
|
cache_status = "unknown"
|
||||||
|
|
||||||
results.append(
|
if "cache hit" in cache_detail or int(age) > 0:
|
||||||
{
|
cache_status = "hit"
|
||||||
|
elif "cache miss" in cache_detail or age == "0":
|
||||||
|
cache_status = "miss"
|
||||||
|
|
||||||
|
return {
|
||||||
"test": f"{case['default']}_{case['size']}px",
|
"test": f"{case['default']}_{case['size']}px",
|
||||||
"duration_ms": duration,
|
"duration_ms": duration,
|
||||||
"status_code": response.status_code,
|
"status_code": response.status_code,
|
||||||
"content_length": len(response.content) if response.content else 0,
|
"content_length": len(response.content) if response.content else 0,
|
||||||
|
"success": response.status_code == 200,
|
||||||
|
"cache_status": cache_status,
|
||||||
|
"cache_detail": cache_detail,
|
||||||
|
"age": age,
|
||||||
|
"full_url": full_url,
|
||||||
|
"email": email,
|
||||||
}
|
}
|
||||||
)
|
except Exception as e:
|
||||||
|
end_time = time.time()
|
||||||
|
duration = (end_time - start_time) * 1000
|
||||||
|
return {
|
||||||
|
"test": f"{case['default']}_{case['size']}px",
|
||||||
|
"duration_ms": duration,
|
||||||
|
"status_code": 0,
|
||||||
|
"success": False,
|
||||||
|
"error": str(e),
|
||||||
|
"cache_status": "error",
|
||||||
|
"full_url": full_url,
|
||||||
|
"email": email,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
# Local testing with Django test client
|
||||||
|
if self.client is None:
|
||||||
|
raise RuntimeError("Django test client not initialized")
|
||||||
|
response = self.client.get(url_path, follow=True)
|
||||||
|
end_time = time.time()
|
||||||
|
duration = (end_time - start_time) * 1000
|
||||||
|
|
||||||
print(f" {case['default']} ({case['size']}px): {duration:.2f}ms")
|
# Check for cache information in response headers
|
||||||
|
cache_status = "unknown"
|
||||||
|
if hasattr(response, "get") and callable(getattr(response, "get", None)):
|
||||||
|
cache_control = response.get("Cache-Control", "")
|
||||||
|
age = response.get("Age", "0")
|
||||||
|
if age and int(age) > 0:
|
||||||
|
cache_status = "hit"
|
||||||
|
elif "no-cache" in cache_control:
|
||||||
|
cache_status = "miss"
|
||||||
|
else:
|
||||||
|
cache_status = "miss" # Default assumption for first generation
|
||||||
|
|
||||||
|
# Handle content length for different response types
|
||||||
|
content_length = 0
|
||||||
|
if hasattr(response, "content"):
|
||||||
|
content_length = len(response.content) if response.content else 0
|
||||||
|
elif hasattr(response, "streaming_content"):
|
||||||
|
# For FileResponse, we can't easily get content length without consuming the stream
|
||||||
|
content_length = 1 # Just indicate there's content
|
||||||
|
|
||||||
|
return {
|
||||||
|
"test": f"{case['default']}_{case['size']}px",
|
||||||
|
"duration_ms": duration,
|
||||||
|
"status_code": response.status_code,
|
||||||
|
"content_length": content_length,
|
||||||
|
"cache_status": cache_status,
|
||||||
|
"success": response.status_code == 200,
|
||||||
|
"full_url": full_url,
|
||||||
|
"email": email,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _display_avatar_results(self, results: List[Dict[str, Any]]) -> None:
|
||||||
|
"""Display avatar test results using prettytable for perfect alignment"""
|
||||||
|
# Group results by avatar style
|
||||||
|
style_results: Dict[str, List[Dict[str, Any]]] = {}
|
||||||
|
for result in results:
|
||||||
|
style = result["test"].split("_")[0] # Extract style from test name
|
||||||
|
if style not in style_results:
|
||||||
|
style_results[style] = []
|
||||||
|
style_results[style].append(result)
|
||||||
|
|
||||||
|
# Create table
|
||||||
|
table = PrettyTable()
|
||||||
|
table.field_names = ["Avatar Style", "Size", "Time (ms)", "Status", "Cache"]
|
||||||
|
table.align["Avatar Style"] = "l"
|
||||||
|
table.align["Size"] = "r"
|
||||||
|
table.align["Time (ms)"] = "r"
|
||||||
|
table.align["Status"] = "c"
|
||||||
|
table.align["Cache"] = "c"
|
||||||
|
|
||||||
|
# Add data to table
|
||||||
|
styles_with_data = [
|
||||||
|
style for style in self.AVATAR_STYLES if style in style_results
|
||||||
|
]
|
||||||
|
|
||||||
|
for i, style in enumerate(styles_with_data):
|
||||||
|
style_data = style_results[style]
|
||||||
|
successful_results = [r for r in style_data if r.get("success", True)]
|
||||||
|
failed_results = [r for r in style_data if not r.get("success", True)]
|
||||||
|
|
||||||
|
if successful_results:
|
||||||
|
# Calculate average
|
||||||
|
avg_duration = statistics.mean(
|
||||||
|
[r["duration_ms"] for r in successful_results]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Determine overall cache status
|
||||||
|
cache_statuses = [
|
||||||
|
r["cache_status"]
|
||||||
|
for r in successful_results
|
||||||
|
if r["cache_status"] != "unknown"
|
||||||
|
]
|
||||||
|
if not cache_statuses:
|
||||||
|
cache_summary = "unknown"
|
||||||
|
elif all(status == "hit" for status in cache_statuses):
|
||||||
|
cache_summary = "hit"
|
||||||
|
elif all(status == "miss" for status in cache_statuses):
|
||||||
|
cache_summary = "miss"
|
||||||
|
else:
|
||||||
|
cache_summary = "mixed"
|
||||||
|
|
||||||
|
# Determine status icon for average line
|
||||||
|
if len(failed_results) == 0:
|
||||||
|
avg_status_icon = "✅" # All successful
|
||||||
|
elif len(successful_results) == 0:
|
||||||
|
avg_status_icon = "❌" # All failed
|
||||||
|
else:
|
||||||
|
avg_status_icon = "⚠️" # Mixed results
|
||||||
|
|
||||||
|
# Add average row
|
||||||
|
table.add_row(
|
||||||
|
[
|
||||||
|
f"{style} (avg)",
|
||||||
|
"",
|
||||||
|
f"{avg_duration:.2f}",
|
||||||
|
avg_status_icon,
|
||||||
|
cache_summary,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add individual size rows
|
||||||
|
for result in style_data:
|
||||||
|
size = result["test"].split("_")[1] # Extract size from test name
|
||||||
|
status_icon = "✅" if result.get("success", True) else "❌"
|
||||||
|
cache_status = result["cache_status"]
|
||||||
|
|
||||||
|
if result.get("success", True):
|
||||||
|
table.add_row(
|
||||||
|
[
|
||||||
|
"",
|
||||||
|
size,
|
||||||
|
f"{result['duration_ms']:.2f}",
|
||||||
|
status_icon,
|
||||||
|
cache_status,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
error_msg = result.get("error", "Failed")
|
||||||
|
table.add_row(["", size, error_msg, status_icon, cache_status])
|
||||||
|
else:
|
||||||
|
# All requests failed
|
||||||
|
table.add_row([f"{style} (avg)", "", "Failed", "❌", "error"])
|
||||||
|
for result in style_data:
|
||||||
|
size = result["test"].split("_")[1]
|
||||||
|
error_msg = result.get("error", "Failed")
|
||||||
|
table.add_row(["", size, error_msg, "❌", "error"])
|
||||||
|
|
||||||
|
# Add divider line between styles (except after the last style)
|
||||||
|
if i < len(styles_with_data) - 1:
|
||||||
|
table.add_row(["-" * 15, "-" * 5, "-" * 9, "-" * 6, "-" * 5])
|
||||||
|
|
||||||
|
print(table)
|
||||||
|
|
||||||
|
def test_avatar_generation_performance(self) -> None:
|
||||||
|
"""Test avatar generation performance"""
|
||||||
|
print("\n=== Avatar Generation Performance Test ===")
|
||||||
|
|
||||||
|
# Generate test cases for all avatar styles and sizes
|
||||||
|
test_cases = self._generate_test_cases()
|
||||||
|
results = []
|
||||||
|
|
||||||
|
# Generate random email for testing
|
||||||
|
test_email = generate_random_email()
|
||||||
|
print(f" Testing with email: {test_email}")
|
||||||
|
|
||||||
|
for case in test_cases:
|
||||||
|
result = self._test_single_avatar_request(
|
||||||
|
case, test_email, use_requests=False
|
||||||
|
)
|
||||||
|
results.append(result)
|
||||||
|
|
||||||
|
# Show example URL from first result
|
||||||
|
if results:
|
||||||
|
print(f" Example URL: {results[0]['full_url']}")
|
||||||
|
|
||||||
|
# Display results grouped by style
|
||||||
|
self._display_avatar_results(results)
|
||||||
|
|
||||||
# Calculate statistics
|
# Calculate statistics
|
||||||
durations = [r["duration_ms"] for r in results]
|
successful_results = [r for r in results if r.get("success", True)]
|
||||||
avg_duration = statistics.mean(durations)
|
if successful_results:
|
||||||
max_duration = max(durations)
|
durations = [r["duration_ms"] for r in successful_results]
|
||||||
|
avg_duration = statistics.mean(durations)
|
||||||
|
max_duration = max(durations)
|
||||||
|
else:
|
||||||
|
avg_duration = 0
|
||||||
|
max_duration = 0
|
||||||
|
|
||||||
print(f"\n Average: {avg_duration:.2f}ms")
|
print(f"\n Average: {avg_duration:.2f}ms")
|
||||||
print(f" Maximum: {max_duration:.2f}ms")
|
print(f" Maximum: {max_duration:.2f}ms")
|
||||||
@@ -143,7 +380,7 @@ class PerformanceTestRunner:
|
|||||||
"results": results,
|
"results": results,
|
||||||
}
|
}
|
||||||
|
|
||||||
def test_concurrent_load(self):
|
def test_concurrent_load(self, response_threshold: int = 1000, p95_threshold: int = 2000) -> None:
|
||||||
"""Test concurrent load handling"""
|
"""Test concurrent load handling"""
|
||||||
print("\n=== Concurrent Load Test ===")
|
print("\n=== Concurrent Load Test ===")
|
||||||
|
|
||||||
@@ -160,6 +397,11 @@ class PerformanceTestRunner:
|
|||||||
successful_requests = [r for r in results if r["success"]]
|
successful_requests = [r for r in results if r["success"]]
|
||||||
failed_requests = [r for r in results if not r["success"]]
|
failed_requests = [r for r in results if not r["success"]]
|
||||||
|
|
||||||
|
# Analyze cache performance
|
||||||
|
cache_hits = [r for r in results if r.get("cache_status") == "hit"]
|
||||||
|
cache_misses = [r for r in results if r.get("cache_status") == "miss"]
|
||||||
|
cache_errors = [r for r in results if r.get("cache_status") == "error"]
|
||||||
|
|
||||||
total_duration = (
|
total_duration = (
|
||||||
sum(r["duration_ms"] for r in results) / 1000
|
sum(r["duration_ms"] for r in results) / 1000
|
||||||
) # Convert to seconds
|
) # Convert to seconds
|
||||||
@@ -168,6 +410,20 @@ class PerformanceTestRunner:
|
|||||||
print(f" Successful requests: {len(successful_requests)}/{num_requests}")
|
print(f" Successful requests: {len(successful_requests)}/{num_requests}")
|
||||||
print(f" Failed requests: {len(failed_requests)}")
|
print(f" Failed requests: {len(failed_requests)}")
|
||||||
|
|
||||||
|
# Show cache statistics if available
|
||||||
|
if cache_hits or cache_misses:
|
||||||
|
print(f" Cache hits: {len(cache_hits)}")
|
||||||
|
print(f" Cache misses: {len(cache_misses)}")
|
||||||
|
if cache_errors:
|
||||||
|
print(f" Cache errors: {len(cache_errors)}")
|
||||||
|
|
||||||
|
cache_hit_rate = (
|
||||||
|
len(cache_hits) / (len(cache_hits) + len(cache_misses)) * 100
|
||||||
|
if (cache_hits or cache_misses)
|
||||||
|
else 0
|
||||||
|
)
|
||||||
|
print(f" Cache hit rate: {cache_hit_rate:.1f}%")
|
||||||
|
|
||||||
if successful_requests:
|
if successful_requests:
|
||||||
durations = [r["duration_ms"] for r in successful_requests]
|
durations = [r["duration_ms"] for r in successful_requests]
|
||||||
avg_duration = statistics.mean(durations)
|
avg_duration = statistics.mean(durations)
|
||||||
@@ -192,10 +448,10 @@ class PerformanceTestRunner:
|
|||||||
# Performance evaluation
|
# Performance evaluation
|
||||||
if len(failed_requests) > 0:
|
if len(failed_requests) > 0:
|
||||||
print(" ⚠️ WARNING: Some operations failed under load")
|
print(" ⚠️ WARNING: Some operations failed under load")
|
||||||
elif p95_duration > 2000: # 2 seconds
|
elif p95_duration > p95_threshold:
|
||||||
print(" ⚠️ WARNING: 95th percentile response time exceeds 2s")
|
print(f" ⚠️ WARNING: 95th percentile response time exceeds {p95_threshold}ms")
|
||||||
elif avg_duration > 1000: # 1 second
|
elif avg_duration > response_threshold:
|
||||||
print(" ⚠️ CAUTION: Average response time exceeds 1s under load")
|
print(f" ⚠️ CAUTION: Average response time exceeds {response_threshold}ms under load")
|
||||||
else:
|
else:
|
||||||
print(" ✅ Load handling is good")
|
print(" ✅ Load handling is good")
|
||||||
else:
|
else:
|
||||||
@@ -212,29 +468,51 @@ class PerformanceTestRunner:
|
|||||||
"requests_per_second": (
|
"requests_per_second": (
|
||||||
len(successful_requests) / total_duration if total_duration > 0 else 0
|
len(successful_requests) / total_duration if total_duration > 0 else 0
|
||||||
),
|
),
|
||||||
|
"cache_hits": len(cache_hits),
|
||||||
|
"cache_misses": len(cache_misses),
|
||||||
|
"cache_errors": len(cache_errors),
|
||||||
|
"cache_hit_rate": (
|
||||||
|
len(cache_hits) / (len(cache_hits) + len(cache_misses)) * 100
|
||||||
|
if (cache_hits or cache_misses)
|
||||||
|
else 0
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
def _test_remote_concurrent_load(self, num_requests):
|
def _test_remote_concurrent_load(self, num_requests: int) -> List[Dict[str, Any]]:
|
||||||
"""Test concurrent load against remote server"""
|
"""Test concurrent load against remote server"""
|
||||||
import requests # noqa: F401
|
import requests # noqa: F401
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
|
|
||||||
def make_remote_request(thread_id):
|
def make_remote_request(thread_id):
|
||||||
test_email = f"perftest{thread_id % 10}@example.com"
|
test_email = generate_random_email()
|
||||||
email_hash = hashlib.md5(test_email.encode()).hexdigest()
|
|
||||||
url = f"{self.base_url}/avatar/{email_hash}"
|
# Use libravatar library to generate the URL
|
||||||
params = {"d": "identicon", "s": 80}
|
full_url = libravatar_url(email=test_email, size=80, default="identicon")
|
||||||
|
urlobj = urlsplit(full_url)
|
||||||
|
url_path = f"{urlobj.path}?{urlobj.query}"
|
||||||
|
url = f"{self.base_url}{url_path}"
|
||||||
|
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
try:
|
try:
|
||||||
response = requests.get(url, params=params, timeout=10)
|
response = requests.get(url, timeout=10)
|
||||||
end_time = time.time()
|
end_time = time.time()
|
||||||
|
|
||||||
|
# Determine cache status
|
||||||
|
cache_detail = response.headers.get("x-cache-detail", "").lower()
|
||||||
|
age = response.headers.get("age", "0")
|
||||||
|
cache_status = "unknown"
|
||||||
|
|
||||||
|
if "cache hit" in cache_detail or int(age) > 0:
|
||||||
|
cache_status = "hit"
|
||||||
|
elif "cache miss" in cache_detail or age == "0":
|
||||||
|
cache_status = "miss"
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"thread_id": thread_id,
|
"thread_id": thread_id,
|
||||||
"duration_ms": (end_time - start_time) * 1000,
|
"duration_ms": (end_time - start_time) * 1000,
|
||||||
"status_code": response.status_code,
|
"status_code": response.status_code,
|
||||||
"success": response.status_code == 200,
|
"success": response.status_code == 200,
|
||||||
|
"cache_status": cache_status,
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
end_time = time.time()
|
end_time = time.time()
|
||||||
@@ -243,6 +521,7 @@ class PerformanceTestRunner:
|
|||||||
"duration_ms": (end_time - start_time) * 1000,
|
"duration_ms": (end_time - start_time) * 1000,
|
||||||
"success": False,
|
"success": False,
|
||||||
"error": str(e),
|
"error": str(e),
|
||||||
|
"cache_status": "error",
|
||||||
}
|
}
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
@@ -260,7 +539,7 @@ class PerformanceTestRunner:
|
|||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def _test_local_concurrent_load(self, num_requests):
|
def _test_local_concurrent_load(self, num_requests: int) -> List[Dict[str, Any]]:
|
||||||
"""Test concurrent load locally using avatar generation functions"""
|
"""Test concurrent load locally using avatar generation functions"""
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
@@ -269,7 +548,7 @@ class PerformanceTestRunner:
|
|||||||
import Identicon
|
import Identicon
|
||||||
|
|
||||||
for i in range(num_requests):
|
for i in range(num_requests):
|
||||||
test_email = f"perftest{i % 10}@example.com"
|
test_email = generate_random_email()
|
||||||
email_hash = hashlib.md5(test_email.encode()).hexdigest()
|
email_hash = hashlib.md5(test_email.encode()).hexdigest()
|
||||||
|
|
||||||
request_start = time.time()
|
request_start = time.time()
|
||||||
@@ -283,6 +562,7 @@ class PerformanceTestRunner:
|
|||||||
"thread_id": i,
|
"thread_id": i,
|
||||||
"duration_ms": (request_end - request_start) * 1000,
|
"duration_ms": (request_end - request_start) * 1000,
|
||||||
"success": len(identicon_data) > 0,
|
"success": len(identicon_data) > 0,
|
||||||
|
"cache_status": "miss", # Direct generation is always a cache miss
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -293,6 +573,7 @@ class PerformanceTestRunner:
|
|||||||
"duration_ms": (request_end - request_start) * 1000,
|
"duration_ms": (request_end - request_start) * 1000,
|
||||||
"success": False,
|
"success": False,
|
||||||
"error": str(e),
|
"error": str(e),
|
||||||
|
"cache_status": "error",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -314,6 +595,7 @@ class PerformanceTestRunner:
|
|||||||
"thread_id": i,
|
"thread_id": i,
|
||||||
"duration_ms": (request_end - request_start) * 1000,
|
"duration_ms": (request_end - request_start) * 1000,
|
||||||
"success": True,
|
"success": True,
|
||||||
|
"cache_status": "n/a", # Database queries don't use image cache
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -324,12 +606,13 @@ class PerformanceTestRunner:
|
|||||||
"duration_ms": (request_end - request_start) * 1000,
|
"duration_ms": (request_end - request_start) * 1000,
|
||||||
"success": False,
|
"success": False,
|
||||||
"error": str(e),
|
"error": str(e),
|
||||||
|
"cache_status": "error",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def test_database_performance(self):
|
def test_database_performance(self) -> None:
|
||||||
"""Test database query performance"""
|
"""Test database query performance"""
|
||||||
print("\n=== Database Performance Test ===")
|
print("\n=== Database Performance Test ===")
|
||||||
|
|
||||||
@@ -378,7 +661,7 @@ class PerformanceTestRunner:
|
|||||||
else:
|
else:
|
||||||
print(f" ✅ Database query count is reasonable ({query_count} queries)")
|
print(f" ✅ Database query count is reasonable ({query_count} queries)")
|
||||||
|
|
||||||
def test_cache_performance(self):
|
def test_cache_performance(self) -> None:
|
||||||
"""Test caching effectiveness"""
|
"""Test caching effectiveness"""
|
||||||
if not self.test_cache:
|
if not self.test_cache:
|
||||||
print("\n=== Cache Performance Test ===")
|
print("\n=== Cache Performance Test ===")
|
||||||
@@ -387,18 +670,17 @@ class PerformanceTestRunner:
|
|||||||
|
|
||||||
print("\n=== Cache Performance Test ===")
|
print("\n=== Cache Performance Test ===")
|
||||||
|
|
||||||
# Use an actual email address that exists in the system
|
# Generate a random email address for cache testing
|
||||||
test_email = "dev@libravatar.org"
|
test_email = generate_random_email()
|
||||||
email_hash = hashlib.md5(test_email.encode()).hexdigest()
|
|
||||||
print(f" Testing with: {test_email}")
|
print(f" Testing with: {test_email}")
|
||||||
|
|
||||||
if self.remote_testing:
|
if self.remote_testing:
|
||||||
first_duration, second_duration = self._test_remote_cache_performance(
|
first_duration, second_duration = self._test_remote_cache_performance(
|
||||||
email_hash
|
test_email
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
first_duration, second_duration = self._test_local_cache_performance(
|
first_duration, second_duration = self._test_local_cache_performance(
|
||||||
email_hash
|
test_email
|
||||||
)
|
)
|
||||||
|
|
||||||
print(f" First request: {first_duration:.2f}ms")
|
print(f" First request: {first_duration:.2f}ms")
|
||||||
@@ -453,16 +735,19 @@ class PerformanceTestRunner:
|
|||||||
"cache_headers": getattr(self, "cache_info", {}),
|
"cache_headers": getattr(self, "cache_info", {}),
|
||||||
}
|
}
|
||||||
|
|
||||||
def _test_remote_cache_performance(self, email_hash):
|
def _test_remote_cache_performance(self, email: str) -> Tuple[float, float]:
|
||||||
"""Test cache performance against remote server"""
|
"""Test cache performance against remote server"""
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
url = f"{self.base_url}/avatar/{email_hash}"
|
# Use libravatar library to generate the URL
|
||||||
params = {"d": "identicon", "s": 80}
|
full_url = libravatar_url(email=email, size=80, default="identicon")
|
||||||
|
urlobj = urlsplit(full_url)
|
||||||
|
url_path = f"{urlobj.path}?{urlobj.query}"
|
||||||
|
url = f"{self.base_url}{url_path}"
|
||||||
|
|
||||||
# First request (should be cache miss or fresh)
|
# First request (should be cache miss or fresh)
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
response1 = requests.get(url, params=params, timeout=10)
|
response1 = requests.get(url, timeout=10)
|
||||||
first_duration = (time.time() - start_time) * 1000
|
first_duration = (time.time() - start_time) * 1000
|
||||||
|
|
||||||
# Check first request headers
|
# Check first request headers
|
||||||
@@ -480,7 +765,7 @@ class PerformanceTestRunner:
|
|||||||
|
|
||||||
# Second request (should be cache hit)
|
# Second request (should be cache hit)
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
response2 = requests.get(url, params=params, timeout=10)
|
response2 = requests.get(url, timeout=10)
|
||||||
second_duration = (time.time() - start_time) * 1000
|
second_duration = (time.time() - start_time) * 1000
|
||||||
|
|
||||||
# Check second request headers
|
# Check second request headers
|
||||||
@@ -525,24 +810,28 @@ class PerformanceTestRunner:
|
|||||||
|
|
||||||
return first_duration, second_duration
|
return first_duration, second_duration
|
||||||
|
|
||||||
def _test_local_cache_performance(self, email_hash):
|
def _test_local_cache_performance(self, email: str) -> Tuple[float, float]:
|
||||||
"""Test cache performance locally"""
|
"""Test cache performance locally"""
|
||||||
url = f"/avatar/{email_hash}"
|
# Use libravatar library to generate the URL
|
||||||
params = {"d": "identicon", "s": 80}
|
full_url = libravatar_url(email=email, size=80, default="identicon")
|
||||||
|
urlobj = urlsplit(full_url)
|
||||||
|
url_path = f"{urlobj.path}?{urlobj.query}"
|
||||||
|
|
||||||
# First request (cache miss)
|
# First request (cache miss)
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
self.client.get(url, params)
|
if self.client:
|
||||||
|
self.client.get(url_path)
|
||||||
first_duration = (time.time() - start_time) * 1000
|
first_duration = (time.time() - start_time) * 1000
|
||||||
|
|
||||||
# Second request (should be cache hit)
|
# Second request (should be cache hit)
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
self.client.get(url, params)
|
if self.client:
|
||||||
|
self.client.get(url_path)
|
||||||
second_duration = (time.time() - start_time) * 1000
|
second_duration = (time.time() - start_time) * 1000
|
||||||
|
|
||||||
return first_duration, second_duration
|
return first_duration, second_duration
|
||||||
|
|
||||||
def run_all_tests(self):
|
def run_all_tests(self, avatar_threshold: int = 1000, response_threshold: int = 1000, p95_threshold: int = 2000, ignore_cache_warnings: bool = False) -> Optional[Dict[str, Any]]:
|
||||||
"""Run all performance tests"""
|
"""Run all performance tests"""
|
||||||
print("Starting Libravatar Performance Tests")
|
print("Starting Libravatar Performance Tests")
|
||||||
print("=" * 50)
|
print("=" * 50)
|
||||||
@@ -557,14 +846,14 @@ class PerformanceTestRunner:
|
|||||||
# Run tests based on mode
|
# Run tests based on mode
|
||||||
if self.remote_testing:
|
if self.remote_testing:
|
||||||
print("🌐 Running remote server tests...")
|
print("🌐 Running remote server tests...")
|
||||||
self.test_remote_avatar_performance()
|
self.test_remote_avatar_performance(response_threshold)
|
||||||
else:
|
else:
|
||||||
print("🏠 Running local tests...")
|
print("🏠 Running local tests...")
|
||||||
self.test_avatar_generation_performance()
|
self.test_avatar_generation_performance()
|
||||||
self.test_database_performance()
|
self.test_database_performance()
|
||||||
|
|
||||||
# Always test concurrent load
|
# Always test concurrent load
|
||||||
self.test_concurrent_load()
|
self.test_concurrent_load(response_threshold, p95_threshold)
|
||||||
|
|
||||||
# Test cache performance if enabled
|
# Test cache performance if enabled
|
||||||
self.test_cache_performance()
|
self.test_cache_performance()
|
||||||
@@ -576,7 +865,7 @@ class PerformanceTestRunner:
|
|||||||
print(f"Performance tests completed in {total_duration:.2f}s")
|
print(f"Performance tests completed in {total_duration:.2f}s")
|
||||||
|
|
||||||
# Overall assessment
|
# Overall assessment
|
||||||
self.assess_overall_performance()
|
self.assess_overall_performance(avatar_threshold, response_threshold, p95_threshold, ignore_cache_warnings)
|
||||||
|
|
||||||
return self.results
|
return self.results
|
||||||
|
|
||||||
@@ -584,68 +873,30 @@ class PerformanceTestRunner:
|
|||||||
print(f"Performance test failed: {e}")
|
print(f"Performance test failed: {e}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def test_remote_avatar_performance(self):
|
def test_remote_avatar_performance(self, response_threshold: int = 1000) -> None:
|
||||||
"""Test avatar generation performance on remote server"""
|
"""Test avatar generation performance on remote server"""
|
||||||
print("\n=== Remote Avatar Performance Test ===")
|
print("\n=== Remote Avatar Performance Test ===")
|
||||||
|
|
||||||
import requests
|
# Generate test cases for all avatar styles and sizes
|
||||||
|
test_cases = self._generate_test_cases()
|
||||||
# Test different avatar types and sizes
|
|
||||||
test_cases = [
|
|
||||||
{"default": "identicon", "size": 80},
|
|
||||||
{"default": "monsterid", "size": 80},
|
|
||||||
{"default": "robohash", "size": 80},
|
|
||||||
{"default": "identicon", "size": 256},
|
|
||||||
{"default": "monsterid", "size": 256},
|
|
||||||
]
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
|
# Generate random email for testing
|
||||||
|
test_email = generate_random_email()
|
||||||
|
print(f" Testing with email: {test_email}")
|
||||||
|
|
||||||
for case in test_cases:
|
for case in test_cases:
|
||||||
# Generate test hash
|
result = self._test_single_avatar_request(
|
||||||
test_email = "perftest@example.com"
|
case, test_email, use_requests=True
|
||||||
email_hash = hashlib.md5(test_email.encode()).hexdigest()
|
)
|
||||||
|
results.append(result)
|
||||||
|
|
||||||
# Build URL
|
# Show example URL from first result
|
||||||
url = f"{self.base_url}/avatar/{email_hash}"
|
if results:
|
||||||
params = {"d": case["default"], "s": case["size"]}
|
print(f" Example URL: {results[0]['full_url']}")
|
||||||
|
|
||||||
# Time the request
|
# Display results grouped by style
|
||||||
start_time = time.time()
|
self._display_avatar_results(results)
|
||||||
try:
|
|
||||||
response = requests.get(url, params=params, timeout=10)
|
|
||||||
end_time = time.time()
|
|
||||||
|
|
||||||
duration = (end_time - start_time) * 1000 # Convert to ms
|
|
||||||
|
|
||||||
results.append(
|
|
||||||
{
|
|
||||||
"test": f"{case['default']}_{case['size']}px",
|
|
||||||
"duration_ms": duration,
|
|
||||||
"status_code": response.status_code,
|
|
||||||
"content_length": (
|
|
||||||
len(response.content) if response.content else 0
|
|
||||||
),
|
|
||||||
"success": response.status_code == 200,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
status = "✅" if response.status_code == 200 else "❌"
|
|
||||||
print(
|
|
||||||
f" {case['default']} ({case['size']}px): {duration:.2f}ms {status}"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f" {case['default']} ({case['size']}px): ❌ Failed - {e}")
|
|
||||||
results.append(
|
|
||||||
{
|
|
||||||
"test": f"{case['default']}_{case['size']}px",
|
|
||||||
"duration_ms": 0,
|
|
||||||
"status_code": 0,
|
|
||||||
"success": False,
|
|
||||||
"error": str(e),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Calculate statistics for successful requests
|
# Calculate statistics for successful requests
|
||||||
successful_results = [r for r in results if r["success"]]
|
successful_results = [r for r in results if r["success"]]
|
||||||
@@ -659,10 +910,10 @@ class PerformanceTestRunner:
|
|||||||
print(f" Success rate: {len(successful_results)}/{len(results)}")
|
print(f" Success rate: {len(successful_results)}/{len(results)}")
|
||||||
|
|
||||||
# Performance thresholds for remote testing
|
# Performance thresholds for remote testing
|
||||||
if avg_duration > 2000: # 2 seconds
|
if avg_duration > (response_threshold * 2): # 2x threshold for warning
|
||||||
print(" ⚠️ WARNING: Average response time exceeds 2s")
|
print(f" ⚠️ WARNING: Average response time exceeds {response_threshold * 2}ms")
|
||||||
elif avg_duration > 1000: # 1 second
|
elif avg_duration > response_threshold:
|
||||||
print(" ⚠️ CAUTION: Average response time exceeds 1s")
|
print(f" ⚠️ CAUTION: Average response time exceeds {response_threshold}ms")
|
||||||
else:
|
else:
|
||||||
print(" ✅ Remote avatar performance is good")
|
print(" ✅ Remote avatar performance is good")
|
||||||
else:
|
else:
|
||||||
@@ -677,7 +928,7 @@ class PerformanceTestRunner:
|
|||||||
"success_rate": len(successful_results) / len(results) if results else 0,
|
"success_rate": len(successful_results) / len(results) if results else 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
def assess_overall_performance(self):
|
def assess_overall_performance(self, avatar_threshold: int = 1000, response_threshold: int = 1000, p95_threshold: int = 2000, ignore_cache_warnings: bool = False) -> bool:
|
||||||
"""Provide overall performance assessment"""
|
"""Provide overall performance assessment"""
|
||||||
print("\n=== OVERALL PERFORMANCE ASSESSMENT ===")
|
print("\n=== OVERALL PERFORMANCE ASSESSMENT ===")
|
||||||
|
|
||||||
@@ -686,8 +937,8 @@ class PerformanceTestRunner:
|
|||||||
# Check avatar generation
|
# Check avatar generation
|
||||||
if "avatar_generation" in self.results:
|
if "avatar_generation" in self.results:
|
||||||
avg_gen = self.results["avatar_generation"]["average_ms"]
|
avg_gen = self.results["avatar_generation"]["average_ms"]
|
||||||
if avg_gen > 1000:
|
if avg_gen > avatar_threshold:
|
||||||
warnings.append(f"Avatar generation is slow ({avg_gen:.0f}ms average)")
|
warnings.append(f"Avatar generation is slow ({avg_gen:.0f}ms average, threshold: {avatar_threshold}ms)")
|
||||||
|
|
||||||
# Check concurrent load
|
# Check concurrent load
|
||||||
if "concurrent_load" in self.results:
|
if "concurrent_load" in self.results:
|
||||||
@@ -696,7 +947,7 @@ class PerformanceTestRunner:
|
|||||||
warnings.append(f"{failed} requests failed under concurrent load")
|
warnings.append(f"{failed} requests failed under concurrent load")
|
||||||
|
|
||||||
# Check cache performance
|
# Check cache performance
|
||||||
if "cache_performance" in self.results:
|
if "cache_performance" in self.results and not ignore_cache_warnings:
|
||||||
cache_working = self.results["cache_performance"].get(
|
cache_working = self.results["cache_performance"].get(
|
||||||
"cache_working", False
|
"cache_working", False
|
||||||
)
|
)
|
||||||
@@ -722,7 +973,7 @@ class PerformanceTestRunner:
|
|||||||
return len(warnings) > 0
|
return len(warnings) > 0
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main() -> Optional[Dict[str, Any]]:
|
||||||
"""Main entry point"""
|
"""Main entry point"""
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
@@ -749,6 +1000,29 @@ def main():
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="Force remote testing mode (auto-detected for non-localhost URLs)",
|
help="Force remote testing mode (auto-detected for non-localhost URLs)",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--avatar-threshold",
|
||||||
|
type=int,
|
||||||
|
default=1000,
|
||||||
|
help="Avatar generation threshold in ms (default: 1000ms, use 2500 for dev environments)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--response-threshold",
|
||||||
|
type=int,
|
||||||
|
default=1000,
|
||||||
|
help="Response time threshold in ms (default: 1000ms, use 2500 for dev environments)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--p95-threshold",
|
||||||
|
type=int,
|
||||||
|
default=2000,
|
||||||
|
help="95th percentile threshold in ms (default: 2000ms, use 5000 for dev environments)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--ignore-cache-warnings",
|
||||||
|
action="store_true",
|
||||||
|
help="Don't fail on cache performance warnings (useful for dev environments)",
|
||||||
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@@ -765,7 +1039,7 @@ def main():
|
|||||||
remote_testing=remote_testing,
|
remote_testing=remote_testing,
|
||||||
)
|
)
|
||||||
|
|
||||||
results = runner.run_all_tests()
|
results = runner.run_all_tests(args.avatar_threshold, args.response_threshold, args.p95_threshold, args.ignore_cache_warnings)
|
||||||
|
|
||||||
if args.output and results:
|
if args.output and results:
|
||||||
import json
|
import json
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Run tests with OpenTelemetry instrumentation and export enabled, plus coverage measurement.
|
Run tests with OpenTelemetry instrumentation and export enabled, plus coverage measurement.
|
||||||
This script is designed to be used with 'coverage run' command.
|
This script is designed to be used with 'coverage run' command.
|
||||||
|
|||||||
Reference in New Issue
Block a user