mirror of
https://git.linux-kernel.at/oliver/ivatar.git
synced 2025-11-15 12:38:03 +00:00
Additional logging of gravatar fetches and ensure we don't send d=None, if default hasn't been set; Reformat with black
This commit is contained in:
384
ivatar/views.py
384
ivatar/views.py
@@ -1,6 +1,7 @@
|
||||
'''
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
views under /
|
||||
'''
|
||||
"""
|
||||
from io import BytesIO
|
||||
from os import path
|
||||
import hashlib
|
||||
@@ -28,24 +29,24 @@ from robohash import Robohash
|
||||
from ivatar.settings import AVATAR_MAX_SIZE, JPEG_QUALITY, DEFAULT_AVATAR_SIZE
|
||||
from ivatar.settings import CACHE_RESPONSE
|
||||
from ivatar.settings import CACHE_IMAGES_MAX_AGE
|
||||
from . ivataraccount.models import ConfirmedEmail, ConfirmedOpenId
|
||||
from . ivataraccount.models import pil_format, file_format
|
||||
from . utils import mm_ng
|
||||
from .ivataraccount.models import ConfirmedEmail, ConfirmedOpenId
|
||||
from .ivataraccount.models import pil_format, file_format
|
||||
from .utils import mm_ng
|
||||
|
||||
URL_TIMEOUT = 5 # in seconds
|
||||
|
||||
|
||||
def get_size(request, size=DEFAULT_AVATAR_SIZE):
|
||||
'''
|
||||
"""
|
||||
Get size from the URL arguments
|
||||
'''
|
||||
"""
|
||||
sizetemp = None
|
||||
if 's' in request.GET:
|
||||
sizetemp = request.GET['s']
|
||||
if 'size' in request.GET:
|
||||
sizetemp = request.GET['size']
|
||||
if "s" in request.GET:
|
||||
sizetemp = request.GET["s"]
|
||||
if "size" in request.GET:
|
||||
sizetemp = request.GET["size"]
|
||||
if sizetemp:
|
||||
if sizetemp != '' and sizetemp is not None and sizetemp != '0':
|
||||
if sizetemp != "" and sizetemp is not None and sizetemp != "0":
|
||||
try:
|
||||
if int(sizetemp) > 0:
|
||||
size = int(sizetemp)
|
||||
@@ -60,39 +61,54 @@ def get_size(request, size=DEFAULT_AVATAR_SIZE):
|
||||
|
||||
|
||||
class CachingHttpResponse(HttpResponse):
|
||||
'''
|
||||
"""
|
||||
Handle caching of response
|
||||
'''
|
||||
def __init__(self, uri, content=b'', content_type=None, status=200, # pylint: disable=too-many-arguments
|
||||
reason=None, charset=None):
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
uri,
|
||||
content=b"",
|
||||
content_type=None,
|
||||
status=200, # pylint: disable=too-many-arguments
|
||||
reason=None,
|
||||
charset=None,
|
||||
):
|
||||
if CACHE_RESPONSE:
|
||||
caches['filesystem'].set(uri, {
|
||||
'content': content,
|
||||
'content_type': content_type,
|
||||
'status': status,
|
||||
'reason': reason,
|
||||
'charset': charset
|
||||
})
|
||||
caches["filesystem"].set(
|
||||
uri,
|
||||
{
|
||||
"content": content,
|
||||
"content_type": content_type,
|
||||
"status": status,
|
||||
"reason": reason,
|
||||
"charset": charset,
|
||||
},
|
||||
)
|
||||
super().__init__(content, content_type, status, reason, charset)
|
||||
|
||||
|
||||
class AvatarImageView(TemplateView):
|
||||
'''
|
||||
"""
|
||||
View to return (binary) image, based on OpenID/Email (both by digest)
|
||||
'''
|
||||
"""
|
||||
|
||||
# TODO: Do cache resize images!! Memcached?
|
||||
|
||||
def options(self, request, *args, **kwargs):
|
||||
response = HttpResponse("", content_type='text/plain')
|
||||
response['Allow'] = "404 mm mp retro pagan wavatar monsterid robohash identicon"
|
||||
response = HttpResponse("", content_type="text/plain")
|
||||
response["Allow"] = "404 mm mp retro pagan wavatar monsterid robohash identicon"
|
||||
return response
|
||||
|
||||
def get(self, request, *args, **kwargs): # pylint: disable=too-many-branches,too-many-statements,too-many-locals,too-many-return-statements
|
||||
'''
|
||||
def get(
|
||||
self, request, *args, **kwargs
|
||||
): # pylint: disable=too-many-branches,too-many-statements,too-many-locals,too-many-return-statements
|
||||
"""
|
||||
Override get from parent class
|
||||
'''
|
||||
"""
|
||||
model = ConfirmedEmail
|
||||
size = get_size(request)
|
||||
imgformat = 'png'
|
||||
imgformat = "png"
|
||||
obj = None
|
||||
default = None
|
||||
forcedefault = False
|
||||
@@ -102,65 +118,69 @@ class AvatarImageView(TemplateView):
|
||||
|
||||
# Check the cache first
|
||||
if CACHE_RESPONSE:
|
||||
centry = caches['filesystem'].get(uri)
|
||||
centry = caches["filesystem"].get(uri)
|
||||
if centry:
|
||||
# For DEBUG purpose only print('Cached entry for %s' % uri)
|
||||
return HttpResponse(
|
||||
centry['content'],
|
||||
content_type=centry['content_type'],
|
||||
status=centry['status'],
|
||||
reason=centry['reason'],
|
||||
charset=centry['charset'])
|
||||
centry["content"],
|
||||
content_type=centry["content_type"],
|
||||
status=centry["status"],
|
||||
reason=centry["reason"],
|
||||
charset=centry["charset"],
|
||||
)
|
||||
|
||||
# In case no digest at all is provided, return to home page
|
||||
if 'digest' not in kwargs:
|
||||
return HttpResponseRedirect(reverse_lazy('home'))
|
||||
if "digest" not in kwargs:
|
||||
return HttpResponseRedirect(reverse_lazy("home"))
|
||||
|
||||
if 'd' in request.GET:
|
||||
default = request.GET['d']
|
||||
if 'default' in request.GET:
|
||||
default = request.GET['default']
|
||||
if "d" in request.GET:
|
||||
default = request.GET["d"]
|
||||
if "default" in request.GET:
|
||||
default = request.GET["default"]
|
||||
|
||||
if 'f' in request.GET:
|
||||
if request.GET['f'] == 'y':
|
||||
if "f" in request.GET:
|
||||
if request.GET["f"] == "y":
|
||||
forcedefault = True
|
||||
if 'forcedefault' in request.GET:
|
||||
if request.GET['forcedefault'] == 'y':
|
||||
if "forcedefault" in request.GET:
|
||||
if request.GET["forcedefault"] == "y":
|
||||
forcedefault = True
|
||||
|
||||
if 'gravatarredirect' in request.GET:
|
||||
if request.GET['gravatarredirect'] == 'y':
|
||||
if "gravatarredirect" in request.GET:
|
||||
if request.GET["gravatarredirect"] == "y":
|
||||
gravatarredirect = True
|
||||
|
||||
if 'gravatarproxy' in request.GET:
|
||||
if request.GET['gravatarproxy'] == 'n':
|
||||
if "gravatarproxy" in request.GET:
|
||||
if request.GET["gravatarproxy"] == "n":
|
||||
gravatarproxy = False
|
||||
|
||||
try:
|
||||
obj = model.objects.get(digest=kwargs['digest'])
|
||||
obj = model.objects.get(digest=kwargs["digest"])
|
||||
except ObjectDoesNotExist:
|
||||
try:
|
||||
obj = model.objects.get(digest_sha256=kwargs['digest'])
|
||||
obj = model.objects.get(digest_sha256=kwargs["digest"])
|
||||
except ObjectDoesNotExist:
|
||||
model = ConfirmedOpenId
|
||||
try:
|
||||
d = kwargs['digest'] # pylint: disable=invalid-name
|
||||
d = kwargs["digest"] # pylint: disable=invalid-name
|
||||
# OpenID is tricky. http vs. https, versus trailing slash or not
|
||||
# However, some users eventually have added their variations already
|
||||
# and therfore we need to use filter() and first()
|
||||
obj = model.objects.filter(
|
||||
Q(digest=d) |
|
||||
Q(alt_digest1=d) |
|
||||
Q(alt_digest2=d) |
|
||||
Q(alt_digest3=d)).first()
|
||||
except: # pylint: disable=bare-except
|
||||
Q(digest=d)
|
||||
| Q(alt_digest1=d)
|
||||
| Q(alt_digest2=d)
|
||||
| Q(alt_digest3=d)
|
||||
).first()
|
||||
except Exception: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
|
||||
# If that mail/openid doesn't exist, or has no photo linked to it
|
||||
if not obj or not obj.photo or forcedefault:
|
||||
gravatar_url = 'https://secure.gravatar.com/avatar/' + kwargs['digest'] \
|
||||
+ '?s=%i' % size
|
||||
gravatar_url = (
|
||||
"https://secure.gravatar.com/avatar/"
|
||||
+ kwargs["digest"]
|
||||
+ "?s=%i" % size
|
||||
)
|
||||
|
||||
# If we have redirection to Gravatar enabled, this overrides all
|
||||
# default= settings, except forcedefault!
|
||||
@@ -169,119 +189,117 @@ class AvatarImageView(TemplateView):
|
||||
|
||||
# Request to proxy Gravatar image - only if not forcedefault
|
||||
if gravatarproxy and not forcedefault:
|
||||
url = reverse_lazy('gravatarproxy', args=[kwargs['digest']]) \
|
||||
+ '?s=%i' % size + '&default=%s' % default
|
||||
url = (
|
||||
reverse_lazy("gravatarproxy", args=[kwargs["digest"]])
|
||||
+ "?s=%i" % size
|
||||
)
|
||||
# Ensure we do not convert None to string 'None'
|
||||
if default:
|
||||
url += "&default=%s" % default
|
||||
else:
|
||||
url += "&default=404"
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
# Return the default URL, as specified, or 404 Not Found, if default=404
|
||||
if default:
|
||||
# Proxy to gravatar to generate wavatar - lazy me
|
||||
if str(default) == 'wavatar':
|
||||
url = reverse_lazy('gravatarproxy', args=[kwargs['digest']]) \
|
||||
+ '?s=%i' % size + '&default=%s&f=y' % default
|
||||
if str(default) == "wavatar":
|
||||
url = (
|
||||
reverse_lazy("gravatarproxy", args=[kwargs["digest"]])
|
||||
+ "?s=%i" % size
|
||||
+ "&default=%s&f=y" % default
|
||||
)
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
if str(default) == str(404):
|
||||
return HttpResponseNotFound(_('<h1>Image not found</h1>'))
|
||||
return HttpResponseNotFound(_("<h1>Image not found</h1>"))
|
||||
|
||||
if str(default) == 'monsterid':
|
||||
monsterdata = BuildMonster(seed=kwargs['digest'], size=(size, size))
|
||||
if str(default) == "monsterid":
|
||||
monsterdata = BuildMonster(seed=kwargs["digest"], size=(size, size))
|
||||
data = BytesIO()
|
||||
monsterdata.save(data, 'PNG', quality=JPEG_QUALITY)
|
||||
monsterdata.save(data, "PNG", quality=JPEG_QUALITY)
|
||||
data.seek(0)
|
||||
response = CachingHttpResponse(
|
||||
uri,
|
||||
data,
|
||||
content_type='image/png')
|
||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
||||
response = CachingHttpResponse(uri, data, content_type="image/png")
|
||||
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||
return response
|
||||
|
||||
if str(default) == 'robohash':
|
||||
roboset = 'any'
|
||||
if request.GET.get('robohash'):
|
||||
roboset = request.GET.get('robohash')
|
||||
robohash = Robohash(kwargs['digest'])
|
||||
if str(default) == "robohash":
|
||||
roboset = "any"
|
||||
if request.GET.get("robohash"):
|
||||
roboset = request.GET.get("robohash")
|
||||
robohash = Robohash(kwargs["digest"])
|
||||
robohash.assemble(roboset=roboset, sizex=size, sizey=size)
|
||||
data = BytesIO()
|
||||
robohash.img.save(data, format='png')
|
||||
robohash.img.save(data, format="png")
|
||||
data.seek(0)
|
||||
response = CachingHttpResponse(
|
||||
uri,
|
||||
data,
|
||||
content_type='image/png')
|
||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
||||
response = CachingHttpResponse(uri, data, content_type="image/png")
|
||||
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||
return response
|
||||
|
||||
if str(default) == 'retro':
|
||||
identicon = Identicon.render(kwargs['digest'])
|
||||
if str(default) == "retro":
|
||||
identicon = Identicon.render(kwargs["digest"])
|
||||
data = BytesIO()
|
||||
img = Image.open(BytesIO(identicon))
|
||||
img = img.resize((size, size), Image.ANTIALIAS)
|
||||
img.save(data, 'PNG', quality=JPEG_QUALITY)
|
||||
img.save(data, "PNG", quality=JPEG_QUALITY)
|
||||
data.seek(0)
|
||||
response = CachingHttpResponse(
|
||||
uri,
|
||||
data,
|
||||
content_type='image/png')
|
||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
||||
response = CachingHttpResponse(uri, data, content_type="image/png")
|
||||
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||
return response
|
||||
|
||||
if str(default) == 'pagan':
|
||||
paganobj = pagan.Avatar(kwargs['digest'])
|
||||
if str(default) == "pagan":
|
||||
paganobj = pagan.Avatar(kwargs["digest"])
|
||||
data = BytesIO()
|
||||
img = paganobj.img.resize((size, size), Image.ANTIALIAS)
|
||||
img.save(data, 'PNG', quality=JPEG_QUALITY)
|
||||
img.save(data, "PNG", quality=JPEG_QUALITY)
|
||||
data.seek(0)
|
||||
response = CachingHttpResponse(
|
||||
uri,
|
||||
data,
|
||||
content_type='image/png')
|
||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
||||
response = CachingHttpResponse(uri, data, content_type="image/png")
|
||||
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||
return response
|
||||
|
||||
if str(default) == 'identicon':
|
||||
if str(default) == "identicon":
|
||||
p = Pydenticon5() # pylint: disable=invalid-name
|
||||
# In order to make use of the whole 32 bytes digest, we need to redigest them.
|
||||
newdigest = hashlib.md5(bytes(kwargs['digest'], 'utf-8')).hexdigest()
|
||||
newdigest = hashlib.md5(
|
||||
bytes(kwargs["digest"], "utf-8")
|
||||
).hexdigest()
|
||||
img = p.draw(newdigest, size, 0)
|
||||
data = BytesIO()
|
||||
img.save(data, 'PNG', quality=JPEG_QUALITY)
|
||||
img.save(data, "PNG", quality=JPEG_QUALITY)
|
||||
data.seek(0)
|
||||
response = CachingHttpResponse(
|
||||
uri,
|
||||
data,
|
||||
content_type='image/png')
|
||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
||||
response = CachingHttpResponse(uri, data, content_type="image/png")
|
||||
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||
return response
|
||||
|
||||
if str(default) == 'mmng':
|
||||
mmngimg = mm_ng(idhash=kwargs['digest'], size=size)
|
||||
if str(default) == "mmng":
|
||||
mmngimg = mm_ng(idhash=kwargs["digest"], size=size)
|
||||
data = BytesIO()
|
||||
mmngimg.save(data, 'PNG', quality=JPEG_QUALITY)
|
||||
mmngimg.save(data, "PNG", quality=JPEG_QUALITY)
|
||||
data.seek(0)
|
||||
response = CachingHttpResponse(
|
||||
uri,
|
||||
data,
|
||||
content_type='image/png')
|
||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
||||
response = CachingHttpResponse(uri, data, content_type="image/png")
|
||||
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||
return response
|
||||
|
||||
if str(default) == 'mm' or str(default) == 'mp':
|
||||
if str(default) == "mm" or str(default) == "mp":
|
||||
# If mm is explicitly given, we need to catch that
|
||||
static_img = path.join('static', 'img', 'mm', '%s%s' % (str(size), '.png'))
|
||||
static_img = path.join(
|
||||
"static", "img", "mm", "%s%s" % (str(size), ".png")
|
||||
)
|
||||
if not path.isfile(static_img):
|
||||
# We trust this exists!!!
|
||||
static_img = path.join('static', 'img', 'mm', '512.png')
|
||||
static_img = path.join("static", "img", "mm", "512.png")
|
||||
# We trust static/ is mapped to /static/
|
||||
return HttpResponseRedirect('/' + static_img)
|
||||
return HttpResponseRedirect("/" + static_img)
|
||||
return HttpResponseRedirect(default)
|
||||
|
||||
static_img = path.join('static', 'img', 'nobody', '%s%s' % (str(size), '.png'))
|
||||
static_img = path.join(
|
||||
"static", "img", "nobody", "%s%s" % (str(size), ".png")
|
||||
)
|
||||
if not path.isfile(static_img):
|
||||
# We trust this exists!!!
|
||||
static_img = path.join('static', 'img', 'nobody', '512.png')
|
||||
static_img = path.join("static", "img", "nobody", "512.png")
|
||||
# We trust static/ is mapped to /static/
|
||||
return HttpResponseRedirect('/' + static_img)
|
||||
return HttpResponseRedirect("/" + static_img)
|
||||
|
||||
imgformat = obj.photo.format
|
||||
photodata = Image.open(BytesIO(obj.photo.data))
|
||||
@@ -298,31 +316,35 @@ class AvatarImageView(TemplateView):
|
||||
obj.photo.save()
|
||||
obj.access_count += 1
|
||||
obj.save()
|
||||
if imgformat == 'jpg':
|
||||
imgformat = 'jpeg'
|
||||
response = CachingHttpResponse(
|
||||
uri,
|
||||
data,
|
||||
content_type='image/%s' % imgformat)
|
||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
||||
if imgformat == "jpg":
|
||||
imgformat = "jpeg"
|
||||
response = CachingHttpResponse(uri, data, content_type="image/%s" % imgformat)
|
||||
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||
return response
|
||||
|
||||
|
||||
class GravatarProxyView(View):
|
||||
'''
|
||||
"""
|
||||
Proxy request to Gravatar and return the image from there
|
||||
'''
|
||||
"""
|
||||
|
||||
# TODO: Do cache images!! Memcached?
|
||||
|
||||
def get(self, request, *args, **kwargs): # pylint: disable=too-many-branches,too-many-statements,too-many-locals,no-self-use,unused-argument,too-many-return-statements
|
||||
'''
|
||||
def get(
|
||||
self, request, *args, **kwargs
|
||||
): # pylint: disable=too-many-branches,too-many-statements,too-many-locals,no-self-use,unused-argument,too-many-return-statements
|
||||
"""
|
||||
Override get from parent class
|
||||
'''
|
||||
"""
|
||||
|
||||
def redir_default(default=None):
|
||||
url = reverse_lazy(
|
||||
'avatar_view',
|
||||
args=[kwargs['digest']]) + '?s=%i' % size + '&forcedefault=y'
|
||||
url = (
|
||||
reverse_lazy("avatar_view", args=[kwargs["digest"]])
|
||||
+ "?s=%i" % size
|
||||
+ "&forcedefault=y"
|
||||
)
|
||||
if default is not None:
|
||||
url += '&default=%s' % default
|
||||
url += "&default=%s" % default
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
size = get_size(request)
|
||||
@@ -330,70 +352,75 @@ class GravatarProxyView(View):
|
||||
default = None
|
||||
|
||||
try:
|
||||
if str(request.GET['default']) != 'None':
|
||||
default = request.GET['default']
|
||||
except: # pylint: disable=bare-except
|
||||
if str(request.GET["default"]) != "None":
|
||||
default = request.GET["default"]
|
||||
except Exception: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
if str(default) != 'wavatar':
|
||||
if str(default) != "wavatar":
|
||||
# This part is special/hackish
|
||||
# Check if the image returned by Gravatar is their default image, if so,
|
||||
# redirect to our default instead.
|
||||
gravatar_test_url = 'https://secure.gravatar.com/avatar/' + kwargs['digest'] \
|
||||
+ '?s=%i' % 50
|
||||
if cache.get(gravatar_test_url) == 'default':
|
||||
gravatar_test_url = (
|
||||
"https://secure.gravatar.com/avatar/" + kwargs["digest"] + "?s=%i" % 50
|
||||
)
|
||||
if cache.get(gravatar_test_url) == "default":
|
||||
# DEBUG only
|
||||
# print("Cached Gravatar response: Default.")
|
||||
return redir_default(default)
|
||||
try:
|
||||
testdata = urlopen(gravatar_test_url, timeout=URL_TIMEOUT)
|
||||
data = BytesIO(testdata.read())
|
||||
if hashlib.md5(data.read()).hexdigest() == '71bc262d627971d13fe6f3180b93062a':
|
||||
cache.set(gravatar_test_url, 'default', 60)
|
||||
if (
|
||||
hashlib.md5(data.read()).hexdigest()
|
||||
== "71bc262d627971d13fe6f3180b93062a"
|
||||
):
|
||||
cache.set(gravatar_test_url, "default", 60)
|
||||
return redir_default(default)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
print('Gravatar test url fetch failed: %s' % exc)
|
||||
print("Gravatar test url fetch failed: %s" % exc)
|
||||
|
||||
gravatar_url = 'https://secure.gravatar.com/avatar/' + kwargs['digest'] \
|
||||
+ '?s=%i' % size + '&d=%s' % default
|
||||
gravatar_url = (
|
||||
"https://secure.gravatar.com/avatar/"
|
||||
+ kwargs["digest"]
|
||||
+ "?s=%i" % size
|
||||
+ "&d=%s" % default
|
||||
)
|
||||
|
||||
try:
|
||||
if cache.get(gravatar_url) == 'err':
|
||||
print('Cached Gravatar fetch failed with URL error')
|
||||
if cache.get(gravatar_url) == "err":
|
||||
print("Cached Gravatar fetch failed with URL error: %s" % gravatar_url)
|
||||
return redir_default(default)
|
||||
|
||||
gravatarimagedata = urlopen(gravatar_url, timeout=URL_TIMEOUT)
|
||||
except HTTPError as exc:
|
||||
if exc.code != 404 and exc.code != 503:
|
||||
print(
|
||||
'Gravatar fetch failed with an unexpected %s HTTP error' %
|
||||
exc.code)
|
||||
cache.set(gravatar_url, 'err', 30)
|
||||
"Gravatar fetch failed with an unexpected %s HTTP error: %s"
|
||||
% (exc.code, gravatar_url)
|
||||
)
|
||||
cache.set(gravatar_url, "err", 30)
|
||||
return redir_default(default)
|
||||
except URLError as exc:
|
||||
print(
|
||||
'Gravatar fetch failed with URL error: %s' %
|
||||
exc.reason)
|
||||
cache.set(gravatar_url, 'err', 30)
|
||||
print("Gravatar fetch failed with URL error: %s" % exc.reason)
|
||||
cache.set(gravatar_url, "err", 30)
|
||||
return redir_default(default)
|
||||
except SSLError as exc:
|
||||
print(
|
||||
'Gravatar fetch failed with SSL error: %s' %
|
||||
exc.reason)
|
||||
cache.set(gravatar_url, 'err', 30)
|
||||
print("Gravatar fetch failed with SSL error: %s" % exc.reason)
|
||||
cache.set(gravatar_url, "err", 30)
|
||||
return redir_default(default)
|
||||
try:
|
||||
data = BytesIO(gravatarimagedata.read())
|
||||
img = Image.open(data)
|
||||
data.seek(0)
|
||||
response = HttpResponse(
|
||||
data.read(),
|
||||
content_type='image/%s' % file_format(img.format))
|
||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
||||
data.read(), content_type="image/%s" % file_format(img.format)
|
||||
)
|
||||
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||
return response
|
||||
|
||||
except ValueError as exc:
|
||||
print('Value error: %s' % exc)
|
||||
print("Value error: %s" % exc)
|
||||
return redir_default(default)
|
||||
|
||||
# We shouldn't reach this point... But make sure we do something
|
||||
@@ -401,14 +428,17 @@ class GravatarProxyView(View):
|
||||
|
||||
|
||||
class StatsView(TemplateView, JsonResponse):
|
||||
'''
|
||||
"""
|
||||
Return stats
|
||||
'''
|
||||
def get(self, request, *args, **kwargs): # pylint: disable=too-many-branches,too-many-statements,too-many-locals,no-self-use,unused-argument,too-many-return-statements
|
||||
"""
|
||||
|
||||
def get(
|
||||
self, request, *args, **kwargs
|
||||
): # pylint: disable=too-many-branches,too-many-statements,too-many-locals,no-self-use,unused-argument,too-many-return-statements
|
||||
retval = {
|
||||
'users': User.objects.all().count(),
|
||||
'mails': ConfirmedEmail.objects.all().count(),
|
||||
'openids': ConfirmedOpenId.objects.all().count(), # pylint: disable=no-member
|
||||
"users": User.objects.all().count(),
|
||||
"mails": ConfirmedEmail.objects.all().count(),
|
||||
"openids": ConfirmedOpenId.objects.all().count(), # pylint: disable=no-member
|
||||
}
|
||||
|
||||
return JsonResponse(retval)
|
||||
|
||||
Reference in New Issue
Block a user