mirror of
https://git.linux-kernel.at/oliver/ivatar.git
synced 2025-11-15 20:48:02 +00:00
Additional logging of gravatar fetches and ensure we don't send d=None, if default hasn't been set; Reformat with black
This commit is contained in:
384
ivatar/views.py
384
ivatar/views.py
@@ -1,6 +1,7 @@
|
|||||||
'''
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
views under /
|
views under /
|
||||||
'''
|
"""
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from os import path
|
from os import path
|
||||||
import hashlib
|
import hashlib
|
||||||
@@ -28,24 +29,24 @@ from robohash import Robohash
|
|||||||
from ivatar.settings import AVATAR_MAX_SIZE, JPEG_QUALITY, DEFAULT_AVATAR_SIZE
|
from ivatar.settings import AVATAR_MAX_SIZE, JPEG_QUALITY, DEFAULT_AVATAR_SIZE
|
||||||
from ivatar.settings import CACHE_RESPONSE
|
from ivatar.settings import CACHE_RESPONSE
|
||||||
from ivatar.settings import CACHE_IMAGES_MAX_AGE
|
from ivatar.settings import CACHE_IMAGES_MAX_AGE
|
||||||
from . ivataraccount.models import ConfirmedEmail, ConfirmedOpenId
|
from .ivataraccount.models import ConfirmedEmail, ConfirmedOpenId
|
||||||
from . ivataraccount.models import pil_format, file_format
|
from .ivataraccount.models import pil_format, file_format
|
||||||
from . utils import mm_ng
|
from .utils import mm_ng
|
||||||
|
|
||||||
URL_TIMEOUT = 5 # in seconds
|
URL_TIMEOUT = 5 # in seconds
|
||||||
|
|
||||||
|
|
||||||
def get_size(request, size=DEFAULT_AVATAR_SIZE):
|
def get_size(request, size=DEFAULT_AVATAR_SIZE):
|
||||||
'''
|
"""
|
||||||
Get size from the URL arguments
|
Get size from the URL arguments
|
||||||
'''
|
"""
|
||||||
sizetemp = None
|
sizetemp = None
|
||||||
if 's' in request.GET:
|
if "s" in request.GET:
|
||||||
sizetemp = request.GET['s']
|
sizetemp = request.GET["s"]
|
||||||
if 'size' in request.GET:
|
if "size" in request.GET:
|
||||||
sizetemp = request.GET['size']
|
sizetemp = request.GET["size"]
|
||||||
if sizetemp:
|
if sizetemp:
|
||||||
if sizetemp != '' and sizetemp is not None and sizetemp != '0':
|
if sizetemp != "" and sizetemp is not None and sizetemp != "0":
|
||||||
try:
|
try:
|
||||||
if int(sizetemp) > 0:
|
if int(sizetemp) > 0:
|
||||||
size = int(sizetemp)
|
size = int(sizetemp)
|
||||||
@@ -60,39 +61,54 @@ def get_size(request, size=DEFAULT_AVATAR_SIZE):
|
|||||||
|
|
||||||
|
|
||||||
class CachingHttpResponse(HttpResponse):
|
class CachingHttpResponse(HttpResponse):
|
||||||
'''
|
"""
|
||||||
Handle caching of response
|
Handle caching of response
|
||||||
'''
|
"""
|
||||||
def __init__(self, uri, content=b'', content_type=None, status=200, # pylint: disable=too-many-arguments
|
|
||||||
reason=None, charset=None):
|
def __init__(
|
||||||
|
self,
|
||||||
|
uri,
|
||||||
|
content=b"",
|
||||||
|
content_type=None,
|
||||||
|
status=200, # pylint: disable=too-many-arguments
|
||||||
|
reason=None,
|
||||||
|
charset=None,
|
||||||
|
):
|
||||||
if CACHE_RESPONSE:
|
if CACHE_RESPONSE:
|
||||||
caches['filesystem'].set(uri, {
|
caches["filesystem"].set(
|
||||||
'content': content,
|
uri,
|
||||||
'content_type': content_type,
|
{
|
||||||
'status': status,
|
"content": content,
|
||||||
'reason': reason,
|
"content_type": content_type,
|
||||||
'charset': charset
|
"status": status,
|
||||||
})
|
"reason": reason,
|
||||||
|
"charset": charset,
|
||||||
|
},
|
||||||
|
)
|
||||||
super().__init__(content, content_type, status, reason, charset)
|
super().__init__(content, content_type, status, reason, charset)
|
||||||
|
|
||||||
|
|
||||||
class AvatarImageView(TemplateView):
|
class AvatarImageView(TemplateView):
|
||||||
'''
|
"""
|
||||||
View to return (binary) image, based on OpenID/Email (both by digest)
|
View to return (binary) image, based on OpenID/Email (both by digest)
|
||||||
'''
|
"""
|
||||||
|
|
||||||
# TODO: Do cache resize images!! Memcached?
|
# TODO: Do cache resize images!! Memcached?
|
||||||
|
|
||||||
def options(self, request, *args, **kwargs):
|
def options(self, request, *args, **kwargs):
|
||||||
response = HttpResponse("", content_type='text/plain')
|
response = HttpResponse("", content_type="text/plain")
|
||||||
response['Allow'] = "404 mm mp retro pagan wavatar monsterid robohash identicon"
|
response["Allow"] = "404 mm mp retro pagan wavatar monsterid robohash identicon"
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs): # pylint: disable=too-many-branches,too-many-statements,too-many-locals,too-many-return-statements
|
def get(
|
||||||
'''
|
self, request, *args, **kwargs
|
||||||
|
): # pylint: disable=too-many-branches,too-many-statements,too-many-locals,too-many-return-statements
|
||||||
|
"""
|
||||||
Override get from parent class
|
Override get from parent class
|
||||||
'''
|
"""
|
||||||
model = ConfirmedEmail
|
model = ConfirmedEmail
|
||||||
size = get_size(request)
|
size = get_size(request)
|
||||||
imgformat = 'png'
|
imgformat = "png"
|
||||||
obj = None
|
obj = None
|
||||||
default = None
|
default = None
|
||||||
forcedefault = False
|
forcedefault = False
|
||||||
@@ -102,65 +118,69 @@ class AvatarImageView(TemplateView):
|
|||||||
|
|
||||||
# Check the cache first
|
# Check the cache first
|
||||||
if CACHE_RESPONSE:
|
if CACHE_RESPONSE:
|
||||||
centry = caches['filesystem'].get(uri)
|
centry = caches["filesystem"].get(uri)
|
||||||
if centry:
|
if centry:
|
||||||
# For DEBUG purpose only print('Cached entry for %s' % uri)
|
# For DEBUG purpose only print('Cached entry for %s' % uri)
|
||||||
return HttpResponse(
|
return HttpResponse(
|
||||||
centry['content'],
|
centry["content"],
|
||||||
content_type=centry['content_type'],
|
content_type=centry["content_type"],
|
||||||
status=centry['status'],
|
status=centry["status"],
|
||||||
reason=centry['reason'],
|
reason=centry["reason"],
|
||||||
charset=centry['charset'])
|
charset=centry["charset"],
|
||||||
|
)
|
||||||
|
|
||||||
# In case no digest at all is provided, return to home page
|
# In case no digest at all is provided, return to home page
|
||||||
if 'digest' not in kwargs:
|
if "digest" not in kwargs:
|
||||||
return HttpResponseRedirect(reverse_lazy('home'))
|
return HttpResponseRedirect(reverse_lazy("home"))
|
||||||
|
|
||||||
if 'd' in request.GET:
|
if "d" in request.GET:
|
||||||
default = request.GET['d']
|
default = request.GET["d"]
|
||||||
if 'default' in request.GET:
|
if "default" in request.GET:
|
||||||
default = request.GET['default']
|
default = request.GET["default"]
|
||||||
|
|
||||||
if 'f' in request.GET:
|
if "f" in request.GET:
|
||||||
if request.GET['f'] == 'y':
|
if request.GET["f"] == "y":
|
||||||
forcedefault = True
|
forcedefault = True
|
||||||
if 'forcedefault' in request.GET:
|
if "forcedefault" in request.GET:
|
||||||
if request.GET['forcedefault'] == 'y':
|
if request.GET["forcedefault"] == "y":
|
||||||
forcedefault = True
|
forcedefault = True
|
||||||
|
|
||||||
if 'gravatarredirect' in request.GET:
|
if "gravatarredirect" in request.GET:
|
||||||
if request.GET['gravatarredirect'] == 'y':
|
if request.GET["gravatarredirect"] == "y":
|
||||||
gravatarredirect = True
|
gravatarredirect = True
|
||||||
|
|
||||||
if 'gravatarproxy' in request.GET:
|
if "gravatarproxy" in request.GET:
|
||||||
if request.GET['gravatarproxy'] == 'n':
|
if request.GET["gravatarproxy"] == "n":
|
||||||
gravatarproxy = False
|
gravatarproxy = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
obj = model.objects.get(digest=kwargs['digest'])
|
obj = model.objects.get(digest=kwargs["digest"])
|
||||||
except ObjectDoesNotExist:
|
except ObjectDoesNotExist:
|
||||||
try:
|
try:
|
||||||
obj = model.objects.get(digest_sha256=kwargs['digest'])
|
obj = model.objects.get(digest_sha256=kwargs["digest"])
|
||||||
except ObjectDoesNotExist:
|
except ObjectDoesNotExist:
|
||||||
model = ConfirmedOpenId
|
model = ConfirmedOpenId
|
||||||
try:
|
try:
|
||||||
d = kwargs['digest'] # pylint: disable=invalid-name
|
d = kwargs["digest"] # pylint: disable=invalid-name
|
||||||
# OpenID is tricky. http vs. https, versus trailing slash or not
|
# OpenID is tricky. http vs. https, versus trailing slash or not
|
||||||
# However, some users eventually have added their variations already
|
# However, some users eventually have added their variations already
|
||||||
# and therfore we need to use filter() and first()
|
# and therfore we need to use filter() and first()
|
||||||
obj = model.objects.filter(
|
obj = model.objects.filter(
|
||||||
Q(digest=d) |
|
Q(digest=d)
|
||||||
Q(alt_digest1=d) |
|
| Q(alt_digest1=d)
|
||||||
Q(alt_digest2=d) |
|
| Q(alt_digest2=d)
|
||||||
Q(alt_digest3=d)).first()
|
| Q(alt_digest3=d)
|
||||||
except: # pylint: disable=bare-except
|
).first()
|
||||||
|
except Exception: # pylint: disable=bare-except
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
# If that mail/openid doesn't exist, or has no photo linked to it
|
# If that mail/openid doesn't exist, or has no photo linked to it
|
||||||
if not obj or not obj.photo or forcedefault:
|
if not obj or not obj.photo or forcedefault:
|
||||||
gravatar_url = 'https://secure.gravatar.com/avatar/' + kwargs['digest'] \
|
gravatar_url = (
|
||||||
+ '?s=%i' % size
|
"https://secure.gravatar.com/avatar/"
|
||||||
|
+ kwargs["digest"]
|
||||||
|
+ "?s=%i" % size
|
||||||
|
)
|
||||||
|
|
||||||
# If we have redirection to Gravatar enabled, this overrides all
|
# If we have redirection to Gravatar enabled, this overrides all
|
||||||
# default= settings, except forcedefault!
|
# default= settings, except forcedefault!
|
||||||
@@ -169,119 +189,117 @@ class AvatarImageView(TemplateView):
|
|||||||
|
|
||||||
# Request to proxy Gravatar image - only if not forcedefault
|
# Request to proxy Gravatar image - only if not forcedefault
|
||||||
if gravatarproxy and not forcedefault:
|
if gravatarproxy and not forcedefault:
|
||||||
url = reverse_lazy('gravatarproxy', args=[kwargs['digest']]) \
|
url = (
|
||||||
+ '?s=%i' % size + '&default=%s' % default
|
reverse_lazy("gravatarproxy", args=[kwargs["digest"]])
|
||||||
|
+ "?s=%i" % size
|
||||||
|
)
|
||||||
|
# Ensure we do not convert None to string 'None'
|
||||||
|
if default:
|
||||||
|
url += "&default=%s" % default
|
||||||
|
else:
|
||||||
|
url += "&default=404"
|
||||||
return HttpResponseRedirect(url)
|
return HttpResponseRedirect(url)
|
||||||
|
|
||||||
# Return the default URL, as specified, or 404 Not Found, if default=404
|
# Return the default URL, as specified, or 404 Not Found, if default=404
|
||||||
if default:
|
if default:
|
||||||
# Proxy to gravatar to generate wavatar - lazy me
|
# Proxy to gravatar to generate wavatar - lazy me
|
||||||
if str(default) == 'wavatar':
|
if str(default) == "wavatar":
|
||||||
url = reverse_lazy('gravatarproxy', args=[kwargs['digest']]) \
|
url = (
|
||||||
+ '?s=%i' % size + '&default=%s&f=y' % default
|
reverse_lazy("gravatarproxy", args=[kwargs["digest"]])
|
||||||
|
+ "?s=%i" % size
|
||||||
|
+ "&default=%s&f=y" % default
|
||||||
|
)
|
||||||
return HttpResponseRedirect(url)
|
return HttpResponseRedirect(url)
|
||||||
|
|
||||||
if str(default) == str(404):
|
if str(default) == str(404):
|
||||||
return HttpResponseNotFound(_('<h1>Image not found</h1>'))
|
return HttpResponseNotFound(_("<h1>Image not found</h1>"))
|
||||||
|
|
||||||
if str(default) == 'monsterid':
|
if str(default) == "monsterid":
|
||||||
monsterdata = BuildMonster(seed=kwargs['digest'], size=(size, size))
|
monsterdata = BuildMonster(seed=kwargs["digest"], size=(size, size))
|
||||||
data = BytesIO()
|
data = BytesIO()
|
||||||
monsterdata.save(data, 'PNG', quality=JPEG_QUALITY)
|
monsterdata.save(data, "PNG", quality=JPEG_QUALITY)
|
||||||
data.seek(0)
|
data.seek(0)
|
||||||
response = CachingHttpResponse(
|
response = CachingHttpResponse(uri, data, content_type="image/png")
|
||||||
uri,
|
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||||
data,
|
|
||||||
content_type='image/png')
|
|
||||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
if str(default) == 'robohash':
|
if str(default) == "robohash":
|
||||||
roboset = 'any'
|
roboset = "any"
|
||||||
if request.GET.get('robohash'):
|
if request.GET.get("robohash"):
|
||||||
roboset = request.GET.get('robohash')
|
roboset = request.GET.get("robohash")
|
||||||
robohash = Robohash(kwargs['digest'])
|
robohash = Robohash(kwargs["digest"])
|
||||||
robohash.assemble(roboset=roboset, sizex=size, sizey=size)
|
robohash.assemble(roboset=roboset, sizex=size, sizey=size)
|
||||||
data = BytesIO()
|
data = BytesIO()
|
||||||
robohash.img.save(data, format='png')
|
robohash.img.save(data, format="png")
|
||||||
data.seek(0)
|
data.seek(0)
|
||||||
response = CachingHttpResponse(
|
response = CachingHttpResponse(uri, data, content_type="image/png")
|
||||||
uri,
|
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||||
data,
|
|
||||||
content_type='image/png')
|
|
||||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
if str(default) == 'retro':
|
if str(default) == "retro":
|
||||||
identicon = Identicon.render(kwargs['digest'])
|
identicon = Identicon.render(kwargs["digest"])
|
||||||
data = BytesIO()
|
data = BytesIO()
|
||||||
img = Image.open(BytesIO(identicon))
|
img = Image.open(BytesIO(identicon))
|
||||||
img = img.resize((size, size), Image.ANTIALIAS)
|
img = img.resize((size, size), Image.ANTIALIAS)
|
||||||
img.save(data, 'PNG', quality=JPEG_QUALITY)
|
img.save(data, "PNG", quality=JPEG_QUALITY)
|
||||||
data.seek(0)
|
data.seek(0)
|
||||||
response = CachingHttpResponse(
|
response = CachingHttpResponse(uri, data, content_type="image/png")
|
||||||
uri,
|
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||||
data,
|
|
||||||
content_type='image/png')
|
|
||||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
if str(default) == 'pagan':
|
if str(default) == "pagan":
|
||||||
paganobj = pagan.Avatar(kwargs['digest'])
|
paganobj = pagan.Avatar(kwargs["digest"])
|
||||||
data = BytesIO()
|
data = BytesIO()
|
||||||
img = paganobj.img.resize((size, size), Image.ANTIALIAS)
|
img = paganobj.img.resize((size, size), Image.ANTIALIAS)
|
||||||
img.save(data, 'PNG', quality=JPEG_QUALITY)
|
img.save(data, "PNG", quality=JPEG_QUALITY)
|
||||||
data.seek(0)
|
data.seek(0)
|
||||||
response = CachingHttpResponse(
|
response = CachingHttpResponse(uri, data, content_type="image/png")
|
||||||
uri,
|
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||||
data,
|
|
||||||
content_type='image/png')
|
|
||||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
if str(default) == 'identicon':
|
if str(default) == "identicon":
|
||||||
p = Pydenticon5() # pylint: disable=invalid-name
|
p = Pydenticon5() # pylint: disable=invalid-name
|
||||||
# In order to make use of the whole 32 bytes digest, we need to redigest them.
|
# In order to make use of the whole 32 bytes digest, we need to redigest them.
|
||||||
newdigest = hashlib.md5(bytes(kwargs['digest'], 'utf-8')).hexdigest()
|
newdigest = hashlib.md5(
|
||||||
|
bytes(kwargs["digest"], "utf-8")
|
||||||
|
).hexdigest()
|
||||||
img = p.draw(newdigest, size, 0)
|
img = p.draw(newdigest, size, 0)
|
||||||
data = BytesIO()
|
data = BytesIO()
|
||||||
img.save(data, 'PNG', quality=JPEG_QUALITY)
|
img.save(data, "PNG", quality=JPEG_QUALITY)
|
||||||
data.seek(0)
|
data.seek(0)
|
||||||
response = CachingHttpResponse(
|
response = CachingHttpResponse(uri, data, content_type="image/png")
|
||||||
uri,
|
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||||
data,
|
|
||||||
content_type='image/png')
|
|
||||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
if str(default) == 'mmng':
|
if str(default) == "mmng":
|
||||||
mmngimg = mm_ng(idhash=kwargs['digest'], size=size)
|
mmngimg = mm_ng(idhash=kwargs["digest"], size=size)
|
||||||
data = BytesIO()
|
data = BytesIO()
|
||||||
mmngimg.save(data, 'PNG', quality=JPEG_QUALITY)
|
mmngimg.save(data, "PNG", quality=JPEG_QUALITY)
|
||||||
data.seek(0)
|
data.seek(0)
|
||||||
response = CachingHttpResponse(
|
response = CachingHttpResponse(uri, data, content_type="image/png")
|
||||||
uri,
|
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||||
data,
|
|
||||||
content_type='image/png')
|
|
||||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
if str(default) == 'mm' or str(default) == 'mp':
|
if str(default) == "mm" or str(default) == "mp":
|
||||||
# If mm is explicitly given, we need to catch that
|
# If mm is explicitly given, we need to catch that
|
||||||
static_img = path.join('static', 'img', 'mm', '%s%s' % (str(size), '.png'))
|
static_img = path.join(
|
||||||
|
"static", "img", "mm", "%s%s" % (str(size), ".png")
|
||||||
|
)
|
||||||
if not path.isfile(static_img):
|
if not path.isfile(static_img):
|
||||||
# We trust this exists!!!
|
# We trust this exists!!!
|
||||||
static_img = path.join('static', 'img', 'mm', '512.png')
|
static_img = path.join("static", "img", "mm", "512.png")
|
||||||
# We trust static/ is mapped to /static/
|
# We trust static/ is mapped to /static/
|
||||||
return HttpResponseRedirect('/' + static_img)
|
return HttpResponseRedirect("/" + static_img)
|
||||||
return HttpResponseRedirect(default)
|
return HttpResponseRedirect(default)
|
||||||
|
|
||||||
static_img = path.join('static', 'img', 'nobody', '%s%s' % (str(size), '.png'))
|
static_img = path.join(
|
||||||
|
"static", "img", "nobody", "%s%s" % (str(size), ".png")
|
||||||
|
)
|
||||||
if not path.isfile(static_img):
|
if not path.isfile(static_img):
|
||||||
# We trust this exists!!!
|
# We trust this exists!!!
|
||||||
static_img = path.join('static', 'img', 'nobody', '512.png')
|
static_img = path.join("static", "img", "nobody", "512.png")
|
||||||
# We trust static/ is mapped to /static/
|
# We trust static/ is mapped to /static/
|
||||||
return HttpResponseRedirect('/' + static_img)
|
return HttpResponseRedirect("/" + static_img)
|
||||||
|
|
||||||
imgformat = obj.photo.format
|
imgformat = obj.photo.format
|
||||||
photodata = Image.open(BytesIO(obj.photo.data))
|
photodata = Image.open(BytesIO(obj.photo.data))
|
||||||
@@ -298,31 +316,35 @@ class AvatarImageView(TemplateView):
|
|||||||
obj.photo.save()
|
obj.photo.save()
|
||||||
obj.access_count += 1
|
obj.access_count += 1
|
||||||
obj.save()
|
obj.save()
|
||||||
if imgformat == 'jpg':
|
if imgformat == "jpg":
|
||||||
imgformat = 'jpeg'
|
imgformat = "jpeg"
|
||||||
response = CachingHttpResponse(
|
response = CachingHttpResponse(uri, data, content_type="image/%s" % imgformat)
|
||||||
uri,
|
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||||
data,
|
|
||||||
content_type='image/%s' % imgformat)
|
|
||||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
class GravatarProxyView(View):
|
class GravatarProxyView(View):
|
||||||
'''
|
"""
|
||||||
Proxy request to Gravatar and return the image from there
|
Proxy request to Gravatar and return the image from there
|
||||||
'''
|
"""
|
||||||
|
|
||||||
# TODO: Do cache images!! Memcached?
|
# TODO: Do cache images!! Memcached?
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs): # pylint: disable=too-many-branches,too-many-statements,too-many-locals,no-self-use,unused-argument,too-many-return-statements
|
def get(
|
||||||
'''
|
self, request, *args, **kwargs
|
||||||
|
): # pylint: disable=too-many-branches,too-many-statements,too-many-locals,no-self-use,unused-argument,too-many-return-statements
|
||||||
|
"""
|
||||||
Override get from parent class
|
Override get from parent class
|
||||||
'''
|
"""
|
||||||
|
|
||||||
def redir_default(default=None):
|
def redir_default(default=None):
|
||||||
url = reverse_lazy(
|
url = (
|
||||||
'avatar_view',
|
reverse_lazy("avatar_view", args=[kwargs["digest"]])
|
||||||
args=[kwargs['digest']]) + '?s=%i' % size + '&forcedefault=y'
|
+ "?s=%i" % size
|
||||||
|
+ "&forcedefault=y"
|
||||||
|
)
|
||||||
if default is not None:
|
if default is not None:
|
||||||
url += '&default=%s' % default
|
url += "&default=%s" % default
|
||||||
return HttpResponseRedirect(url)
|
return HttpResponseRedirect(url)
|
||||||
|
|
||||||
size = get_size(request)
|
size = get_size(request)
|
||||||
@@ -330,70 +352,75 @@ class GravatarProxyView(View):
|
|||||||
default = None
|
default = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if str(request.GET['default']) != 'None':
|
if str(request.GET["default"]) != "None":
|
||||||
default = request.GET['default']
|
default = request.GET["default"]
|
||||||
except: # pylint: disable=bare-except
|
except Exception: # pylint: disable=bare-except
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if str(default) != 'wavatar':
|
if str(default) != "wavatar":
|
||||||
# This part is special/hackish
|
# This part is special/hackish
|
||||||
# Check if the image returned by Gravatar is their default image, if so,
|
# Check if the image returned by Gravatar is their default image, if so,
|
||||||
# redirect to our default instead.
|
# redirect to our default instead.
|
||||||
gravatar_test_url = 'https://secure.gravatar.com/avatar/' + kwargs['digest'] \
|
gravatar_test_url = (
|
||||||
+ '?s=%i' % 50
|
"https://secure.gravatar.com/avatar/" + kwargs["digest"] + "?s=%i" % 50
|
||||||
if cache.get(gravatar_test_url) == 'default':
|
)
|
||||||
|
if cache.get(gravatar_test_url) == "default":
|
||||||
# DEBUG only
|
# DEBUG only
|
||||||
# print("Cached Gravatar response: Default.")
|
# print("Cached Gravatar response: Default.")
|
||||||
return redir_default(default)
|
return redir_default(default)
|
||||||
try:
|
try:
|
||||||
testdata = urlopen(gravatar_test_url, timeout=URL_TIMEOUT)
|
testdata = urlopen(gravatar_test_url, timeout=URL_TIMEOUT)
|
||||||
data = BytesIO(testdata.read())
|
data = BytesIO(testdata.read())
|
||||||
if hashlib.md5(data.read()).hexdigest() == '71bc262d627971d13fe6f3180b93062a':
|
if (
|
||||||
cache.set(gravatar_test_url, 'default', 60)
|
hashlib.md5(data.read()).hexdigest()
|
||||||
|
== "71bc262d627971d13fe6f3180b93062a"
|
||||||
|
):
|
||||||
|
cache.set(gravatar_test_url, "default", 60)
|
||||||
return redir_default(default)
|
return redir_default(default)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
print('Gravatar test url fetch failed: %s' % exc)
|
print("Gravatar test url fetch failed: %s" % exc)
|
||||||
|
|
||||||
gravatar_url = 'https://secure.gravatar.com/avatar/' + kwargs['digest'] \
|
gravatar_url = (
|
||||||
+ '?s=%i' % size + '&d=%s' % default
|
"https://secure.gravatar.com/avatar/"
|
||||||
|
+ kwargs["digest"]
|
||||||
|
+ "?s=%i" % size
|
||||||
|
+ "&d=%s" % default
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if cache.get(gravatar_url) == 'err':
|
if cache.get(gravatar_url) == "err":
|
||||||
print('Cached Gravatar fetch failed with URL error')
|
print("Cached Gravatar fetch failed with URL error: %s" % gravatar_url)
|
||||||
return redir_default(default)
|
return redir_default(default)
|
||||||
|
|
||||||
gravatarimagedata = urlopen(gravatar_url, timeout=URL_TIMEOUT)
|
gravatarimagedata = urlopen(gravatar_url, timeout=URL_TIMEOUT)
|
||||||
except HTTPError as exc:
|
except HTTPError as exc:
|
||||||
if exc.code != 404 and exc.code != 503:
|
if exc.code != 404 and exc.code != 503:
|
||||||
print(
|
print(
|
||||||
'Gravatar fetch failed with an unexpected %s HTTP error' %
|
"Gravatar fetch failed with an unexpected %s HTTP error: %s"
|
||||||
exc.code)
|
% (exc.code, gravatar_url)
|
||||||
cache.set(gravatar_url, 'err', 30)
|
)
|
||||||
|
cache.set(gravatar_url, "err", 30)
|
||||||
return redir_default(default)
|
return redir_default(default)
|
||||||
except URLError as exc:
|
except URLError as exc:
|
||||||
print(
|
print("Gravatar fetch failed with URL error: %s" % exc.reason)
|
||||||
'Gravatar fetch failed with URL error: %s' %
|
cache.set(gravatar_url, "err", 30)
|
||||||
exc.reason)
|
|
||||||
cache.set(gravatar_url, 'err', 30)
|
|
||||||
return redir_default(default)
|
return redir_default(default)
|
||||||
except SSLError as exc:
|
except SSLError as exc:
|
||||||
print(
|
print("Gravatar fetch failed with SSL error: %s" % exc.reason)
|
||||||
'Gravatar fetch failed with SSL error: %s' %
|
cache.set(gravatar_url, "err", 30)
|
||||||
exc.reason)
|
|
||||||
cache.set(gravatar_url, 'err', 30)
|
|
||||||
return redir_default(default)
|
return redir_default(default)
|
||||||
try:
|
try:
|
||||||
data = BytesIO(gravatarimagedata.read())
|
data = BytesIO(gravatarimagedata.read())
|
||||||
img = Image.open(data)
|
img = Image.open(data)
|
||||||
data.seek(0)
|
data.seek(0)
|
||||||
response = HttpResponse(
|
response = HttpResponse(
|
||||||
data.read(),
|
data.read(), content_type="image/%s" % file_format(img.format)
|
||||||
content_type='image/%s' % file_format(img.format))
|
)
|
||||||
response['Cache-Control'] = 'max-age=%i' % CACHE_IMAGES_MAX_AGE
|
response["Cache-Control"] = "max-age=%i" % CACHE_IMAGES_MAX_AGE
|
||||||
return response
|
return response
|
||||||
|
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
print('Value error: %s' % exc)
|
print("Value error: %s" % exc)
|
||||||
return redir_default(default)
|
return redir_default(default)
|
||||||
|
|
||||||
# We shouldn't reach this point... But make sure we do something
|
# We shouldn't reach this point... But make sure we do something
|
||||||
@@ -401,14 +428,17 @@ class GravatarProxyView(View):
|
|||||||
|
|
||||||
|
|
||||||
class StatsView(TemplateView, JsonResponse):
|
class StatsView(TemplateView, JsonResponse):
|
||||||
'''
|
"""
|
||||||
Return stats
|
Return stats
|
||||||
'''
|
"""
|
||||||
def get(self, request, *args, **kwargs): # pylint: disable=too-many-branches,too-many-statements,too-many-locals,no-self-use,unused-argument,too-many-return-statements
|
|
||||||
|
def get(
|
||||||
|
self, request, *args, **kwargs
|
||||||
|
): # pylint: disable=too-many-branches,too-many-statements,too-many-locals,no-self-use,unused-argument,too-many-return-statements
|
||||||
retval = {
|
retval = {
|
||||||
'users': User.objects.all().count(),
|
"users": User.objects.all().count(),
|
||||||
'mails': ConfirmedEmail.objects.all().count(),
|
"mails": ConfirmedEmail.objects.all().count(),
|
||||||
'openids': ConfirmedOpenId.objects.all().count(), # pylint: disable=no-member
|
"openids": ConfirmedOpenId.objects.all().count(), # pylint: disable=no-member
|
||||||
}
|
}
|
||||||
|
|
||||||
return JsonResponse(retval)
|
return JsonResponse(retval)
|
||||||
|
|||||||
Reference in New Issue
Block a user