Skip to content

Commit

Permalink
Merge branch 'release/1.3.1'
Browse files Browse the repository at this point in the history
  • Loading branch information
pmayer committed Jan 26, 2023
2 parents 6a234ae + 1f4caa9 commit 6c2a032
Show file tree
Hide file tree
Showing 8 changed files with 168 additions and 58 deletions.
4 changes: 3 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
default_install_hook_types: [pre-commit, commit-msg]
default_language_version:
python: python3
exclude: (/migrations/|manage.py|docs/source/conf.py)
repos:
- repo: https://github.com/base-angewandte/pre-commit-hooks
rev: py3.7
rev: 1.1.1-py3.7
hooks:
- id: base-hooks
- id: base-commit-msg-hooks
13 changes: 13 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
# Changelog

## 1.3.1

## Added

- Added `all` parameter to `/api/v1/user/{id}/data/` to be able to also return entries in which the user isn't a contributor

### Changed

- **BREAKING**: Updated pre-commit configuration to also enforce the use of conventional commit messages
- **BREAKING**: Changed redirect response from 301 to 308
- **BREAKING**: Default value for `data` is now an empty dict
- Install exiftool via github instead of sourceforge in docker image

## 1.3

### Added
Expand Down
15 changes: 9 additions & 6 deletions src/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,19 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
webp \
&& rm -rf /var/lib/apt/lists/*

SHELL ["/bin/bash", "-o", "pipefail", "-c"]

# hadolint ignore=DL3003
RUN EXIFTOOL_VERSION="$(wget -qO- https://api.github.com/repos/exiftool/exiftool/tags | jq -r '.[0].name')" \
&& wget --progress=dot:giga https://netix.dl.sourceforge.net/project/exiftool/Image-ExifTool-"${EXIFTOOL_VERSION}".tar.gz \
&& tar xvf Image-ExifTool-"${EXIFTOOL_VERSION}".tar.gz \
&& cd Image-ExifTool-"${EXIFTOOL_VERSION}"/ \
EXIFTOOL_FILENAME=exiftool-"${EXIFTOOL_VERSION}" \
&& wget --progress=dot:giga https://github.com/exiftool/exiftool/archive/refs/tags/"${EXIFTOOL_VERSION}".tar.gz -O "${EXIFTOOL_FILENAME}".tar.gz \
&& tar xvf "${EXIFTOOL_FILENAME}".tar.gz \
&& cd "${EXIFTOOL_FILENAME}"/ \
&& perl Makefile.PL \
&& make test \
&& make install \
&& cd .. \
&& rm -rf Image-ExifTool-"${EXIFTOOL_VERSION}"*
&& make install \
&& cd .. \
&& rm -rf "${EXIFTOOL_FILENAME}"*

# hadolint ignore=DL3059
RUN mkdir /logs
Expand Down
41 changes: 41 additions & 0 deletions src/api/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,42 @@
import logging

from rest_framework.exceptions import APIException
from rest_framework.views import exception_handler

default_app_config = 'api.apps.ApiConfig'

logger = logging.getLogger(__name__)


class PermanentRedirect(APIException):
# TODO: in the current version rest_framework.status has a status.HTTP_307_TEMPORARY_REDIRECT, but it is missing a
# status.HTTP_308_PERMANENT_REDIRECT, which is available in a newer version of rest_framework. Update this,
# as soon as rest_framework is updated.
# status_code = status.HTTP_308_PERMANENT_REDIRECT
status_code = 308
default_detail = 'This resource has moved'
default_code = 'permanent_redirect'

def __init__(self, detail=None, to=None):
if to is None:
raise TypeError("PermanentRedirect is missing required argument 'to'")
self.to = to
if detail is None:
detail = self.default_detail
super().__init__(detail)


def portfolio_exception_handler(exc, context):
# Call REST framework's default exception handler first,
# to get the standard error response.
response = exception_handler(exc, context)

if isinstance(exc, PermanentRedirect):
pk = context['kwargs']['pk']
old_path = context['request']._request.path
new_path = old_path.replace(pk, exc.to)
response.data['to'] = new_path
# TODO: update to response.headers['Location'] once django is updated to >= 3.2
response['Location'] = new_path

return response
125 changes: 78 additions & 47 deletions src/api/views.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import json
import operator
from functools import reduce

Expand Down Expand Up @@ -40,6 +41,7 @@
from media_server.models import get_media_for_entry, update_media_order_for_entry
from media_server.utils import get_free_space_for_user

from . import PermanentRedirect
from .mixins import CountModelMixin, CreateListMixin
from .serializers.entry import EntrySerializer
from .serializers.relation import RelationSerializer
Expand Down Expand Up @@ -164,7 +166,7 @@ def retrieve(self, request, *args, **kwargs):
except Http404 as nfe:
reverse_pk = kwargs.get('pk', '')[::-1]
if self.get_queryset().filter(pk=reverse_pk).exists():
return Response(reverse_pk, status=301)
raise PermanentRedirect(to=reverse_pk) from nfe
else:
raise nfe
serializer = self.get_serializer(instance)
Expand Down Expand Up @@ -235,7 +237,14 @@ def media_order(self, request, pk=None, *args, **kwargs):
@action(detail=False, filter_backends=[], pagination_class=None)
def types(self, request, *args, **kwargs):
language = get_language() or 'en'
content = self.get_queryset().exclude(type__isnull=True).values_list('type', flat=True).distinct().order_by()
content = (
self.get_queryset()
.exclude(type__isnull=True)
.exclude(type={})
.values_list('type', flat=True)
.distinct()
.order_by()
)
return Response(sorted(content, key=lambda x: x.get('label', {}).get(language, '').lower()))

def get_queryset(self):
Expand Down Expand Up @@ -337,10 +346,21 @@ def user_information(request, *args, **kwargs):
operation_id='api_v1_user_data',
responses={
200: openapi.Response(''),
400: openapi.Response('Bad Request'),
403: openapi.Response('Access not allowed'),
404: openapi.Response('User not found'),
},
manual_parameters=[authorization_header_paramter, language_header_parameter],
manual_parameters=[
authorization_header_paramter,
language_header_parameter,
openapi.Parameter(
'all',
openapi.IN_QUERY,
required=False,
type=openapi.TYPE_BOOLEAN,
default=False,
),
],
)
@api_view(['GET'])
@authentication_classes((TokenAuthentication,))
Expand All @@ -360,7 +380,7 @@ def entry_to_data(entry):
'id': entry.pk,
'title': entry.title,
'subtitle': entry.subtitle or None,
'type': entry.type.get('label').get(lang),
'type': entry.type.get('label').get(lang) if entry.type else None,
'role': entry.owner_role_display,
'location': entry.location_display,
'year': entry.year_display,
Expand All @@ -374,34 +394,46 @@ def to_data_dict(label, data, sort=True):
'data': data,
}

published_entries_query = Entry.objects.filter(owner=user, published=True, type__isnull=False,).filter(
Q(data__contains={'architecture': [{'source': user.username}]})
| Q(data__contains={'authors': [{'source': user.username}]})
| Q(data__contains={'artists': [{'source': user.username}]})
| Q(data__contains={'winners': [{'source': user.username}]})
| Q(data__contains={'granted_by': [{'source': user.username}]})
| Q(data__contains={'jury': [{'source': user.username}]})
| Q(data__contains={'music': [{'source': user.username}]})
| Q(data__contains={'conductors': [{'source': user.username}]})
| Q(data__contains={'composition': [{'source': user.username}]})
| Q(data__contains={'organisers': [{'source': user.username}]})
| Q(data__contains={'lecturers': [{'source': user.username}]})
| Q(data__contains={'design': [{'source': user.username}]})
| Q(data__contains={'commissions': [{'source': user.username}]})
| Q(data__contains={'editors': [{'source': user.username}]})
| Q(data__contains={'publishers': [{'source': user.username}]})
| Q(data__contains={'curators': [{'source': user.username}]})
| Q(data__contains={'fellow_scholar': [{'source': user.username}]})
| Q(data__contains={'funding': [{'source': user.username}]})
| Q(data__contains={'organisations': [{'source': user.username}]})
| Q(data__contains={'project_lead': [{'source': user.username}]})
| Q(data__contains={'project_partnership': [{'source': user.username}]})
| Q(data__contains={'software_developers': [{'source': user.username}]})
| Q(data__contains={'directors': [{'source': user.username}]})
| Q(data__contains={'contributors': [{'source': user.username}]})
)
try:
all_parameter = json.loads(request.query_params.get('all', 'false'))
except json.JSONDecodeError as e:
raise exceptions.ParseError() from e

published_entries_query = Entry.objects.filter(owner=user, published=True)

if not all_parameter:
published_entries_query = (
published_entries_query.exclude(type__isnull=True)
.exclude(type={})
.filter(
Q(data__contains={'architecture': [{'source': user.username}]})
| Q(data__contains={'authors': [{'source': user.username}]})
| Q(data__contains={'artists': [{'source': user.username}]})
| Q(data__contains={'winners': [{'source': user.username}]})
| Q(data__contains={'granted_by': [{'source': user.username}]})
| Q(data__contains={'jury': [{'source': user.username}]})
| Q(data__contains={'music': [{'source': user.username}]})
| Q(data__contains={'conductors': [{'source': user.username}]})
| Q(data__contains={'composition': [{'source': user.username}]})
| Q(data__contains={'organisers': [{'source': user.username}]})
| Q(data__contains={'lecturers': [{'source': user.username}]})
| Q(data__contains={'design': [{'source': user.username}]})
| Q(data__contains={'commissions': [{'source': user.username}]})
| Q(data__contains={'editors': [{'source': user.username}]})
| Q(data__contains={'publishers': [{'source': user.username}]})
| Q(data__contains={'curators': [{'source': user.username}]})
| Q(data__contains={'fellow_scholar': [{'source': user.username}]})
| Q(data__contains={'funding': [{'source': user.username}]})
| Q(data__contains={'organisations': [{'source': user.username}]})
| Q(data__contains={'project_lead': [{'source': user.username}]})
| Q(data__contains={'project_partnership': [{'source': user.username}]})
| Q(data__contains={'software_developers': [{'source': user.username}]})
| Q(data__contains={'directors': [{'source': user.username}]})
| Q(data__contains={'contributors': [{'source': user.username}]})
)
)

cache_key = f'user_data__{pk}_{lang}'
cache_key = f'user_data__{pk}_{lang}_{all_parameter}'

cache_time, entries_count, usr_data = cache.get(cache_key, (None, None, None))

Expand Down Expand Up @@ -574,7 +606,7 @@ def to_data_dict(label, data, sort=True):
published_entries = published_entries_query.order_by('title')

for e in published_entries:
entry_type = e.type.get('source')
entry_type = e.type.get('source') if e.type else None

if entry_type in DOCUMENT_TYPES:
e_data = document_schema.load(e.data).data
Expand Down Expand Up @@ -921,7 +953,7 @@ def to_data_dict(label, data, sort=True):
videos_data.append(entry_to_data(e))
# General Activites
else:
general_activities_data.append(e)
general_activities_data.append(entry_to_data(e))

# Publications
publications_data = []
Expand Down Expand Up @@ -1021,6 +1053,17 @@ def get_media_for_entry_public(entry):
return media


def get_entry_data(entry):
ret = entry.data_display
ret['media'] = get_media_for_entry_public(entry.pk)
ret['relations'] = {
'parents': [{'id': r.pk, 'title': r.title} for r in entry.related_to.filter(published=True)],
'to': [{'id': r.pk, 'title': r.title} for r in entry.relations.filter(published=True)],
}
ret['showroom_id'] = entry.showroom_id
return ret


@swagger_auto_schema(
methods=['get'],
operation_id='api_v1_user_entry_data',
Expand All @@ -1047,13 +1090,7 @@ def user_entry_data(request, pk=None, entry=None, *args, **kwargs):
except Entry.DoesNotExist as e:
raise exceptions.NotFound(_('Entry does not exist')) from e

ret = e.data_display
ret['media'] = get_media_for_entry_public(entry)
ret['relations'] = {
'parents': [{'id': r.pk, 'title': r.title} for r in e.related_to.filter(published=True)],
'to': [{'id': r.pk, 'title': r.title} for r in e.relations.filter(published=True)],
}

ret = get_entry_data(e)
return Response(ret)


Expand All @@ -1076,13 +1113,7 @@ def entry_data(request, pk=None, *args, **kwargs):
except Entry.DoesNotExist as e:
raise exceptions.NotFound(_('Entry does not exist')) from e

ret = e.data_display
ret['media'] = get_media_for_entry_public(pk)
ret['relations'] = {
'parents': [{'id': r.pk, 'title': r.title} for r in e.related_to.filter(published=True)],
'to': [{'id': r.pk, 'title': r.title} for r in e.relations.filter(published=True)],
}

ret = get_entry_data(e)
return Response(ret)


Expand Down
19 changes: 19 additions & 0 deletions src/core/migrations/0019_auto_20221121_1114.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# Generated by Django 2.2.28 on 2022-11-21 10:14

import django.contrib.postgres.fields.jsonb
from django.db import migrations


class Migration(migrations.Migration):

dependencies = [
('core', '0018_auto_20220422_0809'),
]

operations = [
migrations.AlterField(
model_name='entry',
name='data',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
]
8 changes: 4 additions & 4 deletions src/core/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class Entry(AbstractBaseModel):
)
texts = JSONField(verbose_name=get_preflabel_lazy('text'), validators=[validate_texts], blank=True, null=True)
published = models.BooleanField(default=False)
data = JSONField(blank=True, null=True)
data = JSONField(default=dict)
relations = models.ManyToManyField('self', through='Relation', symmetrical=False, related_name='related_to')

reference = models.CharField(max_length=255, blank=True, null=True, default=None)
Expand All @@ -54,23 +54,23 @@ def icon(self):

@property
def location_display(self):
if self.type.get('source'):
if self.type and self.type.get('source'):
schema = get_schema(self.type['source'])
data = self.data
if schema and data:
return schema().location_display(data)

@property
def owner_role_display(self):
if self.type.get('source'):
if self.type and self.type.get('source'):
schema = get_schema(self.type['source'])
data = self.data
if schema and data:
return schema().role_display(data, self.owner.username)

@property
def year_display(self):
if self.type.get('source'):
if self.type and self.type.get('source'):
schema = get_schema(self.type['source'])
data = self.data
if schema and data:
Expand Down
1 change: 1 addition & 0 deletions src/portfolio/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -379,6 +379,7 @@
'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.URLPathVersioning',
'DEFAULT_VERSION': 'v1',
'ORDERING_PARAM': 'sort',
'EXCEPTION_HANDLER': 'api.portfolio_exception_handler',
}

SWAGGER_SETTINGS = {'SECURITY_DEFINITIONS': {}}
Expand Down

0 comments on commit 6c2a032

Please sign in to comment.