2023-03-06 18:10:25 +13:00
|
|
|
import json
|
|
|
|
import stripe
|
|
|
|
|
|
|
|
from datetime import timedelta
|
|
|
|
|
2023-03-28 12:05:03 +13:00
|
|
|
from django.conf import settings
|
2023-03-06 18:10:25 +13:00
|
|
|
from django.http import HttpResponse, HttpResponseNotAllowed, HttpResponseNotFound, HttpResponseBadRequest, FileResponse
|
|
|
|
from django.shortcuts import get_object_or_404, redirect
|
|
|
|
from django.urls import reverse
|
2023-02-21 16:43:46 +13:00
|
|
|
from django.utils import timezone
|
|
|
|
from django.utils.text import slugify
|
|
|
|
from rest_framework import viewsets, permissions
|
2023-03-06 18:10:25 +13:00
|
|
|
from rest_framework.decorators import action
|
2021-10-22 16:01:38 +13:00
|
|
|
from rest_framework.response import Response
|
|
|
|
|
2023-03-06 18:10:25 +13:00
|
|
|
from .models import Habitat, HabitatImage, Plant, EcologicalDistrictLayer, SoilOrder, Zone, Questionnaire, ActivationKey, ActivationKeySet
|
2023-02-21 16:43:46 +13:00
|
|
|
from .serializers import HabitatImageSerializer, HabitatSerializer, PlantSerializer, SoilOrderSerializer, EcologicalDistrictLayerSerializer, AddressSerializer, ZoneSerializer, QuestionnaireSerializer
|
2021-10-22 16:01:38 +13:00
|
|
|
from .filters import *
|
2023-02-08 14:16:07 +13:00
|
|
|
from .wms_utils import get_address_from_coordinates, search_address
|
2023-02-21 16:43:46 +13:00
|
|
|
from .resource_generation_utils import generate_csv, get_filter_values, serialize_plants_queryset, create_planting_guide_pdf, PLANTING_GUIDE_PDF_FILENAME, CSV_FILENAME, storage
|
2023-03-06 18:10:25 +13:00
|
|
|
from .redis import redis_client
|
2021-10-07 08:59:57 +13:00
|
|
|
|
2021-11-05 11:16:53 +13:00
|
|
|
|
2021-10-07 08:59:57 +13:00
|
|
|
class PlantViewSet(viewsets.ModelViewSet):
|
2021-11-18 11:06:21 +13:00
|
|
|
""" Filtered viewset for plants.
|
2021-10-07 08:59:57 +13:00
|
|
|
"""
|
|
|
|
queryset = Plant.objects.all()
|
|
|
|
serializer_class = PlantSerializer
|
2021-10-19 16:06:21 +13:00
|
|
|
|
|
|
|
def get_queryset(self):
|
|
|
|
""" Filtering plant query set by query parameters in the URL.
|
|
|
|
(May want to eventually use django filters to break up the logic...)
|
|
|
|
"""
|
2021-11-10 14:15:25 +13:00
|
|
|
return get_filtered_plants(self.request)
|
2021-10-22 16:01:38 +13:00
|
|
|
|
2021-11-05 11:16:53 +13:00
|
|
|
|
2021-10-22 16:01:38 +13:00
|
|
|
class SoilOrderViewSet(viewsets.ModelViewSet):
|
2021-11-18 11:06:21 +13:00
|
|
|
""" Filtered viewset for soil details.
|
2021-10-22 16:01:38 +13:00
|
|
|
"""
|
|
|
|
serializer_class = SoilOrderSerializer
|
|
|
|
|
|
|
|
def get_queryset(self):
|
|
|
|
""" Filtering soil order query set by coordinate parameters in the URL.
|
|
|
|
"""
|
|
|
|
coordinates = self.request.query_params.get('coordinates')
|
|
|
|
if coordinates is not None:
|
|
|
|
return soil_order_coordinate_filter(coordinates)
|
|
|
|
|
|
|
|
return SoilOrder.objects.all()
|
|
|
|
|
|
|
|
|
|
|
|
class EcologicalDistrictViewSet(viewsets.ModelViewSet):
|
2021-11-18 11:06:21 +13:00
|
|
|
""" Filtered viewset for ecological district/region details.
|
2021-10-22 16:01:38 +13:00
|
|
|
"""
|
|
|
|
serializer_class = EcologicalDistrictLayerSerializer
|
|
|
|
|
|
|
|
def get_queryset(self):
|
|
|
|
""" Filtering ecological district/region query set by coordinate parameters in the URL.
|
|
|
|
"""
|
|
|
|
coordinates = self.request.query_params.get('coordinates')
|
|
|
|
if coordinates is not None:
|
|
|
|
return ecological_district_coordinate_filter(coordinates)
|
|
|
|
|
|
|
|
return EcologicalDistrictLayer.objects.all()
|
|
|
|
|
|
|
|
|
|
|
|
class LINZPropertyViewSet(viewsets.ViewSet):
|
2021-11-18 11:06:21 +13:00
|
|
|
""" Filtered viewset for ecological district/region details.
|
2021-10-22 16:01:38 +13:00
|
|
|
"""
|
|
|
|
|
|
|
|
def list(self, request):
|
|
|
|
coordinates = self.request.query_params.get('coordinates')
|
2023-02-08 14:16:07 +13:00
|
|
|
address = self.request.query_params.get('search')
|
|
|
|
|
|
|
|
if address is not None:
|
|
|
|
results = search_address(address)
|
|
|
|
return Response(results)
|
|
|
|
elif coordinates is not None:
|
2021-10-22 16:01:38 +13:00
|
|
|
address_data = get_address_from_coordinates(coordinates)
|
|
|
|
serializer = AddressSerializer(address_data)
|
2021-11-05 11:16:53 +13:00
|
|
|
return Response(serializer.data)
|
|
|
|
else:
|
2023-02-08 14:16:07 +13:00
|
|
|
return HttpResponseBadRequest("No parameters given.")
|
2021-11-05 11:16:53 +13:00
|
|
|
|
2021-12-01 09:47:14 +13:00
|
|
|
class AuckCHCHRegionInformation(viewsets.ViewSet):
|
|
|
|
""" Filtered viewset defining if coordinate falls inside auckland and chch regions.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def list(self, request):
|
|
|
|
coordinates = self.request.query_params.get('coordinates')
|
|
|
|
if coordinates is not None:
|
|
|
|
in_chch = is_in_christchurch(coordinates)
|
|
|
|
in_auckland = is_in_auckland(coordinates)
|
|
|
|
region_details = {"in_chch": in_chch, "in_auckland": in_auckland}
|
|
|
|
return Response(region_details)
|
|
|
|
else:
|
|
|
|
return HttpResponseBadRequest("No coordinate given.")
|
2021-11-05 11:16:53 +13:00
|
|
|
|
|
|
|
class HabitatViewSet(viewsets.ModelViewSet):
|
2021-11-18 11:06:21 +13:00
|
|
|
""" Viewset for all habitats.
|
2021-11-05 11:16:53 +13:00
|
|
|
"""
|
|
|
|
serializer_class = HabitatSerializer
|
|
|
|
queryset = Habitat.objects.all()
|
2021-11-05 14:22:07 +13:00
|
|
|
|
|
|
|
|
2021-12-06 10:24:51 +13:00
|
|
|
class ZoneViewSet(viewsets.ModelViewSet):
|
|
|
|
""" Viewset for all habitats.
|
|
|
|
"""
|
|
|
|
serializer_class = ZoneSerializer
|
|
|
|
queryset = Zone.objects.all()
|
|
|
|
|
|
|
|
|
2021-11-05 14:22:07 +13:00
|
|
|
class HabitatImageViewSet(viewsets.ViewSet):
|
2021-11-18 11:06:21 +13:00
|
|
|
""" Viewset for a habitat image.
|
2021-11-05 14:22:07 +13:00
|
|
|
"""
|
|
|
|
|
|
|
|
def list(self, request):
|
|
|
|
queryset = HabitatImage.objects.all()
|
|
|
|
serializer = HabitatImageSerializer(queryset, many=True)
|
|
|
|
return Response(serializer.data)
|
|
|
|
|
|
|
|
def retrieve(self, request, pk=None):
|
|
|
|
queryset = HabitatImage.objects.all()
|
|
|
|
habitat_image = get_object_or_404(queryset, pk=pk)
|
|
|
|
serializer = HabitatImageSerializer(habitat_image)
|
|
|
|
return Response(serializer.data)
|
2021-11-08 17:00:04 +13:00
|
|
|
|
|
|
|
|
|
|
|
class CSVDownloadView(viewsets.ViewSet):
|
2021-11-18 11:06:21 +13:00
|
|
|
""" Viewset for a downloading a CSV plant list and filters.
|
|
|
|
"""
|
2021-11-08 17:00:04 +13:00
|
|
|
|
|
|
|
def list(self, request, *args, **kwargs):
|
2021-11-10 14:15:25 +13:00
|
|
|
filtered_plants = get_filtered_plants(request)
|
2023-02-21 16:43:46 +13:00
|
|
|
plant_data = serialize_plants_queryset(filtered_plants)
|
|
|
|
filename = f"plants_{slugify(timezone.now())}.csv"
|
|
|
|
|
|
|
|
generate_csv(plant_data, filename)
|
2021-11-08 17:00:04 +13:00
|
|
|
|
2023-02-21 16:43:46 +13:00
|
|
|
return FileResponse(
|
|
|
|
storage.open(filename, 'rb'),
|
|
|
|
filename='plants.csv',
|
|
|
|
content_type='text/csv',
|
|
|
|
)
|
2021-11-18 11:06:21 +13:00
|
|
|
|
2021-12-06 10:24:51 +13:00
|
|
|
|
2021-11-18 11:06:21 +13:00
|
|
|
class PDFDownloadView(viewsets.ViewSet):
|
|
|
|
""" Viewset for a downloading a PDF planting guide with appended filter and plant list info.
|
|
|
|
"""
|
|
|
|
|
2021-12-06 10:24:51 +13:00
|
|
|
def list(self, request, *args, **kwargs):
|
2023-02-21 16:43:46 +13:00
|
|
|
filter_data = get_filter_values(request.query_params)
|
2021-11-18 11:06:21 +13:00
|
|
|
filtered_plants = get_filtered_plants(request)
|
2023-02-21 16:43:46 +13:00
|
|
|
plant_data = serialize_plants_queryset(filtered_plants)
|
|
|
|
filename = f"planting_guide_{slugify(timezone.now())}.pdf"
|
|
|
|
|
|
|
|
create_planting_guide_pdf(filter_data, plant_data, filename)
|
|
|
|
|
|
|
|
return FileResponse(
|
|
|
|
storage.open(filename, 'rb'),
|
|
|
|
filename=PLANTING_GUIDE_PDF_FILENAME,
|
|
|
|
content_type='application/pdf',
|
|
|
|
)
|
2021-11-18 11:06:21 +13:00
|
|
|
|
|
|
|
|
2023-02-21 16:43:46 +13:00
|
|
|
class QuestionnaireViewSet(viewsets.ModelViewSet):
|
|
|
|
serializer_class = QuestionnaireSerializer
|
|
|
|
queryset = Questionnaire.objects.all()
|
|
|
|
http_method_names = ("post",)
|
|
|
|
permission_classes = [permissions.AllowAny]
|
2023-03-06 18:10:25 +13:00
|
|
|
|
|
|
|
|
|
|
|
def validate_key(request):
|
|
|
|
"""Checks if a given key value is valid"""
|
|
|
|
if request.method == "GET":
|
|
|
|
data = request.GET
|
|
|
|
elif request.method != "POST":
|
|
|
|
return HttpResponseNotAllowed()
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
data = request.POST or json.loads(request.body)
|
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
return HttpResponseBadRequest(e)
|
|
|
|
|
|
|
|
key = data.get("key")
|
|
|
|
|
|
|
|
if not key:
|
|
|
|
return HttpResponseBadRequest("'key' not specified")
|
|
|
|
|
|
|
|
try:
|
|
|
|
if ActivationKey.objects.get(key=key).remaining_activations > 0:
|
|
|
|
return HttpResponse()
|
|
|
|
except ActivationKey.DoesNotExist:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return HttpResponseNotFound()
|
|
|
|
|
|
|
|
|
|
|
|
def activate_key(request):
|
|
|
|
"""Adds a single activation to a given key if a Stripe payment has succeeded"""
|
|
|
|
redirect_url = "/apply"
|
|
|
|
|
|
|
|
try:
|
|
|
|
key = request.GET['key']
|
|
|
|
stripe_session_id = redis_client.getdel(key).decode()
|
|
|
|
stripe_session = stripe.checkout.Session.retrieve(stripe_session_id)
|
|
|
|
status = stripe_session.payment_status
|
|
|
|
except (KeyError, AttributeError):
|
|
|
|
return redirect(redirect_url)
|
|
|
|
|
|
|
|
match status:
|
|
|
|
case "paid":
|
|
|
|
ActivationKey.objects.create(
|
|
|
|
key=key,
|
|
|
|
key_set=ActivationKeySet.objects.get_or_create(name="Stripe", size=0)[0],
|
|
|
|
)
|
|
|
|
redirect_url += "?key=" + key
|
|
|
|
case "open":
|
|
|
|
stripe.checkout.Session.expire(stripe_session_id)
|
|
|
|
|
|
|
|
return redirect(redirect_url)
|
|
|
|
|
|
|
|
|
|
|
|
def purchase_key(request):
|
|
|
|
"""Generate a prospective key and redirect to the Stripe payment portal"""
|
2023-03-28 12:05:03 +13:00
|
|
|
|
|
|
|
stripe.api_key = settings.STRIPE_API_KEY
|
|
|
|
|
2023-03-06 18:10:25 +13:00
|
|
|
key = ActivationKey.key_default()
|
|
|
|
redirect_url = request.build_absolute_uri(reverse(activate_key)) + f"?key={key}"
|
|
|
|
stripe_session = stripe.checkout.Session.create(
|
|
|
|
line_items=[
|
|
|
|
{
|
2023-03-28 12:05:03 +13:00
|
|
|
"price": settings.STRIPE_PRICE_ID,
|
2023-03-06 18:10:25 +13:00
|
|
|
"quantity": 1,
|
|
|
|
},
|
|
|
|
],
|
|
|
|
automatic_tax={'enabled': True},
|
|
|
|
invoice_creation={
|
|
|
|
'enabled': True,
|
|
|
|
'invoice_data': {
|
2023-03-29 16:37:39 +13:00
|
|
|
'description': f'Your activation key is {key}',
|
2023-03-06 18:10:25 +13:00
|
|
|
'rendering_options': {'amount_tax_display': 'include_inclusive_tax'},
|
|
|
|
'footer': 'BioSphere Capital Limited',
|
|
|
|
},
|
|
|
|
},
|
|
|
|
mode='payment',
|
|
|
|
success_url=redirect_url,
|
|
|
|
cancel_url=redirect_url,
|
|
|
|
)
|
|
|
|
redis_client.setex(key, timedelta(hours=8), stripe_session.id)
|
|
|
|
|
|
|
|
return redirect(stripe_session.url)
|