diff --git a/.env_example b/.env_example index 5a2e327..4f5acf1 100644 --- a/.env_example +++ b/.env_example @@ -5,6 +5,8 @@ DJANGO_SECRET_KEY= PROJECT_FULL_NAME=gigamaps PROJECT_SHORT_NAME=gigamaps +GIGAMAPS_LOG_LEVEL = DEBUG + SUPPORT_EMAIL_ID=giga@mail.unicef.org SUPPORT_PHONE_NUMBER=1234567890 @@ -60,7 +62,6 @@ CACHE_CONTROL_MAX_AGE_FOR_FE=14400 API_KEY_ADMIN_DASHBOARD_URL=http://localhost:9500/admin/api-keys - EMAIL_URL= SERVER_EMAIL_SIGNATURE=Gigamaps SERVER_EMAIL=gigamaps@mail.unicef.org diff --git a/.flake8 b/.flake8 index c788e06..245c5bc 100644 --- a/.flake8 +++ b/.flake8 @@ -1,7 +1,7 @@ [flake8] max-line-length = 120 max-complexity = 30 -exclude = proco_data_migrations, migrations, config/settings, manage.py, Exceptions.py, venv, dailycheckapp_contact, realtime_dailycheckapp, realtime_unicef +exclude = proco_data_migrations, migrations, config/settings, manage.py, Exceptions.py, venv ignore = ; PyFlakes errors ; F405 name may be undefined, or defined from star imports: module diff --git a/Pipfile b/Pipfile index 732dd18..9b6f67d 100644 --- a/Pipfile +++ b/Pipfile @@ -79,6 +79,8 @@ djangorestframework-jwt = "==1.11.0" jsonfield = "==2.0.2" azure-search-documents = "==11.3.0" django-prometheus = "==2.2.0" +celery-redbeat = "==2.2.0" +flower = "==2.0.1" [requires] python_version = "3.8" diff --git a/Pipfile.lock b/Pipfile.lock index 176140e..98416a0 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -676,6 +676,14 @@ "index": "pypi", "version": "==3.0.0" }, + "flower": { + "hashes": [ + "sha256:5ab717b979530770c16afb48b50d2a98d23c3e9fe39851dcf6bc4d01845a02a0", + "sha256:9db2c621eeefbc844c8dd88be64aef61e84e2deb29b271e02ab2b5b9f01068e2" + ], + "index": "pypi", + "version": "==2.0.1" + }, "futures": { "hashes": [ "sha256:3a44f286998ae64f0cc083682fcfec16c406134a81a589a5de445d7bb7c2751b", @@ -725,6 +733,14 @@ "index": "pypi", "version": "==21.2.0" }, + "humanize": { + "hashes": [ + "sha256:06b6eb0293e4b85e8d385397c5868926820db32b9b654b932f57fa41c23c9978", + "sha256:39e7ccb96923e732b5c2e27aeaa3b10a8dfeeba3eb965ba7b74a3eb0e30040a6" + ], + "markers": "python_version >= '3.8'", + "version": "==4.10.0" + }, "idna": { "hashes": [ "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", @@ -1357,6 +1373,23 @@ "markers": "python_version >= '3.8'", "version": "==3.2.0" }, + "tornado": { + "hashes": [ + "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8", + "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f", + "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4", + "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3", + "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14", + "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842", + "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9", + "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698", + "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7", + "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d", + "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4" + ], + "markers": "python_version >= '3.8'", + "version": "==6.4.1" + }, "traitlets": { "hashes": [ "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33", diff --git a/celery-dev.sh b/celery-dev.sh new file mode 100755 index 0000000..7ef115f --- /dev/null +++ b/celery-dev.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +set -ex + +# export environment variables to make them available in ssh session +for var in $(compgen -e); do + echo "export $var=${!var}" >> /etc/profile +done + +export FLASK_APP=hello.py +pipenv run python -m flask run --host 0.0.0.0 --port 8000 & + +if $ENABLED_FLOWER_METRICS; then + echo "Starting worker ..." + pipenv run celery --app=proco.taskapp worker --concurrency=2 --time-limit=300 --soft-time-limit=300 $* & + + echo "Starting flower ..." + pipenv run celery --app=proco.taskapp flower +else + echo "Starting worker ..." + pipenv run celery --app=proco.taskapp worker --concurrency=2 --time-limit=300 --soft-time-limit=300 $* +fi diff --git a/celery.sh b/celery.sh old mode 100644 new mode 100755 index bf8bd65..7072c5d --- a/celery.sh +++ b/celery.sh @@ -14,4 +14,14 @@ pipenv run python -m flask run --host 0.0.0.0 --port 8000 & # pipenv run celery -A proco.taskapp worker $* # --logfile=/code/celeryd-%n.log --loglevel=DEBUG -pipenv run celery --app=proco.taskapp worker --concurrency=3 --time-limit=300 --soft-time-limit=60 $* + +if $ENABLED_FLOWER_METRICS; then + echo "Starting worker ..." + pipenv run celery --app=proco.taskapp worker --concurrency=3 --time-limit=300 --soft-time-limit=60 $* & + + echo "Starting flower ..." + pipenv run celery --app=proco.taskapp flower +else + echo "Starting worker ..." + pipenv run celery --app=proco.taskapp worker --concurrency=3 --time-limit=300 --soft-time-limit=60 $* +fi diff --git a/celerybeat.sh b/celerybeat.sh index b5d7867..e4d346e 100644 --- a/celerybeat.sh +++ b/celerybeat.sh @@ -14,4 +14,4 @@ pipenv run python -m flask run --host 0.0.0.0 --port 8000 & # pipenv run celery -A proco.taskapp beat $* # --logfile=/code/celeryd-%n.log --loglevel=DEBUG -pipenv run celery --app=proco.taskapp beat $* +pipenv run celery --app=proco.taskapp beat --scheduler=redbeat.RedBeatScheduler $* diff --git a/config/admin.py b/config/admin.py deleted file mode 100644 index e71b782..0000000 --- a/config/admin.py +++ /dev/null @@ -1,28 +0,0 @@ -from django.conf.urls import url -from django.contrib import admin, messages -from django.shortcuts import redirect -from django.utils.translation import ugettext as _ - -from proco.utils.cache import cache_manager -from proco.utils.tasks import update_all_cached_values - - -class CustomAdminSite(admin.AdminSite): - site_header = _('Project Connect') - site_title = _('Project Connect') - index_title = _('Welcome to Project Connect') - index_templates = 'admin/index.html' - - def get_urls(self): - urls = super().get_urls() - urls += [ - url(r'^invalidate-cache/$', self.admin_view(self.invalidate_cache), name='admin_invalidate_cache'), - ] - return urls - - def invalidate_cache(self, request): - cache_manager.invalidate() - update_all_cached_values.delay() - - messages.success(request, 'Cache invalidation started. Maps will be updated in a few minutes.') - return redirect('admin:index') diff --git a/config/apps.py b/config/apps.py deleted file mode 100644 index 5cf7c6e..0000000 --- a/config/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.contrib.admin.apps import AdminConfig - - -class CustomAdminConfig(AdminConfig): - default_site = 'config.admin.CustomAdminSite' diff --git a/config/settings/base.py b/config/settings/base.py index d2ea53a..1c2637c 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -1,3 +1,6 @@ +import json +import os +import sys import warnings import environ @@ -34,7 +37,7 @@ # -------------------------------------------------------------------------- DJANGO_APPS = [ - 'config.apps.CustomAdminConfig', + 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', @@ -73,12 +76,9 @@ 'proco.locations', 'proco.connection_statistics', 'proco.contact', - 'proco.dailycheckapp_contact', 'proco.background', - 'proco.realtime_unicef', - 'proco.realtime_dailycheckapp', 'proco.proco_data_migrations', - 'proco.data_sources' + 'proco.data_sources', ] INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS @@ -342,7 +342,6 @@ 'schools', 'background', 'contact', - 'dailycheckapp_contact', 'accounts', 'data_sources', ) @@ -350,7 +349,6 @@ RANDOM_SCHOOLS_DEFAULT_AMOUNT = env('RANDOM_SCHOOLS_DEFAULT_AMOUNT', default=20000) CONTACT_MANAGERS = env.list('CONTACT_MANAGERS', default=['test@test.test']) -DAILYCHECKAPP_CONTACT_MANAGERS = env.list('DAILYCHECKAPP_CONTACT_MANAGERS', default=['test@test.test']) CONSTANCE_REDIS_CONNECTION = env('REDIS_URL', default='redis://localhost:6379/0') CONSTANCE_ADDITIONAL_FIELDS = { @@ -361,7 +359,6 @@ } CONSTANCE_CONFIG = { 'CONTACT_EMAIL': (env('CONTACT_EMAIL', default=''), 'Email to receive contact messages', 'email_input'), - 'DAILYCHECKAPP_CONTACT_EMAIL': ('', 'Email to receive dailycheckapp_contact messages', 'email_input'), } # Cache control headers @@ -436,4 +433,48 @@ INVALIDATE_CACHE_HARD = env('INVALIDATE_CACHE_HARD', default='false') +with open(os.path.join(BASE_DIR, 'proco', 'core', 'resources', 'filters.json')) as filters_json_file: + FILTERS_DATA = json.load(filters_json_file) + # DATABASE_ROUTERS = ["proco.utils.read_db_router.StandbyRouter"] + +GIGAMAPS_LOG_LEVEL = env('GIGAMAPS_LOG_LEVEL', default='INFO') + +# LOGGING +LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'filters': { + 'require_debug_false': { + '()': 'django.utils.log.RequireDebugFalse', + }, + 'require_debug_true': { + '()': 'django.utils.log.RequireDebugTrue', + }, + 'hostname_filter': { + '()': 'proco.core.filters.HostInfoFilter', + }, + }, + 'formatters': { + 'verbose': { + 'format': '%(hostname)s %(hostip)s %(asctime)s %(levelname)s %(pathname)s %(process)d ' + '%(processName)s %(thread)d: %(message)s' + }, + }, + 'handlers': { + 'console': { + 'level': GIGAMAPS_LOG_LEVEL, + 'class': 'logging.StreamHandler', + 'formatter': 'verbose', + 'stream': sys.stderr, + 'filters': ['hostname_filter'], + }, + }, + 'loggers': { + 'gigamaps': { + 'level': GIGAMAPS_LOG_LEVEL, + 'handlers': ['console'], + 'filters': ['hostname_filter'], + }, + }, +} diff --git a/config/settings/dev.py b/config/settings/dev.py index ec2eb03..e2a4970 100644 --- a/config/settings/dev.py +++ b/config/settings/dev.py @@ -28,11 +28,8 @@ try: DATABASES['read_only_database'] = env.db_url(var='READ_ONLY_DATABASE_URL') - DATABASES['realtime'] = env.db_url(var='REALTIME_DATABASE_URL') - DATABASES['dailycheckapp_realtime'] = env.db_url(var='REALTIME_DAILYCHECKAPP_DATABASE_URL') except ImproperlyConfigured: - DATABASES['realtime'] = DATABASES['default'] - DATABASES['dailycheckapp_realtime'] = DATABASES['default'] + pass # Email settings # -------------------------------------------------------------------------- diff --git a/config/settings/dev_test.py b/config/settings/dev_test.py new file mode 100644 index 0000000..eed8304 --- /dev/null +++ b/config/settings/dev_test.py @@ -0,0 +1,63 @@ +from kombu import Exchange, Queue # NOQA + +from config.settings.base import * # noqa: F403 + +# Pytest speed improvements configuration +# Disable debugging for test case execution +DEBUG = False +TEMPLATES[0]['OPTIONS']['debug'] = DEBUG + +SECRET_KEY = env('SECRET_KEY', default='test_key') + +ALLOWED_HOSTS = ['*'] +INTERNAL_IPS = [] + +ADMINS = ( + ('Dev Email', env('DEV_ADMIN_EMAIL', default='admin@localhost')), +) +MANAGERS = ADMINS + + +# Database +# https://docs.djangoproject.com/en/1.9/ref/settings/#databases +# -------------------------------------------------------------------------- + +DATABASES = { + 'default': env.db(default='postgis://localhost/proco'), +} + +DATABASES['default']['CONN_MAX_AGE'] = 1000 + +# Email settings +# -------------------------------------------------------------------------- + +# DEFAULT_FROM_EMAIL = 'noreply@example.com' +# SERVER_EMAIL = DEFAULT_FROM_EMAIL +# EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' + +if CELERY_ENABLED: + MAILING_USE_CELERY = False + +INTERNAL_IPS = ('127.0.0.1',) + +# Sentry config +# ------------- + +SENTRY_ENABLED = False + + +# Mapbox +# -------------- + +MAPBOX_KEY = env('MAPBOX_KEY', default='') + +ANYMAIL['DEBUG_API_REQUESTS'] = False + +ENABLE_AZURE_COGNITIVE_SEARCH = False + +AZURE_CONFIG['COGNITIVE_SEARCH'] = { + 'SEARCH_ENDPOINT': env('SEARCH_ENDPOINT', default='test.endpoint'), + 'SEARCH_API_KEY': env('SEARCH_API_KEY', default='testsearchapikey'), + 'COUNTRY_INDEX_NAME': env('COUNTRY_INDEX_NAME', default='giga_countries'), + 'SCHOOL_INDEX_NAME': env('SCHOOL_INDEX_NAME', default='giga_schools'), +} diff --git a/config/settings/prod.py b/config/settings/prod.py index 03efcd0..f2d29da 100644 --- a/config/settings/prod.py +++ b/config/settings/prod.py @@ -23,8 +23,6 @@ DATABASES = { 'default': env.db(), 'read_only_database': env.db_url(var='READ_ONLY_DATABASE_URL'), - 'realtime': env.db_url(var='REALTIME_DATABASE_URL'), - 'dailycheckapp_realtime': env.db_url(var='REALTIME_DAILYCHECKAPP_DATABASE_URL'), } # Template diff --git a/config/urls.py b/config/urls.py index 7cef051..f2f02e5 100644 --- a/config/urls.py +++ b/config/urls.py @@ -40,7 +40,6 @@ def trigger_error(request): path('statistics/', include('proco.connection_statistics.api_urls')), path('contact/', include('proco.contact.api_urls')), path('about_us/', include('proco.about_us.api_urls')), - path('dailycheckapp_contact/', include('proco.dailycheckapp_contact.api_urls')), path('accounts/', include('proco.accounts.api_urls')), path('sources/', include('proco.data_sources.api_urls')), ])), diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index 30d9dac..d417af9 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -64,7 +64,7 @@ services: build: context: .. dockerfile: docker/Dockerfile-dev - command: bash -c "sleep 10 && pipenv run celery --app=proco.taskapp beat --loglevel=DEBUG" + command: bash -c "sleep 10 && pipenv run celery --app=proco.taskapp beat --scheduler=redbeat.RedBeatScheduler --loglevel=DEBUG" environment: DJANGO_SETTINGS_MODULE: config.settings.dev DATABASE_URL: postgis://test:test@db/proco @@ -82,15 +82,22 @@ services: build: context: .. dockerfile: docker/Dockerfile-dev - command: bash -c "sleep 10 && pipenv run celery --app=proco.taskapp worker --loglevel=DEBUG --time-limit=300 --concurrency=2 --soft-time-limit=300" + command: bash -c "sleep 10 && /code/celery-dev.sh --loglevel=DEBUG --task-events" environment: DJANGO_SETTINGS_MODULE: config.settings.dev DATABASE_URL: postgis://test:test@db/proco -# CELERY_BROKER_URL: amqp://rabbitmq:rabbitmq@rabbitmq/ REDIS_URL: redis://redis:6379/0 + CELERY_BROKER_URL: redis://redis:6379/1 + CELERY_RESULT_BACKEND_URL: redis://redis:6379/2 + ENABLED_FLOWER_METRICS: true + FLOWER_BASIC_AUTH: hz195KIQ2Kvu8S9knla7lDZXIVX35mvj:TCynWCrvV5pCPpfjArcMtm40P39Od3FJ + FLOWER_PORT: 6543 + FLOWER_DEBUG: false volumes: - "..:/code" depends_on: - db # - rabbitmq - redis + ports: + - "6543:6543" diff --git a/giga-maps-backend-release-2.0.11.zip b/giga-maps-backend-release-2.0.11.zip new file mode 100644 index 0000000..1a47e2d Binary files /dev/null and b/giga-maps-backend-release-2.0.11.zip differ diff --git a/proco/about_us/api.py b/proco/about_us/api.py index 88e8882..7b0ec99 100644 --- a/proco/about_us/api.py +++ b/proco/about_us/api.py @@ -2,7 +2,6 @@ from rest_framework import status as rest_status from rest_framework import viewsets -from rest_framework.decorators import permission_classes from rest_framework.filters import SearchFilter from rest_framework.response import Response @@ -73,7 +72,7 @@ def destroy(self, request): if SliderImage.objects.filter(id__in=request.data['id']).exists(): image_data = SliderImage.objects.filter(id__in=request.data['id']) if image_data: - action_log(request, image_data, 3, "image deleted", self.model, + action_log(request, image_data, 3, 'image deleted', self.model, field_name='title') image_data.delete() return Response(status=rest_status.HTTP_200_OK, data=delete_succ_mess) @@ -92,7 +91,6 @@ class AboutUsAPIView(viewsets.ViewSet): ) def list(self, request, *args, **kwargs): - # queryset = super(AboutUsAPIView, self).get_queryset() try: about_us = AboutUs.objects.filter(status=True).values() list_data = [] @@ -146,8 +144,10 @@ def update(self, request, *args, **kwargs): try: list_data = [] change_data = [] + about_us_obj_list = [] for item in request.data: about_us = AboutUs.objects.get(pk=item['id']) + about_us_obj_list.append(about_us) data = AboutUsSerializer(instance=about_us, data=item, partial=True, context={'request': request}) @@ -162,11 +162,11 @@ def update(self, request, *args, **kwargs): if len(change_data) > 0: change_data = list(itertools.chain(*change_data)) change_data = list(set(change_data)) - remove_item = ["created", "modified"] + remove_item = ['created', 'modified'] for field in remove_item: if field in change_data: change_data.remove(field) - action_log(request, [about_us], 2, change_data, self.model, field_name='title') + action_log(request, about_us_obj_list, 2, change_data, self.model, field_name='title') return Response(list_data) except AboutUs.DoesNotExist: return Response(data=error_mess, status=rest_status.HTTP_502_BAD_GATEWAY) @@ -177,7 +177,7 @@ def destroy(self, request, *args, **kwargs): if AboutUs.objects.filter(id__in=request.data['id']).exists(): about_us = AboutUs.objects.filter(id__in=request.data['id']) if about_us: - action_log(request, about_us, 3, "About Us deleted", self.model, + action_log(request, about_us, 3, 'About Us deleted', self.model, field_name='title') about_us.delete() return Response(status=rest_status.HTTP_200_OK, data=delete_succ_mess) diff --git a/proco/about_us/tests/test_api.py b/proco/about_us/tests/test_api.py index 458a35c..1928a74 100644 --- a/proco/about_us/tests/test_api.py +++ b/proco/about_us/tests/test_api.py @@ -10,11 +10,10 @@ class SlideImageAPITestCase(TestAPIViewSetMixin, TestCase): base_view = 'about_us:' - databases = {'read_only_database', 'default'} + databases = {'default', } @classmethod def setUpTestData(cls): - # self.databases = 'default' cls.email = 'test@test.com' cls.password = 'SomeRandomPass96' cls.user = test_utilities.setup_admin_user_by_role() @@ -70,7 +69,7 @@ def test_slide_destroy(self): class AboutUsAPITestCase(TestAPIViewSetMixin, TestCase): base_view = 'about_us:' - databases = {'default', 'read_only_database'} + databases = {'default', } def setUp(self): self.email = 'test@test.com' diff --git a/proco/accounts/api.py b/proco/accounts/api.py index 288c00b..6613590 100644 --- a/proco/accounts/api.py +++ b/proco/accounts/api.py @@ -1,13 +1,13 @@ import copy import json +import logging from datetime import timedelta +from math import floor, ceil from django.conf import settings from django.contrib.admin.models import LogEntry -from django.db.models import ( - Case, Value, When -) -from django.db.models import IntegerField +from django.db.models import Case, F, IntegerField, Value, When, Min, Max +from django.db.models import Q from django.shortcuts import get_object_or_404 from django.utils.decorators import method_decorator from django.views.decorators.cache import cache_control @@ -31,11 +31,15 @@ from proco.core.viewsets import BaseModelViewSet from proco.custom_auth import models as auth_models from proco.locations.models import Country +from proco.schools.models import School from proco.utils import dates as date_utilities from proco.utils.cache import cache_manager from proco.utils.filters import NullsAlwaysLastOrderingFilter +from proco.utils.mixins import CachedListMixin from proco.utils.tasks import update_all_cached_values +logger = logging.getLogger('gigamaps.' + __name__) + class APIsListAPIView(BaseModelViewSet): """ @@ -143,7 +147,6 @@ def update_serializer_context(self, context): if api_instance is not None: context['api_instance'] = api_instance return context - # raise accounts_exceptions.InvalidAPIError() def perform_destroy(self, instance): """ @@ -195,6 +198,7 @@ def put(self, request, *args, **kwargs): request_user = self.request.user queryset = accounts_models.APIKey.objects.all().filter( + Q(user=request_user) | Q(has_write_access=True), api__deleted__isnull=True, api_id=request.data.get('api_id'), api_key=request.data.get('api_key'), @@ -202,12 +206,9 @@ def put(self, request, *args, **kwargs): valid_to__gte=core_utilities.get_current_datetime_object().date(), ) - if queryset.filter(user=request_user).exists(): - return Response(status=rest_status.HTTP_200_OK) - elif queryset.filter(has_write_access=True).exists(): + if queryset.exists(): return Response(status=rest_status.HTTP_200_OK) - - return Response(status=rest_status.HTTP_404_NOT_FOUND, data={"detail": "Please enter valid api key."}) + return Response(status=rest_status.HTTP_404_NOT_FOUND, data={'detail': 'Please enter valid api key.'}) class NotificationViewSet(BaseModelViewSet): @@ -298,6 +299,111 @@ def get(self, request, *args, **kwargs): return Response(data=static_data) +class AdvancedFiltersViewSet(APIView): + base_auth_permissions = ( + permissions.AllowAny, + ) + + CACHE_KEY = 'cache' + CACHE_KEY_PREFIX = 'ADVANCE_FILTERS_JSON' + + def get_cache_key(self): + params = dict(self.request.query_params) + params.pop(self.CACHE_KEY, None) + return '{0}_{1}'.format(self.CACHE_KEY_PREFIX, + '_'.join(map(lambda x: '{0}_{1}'.format(x[0], x[1]), sorted(params.items()))), ) + + def get(self, request, *args, **kwargs): + use_cached_data = self.request.query_params.get(self.CACHE_KEY, 'on').lower() in ['on', 'true'] + cache_key = self.get_cache_key() + + response_data = None + if use_cached_data: + response_data = cache_manager.get(cache_key) + + if not response_data: + filters = copy.deepcopy(settings.FILTERS_DATA) + + for filter_json in filters: + parameter_table = filter_json['parameter']['table'] + parameter_field = filter_json['parameter']['field'] + + last_weekly_status_field = 'last_weekly_status__{}'.format(parameter_field) + + active_countries_list = [] + + # Populate the active countries list + active_countries_sql_filter = filter_json.get('active_countries_filter', None) + if active_countries_sql_filter: + country_qs = School.objects.all() + if parameter_table == 'school_static': + country_qs = country_qs.select_related('last_weekly_status').annotate(**{ + parameter_table + '_' + parameter_field: F(last_weekly_status_field) + }) + + active_countries_list = list(country_qs.extra( + where=[active_countries_sql_filter], + ).order_by('country_id').values_list('country_id', flat=True).distinct('country_id')) + + if len(active_countries_list) > 0: + filter_json['active_countries_list'] = active_countries_list + + del filter_json['active_countries_filter'] + + if filter_json['type'] == 'range': + select_qs = School.objects.all() + if len(active_countries_list) > 0: + select_qs = select_qs.filter(country_id__in=active_countries_list) + + if parameter_table == 'school_static': + select_qs = select_qs.select_related('last_weekly_status').values('country_id').annotate( + min_value=Min(F(last_weekly_status_field)), + max_value=Max(F(last_weekly_status_field)), + ) + else: + select_qs = select_qs.values('country_id').annotate( + min_value=Min(parameter_field), + max_value=Max(parameter_field), + ) + + min_max_result_country_wise = list( + select_qs.values('country_id', 'min_value', 'max_value').order_by('country_id').distinct()) + + active_countries_range = filter_json['active_countries_range'] + + for min_max_result in min_max_result_country_wise: + country_id = min_max_result.pop('country_id') + country_range_json = active_countries_range.get(country_id, copy.deepcopy( + active_countries_range['default'])) + min_max_result['min_value'] = floor(min_max_result['min_value']) + min_max_result['max_value'] = ceil(min_max_result['max_value']) + + if 'downcast_aggr_str' in filter_json: + downcast_eval = filter_json['downcast_aggr_str'] + min_max_result['min_value'] = floor( + eval(downcast_eval.format(val=min_max_result['min_value']))) + min_max_result['max_value'] = ceil( + eval(downcast_eval.format(val=min_max_result['max_value']))) + + country_range_json.update(**min_max_result) + + country_range_json['min_place_holder'] = 'Min ({})'.format(min_max_result['min_value']) + country_range_json['max_place_holder'] = 'Max ({})'.format(min_max_result['max_value']) + active_countries_range[country_id] = country_range_json + + filter_json['active_countries_range'] = active_countries_range + + response_data = { + 'count': len(settings.FILTERS_DATA), + 'results': filters, + } + request_path = remove_query_param(request.get_full_path(), 'cache') + cache_manager.set(cache_key, response_data, request_path=request_path, + soft_timeout=settings.CACHE_CONTROL_MAX_AGE) + + return Response(data=response_data) + + class DataSourceViewSet(BaseModelViewSet): model = accounts_models.DataSource serializer_class = serializers.DataSourceListSerializer @@ -563,7 +669,6 @@ def get_static_map_query(self, kwargs): for title, values_and_label in legend_configs.items(): values = list(filter(lambda val: val if not core_utilities.is_blank_string(val) else None, values_and_label.get('values', []))) - # label = values_and_label.get('labels', title).strip() if len(values) > 0: is_sql_value = 'SQL:' in values[0] @@ -619,7 +724,6 @@ def get(self, request, *args, **kwargs): } global_benchmark = data_layer_instance.global_benchmark.get('value') - # benchmark_unit = data_layer_instance.global_benchmark.get('unit') benchmark_base = str(parameter_col.get('base_benchmark', 1)) data_layer_qs = statistics_models.SchoolDailyStatus.objects.all() @@ -675,7 +779,15 @@ def get(self, request, *args, **kwargs): return Response(data=response) -class PublishedDataLayersViewSet(BaseModelViewSet): +class PublishedDataLayersViewSet(CachedListMixin, BaseModelViewSet): + """ + PublishedDataLayersViewSet + Cache Attr: + Auto Cache: Not required + Call Cache: Yes + """ + LIST_CACHE_KEY_PREFIX = 'PUBLISHED_LAYERS_LIST' + model = accounts_models.DataLayer serializer_class = serializers.DataLayersListSerializer @@ -786,7 +898,7 @@ def update_kwargs(self, country_ids, layer_instance): self.kwargs['admin1_ids'] = [a_id.strip() for a_id in query_params['admin1_id__in'].split(',')] if 'school_id' in query_param_keys: - self.kwargs['school_ids'] = [query_params['school_id']] + self.kwargs['school_ids'] = [str(query_params['school_id']).strip()] elif 'school_id__in' in query_param_keys: self.kwargs['school_ids'] = [s_id.strip() for s_id in query_params['school_id__in'].split(',')] @@ -796,6 +908,11 @@ def update_kwargs(self, country_ids, layer_instance): self.kwargs['convert_unit'] = layer_instance.global_benchmark.get('convert_unit', 'mbps') self.kwargs['is_reverse'] = layer_instance.is_reverse + self.kwargs['school_filters'] = core_utilities.get_filter_sql( + self.request, 'schools', 'schools_school') + self.kwargs['school_static_filters'] = core_utilities.get_filter_sql( + self.request, 'school_static', 'connection_statistics_schoolweeklystatus') + def get_benchmark_value(self, data_layer_instance): benchmark_val = data_layer_instance.global_benchmark.get('value') benchmark_unit = data_layer_instance.global_benchmark.get('unit') @@ -856,6 +973,7 @@ def get_info_query(self): AND (t."date" BETWEEN '{start_date}' AND '{end_date}') AND t."live_data_source" IN ({live_source_types}) ) + {school_weekly_join} WHERE ( "schools_school"."deleted" IS NULL AND "connection_statistics_schoolrealtimeregistration"."deleted" IS NULL @@ -863,6 +981,7 @@ def get_info_query(self): {country_condition} {admin1_condition} {school_condition} + {school_weekly_condition} AND "connection_statistics_schoolrealtimeregistration"."rt_registered" = True AND "connection_statistics_schoolrealtimeregistration"."rt_registration_date"::date <= '{end_date}') GROUP BY "schools_school"."id" @@ -875,6 +994,8 @@ def get_info_query(self): kwargs['country_condition'] = '' kwargs['admin1_condition'] = '' kwargs['school_condition'] = '' + kwargs['school_weekly_join'] = '' + kwargs['school_weekly_condition'] = '' kwargs['case_conditions'] = """ COUNT(DISTINCT CASE WHEN t.field_avg > {benchmark_value} THEN t.school_id ELSE NULL END) AS "good", @@ -903,69 +1024,74 @@ def get_info_query(self): ','.join([str(admin1_id) for admin1_id in kwargs['admin1_ids']]) ) - if len(kwargs.get('school_ids', [])) > 0: - kwargs['school_condition'] = 'AND "schools_school"."id" IN ({0})'.format( - ','.join([str(school_id) for school_id in kwargs['school_ids']]) - ) + if len(kwargs['school_filters']) > 0: + kwargs['school_condition'] = ' AND ' + kwargs['school_filters'] + + if len(kwargs['school_static_filters']) > 0: + kwargs['school_weekly_join'] = """ + LEFT OUTER JOIN "connection_statistics_schoolweeklystatus" + ON "schools_school"."last_weekly_status_id" = "connection_statistics_schoolweeklystatus"."id" + """ + kwargs['school_weekly_condition'] = ' AND ' + kwargs['school_static_filters'] return query.format(**kwargs) def get_school_view_info_query(self): query = """ - SELECT DISTINCT s."id", - s."name", - s."external_id", - s."giga_id_school", + SELECT DISTINCT schools_school."id", + schools_school."name", + schools_school."external_id", + schools_school."giga_id_school", CASE WHEN srr."rt_registered" = True THEN true ELSE false END AS is_data_synced, - s."admin1_id", + schools_school."admin1_id", adm1_metadata."name" AS admin1_name, adm1_metadata."giga_id_admin" AS admin1_code, adm1_metadata."description_ui_label" AS admin1_description_ui_label, - s."admin2_id", + schools_school."admin2_id", adm2_metadata."name" AS admin2_name, adm2_metadata."giga_id_admin" AS admin2_code, adm2_metadata."description_ui_label" AS admin2_description_ui_label, - s."country_id", + schools_school."country_id", c."name" AS country_name, - ST_AsGeoJSON(ST_Transform(s."geopoint", 4326)) AS geopoint, - s."environment", - s."education_level", + ST_AsGeoJSON(ST_Transform(schools_school."geopoint", 4326)) AS geopoint, + schools_school."environment", + schools_school."education_level", ROUND(AVG(sds."{col_name}"::numeric), 2) AS "live_avg", - CASE WHEN s.connectivity_status IN ('good', 'moderate') THEN 'connected' - WHEN s.connectivity_status = 'no' THEN 'not_connected' ELSE 'unknown' END as connectivity_status, + CASE WHEN schools_school.connectivity_status IN ('good', 'moderate') THEN 'connected' + WHEN schools_school.connectivity_status = 'no' THEN 'not_connected' ELSE 'unknown' END as connectivity_status, CASE WHEN srr."rt_registered" = True AND srr."rt_registration_date"::date <= '{end_date}' THEN true ELSE false END AS is_rt_connected, {case_conditions} - FROM "schools_school" s - INNER JOIN public.locations_country c ON c."id" = s."country_id" + FROM "schools_school" schools_school + INNER JOIN public.locations_country c ON c."id" = schools_school."country_id" AND c."deleted" IS NULL - AND s."deleted" IS NULL + AND schools_school."deleted" IS NULL LEFT JOIN public.locations_countryadminmetadata AS adm1_metadata - ON adm1_metadata."id" = s.admin1_id + ON adm1_metadata."id" = schools_school.admin1_id AND adm1_metadata."layer_name" = 'adm1' AND adm1_metadata."deleted" IS NULL LEFT JOIN public.locations_countryadminmetadata AS adm2_metadata - ON adm2_metadata."id" = s.admin2_id + ON adm2_metadata."id" = schools_school.admin2_id AND adm2_metadata."layer_name" = 'adm2' AND adm2_metadata."deleted" IS NULL LEFT JOIN "connection_statistics_schoolrealtimeregistration" AS srr - ON s."id" = srr."school_id" + ON schools_school."id" = srr."school_id" AND srr."deleted" IS NULL LEFT OUTER JOIN "connection_statistics_schooldailystatus" sds - ON s."id" = sds."school_id" + ON schools_school."id" = sds."school_id" AND sds."deleted" IS NULL AND (sds."date" BETWEEN '{start_date}' AND '{end_date}') AND sds."live_data_source" IN ({live_source_types}) - WHERE {school_condition} - GROUP BY s."id", srr."rt_registered", srr."rt_registration_date", + WHERE "schools_school"."id" IN ({ids}) + GROUP BY schools_school."id", srr."rt_registered", srr."rt_registration_date", adm1_metadata."name", adm1_metadata."description_ui_label", adm2_metadata."name", adm2_metadata."description_ui_label", c."name", adm1_metadata."giga_id_admin", adm2_metadata."giga_id_admin" - ORDER BY s."id" ASC + ORDER BY schools_school."id" ASC """ kwargs = copy.deepcopy(self.kwargs) - kwargs['school_condition'] = '' + kwargs['ids'] = ','.join(kwargs['school_ids']) kwargs['case_conditions'] = """ CASE @@ -986,30 +1112,21 @@ def get_school_view_info_query(self): ELSE 'unknown' END AS live_avg_connectivity """.format(**kwargs) - if len(kwargs.get('school_ids', [])) > 0: - kwargs['school_condition'] = 's."id" IN ({0})'.format( - ','.join([str(school_id) for school_id in kwargs['school_ids']]) - ) - return query.format(**kwargs) def get_school_view_statistics_info_query(self): query = """ SELECT sws.* - FROM "schools_school" s - INNER JOIN public.connection_statistics_schoolweeklystatus sws on sws."id" = s."last_weekly_status_id" - WHERE s."deleted" IS NULL AND sws."deleted" IS NULL {school_condition} - """ - - kwargs = copy.deepcopy(self.kwargs) - kwargs['school_condition'] = '' - - if len(kwargs.get('school_ids', [])) > 0: - kwargs['school_condition'] = 'AND s."id" IN ({0})'.format( - ','.join([str(school_id) for school_id in kwargs['school_ids']]) - ) + FROM "schools_school" + INNER JOIN connection_statistics_schoolweeklystatus sws + ON sws."id" = "schools_school"."last_weekly_status_id" + WHERE + "schools_school"."deleted" IS NULL + AND sws."deleted" IS NULL + AND "schools_school"."id" IN ({ids}) + """.format(ids=','.join(self.kwargs['school_ids'])) - return query.format(**kwargs) + return query def get_avg_query(self, **kwargs): query = """ @@ -1028,10 +1145,12 @@ def get_avg_query(self, **kwargs): AND t."live_data_source" IN ({live_source_types}) AND t."deleted" IS NULL ) + {school_weekly_join} WHERE ( {country_condition} {admin1_condition} {school_condition} + {school_weekly_condition} "connection_statistics_schoolrealtimeregistration"."rt_registered" = True AND "connection_statistics_schoolrealtimeregistration"."rt_registration_date"::date <= '{end_date}' AND t."{col_name}" IS NOT NULL) @@ -1044,6 +1163,8 @@ def get_avg_query(self, **kwargs): kwargs['school_condition'] = '' kwargs['school_selection'] = '' kwargs['school_group_by'] = '' + kwargs['school_weekly_join'] = '' + kwargs['school_weekly_condition'] = '' if len(kwargs.get('country_ids', [])) > 0: kwargs['country_condition'] = '"schools_school"."country_id" IN ({0}) AND'.format( @@ -1056,12 +1177,20 @@ def get_avg_query(self, **kwargs): ) if len(kwargs.get('school_ids', [])) > 0: - kwargs['school_condition'] = '"schools_school"."id" IN ({0}) AND'.format( - ','.join([str(school_id) for school_id in kwargs['school_ids']]) - ) + kwargs['school_condition'] = '"schools_school"."id" IN ({0}) AND '.format(','.join(kwargs['school_ids'])) kwargs['school_selection'] = '"schools_school"."id", ' kwargs['school_group_by'] = ', "schools_school"."id"' + if len(kwargs['school_filters']) > 0: + kwargs['school_condition'] += kwargs['school_filters'] + ' AND ' + + if len(kwargs['school_static_filters']) > 0: + kwargs['school_weekly_join'] = """ + LEFT OUTER JOIN "connection_statistics_schoolweeklystatus" + ON "schools_school"."last_weekly_status_id" = "connection_statistics_schoolweeklystatus"."id" + """ + kwargs['school_weekly_condition'] = kwargs['school_static_filters'] + ' AND ' + return query.format(**kwargs) def generate_graph_data(self): @@ -1092,7 +1221,6 @@ def generate_graph_data(self): graph_data_per_school[school_id] = copy.deepcopy(graph_data) all_positive_speeds_per_school[school_id] = [] - # Update the graph_data with actual values if they exist # Update the graph_data with actual values if they exist for daily_avg_data in data: school_id = str(daily_avg_data['id']) @@ -1115,7 +1243,6 @@ def generate_graph_data(self): all_positive_speeds = [] # Update the graph_data with actual values if they exist - # Update the graph_data with actual values if they exist for daily_avg_data in data: formatted_date = date_utilities.format_date(daily_avg_data['date']) for entry in graph_data: @@ -1133,13 +1260,16 @@ def generate_graph_data(self): def get_static_info_query(self, query_labels): query = """ SELECT {label_case_statements} - COUNT(DISTINCT CASE WHEN sws."{col_name}" IS NOT NULL THEN s."id" ELSE NULL END) AS "total_schools" - FROM schools_school AS s - LEFT JOIN connection_statistics_schoolweeklystatus sws ON s.last_weekly_status_id = sws.id - WHERE s."deleted" IS NULL AND sws."deleted" IS NULL + COUNT(DISTINCT CASE WHEN sws."{col_name}" IS NOT NULL THEN "schools_school"."id" ELSE NULL END) + AS "total_schools" + FROM "schools_school" + {school_weekly_join} + LEFT JOIN connection_statistics_schoolweeklystatus sws ON "schools_school"."last_weekly_status_id" = sws."id" + WHERE "schools_school"."deleted" IS NULL AND sws."deleted" IS NULL {country_condition} {admin1_condition} {school_condition} + {school_weekly_condition} """ kwargs = copy.deepcopy(self.kwargs) @@ -1147,21 +1277,28 @@ def get_static_info_query(self, query_labels): kwargs['country_condition'] = '' kwargs['admin1_condition'] = '' kwargs['school_condition'] = '' + kwargs['school_weekly_join'] = '' + kwargs['school_weekly_condition'] = '' if len(kwargs.get('country_ids', [])) > 0: - kwargs['country_condition'] = ' AND s."country_id" IN ({0})'.format( + kwargs['country_condition'] = ' AND "schools_school"."country_id" IN ({0})'.format( ','.join([str(country_id) for country_id in kwargs['country_ids']]) ) if len(kwargs.get('admin1_ids', [])) > 0: - kwargs['admin1_condition'] = ' AND s."admin1_id" IN ({0})'.format( + kwargs['admin1_condition'] = ' AND "schools_school"."admin1_id" IN ({0})'.format( ','.join([str(admin1_id) for admin1_id in kwargs['admin1_ids']]) ) - if len(kwargs.get('school_ids', [])) > 0: - kwargs['school_condition'] = ' AND s."id" IN ({0})'.format( - ','.join([str(school_id) for school_id in kwargs['school_ids']]) - ) + if len(kwargs['school_filters']) > 0: + kwargs['school_condition'] = ' AND ' + kwargs['school_filters'] + + if len(kwargs['school_static_filters']) > 0: + kwargs['school_weekly_join'] = """ + LEFT OUTER JOIN "connection_statistics_schoolweeklystatus" + ON "schools_school"."last_weekly_status_id" = "connection_statistics_schoolweeklystatus"."id" + """ + kwargs['school_weekly_condition'] = ' AND ' + kwargs['school_static_filters'] legend_configs = kwargs['legend_configs'] label_cases = [] @@ -1182,17 +1319,15 @@ def get_static_info_query(self, query_labels): col_name=kwargs['col_name'], ) label_cases.append( - 'COUNT(DISTINCT CASE WHEN {sql} THEN s."id" ELSE NULL END) AS "{label}",'.format( + 'COUNT(DISTINCT CASE WHEN {sql} THEN schools_school."id" ELSE NULL END) AS "{label}",'.format( sql=sql_statement, - col_name=kwargs['col_name'], label=label, - value=','.join(["'" + str(v).lower() + "'" for v in values]) )) else: values_l.extend(values) if parameter_col_type == 'str': label_cases.append( - 'COUNT(DISTINCT CASE WHEN LOWER(sws."{col_name}") IN ({value}) THEN s."id" ELSE NULL END) ' + 'COUNT(DISTINCT CASE WHEN LOWER(sws."{col_name}") IN ({value}) THEN schools_school."id" ELSE NULL END) ' 'AS "{label}",'.format( col_name=kwargs['col_name'], label=label, @@ -1200,7 +1335,7 @@ def get_static_info_query(self, query_labels): )) elif parameter_col_type == 'int': label_cases.append( - 'COUNT(DISTINCT CASE WHEN sws."{col_name}" IN ({value}) THEN s."id" ELSE NULL END) ' + 'COUNT(DISTINCT CASE WHEN sws."{col_name}" IN ({value}) THEN schools_school."id" ELSE NULL END) ' 'AS "{label}",'.format( col_name=kwargs['col_name'], label=label, @@ -1209,7 +1344,7 @@ def get_static_info_query(self, query_labels): else: if is_sql_value: label_cases.append( - 'COUNT(DISTINCT CASE WHEN sws."{col_name}" IS NULL THEN s."id" ELSE NULL END) AS "{label}",'.format( + 'COUNT(DISTINCT CASE WHEN sws."{col_name}" IS NULL THEN schools_school."id" ELSE NULL END) AS "{label}",'.format( col_name=kwargs['col_name'], label=label, )) @@ -1217,7 +1352,7 @@ def get_static_info_query(self, query_labels): values = set(values_l) if parameter_col_type == 'str': label_cases.append( - 'COUNT(DISTINCT CASE WHEN LOWER(sws."{col_name}") NOT IN ({value}) THEN s."id" ELSE NULL END) ' + 'COUNT(DISTINCT CASE WHEN LOWER(sws."{col_name}") NOT IN ({value}) THEN schools_school."id" ELSE NULL END) ' 'AS "{label}",'.format( col_name=kwargs['col_name'], label=label, @@ -1225,7 +1360,7 @@ def get_static_info_query(self, query_labels): )) elif parameter_col_type == 'int': label_cases.append( - 'COUNT(DISTINCT CASE WHEN sws."{col_name}" NOT IN ({value}) THEN s."id" ELSE NULL END) ' + 'COUNT(DISTINCT CASE WHEN sws."{col_name}" NOT IN ({value}) THEN schools_school."id" ELSE NULL END) ' 'AS "{label}",'.format( col_name=kwargs['col_name'], label=label, @@ -1238,49 +1373,44 @@ def get_static_info_query(self, query_labels): def get_static_school_view_info_query(self): query = """ - SELECT s."id", - s."name", - s."external_id", - s."giga_id_school", - s."country_id", + SELECT schools_school."id", + schools_school."name", + schools_school."external_id", + schools_school."giga_id_school", + schools_school."country_id", c."name" AS country_name, - s."admin1_id", + schools_school."admin1_id", adm1_metadata."name" AS admin1_name, adm1_metadata."giga_id_admin" AS admin1_code, adm1_metadata."description_ui_label" AS admin1_description_ui_label, - s."admin2_id", + schools_school."admin2_id", adm2_metadata."name" AS admin2_name, adm2_metadata."giga_id_admin" AS admin2_code, adm2_metadata."description_ui_label" AS admin2_description_ui_label, - s."environment", - s."education_level", + schools_school."environment", + schools_school."education_level", sws."{col_name}" AS field_value, {label_case_statements} - ST_AsGeoJSON(ST_Transform(s."geopoint", 4326)) AS geopoint, - CASE WHEN s.connectivity_status IN ('good', 'moderate') THEN 'connected' - WHEN s.connectivity_status = 'no' THEN 'not_connected' ELSE 'unknown' END as connectivity_status - FROM schools_school AS s - INNER JOIN public.locations_country c ON c.id = s.country_id + ST_AsGeoJSON(ST_Transform(schools_school."geopoint", 4326)) AS geopoint, + CASE WHEN schools_school.connectivity_status IN ('good', 'moderate') THEN 'connected' + WHEN schools_school.connectivity_status = 'no' THEN 'not_connected' ELSE 'unknown' END as connectivity_status + FROM "schools_school" + INNER JOIN locations_country c ON c.id = schools_school.country_id AND c."deleted" IS NULL - LEFT JOIN public.locations_countryadminmetadata AS adm1_metadata - ON adm1_metadata."id" = s.admin1_id + LEFT JOIN locations_countryadminmetadata AS adm1_metadata + ON adm1_metadata."id" = schools_school.admin1_id AND adm1_metadata."layer_name" = 'adm1' AND adm1_metadata."deleted" IS NULL - LEFT JOIN public.locations_countryadminmetadata AS adm2_metadata - ON adm2_metadata."id" = s.admin2_id + LEFT JOIN locations_countryadminmetadata AS adm2_metadata + ON adm2_metadata."id" = schools_school.admin2_id AND adm2_metadata."layer_name" = 'adm2' AND adm2_metadata."deleted" IS NULL - LEFT JOIN connection_statistics_schoolweeklystatus sws ON s.last_weekly_status_id = sws.id - {school_condition} + LEFT JOIN connection_statistics_schoolweeklystatus sws ON schools_school.last_weekly_status_id = sws.id + WHERE "schools_school"."id" IN ({ids}) """ kwargs = copy.deepcopy(self.kwargs) - kwargs['school_condition'] = '' - - if len(kwargs.get('school_ids', [])) > 0: - kwargs['school_condition'] = ' WHERE s."id" IN ({0})'.format( - ','.join([str(school_id) for school_id in kwargs['school_ids']]) - ) + kwargs['ids'] = ','.join(kwargs['school_ids']) legend_configs = kwargs['legend_configs'] label_cases = [] @@ -1400,34 +1530,33 @@ def get(self, request, *args, **kwargs): if len(info_panel_school_list) > 0: for info_panel_school in info_panel_school_list: - school_id = info_panel_school['id'] info_panel_school['geopoint'] = json.loads(info_panel_school['geopoint']) info_panel_school['statistics'] = list(filter( - lambda s: s['school_id'] == school_id, statistics))[-1] + lambda s: s['school_id'] == info_panel_school['id'], statistics))[-1] - live_avg = (round(sum(positive_speeds[str(school_id)]) / len( - positive_speeds[str(school_id)]), 2) if len( - positive_speeds[str(school_id)]) > 0 else 0) + live_avg = (round(sum(positive_speeds[str(info_panel_school['id'])]) / len( + positive_speeds[str(info_panel_school['id'])]), 2) if len( + positive_speeds[str(info_panel_school['id'])]) > 0 else 0) info_panel_school['live_avg'] = live_avg - info_panel_school['graph_data'] = graph_data[str(school_id)] + info_panel_school['graph_data'] = graph_data[str(info_panel_school['id'])] response = info_panel_school_list else: + is_data_synced_qs = SchoolWeeklyStatus.objects.filter( + school__realtime_registration_status__rt_registered=True, + ) + + if len(self.kwargs['school_filters']) > 0: + is_data_synced_qs = is_data_synced_qs.extra(where=[self.kwargs['school_filters']]) + + if len(self.kwargs['school_static_filters']) > 0: + is_data_synced_qs = is_data_synced_qs.extra(where=[self.kwargs['school_static_filters']]) + if len(self.kwargs.get('admin1_ids', [])) > 0: - is_data_synced = SchoolWeeklyStatus.objects.filter( - school__admin1_id__in=self.kwargs['admin1_ids'], - school__realtime_registration_status__rt_registered=True, - ).exists() + is_data_synced_qs = is_data_synced_qs.filter(school__admin1_id__in=self.kwargs['admin1_ids']) elif len(self.kwargs.get('country_ids', [])) > 0: - is_data_synced = SchoolWeeklyStatus.objects.filter( - school__country_id__in=self.kwargs['country_ids'], - school__realtime_registration_status__rt_registered=True, - ).exists() - else: - is_data_synced = SchoolWeeklyStatus.objects.filter( - school__realtime_registration_status__rt_registered=True, - ).exists() + is_data_synced_qs = is_data_synced_qs.filter(school__country_id__in=self.kwargs['country_ids']) query_response = db_utilities.sql_to_response(self.get_info_query(), label=self.__class__.__name__)[ -1] @@ -1465,7 +1594,7 @@ def get(self, request, *args, **kwargs): 'no_internet': query_response['bad'], 'unknown': query_response['unknown'], }, - 'is_data_synced': is_data_synced, + 'is_data_synced': is_data_synced_qs.exists(), 'live_avg': live_avg, 'live_avg_connectivity': live_avg_connectivity, 'graph_data': graph_data, @@ -1538,6 +1667,7 @@ def get_live_map_query(self, env, request): END as connectivity_status FROM schools_school INNER JOIN bounds ON ST_Intersects("schools_school".geopoint, ST_Transform(bounds.geom, 4326)) + {school_weekly_join} LEFT JOIN ( SELECT "schools_school"."id" AS school_id, AVG(t."{col_name}") AS "field_avg" @@ -1550,6 +1680,7 @@ def get_live_map_query(self, env, request): AND (t."date" BETWEEN '{start_date}' AND '{end_date}') AND t."live_data_source" IN ({live_source_types}) ) + {school_weekly_join} WHERE ( "schools_school"."deleted" IS NULL AND "connection_statistics_schoolrealtimeregistration"."deleted" IS NULL @@ -1557,6 +1688,7 @@ def get_live_map_query(self, env, request): {country_condition} {admin1_condition} {school_condition} + {school_weekly_condition} AND "connection_statistics_schoolrealtimeregistration"."rt_registered" = True AND "connection_statistics_schoolrealtimeregistration"."rt_registration_date"::date <= '{end_date}') GROUP BY "schools_school"."id" @@ -1570,6 +1702,7 @@ def get_live_map_query(self, env, request): {country_outer_condition} {admin1_outer_condition} {school_outer_condition} + {school_weekly_condition} {random_order} {limit_condition} ) @@ -1586,6 +1719,9 @@ def get_live_map_query(self, env, request): kwargs['admin1_outer_condition'] = '' kwargs['school_outer_condition'] = '' + kwargs['school_weekly_join'] = '' + kwargs['school_weekly_condition'] = '' + kwargs['env'] = self.envelope_to_bounds_sql(env) kwargs['limit_condition'] = '' @@ -1638,6 +1774,17 @@ def get_live_map_query(self, env, request): ','.join([str(school_id) for school_id in kwargs['school_ids']]) ) + if len(kwargs['school_filters']) > 0: + kwargs['school_condition'] += ' AND ' + kwargs['school_filters'] + kwargs['school_outer_condition'] += ' AND ' + kwargs['school_filters'] + + if len(kwargs['school_static_filters']) > 0: + kwargs['school_weekly_join'] = """ + LEFT OUTER JOIN "connection_statistics_schoolweeklystatus" + ON "schools_school"."last_weekly_status_id" = "connection_statistics_schoolweeklystatus"."id" + """ + kwargs['school_weekly_condition'] = ' AND ' + kwargs['school_static_filters'] + if add_random_condition: kwargs['limit_condition'] = 'LIMIT ' + request.query_params.get('limit', '50000') kwargs['random_order'] = 'ORDER BY random()' if int(request.query_params.get('z', '0')) == 2 else '' @@ -1657,22 +1804,24 @@ def get_static_map_query(self, env, request): {env}::box2d AS b2d ), mvtgeom AS ( - SELECT DISTINCT ST_AsMVTGeom(ST_Transform(s.geopoint, 3857), bounds.b2d) AS geom, - s.id, + SELECT DISTINCT ST_AsMVTGeom(ST_Transform(schools_school.geopoint, 3857), bounds.b2d) AS geom, + schools_school.id, sws."{col_name}" AS field_value, - CASE WHEN s.connectivity_status IN ('good', 'moderate') THEN 'connected' - WHEN s.connectivity_status = 'no' THEN 'not_connected' + CASE WHEN schools_school.connectivity_status IN ('good', 'moderate') THEN 'connected' + WHEN schools_school.connectivity_status = 'no' THEN 'not_connected' ELSE 'unknown' END as connectivity_status, {label_case_statements} - FROM schools_school s - INNER JOIN bounds ON ST_Intersects(s.geopoint, ST_Transform(bounds.geom, 4326)) - LEFT JOIN connection_statistics_schoolweeklystatus sws ON s.last_weekly_status_id = sws.id - WHERE s."deleted" IS NULL + FROM schools_school + INNER JOIN bounds ON ST_Intersects(schools_school.geopoint, ST_Transform(bounds.geom, 4326)) + {school_weekly_join} + LEFT JOIN connection_statistics_schoolweeklystatus sws ON schools_school.last_weekly_status_id = sws.id + WHERE schools_school."deleted" IS NULL AND sws."deleted" IS NULL {country_condition} {admin1_condition} {school_condition} + {school_weekly_condition} {random_order} {limit_condition} ) @@ -1685,6 +1834,9 @@ def get_static_map_query(self, env, request): kwargs['admin1_condition'] = '' kwargs['school_condition'] = '' + kwargs['school_weekly_join'] = '' + kwargs['school_weekly_condition'] = '' + kwargs['env'] = self.envelope_to_bounds_sql(env) kwargs['limit_condition'] = '' @@ -1694,22 +1846,32 @@ def get_static_map_query(self, env, request): if len(kwargs.get('country_ids', [])) > 0: add_random_condition = False - kwargs['country_condition'] = 'AND s."country_id" IN ({0})'.format( + kwargs['country_condition'] = 'AND schools_school."country_id" IN ({0})'.format( ','.join([str(country_id) for country_id in kwargs['country_ids']]) ) if len(kwargs.get('admin1_ids', [])) > 0: add_random_condition = False - kwargs['admin1_condition'] = 'AND s."admin1_id" IN ({0})'.format( + kwargs['admin1_condition'] = 'AND schools_school."admin1_id" IN ({0})'.format( ','.join([str(admin1_id) for admin1_id in kwargs['admin1_ids']]) ) if len(kwargs.get('school_ids', [])) > 0: add_random_condition = False - kwargs['school_condition'] = 'AND s."id" IN ({0})'.format( + kwargs['school_condition'] = 'AND schools_school."id" IN ({0})'.format( ','.join([str(school_id) for school_id in kwargs['school_ids']]) ) + if len(kwargs['school_filters']) > 0: + kwargs['school_condition'] += ' AND ' + kwargs['school_filters'] + + if len(kwargs['school_static_filters']) > 0: + kwargs['school_weekly_join'] = """ + LEFT OUTER JOIN "connection_statistics_schoolweeklystatus" + ON "schools_school"."last_weekly_status_id" = "connection_statistics_schoolweeklystatus"."id" + """ + kwargs['school_weekly_condition'] = ' AND ' + kwargs['school_static_filters'] + legend_configs = kwargs['legend_configs'] label_cases = [] values_l = [] @@ -1717,7 +1879,6 @@ def get_static_map_query(self, env, request): for title, values_and_label in legend_configs.items(): values = list(filter(lambda val: val if not core_utilities.is_blank_string(val) else None, values_and_label.get('values', []))) - # label = values_and_label.get('labels', title).strip() if len(values) > 0: is_sql_value = 'SQL:' in values[0] @@ -1775,11 +1936,10 @@ def get(self, request, *args, **kwargs): parameter_col = data_sources.first().data_source_column parameter_column_name = str(parameter_col['name']) - # parameter_column_unit = str(parameter_col.get('unit', '')).lower() base_benchmark = str(parameter_col.get('base_benchmark', 1)) self.update_kwargs(country_ids, data_layer_instance) - benchmark_value, benchmark_unit = self.get_benchmark_value(data_layer_instance) + benchmark_value, _ = self.get_benchmark_value(data_layer_instance) if data_layer_instance.type == accounts_models.DataLayer.LAYER_TYPE_LIVE: self.kwargs.update({ @@ -1803,7 +1963,7 @@ def get(self, request, *args, **kwargs): try: return self.generate_tile(request) except Exception as ex: - print('Exception occurred for school connectivity tiles endpoint: {}'.format(ex)) + logger.error('Exception occurred for school connectivity tiles endpoint: {}'.format(ex)) return Response({'error': 'An error occurred while processing the request'}, status=500) @@ -1960,5 +2120,5 @@ def get(self, request, *args, **kwargs): try: return self.generate_tile(request) except Exception as ex: - print('Exception occurred for school connectivity tiles endpoint: {0}'.format(ex)) + logger.error('Exception occurred for school connectivity tiles endpoint: {0}'.format(ex)) return Response({'error': 'An error occurred while processing the request'}, status=500) diff --git a/proco/accounts/api_urls.py b/proco/accounts/api_urls.py index eafe9cd..8041c8c 100644 --- a/proco/accounts/api_urls.py +++ b/proco/accounts/api_urls.py @@ -30,6 +30,7 @@ path('invalidate-cache/', api.InvalidateCache.as_view(), name='admin-invalidate-cache'), path('app_configs/', api.AppStaticConfigurationsViewSet.as_view(), name='get-app-static-configurations'), + path('advanced_filters/', api.AdvancedFiltersViewSet.as_view(), name='list-advanced-filters'), path('data_sources/', api.DataSourceViewSet.as_view({ 'get': 'list', 'post': 'create', @@ -65,7 +66,7 @@ 'get': 'list', }), name='list-published-data-layers'), - path('recent_action_log/', api.LogActionViewSet.as_view({'get': 'list', }), name='recent_action_log'), + path('recent_action_log/', api.LogActionViewSet.as_view({'get': 'list', }), name='list-recent-action-log'), path('time-players/v2/', api.TimePlayerViewSet.as_view(), name='get-time-player-data-v2'), ] diff --git a/proco/accounts/config.py b/proco/accounts/config.py index c0c85f8..6fd07f0 100644 --- a/proco/accounts/config.py +++ b/proco/accounts/config.py @@ -1,9 +1,9 @@ class AppConfig(object): @property - def active_api_key_count_for_single_api_limit(self): - """API Key count limit""" - return 1 + def valid_name_pattern(self): + """Regex to validate names""" + return r'[a-zA-Z0-9-\' _()]*$' @property def public_api_key_generation_email_subject_format(self): diff --git a/proco/accounts/exceptions.py b/proco/accounts/exceptions.py index b5ed4f2..230eb91 100644 --- a/proco/accounts/exceptions.py +++ b/proco/accounts/exceptions.py @@ -224,9 +224,15 @@ class InvalidDataLayerNameError(BaseInvalidValidationError): code = 'invalid_data_layer_name' -class DuplicateDataLayerNameError(BaseInvalidValidationError): - message = _("Data Layer with name '{name}' already exists.") - code = 'duplicate_data_layer_name' +class InvalidDataLayerCodeError(BaseInvalidValidationError): + message = _('Invalid Data Layer code.') + description = _('Provide valid data layer code') + code = 'invalid_data_layer_code' + + +class DuplicateDataLayerCodeError(BaseInvalidValidationError): + message = _("Data Layer with code '{code}' already exists.") + code = 'duplicate_data_layer_code' class InvalidCountryNameOrCodeError(BaseInvalidValidationError): diff --git a/proco/accounts/migrations/0014_added_data_layer_code_field.py b/proco/accounts/migrations/0014_added_data_layer_code_field.py new file mode 100644 index 0000000..d493375 --- /dev/null +++ b/proco/accounts/migrations/0014_added_data_layer_code_field.py @@ -0,0 +1,23 @@ +# Generated by Django 2.2.28 on 2024-06-03 10:45 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('accounts', '0013_added_indexes_on_fields'), + ] + + operations = [ + migrations.AddField( + model_name='datalayer', + name='code', + field=models.CharField(db_index=True, default='UNKNOWN', max_length=255, verbose_name='Layer Code'), + ), + migrations.AddField( + model_name='historicaldatalayer', + name='code', + field=models.CharField(db_index=True, default='UNKNOWN', max_length=255, verbose_name='Layer Code'), + ), + ] diff --git a/proco/accounts/migrations/0015_deleted_unused_historical_models.py b/proco/accounts/migrations/0015_deleted_unused_historical_models.py new file mode 100644 index 0000000..7843b9d --- /dev/null +++ b/proco/accounts/migrations/0015_deleted_unused_historical_models.py @@ -0,0 +1,62 @@ +# Generated by Django 2.2.28 on 2024-07-09 08:43 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('accounts', '0014_added_data_layer_code_field'), + ] + + operations = [ + migrations.RemoveField( + model_name='historicaldatalayercountryrelationship', + name='country', + ), + migrations.RemoveField( + model_name='historicaldatalayercountryrelationship', + name='created_by', + ), + migrations.RemoveField( + model_name='historicaldatalayercountryrelationship', + name='data_layer', + ), + migrations.RemoveField( + model_name='historicaldatalayercountryrelationship', + name='history_user', + ), + migrations.RemoveField( + model_name='historicaldatalayercountryrelationship', + name='last_modified_by', + ), + migrations.RemoveField( + model_name='historicaldatalayerdatasourcerelationship', + name='created_by', + ), + migrations.RemoveField( + model_name='historicaldatalayerdatasourcerelationship', + name='data_layer', + ), + migrations.RemoveField( + model_name='historicaldatalayerdatasourcerelationship', + name='data_source', + ), + migrations.RemoveField( + model_name='historicaldatalayerdatasourcerelationship', + name='history_user', + ), + migrations.RemoveField( + model_name='historicaldatalayerdatasourcerelationship', + name='last_modified_by', + ), + migrations.DeleteModel( + name='HistoricalAPIKeyCountryRelationship', + ), + migrations.DeleteModel( + name='HistoricalDataLayerCountryRelationship', + ), + migrations.DeleteModel( + name='HistoricalDataLayerDataSourceRelationship', + ), + ] diff --git a/proco/accounts/models.py b/proco/accounts/models.py index 8d01fc4..9e57db9 100644 --- a/proco/accounts/models.py +++ b/proco/accounts/models.py @@ -122,7 +122,7 @@ class Meta: ordering = ['last_modified_at'] -class APIKeyCountryRelationship(core_models.BaseModel): +class APIKeyCountryRelationship(core_models.BaseModelMixin): """ APIKeyCountryRelationship This model is used to store the Api Key and Country relationship. @@ -183,7 +183,6 @@ class Message(core_models.BaseModel): retry_count = models.IntegerField(default=0) is_sent = models.BooleanField(default=False) is_read = models.BooleanField(default=False) - # external_email = models.EmailField(null=True) subject_text = models.TextField(null=True) message_text = models.TextField() @@ -314,6 +313,14 @@ class DataLayer(core_models.BaseModel): icon = models.TextField(null=True, blank=True) # Unique + code = models.CharField( + max_length=255, + null=False, + verbose_name='Layer Code', + default='UNKNOWN', + db_index=True, + ) + name = models.CharField( max_length=255, null=False, @@ -360,8 +367,12 @@ class DataLayer(core_models.BaseModel): class Meta: ordering = ['last_modified_at'] + def save(self, **kwargs): + self.code = str(self.code).upper() + super().save(**kwargs) + -class DataLayerDataSourceRelationship(core_models.BaseModel): +class DataLayerDataSourceRelationship(core_models.BaseModelMixin): """ DataLayerDataSourceRelationship This model is used to store the Data Layer and Data Source relationship. @@ -376,7 +387,7 @@ class Meta: ordering = ['last_modified_at'] -class DataLayerCountryRelationship(core_models.BaseModel): +class DataLayerCountryRelationship(core_models.BaseModelMixin): """ DataLayerCountryRelationship This model is used to store the Data Layer and Country relationship. diff --git a/proco/accounts/serializers.py b/proco/accounts/serializers.py index 0b6629f..6d8ab22 100644 --- a/proco/accounts/serializers.py +++ b/proco/accounts/serializers.py @@ -128,15 +128,6 @@ def get_date_range_filter_applicable(self, api_instance): return False def apply_api_key_filters(self, filters): - """ - filters = { - 'country_id': '144', - 'start_date': '22-09-2023', - 'end_date': '28-09-2023', - 'is_weekly': 'true', - 'is_export': 'true', - } - """ return filters if isinstance(filters, dict) > 0 else {} def get_download_url(self, api_instance): @@ -157,8 +148,10 @@ def get_download_url(self, api_instance): def get_report_title(self, api_instance): report_file_name = api_instance.report_title - if (api_instance.category == accounts_models.API.API_CATEGORY_PUBLIC and - core_utilities.is_blank_string(report_file_name)): + if ( + api_instance.category == accounts_models.API.API_CATEGORY_PUBLIC and + core_utilities.is_blank_string(report_file_name) + ): report_file_name = str('_'.join([api_instance.name, api_instance.category, '{dt}'])) return report_file_name.format( @@ -361,9 +354,11 @@ def _get_status_by_api_category(self): # If API key is created for a Public API, then update status as APPROVED # If API key is created by Admin/Superuser, then also mark it as APPROVED - if api_instance and api_instance.category == accounts_models.API.API_CATEGORY_PUBLIC: - return accounts_models.APIKey.APPROVED - elif core_utilities.is_superuser(request_user): + if ( + (api_instance and api_instance.category == accounts_models.API.API_CATEGORY_PUBLIC) or + core_utilities.is_superuser(request_user) or + request_user.permissions.get(auth_models.RolePermission.CAN_APPROVE_REJECT_API_KEY, False) + ): return accounts_models.APIKey.APPROVED return accounts_models.APIKey.INITIATED @@ -648,7 +643,7 @@ def _validate_has_active_extension_request(self, api_key_instance): ) if has_active_request: message_kwargs = { - 'msg': f'Invalid API Key Extension Request as an active request already logged' + 'msg': 'Invalid API Key Extension Request as an active request already logged' } raise accounts_exceptions.InvalidAPIKeyExtensionError(message_kwargs=message_kwargs) @@ -665,7 +660,7 @@ def validate_extension_valid_to(self, extension_valid_to): ): return extension_valid_to message_kwargs = { - 'msg': f'Invalid API Key Extension Request Date as only 365 days extension is allowed from the current date' + 'msg': 'Invalid API Key Extension Request Date as only 365 days extension is allowed from the current date' } raise accounts_exceptions.InvalidAPIKeyExtensionError(message_kwargs=message_kwargs) @@ -808,8 +803,6 @@ def validate_recipient(self, recipients): for email in recipients: validate_email(email) return recipients - - print('Invalid Email Id email: {0}'.format(str(recipients))) raise accounts_exceptions.InvalidEmailId() # For SMS notification elif message_type == accounts_models.Message.TYPE_SMS: @@ -821,8 +814,6 @@ def validate_recipient(self, recipients): ): raise accounts_exceptions.InvalidPhoneNumberError() return recipients - - print('Invalid phone nos: {0}'.format(str(recipients))) raise accounts_exceptions.InvalidPhoneNumberError() elif message_type == accounts_models.Message.TYPE_NOTIFICATION: if isinstance(recipients, list): @@ -834,8 +825,6 @@ def validate_recipient(self, recipients): ): raise accounts_exceptions.InvalidUserIdError() return recipients - - print('Invalid user ids: {0}'.format(str(recipients))) raise accounts_exceptions.InvalidPhoneNumberError() return recipients @@ -856,10 +845,6 @@ def to_internal_value(self, data): data['template'] = data['template'] \ if 'template' in data \ else account_config.standard_email_template_name - # For SMS notification - elif message_type == accounts_models.Message.TYPE_SMS: - # TODO: Implement SMS notification - pass return super().to_internal_value(data) def create(self, validated_data): @@ -874,11 +859,6 @@ def create(self, validated_data): if isinstance(response, int): validated_data['is_sent'] = True - # if its SMS notification, send the SMS over <> service and update the status - elif message_type == accounts_models.Message.TYPE_SMS: - # TODO: Implement SMS notification - pass - # if it's just an application level notification, create the message instance else: validated_data['is_sent'] = True @@ -947,22 +927,12 @@ class BaseDataSourceCRUDSerializer(serializers.ModelSerializer): column_config = serializers.JSONField() def validate_name(self, name): - if re.match(r'[a-zA-Z0-9-\' _()]*$', name): + if re.match(account_config.valid_name_pattern, name): if accounts_models.DataSource.objects.filter(name=name).exists(): raise accounts_exceptions.DuplicateDataSourceNameError(message_kwargs={'name': name}) return name raise accounts_exceptions.InvalidDataSourceNameError() - # def validate_request_config(self, request_config): - # if isinstance(request_config, dict): - # if ( - # not core_utilities.is_blank_string(request_config.get('url', None)) and - # not core_utilities.is_blank_string(request_config.get('method', None)) and - # request_config.get('method').lower() in ['get', 'post'] - # ): - # return request_config - # raise accounts_exceptions.InvalidDataSourceRequestConfigError() - def validate_column_config(self, column_config): if isinstance(column_config, dict) and len(column_config) > 0: column_config = [column_config] @@ -1052,22 +1022,25 @@ class Meta: } def validate_name(self, name): - if re.match(r'[a-zA-Z0-9-\' _()]*$', name): + if re.match(account_config.valid_name_pattern, name): if name != self.instance.name and accounts_models.DataSource.objects.filter(name=name).exists(): raise accounts_exceptions.DuplicateDataSourceNameError(message_kwargs={'name': name}) return name raise accounts_exceptions.DuplicateDataSourceNameError(message_kwargs={'name': name}) def validate_status(self, status): - if status in [accounts_models.DataSource.DATA_SOURCE_STATUS_DRAFT, - accounts_models.DataSource.DATA_SOURCE_STATUS_READY_TO_PUBLISH]: - if self.instance.status in [accounts_models.DataSource.DATA_SOURCE_STATUS_DRAFT, - accounts_models.DataSource.DATA_SOURCE_STATUS_READY_TO_PUBLISH]: - return status - elif status == accounts_models.DataSource.DATA_SOURCE_STATUS_DISABLED: - if self.instance.status == accounts_models.DataSource.DATA_SOURCE_STATUS_PUBLISHED: - return status - + if ( + ( + status in [accounts_models.DataSource.DATA_SOURCE_STATUS_DRAFT, + accounts_models.DataSource.DATA_SOURCE_STATUS_READY_TO_PUBLISH] and + self.instance.status in [accounts_models.DataSource.DATA_SOURCE_STATUS_DRAFT, + accounts_models.DataSource.DATA_SOURCE_STATUS_READY_TO_PUBLISH] + ) or + ( + status == accounts_models.DataSource.DATA_SOURCE_STATUS_DISABLED and + self.instance.status == accounts_models.DataSource.DATA_SOURCE_STATUS_PUBLISHED) + ): + return status raise accounts_exceptions.InvalidDataSourceStatusUpdateError() @@ -1140,6 +1113,7 @@ class Meta: read_only_fields = fields = ( 'id', 'icon', + 'code', 'name', 'description', 'version', @@ -1262,12 +1236,22 @@ def create(self, validated_data): class BaseDataLayerCRUDSerializer(serializers.ModelSerializer): def validate_name(self, name): - if re.match(r'[a-zA-Z0-9-\' _()]*$', name): - if accounts_models.DataLayer.objects.filter(name=name).exists(): - raise accounts_exceptions.DuplicateDataLayerNameError(message_kwargs={'name': name}) + if re.match(account_config.valid_name_pattern, name): return name raise accounts_exceptions.InvalidDataLayerNameError() + def validate_code(self, code): + if re.match(r'[A-Z0-9-\' _]*$', code): + # If its Existing layer, then code should match. Else raise error + # If its new Layer, then code should be unique. Else raise error + if ( + (self.instance and code != self.instance.code) or + (not self.instance and accounts_models.DataLayer.objects.filter(code=code).exists()) + ): + raise accounts_exceptions.DuplicateDataLayerCodeError(message_kwargs={'code': code}) + return code + raise accounts_exceptions.InvalidDataLayerCodeError() + def validate_applicable_countries(self, applicable_countries): """ Validate if the given countries are present in our proco DB @@ -1295,7 +1279,8 @@ def validate_applicable_countries(self, applicable_countries): code_lower=Lower('code'), name_lower=Lower('name') ).filter( - Q(name_lower=country_name_or_code.lower()) | Q(code_lower=country_name_or_code.lower()) + Q(name_lower=str(country_name_or_code).lower()) | Q( + code_lower=str(country_name_or_code).lower()) ).last() else: country_instance = locations_models.Country.objects.filter(id=country_data).last() @@ -1344,8 +1329,7 @@ def validate_data_source_column(self, data_source_column): applicable_cols = [col['name'] for col in data_source_column_configs if col.get('is_parameter', False)] if isinstance(data_source_column, list): - # TODO: Remove this dependency - # message='Can not have 2 column for 1 data source' + # Remove this dependency raise accounts_exceptions.InvalidDataSourceColumnForDataLayerError() elif isinstance(data_source_column, dict) and len(data_source_column) > 0: name = data_source_column.get('name') @@ -1374,10 +1358,6 @@ class CreateDataLayersSerializer(BaseDataLayerCRUDSerializer): legend_configs = serializers.JSONField(required=False) data_sources_list = serializers.JSONField() - # serializers.PrimaryKeyRelatedField( - # many=True, - # queryset=accounts_models.DataSource.objects.all() - # ) data_source_column = serializers.JSONField() class Meta: @@ -1391,6 +1371,7 @@ class Meta: fields = read_only_fields + ( 'icon', + 'code', 'name', 'description', 'version', @@ -1407,6 +1388,7 @@ class Meta: extra_kwargs = { 'icon': {'required': True}, + # 'code': {'required': True}, 'name': {'required': True}, 'type': {'required': True}, 'data_sources_list': {'required': True}, @@ -1419,6 +1401,11 @@ def validate_status(self, status): return status raise accounts_exceptions.InvalidDataLayerStatusError() + def to_internal_value(self, data): + if not data.get('code') and data.get('name'): + data['code'] = core_utilities.normalize_str(str(data.get('name'))).upper() + return super().to_internal_value(data) + def create(self, validated_data): """ create @@ -1484,6 +1471,7 @@ class Meta: ) fields = read_only_fields + ( + 'code', 'icon', 'name', 'description', @@ -1503,13 +1491,6 @@ class Meta: 'status': {'required': True}, } - def validate_name(self, name): - if re.match(r'[a-zA-Z0-9-\' _()]*$', name): - if name != self.instance.name and accounts_models.DataLayer.objects.filter(name=name).exists(): - raise accounts_exceptions.DuplicateDataLayerNameError(message_kwargs={'name': name}) - return name - raise accounts_exceptions.InvalidDataLayerNameError() - def validate_status(self, status): if status in [accounts_models.DataLayer.LAYER_STATUS_DRAFT, accounts_models.DataLayer.LAYER_STATUS_READY_TO_PUBLISH]: @@ -1543,15 +1524,11 @@ def _validate_user(self, instance): # else: check # 3. If necessary, the Publisher can edit the details and then approve the changes to the data layer. - if request_user == instance.created_by: - return True - - user_is_publisher = len(get_user_emails_for_permissions( - [auth_models.RolePermission.CAN_PUBLISH_DATA_LAYER], - ids_to_filter=[request_user.id] - )) > 0 - - if user_is_publisher: + if ( + request_user == instance.created_by or + len(get_user_emails_for_permissions([auth_models.RolePermission.CAN_PUBLISH_DATA_LAYER], + ids_to_filter=[request_user.id])) > 0 + ): return True raise accounts_exceptions.InvalidUserOnDataLayerUpdateError() @@ -1623,6 +1600,7 @@ class Meta: 'created', 'last_modified_at', 'icon', + 'code', 'name', 'description', 'version', @@ -1646,14 +1624,11 @@ def validate_status(self, status): status == accounts_models.DataLayer.LAYER_STATUS_PUBLISHED and self.instance.status in [accounts_models.DataLayer.LAYER_STATUS_READY_TO_PUBLISH, accounts_models.DataLayer.LAYER_STATUS_DISABLED] - ): - return status - elif ( + ) or ( status == accounts_models.DataLayer.LAYER_STATUS_DISABLED and self.instance.status == accounts_models.DataLayer.LAYER_STATUS_PUBLISHED ): return status - raise accounts_exceptions.InvalidDataLayerStatusUpdateError() def update(self, instance, validated_data): @@ -1719,18 +1694,15 @@ def get_object_data(self, instance): instance.object_id, ) return self.make_url(request.build_absolute_uri(url_name)) - return def get_section_type(self, instance): if instance.content_type: return apps.get_model(instance.content_type.app_label, instance.content_type.model)._meta.verbose_name.title() - return def get_content_type(self, instance): if instance.content_type: return instance.content_type.app_label - return def get_action_flag(self, instance): if instance.action_flag == 1: diff --git a/proco/accounts/tests/test_api.py b/proco/accounts/tests/test_api.py index 68f3d70..56854fa 100755 --- a/proco/accounts/tests/test_api.py +++ b/proco/accounts/tests/test_api.py @@ -1,5 +1,6 @@ import os from collections import OrderedDict +from datetime import timedelta from django.conf import settings from django.core.cache import cache @@ -9,23 +10,27 @@ from rest_framework import status from proco.accounts import models as accounts_models +from proco.accounts.tests import test_utils as accounts_test_utilities +from proco.core import utils as core_utilities from proco.custom_auth.tests import test_utils as test_utilities from proco.locations.tests.factories import CountryFactory +from proco.schools.tests.factories import SchoolFactory from proco.utils.tests import TestAPIViewSetMixin def accounts_url(url_params, query_param, view_name='list-or-create-api-keys'): url = reverse('accounts:' + view_name, args=url_params) - view_info = resolve(url).func + view = resolve(url) + view_info = view.func if len(query_param) > 0: query_params = '?' + '&'.join([key + '=' + str(val) for key, val in query_param.items()]) url += query_params - return url, view_info + return url, view, view_info class APIsApiTestCase(TestAPIViewSetMixin, TestCase): - databases = ['default'] + databases = ['default', ] @classmethod def setUpTestData(cls): @@ -43,7 +48,7 @@ def setUp(self): super().setUp() def test_list_apis_all(self): - url, view = accounts_url((), {}, view_name='list-apis') + url, _, view = accounts_url((), {}, view_name='list-apis') response = self.forced_auth_req('get', url, user=self.user, view=view) @@ -56,7 +61,7 @@ def test_list_apis_all(self): self.assertEqual(len(response_data['results']), 5) def test_list_apis_filter_on_code(self): - url, view = accounts_url((), { + url, _, view = accounts_url((), { 'code': 'DAILY_CHECK_APP' }, view_name='list-apis') @@ -71,7 +76,7 @@ def test_list_apis_filter_on_code(self): self.assertEqual(len(response_data['results']), 1) def test_list_apis_filter_on_category_public(self): - url, view = accounts_url((), { + url, _, view = accounts_url((), { 'category': 'public' }, view_name='list-apis') @@ -86,7 +91,7 @@ def test_list_apis_filter_on_category_public(self): self.assertEqual(len(response_data['results']), 2) def test_list_apis_filter_on_category_private(self): - url, view = accounts_url((), { + url, _, view = accounts_url((), { 'category': 'private' }, view_name='list-apis') @@ -102,7 +107,7 @@ def test_list_apis_filter_on_category_private(self): class APIKeysApiTestCase(TestAPIViewSetMixin, TestCase): - databases = ['default'] + databases = ['default', ] @classmethod def setUpTestData(cls): @@ -116,14 +121,14 @@ def setUpTestData(cls): cls.admin_user = test_utilities.setup_admin_user_by_role() cls.read_only_user = test_utilities.setup_read_only_user_by_role() - cls.country_one = CountryFactory() + cls.country = CountryFactory() def setUp(self): cache.clear() super().setUp() def test_list_api_keys_all_for_logged_in_user(self): - url, view = accounts_url((), {}) + url, _, view = accounts_url((), {}) response = self.forced_auth_req('get', url, user=self.admin_user, view=view) @@ -136,7 +141,7 @@ def test_list_api_keys_all_for_logged_in_user(self): self.assertEqual(len(response_data['results']), 0) def test_list_api_keys_all_for_read_only_user(self): - url, view = accounts_url((), {}) + url, _, view = accounts_url((), {}) response = self.forced_auth_req('get', url, user=self.read_only_user, view=view) @@ -149,14 +154,14 @@ def test_list_api_keys_all_for_read_only_user(self): self.assertEqual(len(response_data['results']), 0) def test_list_api_keys_all_for_non_logged_in_user(self): - url, view = accounts_url((), {}) + url, _, view = accounts_url((), {}) response = self.forced_auth_req('get', url, user=None, view=view) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_create_api_keys_for_admin_for_public_api(self): - url, view = accounts_url((), {}) + url, _, view = accounts_url((), {}) response = self.forced_auth_req( 'post', @@ -171,7 +176,7 @@ def test_create_api_keys_for_admin_for_public_api(self): self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_validate_api_keys_for_admin_for_public_api(self): - url, view = accounts_url((), {}) + url, _, view = accounts_url((), {}) response = self.forced_auth_req( 'post', @@ -188,7 +193,7 @@ def test_validate_api_keys_for_admin_for_public_api(self): response_data = response.data api_key = response_data['api_key'] - url, view = accounts_url((), {}, view_name='validate-an-api-key') + url, _, view = accounts_url((), {}, view_name='validate-an-api-key') get_response = self.forced_auth_req( 'put', @@ -202,8 +207,40 @@ def test_validate_api_keys_for_admin_for_public_api(self): self.assertEqual(get_response.status_code, status.HTTP_200_OK) + def test_validate_invalid_api_key_for_admin_for_public_api(self): + url, _, view = accounts_url((), {}) + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data={ + 'api': accounts_models.API.objects.get(code='COUNTRY').id, + } + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + response_data = response.data + api_key = response_data['api_key'] + + url, _, view = accounts_url((), {}, view_name='validate-an-api-key') + + get_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'api_id': accounts_models.API.objects.get(code='COUNTRY').id, + 'api_key': api_key + 'abc', + } + ) + + self.assertEqual(get_response.status_code, status.HTTP_404_NOT_FOUND) + def test_create_api_keys_for_admin_for_private_api(self): - url, view = accounts_url((), {}) + url, _, view = accounts_url((), {}) response = self.forced_auth_req( 'post', @@ -212,14 +249,14 @@ def test_create_api_keys_for_admin_for_private_api(self): view=view, data={ 'api': accounts_models.API.objects.get(code='DAILY_CHECK_APP').id, - 'active_countries_list': [self.country_one.id, ] + 'active_countries_list': [self.country.id, ] } ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_create_api_keys_for_read_only_user_for_public_api(self): - url, view = accounts_url((), {}) + url, _, view = accounts_url((), {}) response = self.forced_auth_req( 'post', @@ -228,14 +265,114 @@ def test_create_api_keys_for_read_only_user_for_public_api(self): view=view, data={ 'api': accounts_models.API.objects.get(code='COUNTRY').id, - 'active_countries_list': [self.country_one.id, ] + 'active_countries_list': [self.country.id, ] } ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_create_api_keys_for_read_only_user_for_private_api(self): - url, view = accounts_url((), {}) + url, _, view = accounts_url((), {}) + + response = self.forced_auth_req( + 'post', + url, + user=self.read_only_user, + view=view, + data={ + 'api': accounts_models.API.objects.get(code='DAILY_CHECK_APP').id, + 'active_countries_list': [self.country.id, ] + } + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + def test_approve_api_key_for_read_only_user_for_private_api_by_admin(self): + url, _, view = accounts_url((), {}) + + response = self.forced_auth_req( + 'post', + url, + user=self.read_only_user, + view=view, + data={ + 'api': accounts_models.API.objects.get(code='DAILY_CHECK_APP').id, + 'active_countries_list': [self.country.id, ] + } + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + response_data = response.data + + api_key_id = response_data['id'] + + url, _, view = accounts_url((api_key_id,), {}, + view_name='update-and-delete-api-key') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.APIKey.APPROVED, + 'valid_to': core_utilities.get_current_datetime_object().date() + timedelta(days=30), + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + def test_create_api_key_extension_request_for_private_api(self): + url, _, view = accounts_url((), {}) + + response = self.forced_auth_req( + 'post', + url, + user=self.read_only_user, + view=view, + data={ + 'api': accounts_models.API.objects.get(code='DAILY_CHECK_APP').id, + 'active_countries_list': [self.country.id, ] + } + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + response_data = response.data + + api_key_id = response_data['id'] + + url, _, view = accounts_url((api_key_id,), {}, + view_name='update-and-delete-api-key') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.APIKey.APPROVED, + 'valid_to': core_utilities.get_current_datetime_object().date() + timedelta(days=30), + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((api_key_id,), {}, + view_name='request-api-key-extension') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.read_only_user, + data={ + 'extension_valid_to': core_utilities.get_current_datetime_object().date() + timedelta(days=60), + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + def test_approve_api_key_extension_request_for_private_api_by_admin(self): + url, _, view = accounts_url((), {}) response = self.forced_auth_req( 'post', @@ -244,14 +381,62 @@ def test_create_api_keys_for_read_only_user_for_private_api(self): view=view, data={ 'api': accounts_models.API.objects.get(code='DAILY_CHECK_APP').id, - 'active_countries_list': [self.country_one.id, ] + 'active_countries_list': [self.country.id, ] } ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) + response_data = response.data + + api_key_id = response_data['id'] + + url, _, view = accounts_url((api_key_id,), {}, + view_name='update-and-delete-api-key') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.APIKey.APPROVED, + 'valid_to': core_utilities.get_current_datetime_object().date() + timedelta(days=30), + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((api_key_id,), {}, + view_name='request-api-key-extension') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.read_only_user, + data={ + 'extension_valid_to': core_utilities.get_current_datetime_object().date() + timedelta(days=60), + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((api_key_id,), {}, + view_name='update-and-delete-api-key') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'extension_status': accounts_models.APIKey.APPROVED, + 'extension_valid_to': core_utilities.get_current_datetime_object().date() + timedelta(days=100), + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + def test_delete_api_key_by_admin(self): - url, view = accounts_url((), {}) + url, _, view = accounts_url((), {}) response = self.forced_auth_req( 'post', @@ -269,8 +454,8 @@ def test_delete_api_key_by_admin(self): api_key_id = response_data['id'] - url, view = accounts_url((api_key_id,), {}, - view_name='update-and-delete-api-key') + url, _, view = accounts_url((api_key_id,), {}, + view_name='update-and-delete-api-key') delete_response = self.forced_auth_req( 'delete', @@ -281,7 +466,7 @@ def test_delete_api_key_by_admin(self): self.assertEqual(delete_response.status_code, status.HTTP_204_NO_CONTENT) def test_delete_api_key_by_read_only_user(self): - url, view = accounts_url((), {}) + url, _, view = accounts_url((), {}) response = self.forced_auth_req( 'post', @@ -299,8 +484,8 @@ def test_delete_api_key_by_read_only_user(self): api_key_id = response_data['id'] - url, view = accounts_url((api_key_id,), {}, - view_name='update-and-delete-api-key') + url, _, view = accounts_url((api_key_id,), {}, + view_name='update-and-delete-api-key') delete_response = self.forced_auth_req( 'delete', @@ -308,11 +493,11 @@ def test_delete_api_key_by_read_only_user(self): user=self.read_only_user, ) - self.assertEqual(delete_response.status_code, status.HTTP_403_FORBIDDEN) + self.assertEqual(delete_response.status_code, status.HTTP_204_NO_CONTENT) class NotificationsApiTestCase(TestAPIViewSetMixin, TestCase): - databases = ['default'] + databases = ['default', ] @classmethod def setUpTestData(cls): @@ -324,7 +509,7 @@ def setUp(self): super().setUp() def test_list_for_admin_user(self): - url, view = accounts_url((), {}, view_name='list-send-notifications') + url, _, view = accounts_url((), {}, view_name='list-send-notifications') response = self.forced_auth_req('get', url, user=self.admin_user, view=view) @@ -337,21 +522,21 @@ def test_list_for_admin_user(self): self.assertEqual(len(response_data['results']), 0) def test_list_for_real_only_user(self): - url, view = accounts_url((), {}, view_name='list-send-notifications') + url, _, view = accounts_url((), {}, view_name='list-send-notifications') response = self.forced_auth_req('get', url, user=self.read_only_user, view=view) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_list_for_non_logged_in_user(self): - url, view = accounts_url((), {}, view_name='list-send-notifications') + url, _, view = accounts_url((), {}, view_name='list-send-notifications') response = self.forced_auth_req('get', url, user=None, view=view) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_create_notification_by_admin(self): - url, view = accounts_url((), {}, view_name='list-send-notifications') + url, _, view = accounts_url((), {}, view_name='list-send-notifications') response = self.forced_auth_req( 'post', @@ -369,7 +554,7 @@ def test_create_notification_by_admin(self): self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_create_notification_by_admin_single_recipient(self): - url, view = accounts_url((), {}, view_name='list-send-notifications') + url, _, view = accounts_url((), {}, view_name='list-send-notifications') response = self.forced_auth_req( 'post', @@ -387,7 +572,7 @@ def test_create_notification_by_admin_single_recipient(self): self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_create_notification_by_admin_invalid_recipient(self): - url, view = accounts_url((), {}, view_name='list-send-notifications') + url, _, view = accounts_url((), {}, view_name='list-send-notifications') response = self.forced_auth_req( 'post', @@ -406,12 +591,12 @@ def test_create_notification_by_admin_invalid_recipient(self): class AppStaticConfigurationsApiTestCase(TestAPIViewSetMixin, TestCase): - databases = ['default'] + databases = ['default', ] def test_get(self): - url, view = accounts_url((), {}, view_name='get-app-static-configurations') + url, _, view = accounts_url((), {}, view_name='get-app-static-configurations') - response = self.forced_auth_req('get', url, view=view) + response = self.forced_auth_req('get', url, _, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -422,21 +607,122 @@ def test_get(self): class TimePlayerApiTestCase(TestAPIViewSetMixin, TestCase): - databases = ['default'] + databases = ['default', ] + + @classmethod + def setUpTestData(cls): + args = ['--delete_data_sources', '--update_data_sources', '--update_data_layers'] + call_command('load_system_data_layers', *args) + + cls.admin_user = test_utilities.setup_admin_user_by_role() + cls.read_only_user = test_utilities.setup_read_only_user_by_role() def test_get_invalid_layer_id(self): - url, view = accounts_url((), { + url, _, view = accounts_url((), { 'layer_id': 123, 'country_id': 123, }, view_name='get-time-player-data-v2') - response = self.forced_auth_req('get', url, view=view) + response = self.forced_auth_req('get', url, _, view=view) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + def test_for_live_layer(self): + pcdc_data_source = accounts_models.DataSource.objects.filter( + data_source_type=accounts_models.DataSource.DATA_SOURCE_TYPE_DAILY_CHECK_APP, + ).first() + + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data={ + 'icon': '', + 'name': 'Test data layer 3', + 'description': 'Test data layer 3 description', + 'version': '1.0.0', + 'type': accounts_models.DataLayer.LAYER_TYPE_LIVE, + 'data_sources_list': [pcdc_data_source.id, ], + 'data_source_column': pcdc_data_source.column_config[0], + 'global_benchmark': { + 'value': '20000000', + 'unit': 'bps', + 'convert_unit': 'mbps' + }, + 'is_reverse': False, + 'legend_configs': { + 'good': { + 'values': [], + 'labels': 'Good' + }, + 'moderate': { + 'values': [], + 'labels': 'Moderate' + }, + 'bad': { + 'values': [], + 'labels': 'Bad' + }, + 'unknown': { + 'values': [], + 'labels': 'Unknown' + } + } + } + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + response_data = response.data + + layer_id = response_data['id'] + + url, _, view = accounts_url((layer_id,), {}, + view_name='update-or-delete-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_READY_TO_PUBLISH, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((layer_id,), {}, + view_name='publish-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_PUBLISHED, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((), { + 'layer_id': layer_id, + 'country_id': 123, + 'z': '2', + 'x': '1', + 'y': '2.mvt', + }, view_name='get-time-player-data-v2') + + response = self.forced_auth_req('get', url, _, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + class DataSourceApiTestCase(TestAPIViewSetMixin, TestCase): - databases = ['default'] + databases = ['default', ] @classmethod def setUpTestData(cls): @@ -450,7 +736,7 @@ def setUp(self): super().setUp() def test_list_data_sources_all(self): - url, view = accounts_url((), {}, view_name='list-or-create-data-sources') + url, _, view = accounts_url((), {}, view_name='list-or-create-data-sources') response = self.forced_auth_req('get', url, user=self.user, view=view) @@ -463,7 +749,7 @@ def test_list_data_sources_all(self): self.assertEqual(len(response_data['results']), 3) def test_list_data_sources_filter_on_status_published(self): - url, view = accounts_url((), { + url, _, view = accounts_url((), { 'status': 'PUBLISHED' }, view_name='list-or-create-data-sources') @@ -478,7 +764,7 @@ def test_list_data_sources_filter_on_status_published(self): self.assertEqual(len(response_data['results']), 3) def test_list_data_sources_filter_on_status_draft(self): - url, view = accounts_url((), { + url, _, view = accounts_url((), { 'status': 'DRAFT' }, view_name='list-or-create-data-sources') @@ -493,7 +779,7 @@ def test_list_data_sources_filter_on_status_draft(self): self.assertEqual(len(response_data['results']), 0) def test_list_data_sources_filter_on_status_published_without_auth(self): - url, view = accounts_url((), { + url, _, view = accounts_url((), { 'status': 'PUBLISHED' }, view_name='list-or-create-data-sources') @@ -502,7 +788,7 @@ def test_list_data_sources_filter_on_status_published_without_auth(self): self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_delete_data_source_by_admin(self): - url, view = accounts_url((), {}, view_name='list-or-create-data-sources') + url, _, view = accounts_url((), {}, view_name='list-or-create-data-sources') response = self.forced_auth_req( 'post', @@ -526,8 +812,8 @@ def test_delete_data_source_by_admin(self): source_id = response_data['id'] - url, view = accounts_url((source_id,), {}, - view_name='update-or-delete-data-source') + url, _, view = accounts_url((source_id,), {}, + view_name='update-or-delete-data-source') delete_response = self.forced_auth_req( 'delete', @@ -538,7 +824,7 @@ def test_delete_data_source_by_admin(self): self.assertEqual(delete_response.status_code, status.HTTP_204_NO_CONTENT) def test_publish_data_source_by_admin(self): - url, view = accounts_url((), {}, view_name='list-or-create-data-sources') + url, _, view = accounts_url((), {}, view_name='list-or-create-data-sources') response = self.forced_auth_req( 'post', @@ -562,8 +848,8 @@ def test_publish_data_source_by_admin(self): source_id = response_data['id'] - url, view = accounts_url((source_id,), {}, - view_name='publish-data-source') + url, _, view = accounts_url((source_id,), {}, + view_name='publish-data-source') put_response = self.forced_auth_req( 'put', @@ -578,7 +864,7 @@ def test_publish_data_source_by_admin(self): class DataLayerApiTestCase(TestAPIViewSetMixin, TestCase): - databases = ['default'] + databases = ['default', ] @classmethod def setUpTestData(cls): @@ -593,7 +879,7 @@ def setUp(self): super().setUp() def test_list_data_layers_all(self): - url, view = accounts_url((), {}, view_name='list-or-create-data-layers') + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') response = self.forced_auth_req('get', url, user=self.admin_user, view=view) @@ -606,7 +892,7 @@ def test_list_data_layers_all(self): self.assertEqual(len(response_data['results']), 2) def test_list_data_layers_filter_on_status_published(self): - url, view = accounts_url((), { + url, _, view = accounts_url((), { 'status': 'PUBLISHED' }, view_name='list-or-create-data-layers') @@ -621,7 +907,7 @@ def test_list_data_layers_filter_on_status_published(self): self.assertEqual(len(response_data['results']), 2) def test_list_published_data_layers_for_admin(self): - url, view = accounts_url(('PUBLISHED',), { + url, _, view = accounts_url(('PUBLISHED',), { }, view_name='list-published-data-layers') response = self.forced_auth_req('get', url, user=self.admin_user, view=view) @@ -635,7 +921,7 @@ def test_list_published_data_layers_for_admin(self): self.assertEqual(len(response_data['results']), 2) def test_list_published_data_layers_without_auth(self): - url, view = accounts_url(('PUBLISHED',), { + url, _, view = accounts_url(('PUBLISHED',), { }, view_name='list-published-data-layers') response = self.forced_auth_req('get', url, user=None, view=view) @@ -649,7 +935,7 @@ def test_list_published_data_layers_without_auth(self): self.assertEqual(len(response_data['results']), 2) def test_list_published_data_layers_for_country(self): - url, view = accounts_url(('PUBLISHED',), { + url, _, view = accounts_url(('PUBLISHED',), { 'country_id': 123456789, }, view_name='list-published-data-layers') @@ -664,7 +950,7 @@ def test_list_published_data_layers_for_country(self): self.assertEqual(len(response_data['results']), 0) def test_list_data_layers_filter_on_status_draft(self): - url, view = accounts_url((), { + url, _, view = accounts_url((), { 'status': 'DRAFT' }, view_name='list-or-create-data-layers') @@ -679,7 +965,7 @@ def test_list_data_layers_filter_on_status_draft(self): self.assertEqual(len(response_data['results']), 0) def test_list_data_layers_filter_on_status_published_without_auth(self): - url, view = accounts_url((), { + url, _, view = accounts_url((), { 'status': 'PUBLISHED' }, view_name='list-or-create-data-layers') @@ -692,7 +978,7 @@ def test_create_data_layer_by_admin(self): data_source_type=accounts_models.DataSource.DATA_SOURCE_TYPE_DAILY_CHECK_APP, ).first() - url, view = accounts_url((), {}, view_name='list-or-create-data-layers') + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') response = self.forced_auth_req( 'post', @@ -707,6 +993,30 @@ def test_create_data_layer_by_admin(self): 'type': accounts_models.DataLayer.LAYER_TYPE_LIVE, 'data_sources_list': [pcdc_data_source.id, ], 'data_source_column': pcdc_data_source.column_config[0], + 'global_benchmark': { + 'value': '20000000', + 'unit': 'bps', + 'convert_unit': 'mbps' + }, + 'is_reverse': False, + 'legend_configs': { + 'good': { + 'values': [], + 'labels': 'Good' + }, + 'moderate': { + 'values': [], + 'labels': 'Moderate' + }, + 'bad': { + 'values': [], + 'labels': 'Bad' + }, + 'unknown': { + 'values': [], + 'labels': 'Unknown' + } + } } ) @@ -717,7 +1027,7 @@ def test_publish_in_draft_data_layer_by_admin(self): data_source_type=accounts_models.DataSource.DATA_SOURCE_TYPE_DAILY_CHECK_APP, ).first() - url, view = accounts_url((), {}, view_name='list-or-create-data-layers') + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') response = self.forced_auth_req( 'post', @@ -732,6 +1042,30 @@ def test_publish_in_draft_data_layer_by_admin(self): 'type': accounts_models.DataLayer.LAYER_TYPE_LIVE, 'data_sources_list': [pcdc_data_source.id, ], 'data_source_column': pcdc_data_source.column_config[0], + 'global_benchmark': { + 'value': '20000000', + 'unit': 'bps', + 'convert_unit': 'mbps' + }, + 'is_reverse': False, + 'legend_configs': { + 'good': { + 'values': [], + 'labels': 'Good' + }, + 'moderate': { + 'values': [], + 'labels': 'Moderate' + }, + 'bad': { + 'values': [], + 'labels': 'Bad' + }, + 'unknown': { + 'values': [], + 'labels': 'Unknown' + } + } } ) @@ -741,8 +1075,8 @@ def test_publish_in_draft_data_layer_by_admin(self): layer_id = response_data['id'] - url, view = accounts_url((layer_id,), {}, - view_name='publish-data-layer') + url, _, view = accounts_url((layer_id,), {}, + view_name='publish-data-layer') put_response = self.forced_auth_req( 'put', @@ -760,7 +1094,7 @@ def test_delete_in_draft_data_layer_by_admin(self): data_source_type=accounts_models.DataSource.DATA_SOURCE_TYPE_DAILY_CHECK_APP, ).first() - url, view = accounts_url((), {}, view_name='list-or-create-data-layers') + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') response = self.forced_auth_req( 'post', @@ -775,6 +1109,30 @@ def test_delete_in_draft_data_layer_by_admin(self): 'type': accounts_models.DataLayer.LAYER_TYPE_LIVE, 'data_sources_list': [pcdc_data_source.id, ], 'data_source_column': pcdc_data_source.column_config[0], + 'global_benchmark': { + 'value': '20000000', + 'unit': 'bps', + 'convert_unit': 'mbps' + }, + 'is_reverse': False, + 'legend_configs': { + 'good': { + 'values': [], + 'labels': 'Good' + }, + 'moderate': { + 'values': [], + 'labels': 'Moderate' + }, + 'bad': { + 'values': [], + 'labels': 'Bad' + }, + 'unknown': { + 'values': [], + 'labels': 'Unknown' + } + } } ) @@ -784,8 +1142,8 @@ def test_delete_in_draft_data_layer_by_admin(self): layer_id = response_data['id'] - url, view = accounts_url((layer_id,), {}, - view_name='update-or-delete-data-layer') + url, _, view = accounts_url((layer_id,), {}, + view_name='update-or-delete-data-layer') put_response = self.forced_auth_req( 'delete', @@ -800,7 +1158,7 @@ def test_publish_in_ready_data_layer_by_admin(self): data_source_type=accounts_models.DataSource.DATA_SOURCE_TYPE_DAILY_CHECK_APP, ).first() - url, view = accounts_url((), {}, view_name='list-or-create-data-layers') + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') response = self.forced_auth_req( 'post', @@ -815,6 +1173,30 @@ def test_publish_in_ready_data_layer_by_admin(self): 'type': accounts_models.DataLayer.LAYER_TYPE_LIVE, 'data_sources_list': [pcdc_data_source.id, ], 'data_source_column': pcdc_data_source.column_config[0], + 'global_benchmark': { + 'value': '20000000', + 'unit': 'bps', + 'convert_unit': 'mbps' + }, + 'is_reverse': False, + 'legend_configs': { + 'good': { + 'values': [], + 'labels': 'Good' + }, + 'moderate': { + 'values': [], + 'labels': 'Moderate' + }, + 'bad': { + 'values': [], + 'labels': 'Bad' + }, + 'unknown': { + 'values': [], + 'labels': 'Unknown' + } + } } ) @@ -824,8 +1206,8 @@ def test_publish_in_ready_data_layer_by_admin(self): layer_id = response_data['id'] - url, view = accounts_url((layer_id,), {}, - view_name='update-or-delete-data-layer') + url, _, view = accounts_url((layer_id,), {}, + view_name='update-or-delete-data-layer') put_response = self.forced_auth_req( 'put', @@ -838,8 +1220,8 @@ def test_publish_in_ready_data_layer_by_admin(self): self.assertEqual(put_response.status_code, status.HTTP_200_OK) - url, view = accounts_url((layer_id,), {}, - view_name='publish-data-layer') + url, _, view = accounts_url((layer_id,), {}, + view_name='publish-data-layer') put_response = self.forced_auth_req( 'put', @@ -857,7 +1239,7 @@ def test_preview_pcdc_data_layer_by_admin(self): data_source_type=accounts_models.DataSource.DATA_SOURCE_TYPE_DAILY_CHECK_APP, ).first() - url, view = accounts_url((), {}, view_name='list-or-create-data-layers') + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') response = self.forced_auth_req( 'post', @@ -872,6 +1254,30 @@ def test_preview_pcdc_data_layer_by_admin(self): 'type': accounts_models.DataLayer.LAYER_TYPE_LIVE, 'data_sources_list': [pcdc_data_source.id, ], 'data_source_column': pcdc_data_source.column_config[0], + 'global_benchmark': { + 'value': '20000000', + 'unit': 'bps', + 'convert_unit': 'mbps' + }, + 'is_reverse': False, + 'legend_configs': { + 'good': { + 'values': [], + 'labels': 'Good' + }, + 'moderate': { + 'values': [], + 'labels': 'Moderate' + }, + 'bad': { + 'values': [], + 'labels': 'Bad' + }, + 'unknown': { + 'values': [], + 'labels': 'Unknown' + } + } } ) @@ -881,8 +1287,8 @@ def test_preview_pcdc_data_layer_by_admin(self): layer_id = response_data['id'] - url, view = accounts_url((layer_id,), {}, - view_name='preview-data-layer') + url, _, view = accounts_url((layer_id,), {}, + view_name='preview-data-layer') put_response = self.forced_auth_req( 'get', @@ -897,7 +1303,7 @@ def test_preview_qos_data_layer_by_admin(self): data_source_type=accounts_models.DataSource.DATA_SOURCE_TYPE_QOS, ).first() - url, view = accounts_url((), {}, view_name='list-or-create-data-layers') + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') response = self.forced_auth_req( 'post', @@ -912,6 +1318,30 @@ def test_preview_qos_data_layer_by_admin(self): 'type': accounts_models.DataLayer.LAYER_TYPE_LIVE, 'data_sources_list': [qos_data_source.id, ], 'data_source_column': qos_data_source.column_config[0], + 'global_benchmark': { + 'value': '20000000', + 'unit': 'bps', + 'convert_unit': 'mbps' + }, + 'is_reverse': False, + 'legend_configs': { + 'good': { + 'values': [], + 'labels': 'Good' + }, + 'moderate': { + 'values': [], + 'labels': 'Moderate' + }, + 'bad': { + 'values': [], + 'labels': 'Bad' + }, + 'unknown': { + 'values': [], + 'labels': 'Unknown' + } + } } ) @@ -921,8 +1351,8 @@ def test_preview_qos_data_layer_by_admin(self): layer_id = response_data['id'] - url, view = accounts_url((layer_id,), {}, - view_name='preview-data-layer') + url, _, view = accounts_url((layer_id,), {}, + view_name='preview-data-layer') put_response = self.forced_auth_req( 'get', @@ -933,27 +1363,14 @@ def test_preview_qos_data_layer_by_admin(self): self.assertEqual(put_response.status_code, status.HTTP_200_OK) def test_preview_static_data_layer_by_admin(self): - master_data_source = accounts_models.DataSource.objects.filter( - data_source_type=accounts_models.DataSource.DATA_SOURCE_TYPE_SCHOOL_MASTER, - ).first() - - url, view = accounts_url((), {}, view_name='list-or-create-data-layers') + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') response = self.forced_auth_req( 'post', url, user=self.admin_user, view=view, - data={ - 'icon': '', - 'name': 'Test data layer', - 'description': 'Test data layer description', - 'version': '1.0.0', - 'type': accounts_models.DataLayer.LAYER_TYPE_STATIC, - 'data_sources_list': [master_data_source.id, ], - 'data_source_column': master_data_source.column_config[0], - 'legend_configs': {}, - } + data=accounts_test_utilities.static_coverage_layer_data() ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -962,8 +1379,8 @@ def test_preview_static_data_layer_by_admin(self): layer_id = response_data['id'] - url, view = accounts_url((layer_id,), {}, - view_name='preview-data-layer') + url, _, view = accounts_url((layer_id,), {}, + view_name='preview-data-layer') put_response = self.forced_auth_req( 'get', @@ -972,3 +1389,639 @@ def test_preview_static_data_layer_by_admin(self): ) self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + +class AdvanceFiltersApiTestCase(TestAPIViewSetMixin, TestCase): + databases = ['default', ] + + def setUp(self): + cache.clear() + super().setUp() + + def test_list_advance_filters(self): + url, _, view = accounts_url((), {}, view_name='list-advanced-filters') + + response = self.forced_auth_req('get', url, _, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + + self.assertEqual(type(response_data), dict) + self.assertTrue(response_data['count'] > 0) + self.assertTrue(len(response_data['results']) > 0) + + +class LogActionApiTestCase(TestAPIViewSetMixin, TestCase): + databases = ['default', ] + + @classmethod + def setUpTestData(cls): + cls.admin_user = test_utilities.setup_admin_user_by_role() + cls.read_only_user = test_utilities.setup_read_only_user_by_role() + + def test_list_for_admin_user(self): + url, _, view = accounts_url((), {}, view_name='list-recent-action-log') + + response = self.forced_auth_req('get', url, user=self.admin_user, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + + self.assertEqual(type(response_data), dict) + self.assertEqual(response_data['count'], 0) + self.assertEqual(len(response_data['results']), 0) + + def test_list_for_readonly_user(self): + url, _, view = accounts_url((), {}, view_name='list-recent-action-log') + + response = self.forced_auth_req('get', url, user=self.read_only_user, view=view) + + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + +class DataLayerMapApiTestCase(TestAPIViewSetMixin, TestCase): + databases = ['default', ] + + @classmethod + def setUpTestData(cls): + args = ['--delete_data_sources', '--update_data_sources', '--update_data_layers'] + call_command('load_system_data_layers', *args) + + cls.admin_user = test_utilities.setup_admin_user_by_role() + cls.read_only_user = test_utilities.setup_read_only_user_by_role() + + cls.country = CountryFactory() + + def setUp(self): + cache.clear() + super().setUp() + + def test_static_data_layer_map_country_view(self): + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data=accounts_test_utilities.static_coverage_layer_data() + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + response_data = response.data + + layer_id = response_data['id'] + + url, _, view = accounts_url((layer_id,), {}, + view_name='update-or-delete-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_READY_TO_PUBLISH, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((layer_id,), {}, + view_name='publish-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_PUBLISHED, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, view, view_info = accounts_url( + (layer_id,), + { + 'country_id': self.country.id, + 'z': '8', + 'x': '82', + 'y': '114.mvt' + }, + view_name='map-data-layer' + ) + + response = self.forced_auth_req( + 'get', + url, + view=view, + view_info=view_info, + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_static_data_layer_map_school_view(self): + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data=accounts_test_utilities.static_coverage_layer_data() + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + response_data = response.data + + layer_id = response_data['id'] + + url, _, view = accounts_url((layer_id,), {}, + view_name='update-or-delete-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_READY_TO_PUBLISH, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((layer_id,), {}, + view_name='publish-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_PUBLISHED, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, view, view_info = accounts_url( + (layer_id,), + { + 'country_id': self.country.id, + 'school_id': '1234567', + 'z': '8', + 'x': '82', + 'y': '114.mvt' + }, + view_name='map-data-layer' + ) + + response = self.forced_auth_req( + 'get', + url, + view=view, + view_info=view_info, + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_live_data_layer_map_country_view(self): + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data=accounts_test_utilities.live_download_layer_data_pcdc(), + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + response_data = response.data + + layer_id = response_data['id'] + + url, _, view = accounts_url((layer_id,), {}, + view_name='update-or-delete-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_READY_TO_PUBLISH, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((layer_id,), {}, + view_name='publish-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_PUBLISHED, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, view, view_info = accounts_url( + (layer_id,), + { + 'country_id': self.country.id, + 'benchmark': 'global', + 'start_date': '24-06-2024', + 'end_date': '30-06-2024', + 'is_weekly': 'true', + 'z': '8', + 'x': '82', + 'y': '114.mvt' + }, + view_name='map-data-layer' + ) + + response = self.forced_auth_req( + 'get', + url, + view=view, + view_info=view_info, + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_live_data_layer_map_school_view(self): + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data=accounts_test_utilities.live_download_layer_data_pcdc(), + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + response_data = response.data + + layer_id = response_data['id'] + + url, _, view = accounts_url((layer_id,), {}, + view_name='update-or-delete-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_READY_TO_PUBLISH, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((layer_id,), {}, + view_name='publish-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_PUBLISHED, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, view, view_info = accounts_url( + (layer_id,), + { + 'country_id': self.country.id, + 'school_id': '1234568', + 'benchmark': 'global', + 'start_date': '24-06-2024', + 'end_date': '30-06-2024', + 'is_weekly': 'true', + 'z': '8', + 'x': '82', + 'y': '114.mvt' + }, + view_name='map-data-layer' + ) + + response = self.forced_auth_req( + 'get', + url, + view=view, + view_info=view_info, + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + +class DataLayerInfoApiTestCase(TestAPIViewSetMixin, TestCase): + databases = ['default', ] + + @classmethod + def setUpTestData(cls): + args = ['--delete_data_sources', '--update_data_sources', '--update_data_layers'] + call_command('load_system_data_layers', *args) + + cls.admin_user = test_utilities.setup_admin_user_by_role() + cls.read_only_user = test_utilities.setup_read_only_user_by_role() + + cls.country = CountryFactory() + cls.school = SchoolFactory() + + def setUp(self): + cache.clear() + super().setUp() + + def test_static_data_layer_map_country_view(self): + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data=accounts_test_utilities.static_coverage_layer_data() + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + response_data = response.data + + layer_id = response_data['id'] + + url, _, view = accounts_url((layer_id,), {}, + view_name='update-or-delete-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_READY_TO_PUBLISH, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((layer_id,), {}, + view_name='publish-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_PUBLISHED, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, view, view_info = accounts_url( + (layer_id,), + { + 'country_id': self.country.id, + }, + view_name='info-data-layer' + ) + + response = self.forced_auth_req( + 'get', + url, + view=view, + view_info=view_info, + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_static_data_layer_map_school_view(self): + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data=accounts_test_utilities.static_coverage_layer_data() + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + response_data = response.data + + layer_id = response_data['id'] + + url, _, view = accounts_url((layer_id,), {}, + view_name='update-or-delete-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_READY_TO_PUBLISH, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((layer_id,), {}, + view_name='publish-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_PUBLISHED, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, view, view_info = accounts_url( + (layer_id,), + { + 'country_id': self.country.id, + 'school_id': '1234567', + }, + view_name='info-data-layer' + ) + + response = self.forced_auth_req( + 'get', + url, + view=view, + view_info=view_info, + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_live_data_layer_map_country_view(self): + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data=accounts_test_utilities.live_download_layer_data_pcdc(), + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + response_data = response.data + + layer_id = response_data['id'] + + url, _, view = accounts_url((layer_id,), {}, + view_name='update-or-delete-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_READY_TO_PUBLISH, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((layer_id,), {}, + view_name='publish-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_PUBLISHED, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, view, view_info = accounts_url( + (layer_id,), + { + 'country_id': self.country.id, + 'benchmark': 'global', + 'start_date': '24-06-2024', + 'end_date': '30-06-2024', + 'is_weekly': 'true', + }, + view_name='info-data-layer' + ) + + response = self.forced_auth_req( + 'get', + url, + view=view, + view_info=view_info, + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_live_data_layer_map_school_view(self): + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data=accounts_test_utilities.live_download_layer_data_pcdc(), + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + response_data = response.data + + layer_id = response_data['id'] + + url, _, view = accounts_url((layer_id,), {}, + view_name='update-or-delete-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_READY_TO_PUBLISH, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((layer_id,), {}, + view_name='publish-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_PUBLISHED, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, view, view_info = accounts_url( + (layer_id,), + { + 'country_id': self.country.id, + 'school_id': '123456', + 'benchmark': 'global', + 'start_date': '24-06-2024', + 'end_date': '30-06-2024', + 'is_weekly': 'true', + }, + view_name='info-data-layer' + ) + + response = self.forced_auth_req( + 'get', + url, + view=view, + view_info=view_info, + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + +class InvalidateCacheApiTestCase(TestAPIViewSetMixin, TestCase): + @classmethod + def setUpTestData(cls): + cls.admin_user = test_utilities.setup_admin_user_by_role() + cls.read_only_user = test_utilities.setup_read_only_user_by_role() + + def test_hard_cache_clean_for_admin(self): + url, view, view_info = accounts_url((), {'hard': 'true'}, view_name='admin-invalidate-cache') + + response = self.forced_auth_req('get', url, user=self.admin_user, view=view, view_info=view_info) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_soft_cache_clean_for_admin(self): + url, view, view_info = accounts_url((), {'hard': 'false'}, view_name='admin-invalidate-cache') + + response = self.forced_auth_req('get', url, user=self.admin_user, view=view, view_info=view_info) + + self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/proco/accounts/tests/test_utils.py b/proco/accounts/tests/test_utils.py new file mode 100644 index 0000000..6ddadfc --- /dev/null +++ b/proco/accounts/tests/test_utils.py @@ -0,0 +1,94 @@ +from proco.accounts import models as accounts_models + + +def static_coverage_layer_data(): + master_data_source = accounts_models.DataSource.objects.filter( + data_source_type=accounts_models.DataSource.DATA_SOURCE_TYPE_SCHOOL_MASTER, + ).first() + return { + "icon": "icon", + "code": "CELLULAR COVERAGE", + "name": "Cellular Coverage", + "description": "Mobile coverage in the area", + "version": "V 1.0", + "type": "STATIC", + "category": "COVERAGE", + "applicable_countries": [], + "legend_configs": { + "good": { + "values": [ + "5G", + "4G" + ], + "labels": "3G & above" + }, + "moderate": { + "values": [ + "3G", + "2G" + ], + "labels": "2G" + }, + "bad": { + "values": [ + "no" + ], + "labels": "No Coverage" + }, + "unknown": { + "values": [], + "labels": "Unknown" + } + }, + "data_sources_list": [master_data_source.id, ], + "data_source_column": master_data_source.column_config[0], + } + + +def live_download_layer_data_pcdc(): + pcdc_data_source = accounts_models.DataSource.objects.filter( + data_source_type=accounts_models.DataSource.DATA_SOURCE_TYPE_DAILY_CHECK_APP, + ).first() + + return { + "icon": "icon", + "code": "DOWNLOAD PCDC", + "name": "Download - PCDC", + "description": "pcdc download speed", + "type": "LIVE", + "category": "CONNECTIVITY", + "applicable_countries": [], + "global_benchmark": { + "value": "20000000", + "unit": "bps", + "convert_unit": "mbps" + }, + "legend_configs": { + "good": { + "values": [], + "labels": "Good" + }, + "moderate": { + "values": [], + "labels": "Moderate" + }, + "bad": { + "values": [], + "labels": "Bad" + }, + "unknown": { + "values": [], + "labels": "Unknown" + } + }, + "is_reverse": False, + "data_sources_list": [pcdc_data_source.id,], + "data_source_column": pcdc_data_source.column_config[0], + "benchmark_metadata": { + "benchmark_value": "20000000", + "benchmark_unit": "bps", + "base_benchmark": "1000000", + "parameter_column_unit": "bps", + "round_unit_value": "{val} / (1000 * 1000)" + }, + } diff --git a/proco/accounts/utils.py b/proco/accounts/utils.py index 72c9c04..1bacb07 100644 --- a/proco/accounts/utils.py +++ b/proco/accounts/utils.py @@ -1,3 +1,4 @@ +import logging import re from anymail.message import AnymailMessage @@ -10,6 +11,8 @@ from proco.accounts.config import app_config as config from proco.core import utils as core_utilities +logger = logging.getLogger('gigamaps.' + __name__) + def send_standard_email(user, data): """ @@ -22,7 +25,7 @@ def send_standard_email(user, data): core_utilities.is_blank_string(settings.ANYMAIL.get('MAILJET_API_KEY')) or core_utilities.is_blank_string(settings.ANYMAIL.get('MAILJET_SECRET_KEY')) ): - print('ERROR: MailJet creds are not configured to send the email. Hence email notification is disabled.') + logger.error('MailJet creds are not configured to send the email. Hence email notification is disabled.') return data.update({ @@ -40,7 +43,7 @@ def send_standard_email(user, data): to=[user.email], ) mail.content_subtype = 'html' - print('Sending standard message over email') + logger.debug('Sending standard message over email') mail.send() @@ -50,7 +53,7 @@ def send_email_over_mailjet_service(recipient_list, cc=None, bcc=None, fail_sile core_utilities.is_blank_string(settings.ANYMAIL.get('MAILJET_API_KEY')) or core_utilities.is_blank_string(settings.ANYMAIL.get('MAILJET_SECRET_KEY')) ): - print('ERROR: MailJet creds are not configured to send the email. Hence email notification is disabled.') + logger.error('MailJet creds are not configured to send the email. Hence email notification is disabled.') return kwargs.update({ @@ -70,15 +73,12 @@ def send_email_over_mailjet_service(recipient_list, cc=None, bcc=None, fail_sile bcc=bcc ) mail.content_subtype = 'html' - print('Sending message over email') + logger.debug('Sending message over email') response = mail.send(fail_silently=fail_silently) return response class BaseTileGenerator: - # def __init__(self, table_config): - # self.table_config = table_config - def path_to_tile(self, request): path = "/" + request.query_params.get('z') + "/" + request.query_params.get( 'x') + "/" + request.query_params.get('y') @@ -133,11 +133,10 @@ def sql_to_pbf(self, sql): if not cur: return Response({"error": f"sql query failed: {sql}"}, status=404) return cur.fetchone()[0] - except Exception as error: + except Exception: return Response({"error": "An error occurred while executing SQL query"}, status=500) def generate_tile(self, request): - # start_time = time.time() tile = self.path_to_tile(request) if not (tile and self.tile_is_valid(tile)): return Response({"error": "Invalid tile path"}, status=400) @@ -146,7 +145,7 @@ def generate_tile(self, request): sql = self.envelope_to_sql(env, request) - print(sql.replace('\n', '')) + logger.debug(sql.replace('\n', '')) pbf = self.sql_to_pbf(sql) if isinstance(pbf, memoryview): diff --git a/proco/assets/admin/styles/admin.css b/proco/assets/admin/styles/admin.css deleted file mode 100644 index 0681192..0000000 --- a/proco/assets/admin/styles/admin.css +++ /dev/null @@ -1,56 +0,0 @@ -ul.action-list { - padding-left: 20px; -} - -ul.action-list li { - list-style-type: none; -} -ul.action-list li a { - font-weight: 600; -} -ul.action-list li a i { - padding-right: 5px; -} - -.admin-actions { - background: #f8f8f8; - display: block; - float: left; - position: relative; -} - -#content-related { - margin-top: 150px; -} - -.admin-actions h2 { - padding: 16px; - margin-bottom: 16px; - border-bottom: 1px solid #eaeaea; - font-size: 18px; - color: #333; - font-weight: 400; -} - -@media (min-width: 1025px) { -.admin-actions { - margin-right: -300px; - width: 260px; - margin-left: 40px; -} -} - -@media (max-width: 1024px) and (min-width: 767px) { -.admin-actions { - margin-left: 30px; - margin-right: -300px; - width: 260px; -} -} - -@media (max-width: 767px) { -.admin-actions { - width: 100%; - margin-bottom: 20px; -} -} diff --git a/proco/assets/test.txt b/proco/assets/test.txt deleted file mode 100644 index 84362ca..0000000 --- a/proco/assets/test.txt +++ /dev/null @@ -1 +0,0 @@ -Test file \ No newline at end of file diff --git a/proco/assets/vendor/fontawesome/css/all.css b/proco/assets/vendor/fontawesome/css/all.css deleted file mode 100644 index 934c5ca..0000000 --- a/proco/assets/vendor/fontawesome/css/all.css +++ /dev/null @@ -1,4586 +0,0 @@ -/*! - * Font Awesome Free 5.14.0 by @fontawesome - https://fontawesome.com - * License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) - */ -.fa, -.fas, -.far, -.fal, -.fad, -.fab { - -moz-osx-font-smoothing: grayscale; - -webkit-font-smoothing: antialiased; - display: inline-block; - font-style: normal; - font-variant: normal; - text-rendering: auto; - line-height: 1; } - -.fa-lg { - font-size: 1.33333em; - line-height: 0.75em; - vertical-align: -.0667em; } - -.fa-xs { - font-size: .75em; } - -.fa-sm { - font-size: .875em; } - -.fa-1x { - font-size: 1em; } - -.fa-2x { - font-size: 2em; } - -.fa-3x { - font-size: 3em; } - -.fa-4x { - font-size: 4em; } - -.fa-5x { - font-size: 5em; } - -.fa-6x { - font-size: 6em; } - -.fa-7x { - font-size: 7em; } - -.fa-8x { - font-size: 8em; } - -.fa-9x { - font-size: 9em; } - -.fa-10x { - font-size: 10em; } - -.fa-fw { - text-align: center; - width: 1.25em; } - -.fa-ul { - list-style-type: none; - margin-left: 2.5em; - padding-left: 0; } - .fa-ul > li { - position: relative; } - -.fa-li { - left: -2em; - position: absolute; - text-align: center; - width: 2em; - line-height: inherit; } - -.fa-border { - border: solid 0.08em #eee; - border-radius: .1em; - padding: .2em .25em .15em; } - -.fa-pull-left { - float: left; } - -.fa-pull-right { - float: right; } - -.fa.fa-pull-left, -.fas.fa-pull-left, -.far.fa-pull-left, -.fal.fa-pull-left, -.fab.fa-pull-left { - margin-right: .3em; } - -.fa.fa-pull-right, -.fas.fa-pull-right, -.far.fa-pull-right, -.fal.fa-pull-right, -.fab.fa-pull-right { - margin-left: .3em; } - -.fa-spin { - -webkit-animation: fa-spin 2s infinite linear; - animation: fa-spin 2s infinite linear; } - -.fa-pulse { - -webkit-animation: fa-spin 1s infinite steps(8); - animation: fa-spin 1s infinite steps(8); } - -@-webkit-keyframes fa-spin { - 0% { - -webkit-transform: rotate(0deg); - transform: rotate(0deg); } - 100% { - -webkit-transform: rotate(360deg); - transform: rotate(360deg); } } - -@keyframes fa-spin { - 0% { - -webkit-transform: rotate(0deg); - transform: rotate(0deg); } - 100% { - -webkit-transform: rotate(360deg); - transform: rotate(360deg); } } - -.fa-rotate-90 { - -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=1)"; - -webkit-transform: rotate(90deg); - transform: rotate(90deg); } - -.fa-rotate-180 { - -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=2)"; - -webkit-transform: rotate(180deg); - transform: rotate(180deg); } - -.fa-rotate-270 { - -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=3)"; - -webkit-transform: rotate(270deg); - transform: rotate(270deg); } - -.fa-flip-horizontal { - -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)"; - -webkit-transform: scale(-1, 1); - transform: scale(-1, 1); } - -.fa-flip-vertical { - -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)"; - -webkit-transform: scale(1, -1); - transform: scale(1, -1); } - -.fa-flip-both, .fa-flip-horizontal.fa-flip-vertical { - -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)"; - -webkit-transform: scale(-1, -1); - transform: scale(-1, -1); } - -:root .fa-rotate-90, -:root .fa-rotate-180, -:root .fa-rotate-270, -:root .fa-flip-horizontal, -:root .fa-flip-vertical, -:root .fa-flip-both { - -webkit-filter: none; - filter: none; } - -.fa-stack { - display: inline-block; - height: 2em; - line-height: 2em; - position: relative; - vertical-align: middle; - width: 2.5em; } - -.fa-stack-1x, -.fa-stack-2x { - left: 0; - position: absolute; - text-align: center; - width: 100%; } - -.fa-stack-1x { - line-height: inherit; } - -.fa-stack-2x { - font-size: 2em; } - -.fa-inverse { - color: #fff; } - -/* Font Awesome uses the Unicode Private Use Area (PUA) to ensure screen -readers do not read off random characters that represent icons */ -.fa-500px:before { - content: "\f26e"; } - -.fa-accessible-icon:before { - content: "\f368"; } - -.fa-accusoft:before { - content: "\f369"; } - -.fa-acquisitions-incorporated:before { - content: "\f6af"; } - -.fa-ad:before { - content: "\f641"; } - -.fa-address-book:before { - content: "\f2b9"; } - -.fa-address-card:before { - content: "\f2bb"; } - -.fa-adjust:before { - content: "\f042"; } - -.fa-adn:before { - content: "\f170"; } - -.fa-adobe:before { - content: "\f778"; } - -.fa-adversal:before { - content: "\f36a"; } - -.fa-affiliatetheme:before { - content: "\f36b"; } - -.fa-air-freshener:before { - content: "\f5d0"; } - -.fa-airbnb:before { - content: "\f834"; } - -.fa-algolia:before { - content: "\f36c"; } - -.fa-align-center:before { - content: "\f037"; } - -.fa-align-justify:before { - content: "\f039"; } - -.fa-align-left:before { - content: "\f036"; } - -.fa-align-right:before { - content: "\f038"; } - -.fa-alipay:before { - content: "\f642"; } - -.fa-allergies:before { - content: "\f461"; } - -.fa-amazon:before { - content: "\f270"; } - -.fa-amazon-pay:before { - content: "\f42c"; } - -.fa-ambulance:before { - content: "\f0f9"; } - -.fa-american-sign-language-interpreting:before { - content: "\f2a3"; } - -.fa-amilia:before { - content: "\f36d"; } - -.fa-anchor:before { - content: "\f13d"; } - -.fa-android:before { - content: "\f17b"; } - -.fa-angellist:before { - content: "\f209"; } - -.fa-angle-double-down:before { - content: "\f103"; } - -.fa-angle-double-left:before { - content: "\f100"; } - -.fa-angle-double-right:before { - content: "\f101"; } - -.fa-angle-double-up:before { - content: "\f102"; } - -.fa-angle-down:before { - content: "\f107"; } - -.fa-angle-left:before { - content: "\f104"; } - -.fa-angle-right:before { - content: "\f105"; } - -.fa-angle-up:before { - content: "\f106"; } - -.fa-angry:before { - content: "\f556"; } - -.fa-angrycreative:before { - content: "\f36e"; } - -.fa-angular:before { - content: "\f420"; } - -.fa-ankh:before { - content: "\f644"; } - -.fa-app-store:before { - content: "\f36f"; } - -.fa-app-store-ios:before { - content: "\f370"; } - -.fa-apper:before { - content: "\f371"; } - -.fa-apple:before { - content: "\f179"; } - -.fa-apple-alt:before { - content: "\f5d1"; } - -.fa-apple-pay:before { - content: "\f415"; } - -.fa-archive:before { - content: "\f187"; } - -.fa-archway:before { - content: "\f557"; } - -.fa-arrow-alt-circle-down:before { - content: "\f358"; } - -.fa-arrow-alt-circle-left:before { - content: "\f359"; } - -.fa-arrow-alt-circle-right:before { - content: "\f35a"; } - -.fa-arrow-alt-circle-up:before { - content: "\f35b"; } - -.fa-arrow-circle-down:before { - content: "\f0ab"; } - -.fa-arrow-circle-left:before { - content: "\f0a8"; } - -.fa-arrow-circle-right:before { - content: "\f0a9"; } - -.fa-arrow-circle-up:before { - content: "\f0aa"; } - -.fa-arrow-down:before { - content: "\f063"; } - -.fa-arrow-left:before { - content: "\f060"; } - -.fa-arrow-right:before { - content: "\f061"; } - -.fa-arrow-up:before { - content: "\f062"; } - -.fa-arrows-alt:before { - content: "\f0b2"; } - -.fa-arrows-alt-h:before { - content: "\f337"; } - -.fa-arrows-alt-v:before { - content: "\f338"; } - -.fa-artstation:before { - content: "\f77a"; } - -.fa-assistive-listening-systems:before { - content: "\f2a2"; } - -.fa-asterisk:before { - content: "\f069"; } - -.fa-asymmetrik:before { - content: "\f372"; } - -.fa-at:before { - content: "\f1fa"; } - -.fa-atlas:before { - content: "\f558"; } - -.fa-atlassian:before { - content: "\f77b"; } - -.fa-atom:before { - content: "\f5d2"; } - -.fa-audible:before { - content: "\f373"; } - -.fa-audio-description:before { - content: "\f29e"; } - -.fa-autoprefixer:before { - content: "\f41c"; } - -.fa-avianex:before { - content: "\f374"; } - -.fa-aviato:before { - content: "\f421"; } - -.fa-award:before { - content: "\f559"; } - -.fa-aws:before { - content: "\f375"; } - -.fa-baby:before { - content: "\f77c"; } - -.fa-baby-carriage:before { - content: "\f77d"; } - -.fa-backspace:before { - content: "\f55a"; } - -.fa-backward:before { - content: "\f04a"; } - -.fa-bacon:before { - content: "\f7e5"; } - -.fa-bacteria:before { - content: "\e059"; } - -.fa-bacterium:before { - content: "\e05a"; } - -.fa-bahai:before { - content: "\f666"; } - -.fa-balance-scale:before { - content: "\f24e"; } - -.fa-balance-scale-left:before { - content: "\f515"; } - -.fa-balance-scale-right:before { - content: "\f516"; } - -.fa-ban:before { - content: "\f05e"; } - -.fa-band-aid:before { - content: "\f462"; } - -.fa-bandcamp:before { - content: "\f2d5"; } - -.fa-barcode:before { - content: "\f02a"; } - -.fa-bars:before { - content: "\f0c9"; } - -.fa-baseball-ball:before { - content: "\f433"; } - -.fa-basketball-ball:before { - content: "\f434"; } - -.fa-bath:before { - content: "\f2cd"; } - -.fa-battery-empty:before { - content: "\f244"; } - -.fa-battery-full:before { - content: "\f240"; } - -.fa-battery-half:before { - content: "\f242"; } - -.fa-battery-quarter:before { - content: "\f243"; } - -.fa-battery-three-quarters:before { - content: "\f241"; } - -.fa-battle-net:before { - content: "\f835"; } - -.fa-bed:before { - content: "\f236"; } - -.fa-beer:before { - content: "\f0fc"; } - -.fa-behance:before { - content: "\f1b4"; } - -.fa-behance-square:before { - content: "\f1b5"; } - -.fa-bell:before { - content: "\f0f3"; } - -.fa-bell-slash:before { - content: "\f1f6"; } - -.fa-bezier-curve:before { - content: "\f55b"; } - -.fa-bible:before { - content: "\f647"; } - -.fa-bicycle:before { - content: "\f206"; } - -.fa-biking:before { - content: "\f84a"; } - -.fa-bimobject:before { - content: "\f378"; } - -.fa-binoculars:before { - content: "\f1e5"; } - -.fa-biohazard:before { - content: "\f780"; } - -.fa-birthday-cake:before { - content: "\f1fd"; } - -.fa-bitbucket:before { - content: "\f171"; } - -.fa-bitcoin:before { - content: "\f379"; } - -.fa-bity:before { - content: "\f37a"; } - -.fa-black-tie:before { - content: "\f27e"; } - -.fa-blackberry:before { - content: "\f37b"; } - -.fa-blender:before { - content: "\f517"; } - -.fa-blender-phone:before { - content: "\f6b6"; } - -.fa-blind:before { - content: "\f29d"; } - -.fa-blog:before { - content: "\f781"; } - -.fa-blogger:before { - content: "\f37c"; } - -.fa-blogger-b:before { - content: "\f37d"; } - -.fa-bluetooth:before { - content: "\f293"; } - -.fa-bluetooth-b:before { - content: "\f294"; } - -.fa-bold:before { - content: "\f032"; } - -.fa-bolt:before { - content: "\f0e7"; } - -.fa-bomb:before { - content: "\f1e2"; } - -.fa-bone:before { - content: "\f5d7"; } - -.fa-bong:before { - content: "\f55c"; } - -.fa-book:before { - content: "\f02d"; } - -.fa-book-dead:before { - content: "\f6b7"; } - -.fa-book-medical:before { - content: "\f7e6"; } - -.fa-book-open:before { - content: "\f518"; } - -.fa-book-reader:before { - content: "\f5da"; } - -.fa-bookmark:before { - content: "\f02e"; } - -.fa-bootstrap:before { - content: "\f836"; } - -.fa-border-all:before { - content: "\f84c"; } - -.fa-border-none:before { - content: "\f850"; } - -.fa-border-style:before { - content: "\f853"; } - -.fa-bowling-ball:before { - content: "\f436"; } - -.fa-box:before { - content: "\f466"; } - -.fa-box-open:before { - content: "\f49e"; } - -.fa-box-tissue:before { - content: "\e05b"; } - -.fa-boxes:before { - content: "\f468"; } - -.fa-braille:before { - content: "\f2a1"; } - -.fa-brain:before { - content: "\f5dc"; } - -.fa-bread-slice:before { - content: "\f7ec"; } - -.fa-briefcase:before { - content: "\f0b1"; } - -.fa-briefcase-medical:before { - content: "\f469"; } - -.fa-broadcast-tower:before { - content: "\f519"; } - -.fa-broom:before { - content: "\f51a"; } - -.fa-brush:before { - content: "\f55d"; } - -.fa-btc:before { - content: "\f15a"; } - -.fa-buffer:before { - content: "\f837"; } - -.fa-bug:before { - content: "\f188"; } - -.fa-building:before { - content: "\f1ad"; } - -.fa-bullhorn:before { - content: "\f0a1"; } - -.fa-bullseye:before { - content: "\f140"; } - -.fa-burn:before { - content: "\f46a"; } - -.fa-buromobelexperte:before { - content: "\f37f"; } - -.fa-bus:before { - content: "\f207"; } - -.fa-bus-alt:before { - content: "\f55e"; } - -.fa-business-time:before { - content: "\f64a"; } - -.fa-buy-n-large:before { - content: "\f8a6"; } - -.fa-buysellads:before { - content: "\f20d"; } - -.fa-calculator:before { - content: "\f1ec"; } - -.fa-calendar:before { - content: "\f133"; } - -.fa-calendar-alt:before { - content: "\f073"; } - -.fa-calendar-check:before { - content: "\f274"; } - -.fa-calendar-day:before { - content: "\f783"; } - -.fa-calendar-minus:before { - content: "\f272"; } - -.fa-calendar-plus:before { - content: "\f271"; } - -.fa-calendar-times:before { - content: "\f273"; } - -.fa-calendar-week:before { - content: "\f784"; } - -.fa-camera:before { - content: "\f030"; } - -.fa-camera-retro:before { - content: "\f083"; } - -.fa-campground:before { - content: "\f6bb"; } - -.fa-canadian-maple-leaf:before { - content: "\f785"; } - -.fa-candy-cane:before { - content: "\f786"; } - -.fa-cannabis:before { - content: "\f55f"; } - -.fa-capsules:before { - content: "\f46b"; } - -.fa-car:before { - content: "\f1b9"; } - -.fa-car-alt:before { - content: "\f5de"; } - -.fa-car-battery:before { - content: "\f5df"; } - -.fa-car-crash:before { - content: "\f5e1"; } - -.fa-car-side:before { - content: "\f5e4"; } - -.fa-caravan:before { - content: "\f8ff"; } - -.fa-caret-down:before { - content: "\f0d7"; } - -.fa-caret-left:before { - content: "\f0d9"; } - -.fa-caret-right:before { - content: "\f0da"; } - -.fa-caret-square-down:before { - content: "\f150"; } - -.fa-caret-square-left:before { - content: "\f191"; } - -.fa-caret-square-right:before { - content: "\f152"; } - -.fa-caret-square-up:before { - content: "\f151"; } - -.fa-caret-up:before { - content: "\f0d8"; } - -.fa-carrot:before { - content: "\f787"; } - -.fa-cart-arrow-down:before { - content: "\f218"; } - -.fa-cart-plus:before { - content: "\f217"; } - -.fa-cash-register:before { - content: "\f788"; } - -.fa-cat:before { - content: "\f6be"; } - -.fa-cc-amazon-pay:before { - content: "\f42d"; } - -.fa-cc-amex:before { - content: "\f1f3"; } - -.fa-cc-apple-pay:before { - content: "\f416"; } - -.fa-cc-diners-club:before { - content: "\f24c"; } - -.fa-cc-discover:before { - content: "\f1f2"; } - -.fa-cc-jcb:before { - content: "\f24b"; } - -.fa-cc-mastercard:before { - content: "\f1f1"; } - -.fa-cc-paypal:before { - content: "\f1f4"; } - -.fa-cc-stripe:before { - content: "\f1f5"; } - -.fa-cc-visa:before { - content: "\f1f0"; } - -.fa-centercode:before { - content: "\f380"; } - -.fa-centos:before { - content: "\f789"; } - -.fa-certificate:before { - content: "\f0a3"; } - -.fa-chair:before { - content: "\f6c0"; } - -.fa-chalkboard:before { - content: "\f51b"; } - -.fa-chalkboard-teacher:before { - content: "\f51c"; } - -.fa-charging-station:before { - content: "\f5e7"; } - -.fa-chart-area:before { - content: "\f1fe"; } - -.fa-chart-bar:before { - content: "\f080"; } - -.fa-chart-line:before { - content: "\f201"; } - -.fa-chart-pie:before { - content: "\f200"; } - -.fa-check:before { - content: "\f00c"; } - -.fa-check-circle:before { - content: "\f058"; } - -.fa-check-double:before { - content: "\f560"; } - -.fa-check-square:before { - content: "\f14a"; } - -.fa-cheese:before { - content: "\f7ef"; } - -.fa-chess:before { - content: "\f439"; } - -.fa-chess-bishop:before { - content: "\f43a"; } - -.fa-chess-board:before { - content: "\f43c"; } - -.fa-chess-king:before { - content: "\f43f"; } - -.fa-chess-knight:before { - content: "\f441"; } - -.fa-chess-pawn:before { - content: "\f443"; } - -.fa-chess-queen:before { - content: "\f445"; } - -.fa-chess-rook:before { - content: "\f447"; } - -.fa-chevron-circle-down:before { - content: "\f13a"; } - -.fa-chevron-circle-left:before { - content: "\f137"; } - -.fa-chevron-circle-right:before { - content: "\f138"; } - -.fa-chevron-circle-up:before { - content: "\f139"; } - -.fa-chevron-down:before { - content: "\f078"; } - -.fa-chevron-left:before { - content: "\f053"; } - -.fa-chevron-right:before { - content: "\f054"; } - -.fa-chevron-up:before { - content: "\f077"; } - -.fa-child:before { - content: "\f1ae"; } - -.fa-chrome:before { - content: "\f268"; } - -.fa-chromecast:before { - content: "\f838"; } - -.fa-church:before { - content: "\f51d"; } - -.fa-circle:before { - content: "\f111"; } - -.fa-circle-notch:before { - content: "\f1ce"; } - -.fa-city:before { - content: "\f64f"; } - -.fa-clinic-medical:before { - content: "\f7f2"; } - -.fa-clipboard:before { - content: "\f328"; } - -.fa-clipboard-check:before { - content: "\f46c"; } - -.fa-clipboard-list:before { - content: "\f46d"; } - -.fa-clock:before { - content: "\f017"; } - -.fa-clone:before { - content: "\f24d"; } - -.fa-closed-captioning:before { - content: "\f20a"; } - -.fa-cloud:before { - content: "\f0c2"; } - -.fa-cloud-download-alt:before { - content: "\f381"; } - -.fa-cloud-meatball:before { - content: "\f73b"; } - -.fa-cloud-moon:before { - content: "\f6c3"; } - -.fa-cloud-moon-rain:before { - content: "\f73c"; } - -.fa-cloud-rain:before { - content: "\f73d"; } - -.fa-cloud-showers-heavy:before { - content: "\f740"; } - -.fa-cloud-sun:before { - content: "\f6c4"; } - -.fa-cloud-sun-rain:before { - content: "\f743"; } - -.fa-cloud-upload-alt:before { - content: "\f382"; } - -.fa-cloudscale:before { - content: "\f383"; } - -.fa-cloudsmith:before { - content: "\f384"; } - -.fa-cloudversify:before { - content: "\f385"; } - -.fa-cocktail:before { - content: "\f561"; } - -.fa-code:before { - content: "\f121"; } - -.fa-code-branch:before { - content: "\f126"; } - -.fa-codepen:before { - content: "\f1cb"; } - -.fa-codiepie:before { - content: "\f284"; } - -.fa-coffee:before { - content: "\f0f4"; } - -.fa-cog:before { - content: "\f013"; } - -.fa-cogs:before { - content: "\f085"; } - -.fa-coins:before { - content: "\f51e"; } - -.fa-columns:before { - content: "\f0db"; } - -.fa-comment:before { - content: "\f075"; } - -.fa-comment-alt:before { - content: "\f27a"; } - -.fa-comment-dollar:before { - content: "\f651"; } - -.fa-comment-dots:before { - content: "\f4ad"; } - -.fa-comment-medical:before { - content: "\f7f5"; } - -.fa-comment-slash:before { - content: "\f4b3"; } - -.fa-comments:before { - content: "\f086"; } - -.fa-comments-dollar:before { - content: "\f653"; } - -.fa-compact-disc:before { - content: "\f51f"; } - -.fa-compass:before { - content: "\f14e"; } - -.fa-compress:before { - content: "\f066"; } - -.fa-compress-alt:before { - content: "\f422"; } - -.fa-compress-arrows-alt:before { - content: "\f78c"; } - -.fa-concierge-bell:before { - content: "\f562"; } - -.fa-confluence:before { - content: "\f78d"; } - -.fa-connectdevelop:before { - content: "\f20e"; } - -.fa-contao:before { - content: "\f26d"; } - -.fa-cookie:before { - content: "\f563"; } - -.fa-cookie-bite:before { - content: "\f564"; } - -.fa-copy:before { - content: "\f0c5"; } - -.fa-copyright:before { - content: "\f1f9"; } - -.fa-cotton-bureau:before { - content: "\f89e"; } - -.fa-couch:before { - content: "\f4b8"; } - -.fa-cpanel:before { - content: "\f388"; } - -.fa-creative-commons:before { - content: "\f25e"; } - -.fa-creative-commons-by:before { - content: "\f4e7"; } - -.fa-creative-commons-nc:before { - content: "\f4e8"; } - -.fa-creative-commons-nc-eu:before { - content: "\f4e9"; } - -.fa-creative-commons-nc-jp:before { - content: "\f4ea"; } - -.fa-creative-commons-nd:before { - content: "\f4eb"; } - -.fa-creative-commons-pd:before { - content: "\f4ec"; } - -.fa-creative-commons-pd-alt:before { - content: "\f4ed"; } - -.fa-creative-commons-remix:before { - content: "\f4ee"; } - -.fa-creative-commons-sa:before { - content: "\f4ef"; } - -.fa-creative-commons-sampling:before { - content: "\f4f0"; } - -.fa-creative-commons-sampling-plus:before { - content: "\f4f1"; } - -.fa-creative-commons-share:before { - content: "\f4f2"; } - -.fa-creative-commons-zero:before { - content: "\f4f3"; } - -.fa-credit-card:before { - content: "\f09d"; } - -.fa-critical-role:before { - content: "\f6c9"; } - -.fa-crop:before { - content: "\f125"; } - -.fa-crop-alt:before { - content: "\f565"; } - -.fa-cross:before { - content: "\f654"; } - -.fa-crosshairs:before { - content: "\f05b"; } - -.fa-crow:before { - content: "\f520"; } - -.fa-crown:before { - content: "\f521"; } - -.fa-crutch:before { - content: "\f7f7"; } - -.fa-css3:before { - content: "\f13c"; } - -.fa-css3-alt:before { - content: "\f38b"; } - -.fa-cube:before { - content: "\f1b2"; } - -.fa-cubes:before { - content: "\f1b3"; } - -.fa-cut:before { - content: "\f0c4"; } - -.fa-cuttlefish:before { - content: "\f38c"; } - -.fa-d-and-d:before { - content: "\f38d"; } - -.fa-d-and-d-beyond:before { - content: "\f6ca"; } - -.fa-dailymotion:before { - content: "\e052"; } - -.fa-dashcube:before { - content: "\f210"; } - -.fa-database:before { - content: "\f1c0"; } - -.fa-deaf:before { - content: "\f2a4"; } - -.fa-deezer:before { - content: "\e077"; } - -.fa-delicious:before { - content: "\f1a5"; } - -.fa-democrat:before { - content: "\f747"; } - -.fa-deploydog:before { - content: "\f38e"; } - -.fa-deskpro:before { - content: "\f38f"; } - -.fa-desktop:before { - content: "\f108"; } - -.fa-dev:before { - content: "\f6cc"; } - -.fa-deviantart:before { - content: "\f1bd"; } - -.fa-dharmachakra:before { - content: "\f655"; } - -.fa-dhl:before { - content: "\f790"; } - -.fa-diagnoses:before { - content: "\f470"; } - -.fa-diaspora:before { - content: "\f791"; } - -.fa-dice:before { - content: "\f522"; } - -.fa-dice-d20:before { - content: "\f6cf"; } - -.fa-dice-d6:before { - content: "\f6d1"; } - -.fa-dice-five:before { - content: "\f523"; } - -.fa-dice-four:before { - content: "\f524"; } - -.fa-dice-one:before { - content: "\f525"; } - -.fa-dice-six:before { - content: "\f526"; } - -.fa-dice-three:before { - content: "\f527"; } - -.fa-dice-two:before { - content: "\f528"; } - -.fa-digg:before { - content: "\f1a6"; } - -.fa-digital-ocean:before { - content: "\f391"; } - -.fa-digital-tachograph:before { - content: "\f566"; } - -.fa-directions:before { - content: "\f5eb"; } - -.fa-discord:before { - content: "\f392"; } - -.fa-discourse:before { - content: "\f393"; } - -.fa-disease:before { - content: "\f7fa"; } - -.fa-divide:before { - content: "\f529"; } - -.fa-dizzy:before { - content: "\f567"; } - -.fa-dna:before { - content: "\f471"; } - -.fa-dochub:before { - content: "\f394"; } - -.fa-docker:before { - content: "\f395"; } - -.fa-dog:before { - content: "\f6d3"; } - -.fa-dollar-sign:before { - content: "\f155"; } - -.fa-dolly:before { - content: "\f472"; } - -.fa-dolly-flatbed:before { - content: "\f474"; } - -.fa-donate:before { - content: "\f4b9"; } - -.fa-door-closed:before { - content: "\f52a"; } - -.fa-door-open:before { - content: "\f52b"; } - -.fa-dot-circle:before { - content: "\f192"; } - -.fa-dove:before { - content: "\f4ba"; } - -.fa-download:before { - content: "\f019"; } - -.fa-draft2digital:before { - content: "\f396"; } - -.fa-drafting-compass:before { - content: "\f568"; } - -.fa-dragon:before { - content: "\f6d5"; } - -.fa-draw-polygon:before { - content: "\f5ee"; } - -.fa-dribbble:before { - content: "\f17d"; } - -.fa-dribbble-square:before { - content: "\f397"; } - -.fa-dropbox:before { - content: "\f16b"; } - -.fa-drum:before { - content: "\f569"; } - -.fa-drum-steelpan:before { - content: "\f56a"; } - -.fa-drumstick-bite:before { - content: "\f6d7"; } - -.fa-drupal:before { - content: "\f1a9"; } - -.fa-dumbbell:before { - content: "\f44b"; } - -.fa-dumpster:before { - content: "\f793"; } - -.fa-dumpster-fire:before { - content: "\f794"; } - -.fa-dungeon:before { - content: "\f6d9"; } - -.fa-dyalog:before { - content: "\f399"; } - -.fa-earlybirds:before { - content: "\f39a"; } - -.fa-ebay:before { - content: "\f4f4"; } - -.fa-edge:before { - content: "\f282"; } - -.fa-edge-legacy:before { - content: "\e078"; } - -.fa-edit:before { - content: "\f044"; } - -.fa-egg:before { - content: "\f7fb"; } - -.fa-eject:before { - content: "\f052"; } - -.fa-elementor:before { - content: "\f430"; } - -.fa-ellipsis-h:before { - content: "\f141"; } - -.fa-ellipsis-v:before { - content: "\f142"; } - -.fa-ello:before { - content: "\f5f1"; } - -.fa-ember:before { - content: "\f423"; } - -.fa-empire:before { - content: "\f1d1"; } - -.fa-envelope:before { - content: "\f0e0"; } - -.fa-envelope-open:before { - content: "\f2b6"; } - -.fa-envelope-open-text:before { - content: "\f658"; } - -.fa-envelope-square:before { - content: "\f199"; } - -.fa-envira:before { - content: "\f299"; } - -.fa-equals:before { - content: "\f52c"; } - -.fa-eraser:before { - content: "\f12d"; } - -.fa-erlang:before { - content: "\f39d"; } - -.fa-ethereum:before { - content: "\f42e"; } - -.fa-ethernet:before { - content: "\f796"; } - -.fa-etsy:before { - content: "\f2d7"; } - -.fa-euro-sign:before { - content: "\f153"; } - -.fa-evernote:before { - content: "\f839"; } - -.fa-exchange-alt:before { - content: "\f362"; } - -.fa-exclamation:before { - content: "\f12a"; } - -.fa-exclamation-circle:before { - content: "\f06a"; } - -.fa-exclamation-triangle:before { - content: "\f071"; } - -.fa-expand:before { - content: "\f065"; } - -.fa-expand-alt:before { - content: "\f424"; } - -.fa-expand-arrows-alt:before { - content: "\f31e"; } - -.fa-expeditedssl:before { - content: "\f23e"; } - -.fa-external-link-alt:before { - content: "\f35d"; } - -.fa-external-link-square-alt:before { - content: "\f360"; } - -.fa-eye:before { - content: "\f06e"; } - -.fa-eye-dropper:before { - content: "\f1fb"; } - -.fa-eye-slash:before { - content: "\f070"; } - -.fa-facebook:before { - content: "\f09a"; } - -.fa-facebook-f:before { - content: "\f39e"; } - -.fa-facebook-messenger:before { - content: "\f39f"; } - -.fa-facebook-square:before { - content: "\f082"; } - -.fa-fan:before { - content: "\f863"; } - -.fa-fantasy-flight-games:before { - content: "\f6dc"; } - -.fa-fast-backward:before { - content: "\f049"; } - -.fa-fast-forward:before { - content: "\f050"; } - -.fa-faucet:before { - content: "\e005"; } - -.fa-fax:before { - content: "\f1ac"; } - -.fa-feather:before { - content: "\f52d"; } - -.fa-feather-alt:before { - content: "\f56b"; } - -.fa-fedex:before { - content: "\f797"; } - -.fa-fedora:before { - content: "\f798"; } - -.fa-female:before { - content: "\f182"; } - -.fa-fighter-jet:before { - content: "\f0fb"; } - -.fa-figma:before { - content: "\f799"; } - -.fa-file:before { - content: "\f15b"; } - -.fa-file-alt:before { - content: "\f15c"; } - -.fa-file-archive:before { - content: "\f1c6"; } - -.fa-file-audio:before { - content: "\f1c7"; } - -.fa-file-code:before { - content: "\f1c9"; } - -.fa-file-contract:before { - content: "\f56c"; } - -.fa-file-csv:before { - content: "\f6dd"; } - -.fa-file-download:before { - content: "\f56d"; } - -.fa-file-excel:before { - content: "\f1c3"; } - -.fa-file-export:before { - content: "\f56e"; } - -.fa-file-image:before { - content: "\f1c5"; } - -.fa-file-import:before { - content: "\f56f"; } - -.fa-file-invoice:before { - content: "\f570"; } - -.fa-file-invoice-dollar:before { - content: "\f571"; } - -.fa-file-medical:before { - content: "\f477"; } - -.fa-file-medical-alt:before { - content: "\f478"; } - -.fa-file-pdf:before { - content: "\f1c1"; } - -.fa-file-powerpoint:before { - content: "\f1c4"; } - -.fa-file-prescription:before { - content: "\f572"; } - -.fa-file-signature:before { - content: "\f573"; } - -.fa-file-upload:before { - content: "\f574"; } - -.fa-file-video:before { - content: "\f1c8"; } - -.fa-file-word:before { - content: "\f1c2"; } - -.fa-fill:before { - content: "\f575"; } - -.fa-fill-drip:before { - content: "\f576"; } - -.fa-film:before { - content: "\f008"; } - -.fa-filter:before { - content: "\f0b0"; } - -.fa-fingerprint:before { - content: "\f577"; } - -.fa-fire:before { - content: "\f06d"; } - -.fa-fire-alt:before { - content: "\f7e4"; } - -.fa-fire-extinguisher:before { - content: "\f134"; } - -.fa-firefox:before { - content: "\f269"; } - -.fa-firefox-browser:before { - content: "\e007"; } - -.fa-first-aid:before { - content: "\f479"; } - -.fa-first-order:before { - content: "\f2b0"; } - -.fa-first-order-alt:before { - content: "\f50a"; } - -.fa-firstdraft:before { - content: "\f3a1"; } - -.fa-fish:before { - content: "\f578"; } - -.fa-fist-raised:before { - content: "\f6de"; } - -.fa-flag:before { - content: "\f024"; } - -.fa-flag-checkered:before { - content: "\f11e"; } - -.fa-flag-usa:before { - content: "\f74d"; } - -.fa-flask:before { - content: "\f0c3"; } - -.fa-flickr:before { - content: "\f16e"; } - -.fa-flipboard:before { - content: "\f44d"; } - -.fa-flushed:before { - content: "\f579"; } - -.fa-fly:before { - content: "\f417"; } - -.fa-folder:before { - content: "\f07b"; } - -.fa-folder-minus:before { - content: "\f65d"; } - -.fa-folder-open:before { - content: "\f07c"; } - -.fa-folder-plus:before { - content: "\f65e"; } - -.fa-font:before { - content: "\f031"; } - -.fa-font-awesome:before { - content: "\f2b4"; } - -.fa-font-awesome-alt:before { - content: "\f35c"; } - -.fa-font-awesome-flag:before { - content: "\f425"; } - -.fa-font-awesome-logo-full:before { - content: "\f4e6"; } - -.fa-fonticons:before { - content: "\f280"; } - -.fa-fonticons-fi:before { - content: "\f3a2"; } - -.fa-football-ball:before { - content: "\f44e"; } - -.fa-fort-awesome:before { - content: "\f286"; } - -.fa-fort-awesome-alt:before { - content: "\f3a3"; } - -.fa-forumbee:before { - content: "\f211"; } - -.fa-forward:before { - content: "\f04e"; } - -.fa-foursquare:before { - content: "\f180"; } - -.fa-free-code-camp:before { - content: "\f2c5"; } - -.fa-freebsd:before { - content: "\f3a4"; } - -.fa-frog:before { - content: "\f52e"; } - -.fa-frown:before { - content: "\f119"; } - -.fa-frown-open:before { - content: "\f57a"; } - -.fa-fulcrum:before { - content: "\f50b"; } - -.fa-funnel-dollar:before { - content: "\f662"; } - -.fa-futbol:before { - content: "\f1e3"; } - -.fa-galactic-republic:before { - content: "\f50c"; } - -.fa-galactic-senate:before { - content: "\f50d"; } - -.fa-gamepad:before { - content: "\f11b"; } - -.fa-gas-pump:before { - content: "\f52f"; } - -.fa-gavel:before { - content: "\f0e3"; } - -.fa-gem:before { - content: "\f3a5"; } - -.fa-genderless:before { - content: "\f22d"; } - -.fa-get-pocket:before { - content: "\f265"; } - -.fa-gg:before { - content: "\f260"; } - -.fa-gg-circle:before { - content: "\f261"; } - -.fa-ghost:before { - content: "\f6e2"; } - -.fa-gift:before { - content: "\f06b"; } - -.fa-gifts:before { - content: "\f79c"; } - -.fa-git:before { - content: "\f1d3"; } - -.fa-git-alt:before { - content: "\f841"; } - -.fa-git-square:before { - content: "\f1d2"; } - -.fa-github:before { - content: "\f09b"; } - -.fa-github-alt:before { - content: "\f113"; } - -.fa-github-square:before { - content: "\f092"; } - -.fa-gitkraken:before { - content: "\f3a6"; } - -.fa-gitlab:before { - content: "\f296"; } - -.fa-gitter:before { - content: "\f426"; } - -.fa-glass-cheers:before { - content: "\f79f"; } - -.fa-glass-martini:before { - content: "\f000"; } - -.fa-glass-martini-alt:before { - content: "\f57b"; } - -.fa-glass-whiskey:before { - content: "\f7a0"; } - -.fa-glasses:before { - content: "\f530"; } - -.fa-glide:before { - content: "\f2a5"; } - -.fa-glide-g:before { - content: "\f2a6"; } - -.fa-globe:before { - content: "\f0ac"; } - -.fa-globe-africa:before { - content: "\f57c"; } - -.fa-globe-americas:before { - content: "\f57d"; } - -.fa-globe-asia:before { - content: "\f57e"; } - -.fa-globe-europe:before { - content: "\f7a2"; } - -.fa-gofore:before { - content: "\f3a7"; } - -.fa-golf-ball:before { - content: "\f450"; } - -.fa-goodreads:before { - content: "\f3a8"; } - -.fa-goodreads-g:before { - content: "\f3a9"; } - -.fa-google:before { - content: "\f1a0"; } - -.fa-google-drive:before { - content: "\f3aa"; } - -.fa-google-pay:before { - content: "\e079"; } - -.fa-google-play:before { - content: "\f3ab"; } - -.fa-google-plus:before { - content: "\f2b3"; } - -.fa-google-plus-g:before { - content: "\f0d5"; } - -.fa-google-plus-square:before { - content: "\f0d4"; } - -.fa-google-wallet:before { - content: "\f1ee"; } - -.fa-gopuram:before { - content: "\f664"; } - -.fa-graduation-cap:before { - content: "\f19d"; } - -.fa-gratipay:before { - content: "\f184"; } - -.fa-grav:before { - content: "\f2d6"; } - -.fa-greater-than:before { - content: "\f531"; } - -.fa-greater-than-equal:before { - content: "\f532"; } - -.fa-grimace:before { - content: "\f57f"; } - -.fa-grin:before { - content: "\f580"; } - -.fa-grin-alt:before { - content: "\f581"; } - -.fa-grin-beam:before { - content: "\f582"; } - -.fa-grin-beam-sweat:before { - content: "\f583"; } - -.fa-grin-hearts:before { - content: "\f584"; } - -.fa-grin-squint:before { - content: "\f585"; } - -.fa-grin-squint-tears:before { - content: "\f586"; } - -.fa-grin-stars:before { - content: "\f587"; } - -.fa-grin-tears:before { - content: "\f588"; } - -.fa-grin-tongue:before { - content: "\f589"; } - -.fa-grin-tongue-squint:before { - content: "\f58a"; } - -.fa-grin-tongue-wink:before { - content: "\f58b"; } - -.fa-grin-wink:before { - content: "\f58c"; } - -.fa-grip-horizontal:before { - content: "\f58d"; } - -.fa-grip-lines:before { - content: "\f7a4"; } - -.fa-grip-lines-vertical:before { - content: "\f7a5"; } - -.fa-grip-vertical:before { - content: "\f58e"; } - -.fa-gripfire:before { - content: "\f3ac"; } - -.fa-grunt:before { - content: "\f3ad"; } - -.fa-guitar:before { - content: "\f7a6"; } - -.fa-gulp:before { - content: "\f3ae"; } - -.fa-h-square:before { - content: "\f0fd"; } - -.fa-hacker-news:before { - content: "\f1d4"; } - -.fa-hacker-news-square:before { - content: "\f3af"; } - -.fa-hackerrank:before { - content: "\f5f7"; } - -.fa-hamburger:before { - content: "\f805"; } - -.fa-hammer:before { - content: "\f6e3"; } - -.fa-hamsa:before { - content: "\f665"; } - -.fa-hand-holding:before { - content: "\f4bd"; } - -.fa-hand-holding-heart:before { - content: "\f4be"; } - -.fa-hand-holding-medical:before { - content: "\e05c"; } - -.fa-hand-holding-usd:before { - content: "\f4c0"; } - -.fa-hand-holding-water:before { - content: "\f4c1"; } - -.fa-hand-lizard:before { - content: "\f258"; } - -.fa-hand-middle-finger:before { - content: "\f806"; } - -.fa-hand-paper:before { - content: "\f256"; } - -.fa-hand-peace:before { - content: "\f25b"; } - -.fa-hand-point-down:before { - content: "\f0a7"; } - -.fa-hand-point-left:before { - content: "\f0a5"; } - -.fa-hand-point-right:before { - content: "\f0a4"; } - -.fa-hand-point-up:before { - content: "\f0a6"; } - -.fa-hand-pointer:before { - content: "\f25a"; } - -.fa-hand-rock:before { - content: "\f255"; } - -.fa-hand-scissors:before { - content: "\f257"; } - -.fa-hand-sparkles:before { - content: "\e05d"; } - -.fa-hand-spock:before { - content: "\f259"; } - -.fa-hands:before { - content: "\f4c2"; } - -.fa-hands-helping:before { - content: "\f4c4"; } - -.fa-hands-wash:before { - content: "\e05e"; } - -.fa-handshake:before { - content: "\f2b5"; } - -.fa-handshake-alt-slash:before { - content: "\e05f"; } - -.fa-handshake-slash:before { - content: "\e060"; } - -.fa-hanukiah:before { - content: "\f6e6"; } - -.fa-hard-hat:before { - content: "\f807"; } - -.fa-hashtag:before { - content: "\f292"; } - -.fa-hat-cowboy:before { - content: "\f8c0"; } - -.fa-hat-cowboy-side:before { - content: "\f8c1"; } - -.fa-hat-wizard:before { - content: "\f6e8"; } - -.fa-hdd:before { - content: "\f0a0"; } - -.fa-head-side-cough:before { - content: "\e061"; } - -.fa-head-side-cough-slash:before { - content: "\e062"; } - -.fa-head-side-mask:before { - content: "\e063"; } - -.fa-head-side-virus:before { - content: "\e064"; } - -.fa-heading:before { - content: "\f1dc"; } - -.fa-headphones:before { - content: "\f025"; } - -.fa-headphones-alt:before { - content: "\f58f"; } - -.fa-headset:before { - content: "\f590"; } - -.fa-heart:before { - content: "\f004"; } - -.fa-heart-broken:before { - content: "\f7a9"; } - -.fa-heartbeat:before { - content: "\f21e"; } - -.fa-helicopter:before { - content: "\f533"; } - -.fa-highlighter:before { - content: "\f591"; } - -.fa-hiking:before { - content: "\f6ec"; } - -.fa-hippo:before { - content: "\f6ed"; } - -.fa-hips:before { - content: "\f452"; } - -.fa-hire-a-helper:before { - content: "\f3b0"; } - -.fa-history:before { - content: "\f1da"; } - -.fa-hockey-puck:before { - content: "\f453"; } - -.fa-holly-berry:before { - content: "\f7aa"; } - -.fa-home:before { - content: "\f015"; } - -.fa-hooli:before { - content: "\f427"; } - -.fa-hornbill:before { - content: "\f592"; } - -.fa-horse:before { - content: "\f6f0"; } - -.fa-horse-head:before { - content: "\f7ab"; } - -.fa-hospital:before { - content: "\f0f8"; } - -.fa-hospital-alt:before { - content: "\f47d"; } - -.fa-hospital-symbol:before { - content: "\f47e"; } - -.fa-hospital-user:before { - content: "\f80d"; } - -.fa-hot-tub:before { - content: "\f593"; } - -.fa-hotdog:before { - content: "\f80f"; } - -.fa-hotel:before { - content: "\f594"; } - -.fa-hotjar:before { - content: "\f3b1"; } - -.fa-hourglass:before { - content: "\f254"; } - -.fa-hourglass-end:before { - content: "\f253"; } - -.fa-hourglass-half:before { - content: "\f252"; } - -.fa-hourglass-start:before { - content: "\f251"; } - -.fa-house-damage:before { - content: "\f6f1"; } - -.fa-house-user:before { - content: "\e065"; } - -.fa-houzz:before { - content: "\f27c"; } - -.fa-hryvnia:before { - content: "\f6f2"; } - -.fa-html5:before { - content: "\f13b"; } - -.fa-hubspot:before { - content: "\f3b2"; } - -.fa-i-cursor:before { - content: "\f246"; } - -.fa-ice-cream:before { - content: "\f810"; } - -.fa-icicles:before { - content: "\f7ad"; } - -.fa-icons:before { - content: "\f86d"; } - -.fa-id-badge:before { - content: "\f2c1"; } - -.fa-id-card:before { - content: "\f2c2"; } - -.fa-id-card-alt:before { - content: "\f47f"; } - -.fa-ideal:before { - content: "\e013"; } - -.fa-igloo:before { - content: "\f7ae"; } - -.fa-image:before { - content: "\f03e"; } - -.fa-images:before { - content: "\f302"; } - -.fa-imdb:before { - content: "\f2d8"; } - -.fa-inbox:before { - content: "\f01c"; } - -.fa-indent:before { - content: "\f03c"; } - -.fa-industry:before { - content: "\f275"; } - -.fa-infinity:before { - content: "\f534"; } - -.fa-info:before { - content: "\f129"; } - -.fa-info-circle:before { - content: "\f05a"; } - -.fa-instagram:before { - content: "\f16d"; } - -.fa-instagram-square:before { - content: "\e055"; } - -.fa-intercom:before { - content: "\f7af"; } - -.fa-internet-explorer:before { - content: "\f26b"; } - -.fa-invision:before { - content: "\f7b0"; } - -.fa-ioxhost:before { - content: "\f208"; } - -.fa-italic:before { - content: "\f033"; } - -.fa-itch-io:before { - content: "\f83a"; } - -.fa-itunes:before { - content: "\f3b4"; } - -.fa-itunes-note:before { - content: "\f3b5"; } - -.fa-java:before { - content: "\f4e4"; } - -.fa-jedi:before { - content: "\f669"; } - -.fa-jedi-order:before { - content: "\f50e"; } - -.fa-jenkins:before { - content: "\f3b6"; } - -.fa-jira:before { - content: "\f7b1"; } - -.fa-joget:before { - content: "\f3b7"; } - -.fa-joint:before { - content: "\f595"; } - -.fa-joomla:before { - content: "\f1aa"; } - -.fa-journal-whills:before { - content: "\f66a"; } - -.fa-js:before { - content: "\f3b8"; } - -.fa-js-square:before { - content: "\f3b9"; } - -.fa-jsfiddle:before { - content: "\f1cc"; } - -.fa-kaaba:before { - content: "\f66b"; } - -.fa-kaggle:before { - content: "\f5fa"; } - -.fa-key:before { - content: "\f084"; } - -.fa-keybase:before { - content: "\f4f5"; } - -.fa-keyboard:before { - content: "\f11c"; } - -.fa-keycdn:before { - content: "\f3ba"; } - -.fa-khanda:before { - content: "\f66d"; } - -.fa-kickstarter:before { - content: "\f3bb"; } - -.fa-kickstarter-k:before { - content: "\f3bc"; } - -.fa-kiss:before { - content: "\f596"; } - -.fa-kiss-beam:before { - content: "\f597"; } - -.fa-kiss-wink-heart:before { - content: "\f598"; } - -.fa-kiwi-bird:before { - content: "\f535"; } - -.fa-korvue:before { - content: "\f42f"; } - -.fa-landmark:before { - content: "\f66f"; } - -.fa-language:before { - content: "\f1ab"; } - -.fa-laptop:before { - content: "\f109"; } - -.fa-laptop-code:before { - content: "\f5fc"; } - -.fa-laptop-house:before { - content: "\e066"; } - -.fa-laptop-medical:before { - content: "\f812"; } - -.fa-laravel:before { - content: "\f3bd"; } - -.fa-lastfm:before { - content: "\f202"; } - -.fa-lastfm-square:before { - content: "\f203"; } - -.fa-laugh:before { - content: "\f599"; } - -.fa-laugh-beam:before { - content: "\f59a"; } - -.fa-laugh-squint:before { - content: "\f59b"; } - -.fa-laugh-wink:before { - content: "\f59c"; } - -.fa-layer-group:before { - content: "\f5fd"; } - -.fa-leaf:before { - content: "\f06c"; } - -.fa-leanpub:before { - content: "\f212"; } - -.fa-lemon:before { - content: "\f094"; } - -.fa-less:before { - content: "\f41d"; } - -.fa-less-than:before { - content: "\f536"; } - -.fa-less-than-equal:before { - content: "\f537"; } - -.fa-level-down-alt:before { - content: "\f3be"; } - -.fa-level-up-alt:before { - content: "\f3bf"; } - -.fa-life-ring:before { - content: "\f1cd"; } - -.fa-lightbulb:before { - content: "\f0eb"; } - -.fa-line:before { - content: "\f3c0"; } - -.fa-link:before { - content: "\f0c1"; } - -.fa-linkedin:before { - content: "\f08c"; } - -.fa-linkedin-in:before { - content: "\f0e1"; } - -.fa-linode:before { - content: "\f2b8"; } - -.fa-linux:before { - content: "\f17c"; } - -.fa-lira-sign:before { - content: "\f195"; } - -.fa-list:before { - content: "\f03a"; } - -.fa-list-alt:before { - content: "\f022"; } - -.fa-list-ol:before { - content: "\f0cb"; } - -.fa-list-ul:before { - content: "\f0ca"; } - -.fa-location-arrow:before { - content: "\f124"; } - -.fa-lock:before { - content: "\f023"; } - -.fa-lock-open:before { - content: "\f3c1"; } - -.fa-long-arrow-alt-down:before { - content: "\f309"; } - -.fa-long-arrow-alt-left:before { - content: "\f30a"; } - -.fa-long-arrow-alt-right:before { - content: "\f30b"; } - -.fa-long-arrow-alt-up:before { - content: "\f30c"; } - -.fa-low-vision:before { - content: "\f2a8"; } - -.fa-luggage-cart:before { - content: "\f59d"; } - -.fa-lungs:before { - content: "\f604"; } - -.fa-lungs-virus:before { - content: "\e067"; } - -.fa-lyft:before { - content: "\f3c3"; } - -.fa-magento:before { - content: "\f3c4"; } - -.fa-magic:before { - content: "\f0d0"; } - -.fa-magnet:before { - content: "\f076"; } - -.fa-mail-bulk:before { - content: "\f674"; } - -.fa-mailchimp:before { - content: "\f59e"; } - -.fa-male:before { - content: "\f183"; } - -.fa-mandalorian:before { - content: "\f50f"; } - -.fa-map:before { - content: "\f279"; } - -.fa-map-marked:before { - content: "\f59f"; } - -.fa-map-marked-alt:before { - content: "\f5a0"; } - -.fa-map-marker:before { - content: "\f041"; } - -.fa-map-marker-alt:before { - content: "\f3c5"; } - -.fa-map-pin:before { - content: "\f276"; } - -.fa-map-signs:before { - content: "\f277"; } - -.fa-markdown:before { - content: "\f60f"; } - -.fa-marker:before { - content: "\f5a1"; } - -.fa-mars:before { - content: "\f222"; } - -.fa-mars-double:before { - content: "\f227"; } - -.fa-mars-stroke:before { - content: "\f229"; } - -.fa-mars-stroke-h:before { - content: "\f22b"; } - -.fa-mars-stroke-v:before { - content: "\f22a"; } - -.fa-mask:before { - content: "\f6fa"; } - -.fa-mastodon:before { - content: "\f4f6"; } - -.fa-maxcdn:before { - content: "\f136"; } - -.fa-mdb:before { - content: "\f8ca"; } - -.fa-medal:before { - content: "\f5a2"; } - -.fa-medapps:before { - content: "\f3c6"; } - -.fa-medium:before { - content: "\f23a"; } - -.fa-medium-m:before { - content: "\f3c7"; } - -.fa-medkit:before { - content: "\f0fa"; } - -.fa-medrt:before { - content: "\f3c8"; } - -.fa-meetup:before { - content: "\f2e0"; } - -.fa-megaport:before { - content: "\f5a3"; } - -.fa-meh:before { - content: "\f11a"; } - -.fa-meh-blank:before { - content: "\f5a4"; } - -.fa-meh-rolling-eyes:before { - content: "\f5a5"; } - -.fa-memory:before { - content: "\f538"; } - -.fa-mendeley:before { - content: "\f7b3"; } - -.fa-menorah:before { - content: "\f676"; } - -.fa-mercury:before { - content: "\f223"; } - -.fa-meteor:before { - content: "\f753"; } - -.fa-microblog:before { - content: "\e01a"; } - -.fa-microchip:before { - content: "\f2db"; } - -.fa-microphone:before { - content: "\f130"; } - -.fa-microphone-alt:before { - content: "\f3c9"; } - -.fa-microphone-alt-slash:before { - content: "\f539"; } - -.fa-microphone-slash:before { - content: "\f131"; } - -.fa-microscope:before { - content: "\f610"; } - -.fa-microsoft:before { - content: "\f3ca"; } - -.fa-minus:before { - content: "\f068"; } - -.fa-minus-circle:before { - content: "\f056"; } - -.fa-minus-square:before { - content: "\f146"; } - -.fa-mitten:before { - content: "\f7b5"; } - -.fa-mix:before { - content: "\f3cb"; } - -.fa-mixcloud:before { - content: "\f289"; } - -.fa-mixer:before { - content: "\e056"; } - -.fa-mizuni:before { - content: "\f3cc"; } - -.fa-mobile:before { - content: "\f10b"; } - -.fa-mobile-alt:before { - content: "\f3cd"; } - -.fa-modx:before { - content: "\f285"; } - -.fa-monero:before { - content: "\f3d0"; } - -.fa-money-bill:before { - content: "\f0d6"; } - -.fa-money-bill-alt:before { - content: "\f3d1"; } - -.fa-money-bill-wave:before { - content: "\f53a"; } - -.fa-money-bill-wave-alt:before { - content: "\f53b"; } - -.fa-money-check:before { - content: "\f53c"; } - -.fa-money-check-alt:before { - content: "\f53d"; } - -.fa-monument:before { - content: "\f5a6"; } - -.fa-moon:before { - content: "\f186"; } - -.fa-mortar-pestle:before { - content: "\f5a7"; } - -.fa-mosque:before { - content: "\f678"; } - -.fa-motorcycle:before { - content: "\f21c"; } - -.fa-mountain:before { - content: "\f6fc"; } - -.fa-mouse:before { - content: "\f8cc"; } - -.fa-mouse-pointer:before { - content: "\f245"; } - -.fa-mug-hot:before { - content: "\f7b6"; } - -.fa-music:before { - content: "\f001"; } - -.fa-napster:before { - content: "\f3d2"; } - -.fa-neos:before { - content: "\f612"; } - -.fa-network-wired:before { - content: "\f6ff"; } - -.fa-neuter:before { - content: "\f22c"; } - -.fa-newspaper:before { - content: "\f1ea"; } - -.fa-nimblr:before { - content: "\f5a8"; } - -.fa-node:before { - content: "\f419"; } - -.fa-node-js:before { - content: "\f3d3"; } - -.fa-not-equal:before { - content: "\f53e"; } - -.fa-notes-medical:before { - content: "\f481"; } - -.fa-npm:before { - content: "\f3d4"; } - -.fa-ns8:before { - content: "\f3d5"; } - -.fa-nutritionix:before { - content: "\f3d6"; } - -.fa-object-group:before { - content: "\f247"; } - -.fa-object-ungroup:before { - content: "\f248"; } - -.fa-odnoklassniki:before { - content: "\f263"; } - -.fa-odnoklassniki-square:before { - content: "\f264"; } - -.fa-oil-can:before { - content: "\f613"; } - -.fa-old-republic:before { - content: "\f510"; } - -.fa-om:before { - content: "\f679"; } - -.fa-opencart:before { - content: "\f23d"; } - -.fa-openid:before { - content: "\f19b"; } - -.fa-opera:before { - content: "\f26a"; } - -.fa-optin-monster:before { - content: "\f23c"; } - -.fa-orcid:before { - content: "\f8d2"; } - -.fa-osi:before { - content: "\f41a"; } - -.fa-otter:before { - content: "\f700"; } - -.fa-outdent:before { - content: "\f03b"; } - -.fa-page4:before { - content: "\f3d7"; } - -.fa-pagelines:before { - content: "\f18c"; } - -.fa-pager:before { - content: "\f815"; } - -.fa-paint-brush:before { - content: "\f1fc"; } - -.fa-paint-roller:before { - content: "\f5aa"; } - -.fa-palette:before { - content: "\f53f"; } - -.fa-palfed:before { - content: "\f3d8"; } - -.fa-pallet:before { - content: "\f482"; } - -.fa-paper-plane:before { - content: "\f1d8"; } - -.fa-paperclip:before { - content: "\f0c6"; } - -.fa-parachute-box:before { - content: "\f4cd"; } - -.fa-paragraph:before { - content: "\f1dd"; } - -.fa-parking:before { - content: "\f540"; } - -.fa-passport:before { - content: "\f5ab"; } - -.fa-pastafarianism:before { - content: "\f67b"; } - -.fa-paste:before { - content: "\f0ea"; } - -.fa-patreon:before { - content: "\f3d9"; } - -.fa-pause:before { - content: "\f04c"; } - -.fa-pause-circle:before { - content: "\f28b"; } - -.fa-paw:before { - content: "\f1b0"; } - -.fa-paypal:before { - content: "\f1ed"; } - -.fa-peace:before { - content: "\f67c"; } - -.fa-pen:before { - content: "\f304"; } - -.fa-pen-alt:before { - content: "\f305"; } - -.fa-pen-fancy:before { - content: "\f5ac"; } - -.fa-pen-nib:before { - content: "\f5ad"; } - -.fa-pen-square:before { - content: "\f14b"; } - -.fa-pencil-alt:before { - content: "\f303"; } - -.fa-pencil-ruler:before { - content: "\f5ae"; } - -.fa-penny-arcade:before { - content: "\f704"; } - -.fa-people-arrows:before { - content: "\e068"; } - -.fa-people-carry:before { - content: "\f4ce"; } - -.fa-pepper-hot:before { - content: "\f816"; } - -.fa-percent:before { - content: "\f295"; } - -.fa-percentage:before { - content: "\f541"; } - -.fa-periscope:before { - content: "\f3da"; } - -.fa-person-booth:before { - content: "\f756"; } - -.fa-phabricator:before { - content: "\f3db"; } - -.fa-phoenix-framework:before { - content: "\f3dc"; } - -.fa-phoenix-squadron:before { - content: "\f511"; } - -.fa-phone:before { - content: "\f095"; } - -.fa-phone-alt:before { - content: "\f879"; } - -.fa-phone-slash:before { - content: "\f3dd"; } - -.fa-phone-square:before { - content: "\f098"; } - -.fa-phone-square-alt:before { - content: "\f87b"; } - -.fa-phone-volume:before { - content: "\f2a0"; } - -.fa-photo-video:before { - content: "\f87c"; } - -.fa-php:before { - content: "\f457"; } - -.fa-pied-piper:before { - content: "\f2ae"; } - -.fa-pied-piper-alt:before { - content: "\f1a8"; } - -.fa-pied-piper-hat:before { - content: "\f4e5"; } - -.fa-pied-piper-pp:before { - content: "\f1a7"; } - -.fa-pied-piper-square:before { - content: "\e01e"; } - -.fa-piggy-bank:before { - content: "\f4d3"; } - -.fa-pills:before { - content: "\f484"; } - -.fa-pinterest:before { - content: "\f0d2"; } - -.fa-pinterest-p:before { - content: "\f231"; } - -.fa-pinterest-square:before { - content: "\f0d3"; } - -.fa-pizza-slice:before { - content: "\f818"; } - -.fa-place-of-worship:before { - content: "\f67f"; } - -.fa-plane:before { - content: "\f072"; } - -.fa-plane-arrival:before { - content: "\f5af"; } - -.fa-plane-departure:before { - content: "\f5b0"; } - -.fa-plane-slash:before { - content: "\e069"; } - -.fa-play:before { - content: "\f04b"; } - -.fa-play-circle:before { - content: "\f144"; } - -.fa-playstation:before { - content: "\f3df"; } - -.fa-plug:before { - content: "\f1e6"; } - -.fa-plus:before { - content: "\f067"; } - -.fa-plus-circle:before { - content: "\f055"; } - -.fa-plus-square:before { - content: "\f0fe"; } - -.fa-podcast:before { - content: "\f2ce"; } - -.fa-poll:before { - content: "\f681"; } - -.fa-poll-h:before { - content: "\f682"; } - -.fa-poo:before { - content: "\f2fe"; } - -.fa-poo-storm:before { - content: "\f75a"; } - -.fa-poop:before { - content: "\f619"; } - -.fa-portrait:before { - content: "\f3e0"; } - -.fa-pound-sign:before { - content: "\f154"; } - -.fa-power-off:before { - content: "\f011"; } - -.fa-pray:before { - content: "\f683"; } - -.fa-praying-hands:before { - content: "\f684"; } - -.fa-prescription:before { - content: "\f5b1"; } - -.fa-prescription-bottle:before { - content: "\f485"; } - -.fa-prescription-bottle-alt:before { - content: "\f486"; } - -.fa-print:before { - content: "\f02f"; } - -.fa-procedures:before { - content: "\f487"; } - -.fa-product-hunt:before { - content: "\f288"; } - -.fa-project-diagram:before { - content: "\f542"; } - -.fa-pump-medical:before { - content: "\e06a"; } - -.fa-pump-soap:before { - content: "\e06b"; } - -.fa-pushed:before { - content: "\f3e1"; } - -.fa-puzzle-piece:before { - content: "\f12e"; } - -.fa-python:before { - content: "\f3e2"; } - -.fa-qq:before { - content: "\f1d6"; } - -.fa-qrcode:before { - content: "\f029"; } - -.fa-question:before { - content: "\f128"; } - -.fa-question-circle:before { - content: "\f059"; } - -.fa-quidditch:before { - content: "\f458"; } - -.fa-quinscape:before { - content: "\f459"; } - -.fa-quora:before { - content: "\f2c4"; } - -.fa-quote-left:before { - content: "\f10d"; } - -.fa-quote-right:before { - content: "\f10e"; } - -.fa-quran:before { - content: "\f687"; } - -.fa-r-project:before { - content: "\f4f7"; } - -.fa-radiation:before { - content: "\f7b9"; } - -.fa-radiation-alt:before { - content: "\f7ba"; } - -.fa-rainbow:before { - content: "\f75b"; } - -.fa-random:before { - content: "\f074"; } - -.fa-raspberry-pi:before { - content: "\f7bb"; } - -.fa-ravelry:before { - content: "\f2d9"; } - -.fa-react:before { - content: "\f41b"; } - -.fa-reacteurope:before { - content: "\f75d"; } - -.fa-readme:before { - content: "\f4d5"; } - -.fa-rebel:before { - content: "\f1d0"; } - -.fa-receipt:before { - content: "\f543"; } - -.fa-record-vinyl:before { - content: "\f8d9"; } - -.fa-recycle:before { - content: "\f1b8"; } - -.fa-red-river:before { - content: "\f3e3"; } - -.fa-reddit:before { - content: "\f1a1"; } - -.fa-reddit-alien:before { - content: "\f281"; } - -.fa-reddit-square:before { - content: "\f1a2"; } - -.fa-redhat:before { - content: "\f7bc"; } - -.fa-redo:before { - content: "\f01e"; } - -.fa-redo-alt:before { - content: "\f2f9"; } - -.fa-registered:before { - content: "\f25d"; } - -.fa-remove-format:before { - content: "\f87d"; } - -.fa-renren:before { - content: "\f18b"; } - -.fa-reply:before { - content: "\f3e5"; } - -.fa-reply-all:before { - content: "\f122"; } - -.fa-replyd:before { - content: "\f3e6"; } - -.fa-republican:before { - content: "\f75e"; } - -.fa-researchgate:before { - content: "\f4f8"; } - -.fa-resolving:before { - content: "\f3e7"; } - -.fa-restroom:before { - content: "\f7bd"; } - -.fa-retweet:before { - content: "\f079"; } - -.fa-rev:before { - content: "\f5b2"; } - -.fa-ribbon:before { - content: "\f4d6"; } - -.fa-ring:before { - content: "\f70b"; } - -.fa-road:before { - content: "\f018"; } - -.fa-robot:before { - content: "\f544"; } - -.fa-rocket:before { - content: "\f135"; } - -.fa-rocketchat:before { - content: "\f3e8"; } - -.fa-rockrms:before { - content: "\f3e9"; } - -.fa-route:before { - content: "\f4d7"; } - -.fa-rss:before { - content: "\f09e"; } - -.fa-rss-square:before { - content: "\f143"; } - -.fa-ruble-sign:before { - content: "\f158"; } - -.fa-ruler:before { - content: "\f545"; } - -.fa-ruler-combined:before { - content: "\f546"; } - -.fa-ruler-horizontal:before { - content: "\f547"; } - -.fa-ruler-vertical:before { - content: "\f548"; } - -.fa-running:before { - content: "\f70c"; } - -.fa-rupee-sign:before { - content: "\f156"; } - -.fa-rust:before { - content: "\e07a"; } - -.fa-sad-cry:before { - content: "\f5b3"; } - -.fa-sad-tear:before { - content: "\f5b4"; } - -.fa-safari:before { - content: "\f267"; } - -.fa-salesforce:before { - content: "\f83b"; } - -.fa-sass:before { - content: "\f41e"; } - -.fa-satellite:before { - content: "\f7bf"; } - -.fa-satellite-dish:before { - content: "\f7c0"; } - -.fa-save:before { - content: "\f0c7"; } - -.fa-schlix:before { - content: "\f3ea"; } - -.fa-school:before { - content: "\f549"; } - -.fa-screwdriver:before { - content: "\f54a"; } - -.fa-scribd:before { - content: "\f28a"; } - -.fa-scroll:before { - content: "\f70e"; } - -.fa-sd-card:before { - content: "\f7c2"; } - -.fa-search:before { - content: "\f002"; } - -.fa-search-dollar:before { - content: "\f688"; } - -.fa-search-location:before { - content: "\f689"; } - -.fa-search-minus:before { - content: "\f010"; } - -.fa-search-plus:before { - content: "\f00e"; } - -.fa-searchengin:before { - content: "\f3eb"; } - -.fa-seedling:before { - content: "\f4d8"; } - -.fa-sellcast:before { - content: "\f2da"; } - -.fa-sellsy:before { - content: "\f213"; } - -.fa-server:before { - content: "\f233"; } - -.fa-servicestack:before { - content: "\f3ec"; } - -.fa-shapes:before { - content: "\f61f"; } - -.fa-share:before { - content: "\f064"; } - -.fa-share-alt:before { - content: "\f1e0"; } - -.fa-share-alt-square:before { - content: "\f1e1"; } - -.fa-share-square:before { - content: "\f14d"; } - -.fa-shekel-sign:before { - content: "\f20b"; } - -.fa-shield-alt:before { - content: "\f3ed"; } - -.fa-shield-virus:before { - content: "\e06c"; } - -.fa-ship:before { - content: "\f21a"; } - -.fa-shipping-fast:before { - content: "\f48b"; } - -.fa-shirtsinbulk:before { - content: "\f214"; } - -.fa-shoe-prints:before { - content: "\f54b"; } - -.fa-shopify:before { - content: "\e057"; } - -.fa-shopping-bag:before { - content: "\f290"; } - -.fa-shopping-basket:before { - content: "\f291"; } - -.fa-shopping-cart:before { - content: "\f07a"; } - -.fa-shopware:before { - content: "\f5b5"; } - -.fa-shower:before { - content: "\f2cc"; } - -.fa-shuttle-van:before { - content: "\f5b6"; } - -.fa-sign:before { - content: "\f4d9"; } - -.fa-sign-in-alt:before { - content: "\f2f6"; } - -.fa-sign-language:before { - content: "\f2a7"; } - -.fa-sign-out-alt:before { - content: "\f2f5"; } - -.fa-signal:before { - content: "\f012"; } - -.fa-signature:before { - content: "\f5b7"; } - -.fa-sim-card:before { - content: "\f7c4"; } - -.fa-simplybuilt:before { - content: "\f215"; } - -.fa-sink:before { - content: "\e06d"; } - -.fa-sistrix:before { - content: "\f3ee"; } - -.fa-sitemap:before { - content: "\f0e8"; } - -.fa-sith:before { - content: "\f512"; } - -.fa-skating:before { - content: "\f7c5"; } - -.fa-sketch:before { - content: "\f7c6"; } - -.fa-skiing:before { - content: "\f7c9"; } - -.fa-skiing-nordic:before { - content: "\f7ca"; } - -.fa-skull:before { - content: "\f54c"; } - -.fa-skull-crossbones:before { - content: "\f714"; } - -.fa-skyatlas:before { - content: "\f216"; } - -.fa-skype:before { - content: "\f17e"; } - -.fa-slack:before { - content: "\f198"; } - -.fa-slack-hash:before { - content: "\f3ef"; } - -.fa-slash:before { - content: "\f715"; } - -.fa-sleigh:before { - content: "\f7cc"; } - -.fa-sliders-h:before { - content: "\f1de"; } - -.fa-slideshare:before { - content: "\f1e7"; } - -.fa-smile:before { - content: "\f118"; } - -.fa-smile-beam:before { - content: "\f5b8"; } - -.fa-smile-wink:before { - content: "\f4da"; } - -.fa-smog:before { - content: "\f75f"; } - -.fa-smoking:before { - content: "\f48d"; } - -.fa-smoking-ban:before { - content: "\f54d"; } - -.fa-sms:before { - content: "\f7cd"; } - -.fa-snapchat:before { - content: "\f2ab"; } - -.fa-snapchat-ghost:before { - content: "\f2ac"; } - -.fa-snapchat-square:before { - content: "\f2ad"; } - -.fa-snowboarding:before { - content: "\f7ce"; } - -.fa-snowflake:before { - content: "\f2dc"; } - -.fa-snowman:before { - content: "\f7d0"; } - -.fa-snowplow:before { - content: "\f7d2"; } - -.fa-soap:before { - content: "\e06e"; } - -.fa-socks:before { - content: "\f696"; } - -.fa-solar-panel:before { - content: "\f5ba"; } - -.fa-sort:before { - content: "\f0dc"; } - -.fa-sort-alpha-down:before { - content: "\f15d"; } - -.fa-sort-alpha-down-alt:before { - content: "\f881"; } - -.fa-sort-alpha-up:before { - content: "\f15e"; } - -.fa-sort-alpha-up-alt:before { - content: "\f882"; } - -.fa-sort-amount-down:before { - content: "\f160"; } - -.fa-sort-amount-down-alt:before { - content: "\f884"; } - -.fa-sort-amount-up:before { - content: "\f161"; } - -.fa-sort-amount-up-alt:before { - content: "\f885"; } - -.fa-sort-down:before { - content: "\f0dd"; } - -.fa-sort-numeric-down:before { - content: "\f162"; } - -.fa-sort-numeric-down-alt:before { - content: "\f886"; } - -.fa-sort-numeric-up:before { - content: "\f163"; } - -.fa-sort-numeric-up-alt:before { - content: "\f887"; } - -.fa-sort-up:before { - content: "\f0de"; } - -.fa-soundcloud:before { - content: "\f1be"; } - -.fa-sourcetree:before { - content: "\f7d3"; } - -.fa-spa:before { - content: "\f5bb"; } - -.fa-space-shuttle:before { - content: "\f197"; } - -.fa-speakap:before { - content: "\f3f3"; } - -.fa-speaker-deck:before { - content: "\f83c"; } - -.fa-spell-check:before { - content: "\f891"; } - -.fa-spider:before { - content: "\f717"; } - -.fa-spinner:before { - content: "\f110"; } - -.fa-splotch:before { - content: "\f5bc"; } - -.fa-spotify:before { - content: "\f1bc"; } - -.fa-spray-can:before { - content: "\f5bd"; } - -.fa-square:before { - content: "\f0c8"; } - -.fa-square-full:before { - content: "\f45c"; } - -.fa-square-root-alt:before { - content: "\f698"; } - -.fa-squarespace:before { - content: "\f5be"; } - -.fa-stack-exchange:before { - content: "\f18d"; } - -.fa-stack-overflow:before { - content: "\f16c"; } - -.fa-stackpath:before { - content: "\f842"; } - -.fa-stamp:before { - content: "\f5bf"; } - -.fa-star:before { - content: "\f005"; } - -.fa-star-and-crescent:before { - content: "\f699"; } - -.fa-star-half:before { - content: "\f089"; } - -.fa-star-half-alt:before { - content: "\f5c0"; } - -.fa-star-of-david:before { - content: "\f69a"; } - -.fa-star-of-life:before { - content: "\f621"; } - -.fa-staylinked:before { - content: "\f3f5"; } - -.fa-steam:before { - content: "\f1b6"; } - -.fa-steam-square:before { - content: "\f1b7"; } - -.fa-steam-symbol:before { - content: "\f3f6"; } - -.fa-step-backward:before { - content: "\f048"; } - -.fa-step-forward:before { - content: "\f051"; } - -.fa-stethoscope:before { - content: "\f0f1"; } - -.fa-sticker-mule:before { - content: "\f3f7"; } - -.fa-sticky-note:before { - content: "\f249"; } - -.fa-stop:before { - content: "\f04d"; } - -.fa-stop-circle:before { - content: "\f28d"; } - -.fa-stopwatch:before { - content: "\f2f2"; } - -.fa-stopwatch-20:before { - content: "\e06f"; } - -.fa-store:before { - content: "\f54e"; } - -.fa-store-alt:before { - content: "\f54f"; } - -.fa-store-alt-slash:before { - content: "\e070"; } - -.fa-store-slash:before { - content: "\e071"; } - -.fa-strava:before { - content: "\f428"; } - -.fa-stream:before { - content: "\f550"; } - -.fa-street-view:before { - content: "\f21d"; } - -.fa-strikethrough:before { - content: "\f0cc"; } - -.fa-stripe:before { - content: "\f429"; } - -.fa-stripe-s:before { - content: "\f42a"; } - -.fa-stroopwafel:before { - content: "\f551"; } - -.fa-studiovinari:before { - content: "\f3f8"; } - -.fa-stumbleupon:before { - content: "\f1a4"; } - -.fa-stumbleupon-circle:before { - content: "\f1a3"; } - -.fa-subscript:before { - content: "\f12c"; } - -.fa-subway:before { - content: "\f239"; } - -.fa-suitcase:before { - content: "\f0f2"; } - -.fa-suitcase-rolling:before { - content: "\f5c1"; } - -.fa-sun:before { - content: "\f185"; } - -.fa-superpowers:before { - content: "\f2dd"; } - -.fa-superscript:before { - content: "\f12b"; } - -.fa-supple:before { - content: "\f3f9"; } - -.fa-surprise:before { - content: "\f5c2"; } - -.fa-suse:before { - content: "\f7d6"; } - -.fa-swatchbook:before { - content: "\f5c3"; } - -.fa-swift:before { - content: "\f8e1"; } - -.fa-swimmer:before { - content: "\f5c4"; } - -.fa-swimming-pool:before { - content: "\f5c5"; } - -.fa-symfony:before { - content: "\f83d"; } - -.fa-synagogue:before { - content: "\f69b"; } - -.fa-sync:before { - content: "\f021"; } - -.fa-sync-alt:before { - content: "\f2f1"; } - -.fa-syringe:before { - content: "\f48e"; } - -.fa-table:before { - content: "\f0ce"; } - -.fa-table-tennis:before { - content: "\f45d"; } - -.fa-tablet:before { - content: "\f10a"; } - -.fa-tablet-alt:before { - content: "\f3fa"; } - -.fa-tablets:before { - content: "\f490"; } - -.fa-tachometer-alt:before { - content: "\f3fd"; } - -.fa-tag:before { - content: "\f02b"; } - -.fa-tags:before { - content: "\f02c"; } - -.fa-tape:before { - content: "\f4db"; } - -.fa-tasks:before { - content: "\f0ae"; } - -.fa-taxi:before { - content: "\f1ba"; } - -.fa-teamspeak:before { - content: "\f4f9"; } - -.fa-teeth:before { - content: "\f62e"; } - -.fa-teeth-open:before { - content: "\f62f"; } - -.fa-telegram:before { - content: "\f2c6"; } - -.fa-telegram-plane:before { - content: "\f3fe"; } - -.fa-temperature-high:before { - content: "\f769"; } - -.fa-temperature-low:before { - content: "\f76b"; } - -.fa-tencent-weibo:before { - content: "\f1d5"; } - -.fa-tenge:before { - content: "\f7d7"; } - -.fa-terminal:before { - content: "\f120"; } - -.fa-text-height:before { - content: "\f034"; } - -.fa-text-width:before { - content: "\f035"; } - -.fa-th:before { - content: "\f00a"; } - -.fa-th-large:before { - content: "\f009"; } - -.fa-th-list:before { - content: "\f00b"; } - -.fa-the-red-yeti:before { - content: "\f69d"; } - -.fa-theater-masks:before { - content: "\f630"; } - -.fa-themeco:before { - content: "\f5c6"; } - -.fa-themeisle:before { - content: "\f2b2"; } - -.fa-thermometer:before { - content: "\f491"; } - -.fa-thermometer-empty:before { - content: "\f2cb"; } - -.fa-thermometer-full:before { - content: "\f2c7"; } - -.fa-thermometer-half:before { - content: "\f2c9"; } - -.fa-thermometer-quarter:before { - content: "\f2ca"; } - -.fa-thermometer-three-quarters:before { - content: "\f2c8"; } - -.fa-think-peaks:before { - content: "\f731"; } - -.fa-thumbs-down:before { - content: "\f165"; } - -.fa-thumbs-up:before { - content: "\f164"; } - -.fa-thumbtack:before { - content: "\f08d"; } - -.fa-ticket-alt:before { - content: "\f3ff"; } - -.fa-tiktok:before { - content: "\e07b"; } - -.fa-times:before { - content: "\f00d"; } - -.fa-times-circle:before { - content: "\f057"; } - -.fa-tint:before { - content: "\f043"; } - -.fa-tint-slash:before { - content: "\f5c7"; } - -.fa-tired:before { - content: "\f5c8"; } - -.fa-toggle-off:before { - content: "\f204"; } - -.fa-toggle-on:before { - content: "\f205"; } - -.fa-toilet:before { - content: "\f7d8"; } - -.fa-toilet-paper:before { - content: "\f71e"; } - -.fa-toilet-paper-slash:before { - content: "\e072"; } - -.fa-toolbox:before { - content: "\f552"; } - -.fa-tools:before { - content: "\f7d9"; } - -.fa-tooth:before { - content: "\f5c9"; } - -.fa-torah:before { - content: "\f6a0"; } - -.fa-torii-gate:before { - content: "\f6a1"; } - -.fa-tractor:before { - content: "\f722"; } - -.fa-trade-federation:before { - content: "\f513"; } - -.fa-trademark:before { - content: "\f25c"; } - -.fa-traffic-light:before { - content: "\f637"; } - -.fa-trailer:before { - content: "\e041"; } - -.fa-train:before { - content: "\f238"; } - -.fa-tram:before { - content: "\f7da"; } - -.fa-transgender:before { - content: "\f224"; } - -.fa-transgender-alt:before { - content: "\f225"; } - -.fa-trash:before { - content: "\f1f8"; } - -.fa-trash-alt:before { - content: "\f2ed"; } - -.fa-trash-restore:before { - content: "\f829"; } - -.fa-trash-restore-alt:before { - content: "\f82a"; } - -.fa-tree:before { - content: "\f1bb"; } - -.fa-trello:before { - content: "\f181"; } - -.fa-tripadvisor:before { - content: "\f262"; } - -.fa-trophy:before { - content: "\f091"; } - -.fa-truck:before { - content: "\f0d1"; } - -.fa-truck-loading:before { - content: "\f4de"; } - -.fa-truck-monster:before { - content: "\f63b"; } - -.fa-truck-moving:before { - content: "\f4df"; } - -.fa-truck-pickup:before { - content: "\f63c"; } - -.fa-tshirt:before { - content: "\f553"; } - -.fa-tty:before { - content: "\f1e4"; } - -.fa-tumblr:before { - content: "\f173"; } - -.fa-tumblr-square:before { - content: "\f174"; } - -.fa-tv:before { - content: "\f26c"; } - -.fa-twitch:before { - content: "\f1e8"; } - -.fa-twitter:before { - content: "\f099"; } - -.fa-twitter-square:before { - content: "\f081"; } - -.fa-typo3:before { - content: "\f42b"; } - -.fa-uber:before { - content: "\f402"; } - -.fa-ubuntu:before { - content: "\f7df"; } - -.fa-uikit:before { - content: "\f403"; } - -.fa-umbraco:before { - content: "\f8e8"; } - -.fa-umbrella:before { - content: "\f0e9"; } - -.fa-umbrella-beach:before { - content: "\f5ca"; } - -.fa-underline:before { - content: "\f0cd"; } - -.fa-undo:before { - content: "\f0e2"; } - -.fa-undo-alt:before { - content: "\f2ea"; } - -.fa-uniregistry:before { - content: "\f404"; } - -.fa-unity:before { - content: "\e049"; } - -.fa-universal-access:before { - content: "\f29a"; } - -.fa-university:before { - content: "\f19c"; } - -.fa-unlink:before { - content: "\f127"; } - -.fa-unlock:before { - content: "\f09c"; } - -.fa-unlock-alt:before { - content: "\f13e"; } - -.fa-unsplash:before { - content: "\e07c"; } - -.fa-untappd:before { - content: "\f405"; } - -.fa-upload:before { - content: "\f093"; } - -.fa-ups:before { - content: "\f7e0"; } - -.fa-usb:before { - content: "\f287"; } - -.fa-user:before { - content: "\f007"; } - -.fa-user-alt:before { - content: "\f406"; } - -.fa-user-alt-slash:before { - content: "\f4fa"; } - -.fa-user-astronaut:before { - content: "\f4fb"; } - -.fa-user-check:before { - content: "\f4fc"; } - -.fa-user-circle:before { - content: "\f2bd"; } - -.fa-user-clock:before { - content: "\f4fd"; } - -.fa-user-cog:before { - content: "\f4fe"; } - -.fa-user-edit:before { - content: "\f4ff"; } - -.fa-user-friends:before { - content: "\f500"; } - -.fa-user-graduate:before { - content: "\f501"; } - -.fa-user-injured:before { - content: "\f728"; } - -.fa-user-lock:before { - content: "\f502"; } - -.fa-user-md:before { - content: "\f0f0"; } - -.fa-user-minus:before { - content: "\f503"; } - -.fa-user-ninja:before { - content: "\f504"; } - -.fa-user-nurse:before { - content: "\f82f"; } - -.fa-user-plus:before { - content: "\f234"; } - -.fa-user-secret:before { - content: "\f21b"; } - -.fa-user-shield:before { - content: "\f505"; } - -.fa-user-slash:before { - content: "\f506"; } - -.fa-user-tag:before { - content: "\f507"; } - -.fa-user-tie:before { - content: "\f508"; } - -.fa-user-times:before { - content: "\f235"; } - -.fa-users:before { - content: "\f0c0"; } - -.fa-users-cog:before { - content: "\f509"; } - -.fa-users-slash:before { - content: "\e073"; } - -.fa-usps:before { - content: "\f7e1"; } - -.fa-ussunnah:before { - content: "\f407"; } - -.fa-utensil-spoon:before { - content: "\f2e5"; } - -.fa-utensils:before { - content: "\f2e7"; } - -.fa-vaadin:before { - content: "\f408"; } - -.fa-vector-square:before { - content: "\f5cb"; } - -.fa-venus:before { - content: "\f221"; } - -.fa-venus-double:before { - content: "\f226"; } - -.fa-venus-mars:before { - content: "\f228"; } - -.fa-viacoin:before { - content: "\f237"; } - -.fa-viadeo:before { - content: "\f2a9"; } - -.fa-viadeo-square:before { - content: "\f2aa"; } - -.fa-vial:before { - content: "\f492"; } - -.fa-vials:before { - content: "\f493"; } - -.fa-viber:before { - content: "\f409"; } - -.fa-video:before { - content: "\f03d"; } - -.fa-video-slash:before { - content: "\f4e2"; } - -.fa-vihara:before { - content: "\f6a7"; } - -.fa-vimeo:before { - content: "\f40a"; } - -.fa-vimeo-square:before { - content: "\f194"; } - -.fa-vimeo-v:before { - content: "\f27d"; } - -.fa-vine:before { - content: "\f1ca"; } - -.fa-virus:before { - content: "\e074"; } - -.fa-virus-slash:before { - content: "\e075"; } - -.fa-viruses:before { - content: "\e076"; } - -.fa-vk:before { - content: "\f189"; } - -.fa-vnv:before { - content: "\f40b"; } - -.fa-voicemail:before { - content: "\f897"; } - -.fa-volleyball-ball:before { - content: "\f45f"; } - -.fa-volume-down:before { - content: "\f027"; } - -.fa-volume-mute:before { - content: "\f6a9"; } - -.fa-volume-off:before { - content: "\f026"; } - -.fa-volume-up:before { - content: "\f028"; } - -.fa-vote-yea:before { - content: "\f772"; } - -.fa-vr-cardboard:before { - content: "\f729"; } - -.fa-vuejs:before { - content: "\f41f"; } - -.fa-walking:before { - content: "\f554"; } - -.fa-wallet:before { - content: "\f555"; } - -.fa-warehouse:before { - content: "\f494"; } - -.fa-water:before { - content: "\f773"; } - -.fa-wave-square:before { - content: "\f83e"; } - -.fa-waze:before { - content: "\f83f"; } - -.fa-weebly:before { - content: "\f5cc"; } - -.fa-weibo:before { - content: "\f18a"; } - -.fa-weight:before { - content: "\f496"; } - -.fa-weight-hanging:before { - content: "\f5cd"; } - -.fa-weixin:before { - content: "\f1d7"; } - -.fa-whatsapp:before { - content: "\f232"; } - -.fa-whatsapp-square:before { - content: "\f40c"; } - -.fa-wheelchair:before { - content: "\f193"; } - -.fa-whmcs:before { - content: "\f40d"; } - -.fa-wifi:before { - content: "\f1eb"; } - -.fa-wikipedia-w:before { - content: "\f266"; } - -.fa-wind:before { - content: "\f72e"; } - -.fa-window-close:before { - content: "\f410"; } - -.fa-window-maximize:before { - content: "\f2d0"; } - -.fa-window-minimize:before { - content: "\f2d1"; } - -.fa-window-restore:before { - content: "\f2d2"; } - -.fa-windows:before { - content: "\f17a"; } - -.fa-wine-bottle:before { - content: "\f72f"; } - -.fa-wine-glass:before { - content: "\f4e3"; } - -.fa-wine-glass-alt:before { - content: "\f5ce"; } - -.fa-wix:before { - content: "\f5cf"; } - -.fa-wizards-of-the-coast:before { - content: "\f730"; } - -.fa-wolf-pack-battalion:before { - content: "\f514"; } - -.fa-won-sign:before { - content: "\f159"; } - -.fa-wordpress:before { - content: "\f19a"; } - -.fa-wordpress-simple:before { - content: "\f411"; } - -.fa-wpbeginner:before { - content: "\f297"; } - -.fa-wpexplorer:before { - content: "\f2de"; } - -.fa-wpforms:before { - content: "\f298"; } - -.fa-wpressr:before { - content: "\f3e4"; } - -.fa-wrench:before { - content: "\f0ad"; } - -.fa-x-ray:before { - content: "\f497"; } - -.fa-xbox:before { - content: "\f412"; } - -.fa-xing:before { - content: "\f168"; } - -.fa-xing-square:before { - content: "\f169"; } - -.fa-y-combinator:before { - content: "\f23b"; } - -.fa-yahoo:before { - content: "\f19e"; } - -.fa-yammer:before { - content: "\f840"; } - -.fa-yandex:before { - content: "\f413"; } - -.fa-yandex-international:before { - content: "\f414"; } - -.fa-yarn:before { - content: "\f7e3"; } - -.fa-yelp:before { - content: "\f1e9"; } - -.fa-yen-sign:before { - content: "\f157"; } - -.fa-yin-yang:before { - content: "\f6ad"; } - -.fa-yoast:before { - content: "\f2b1"; } - -.fa-youtube:before { - content: "\f167"; } - -.fa-youtube-square:before { - content: "\f431"; } - -.fa-zhihu:before { - content: "\f63f"; } - -.sr-only { - border: 0; - clip: rect(0, 0, 0, 0); - height: 1px; - margin: -1px; - overflow: hidden; - padding: 0; - position: absolute; - width: 1px; } - -.sr-only-focusable:active, .sr-only-focusable:focus { - clip: auto; - height: auto; - margin: 0; - overflow: visible; - position: static; - width: auto; } -@font-face { - font-family: 'Font Awesome 5 Brands'; - font-style: normal; - font-weight: 400; - font-display: block; - src: url("../webfonts/fa-brands-400.eot"); - src: url("../webfonts/fa-brands-400.eot?#iefix") format("embedded-opentype"), url("../webfonts/fa-brands-400.woff2") format("woff2"), url("../webfonts/fa-brands-400.woff") format("woff"), url("../webfonts/fa-brands-400.ttf") format("truetype"), url("../webfonts/fa-brands-400.svg#fontawesome") format("svg"); } - -.fab { - font-family: 'Font Awesome 5 Brands'; - font-weight: 400; } -@font-face { - font-family: 'Font Awesome 5 Free'; - font-style: normal; - font-weight: 400; - font-display: block; - src: url("../webfonts/fa-regular-400.eot"); - src: url("../webfonts/fa-regular-400.eot?#iefix") format("embedded-opentype"), url("../webfonts/fa-regular-400.woff2") format("woff2"), url("../webfonts/fa-regular-400.woff") format("woff"), url("../webfonts/fa-regular-400.ttf") format("truetype"), url("../webfonts/fa-regular-400.svg#fontawesome") format("svg"); } - -.far { - font-family: 'Font Awesome 5 Free'; - font-weight: 400; } -@font-face { - font-family: 'Font Awesome 5 Free'; - font-style: normal; - font-weight: 900; - font-display: block; - src: url("../webfonts/fa-solid-900.eot"); - src: url("../webfonts/fa-solid-900.eot?#iefix") format("embedded-opentype"), url("../webfonts/fa-solid-900.woff2") format("woff2"), url("../webfonts/fa-solid-900.woff") format("woff"), url("../webfonts/fa-solid-900.ttf") format("truetype"), url("../webfonts/fa-solid-900.svg#fontawesome") format("svg"); } - -.fa, -.fas { - font-family: 'Font Awesome 5 Free'; - font-weight: 900; } diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.eot b/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.eot deleted file mode 100644 index 54ad8d7..0000000 Binary files a/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.eot and /dev/null differ diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.svg b/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.svg deleted file mode 100644 index 2c8659c..0000000 --- a/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.svg +++ /dev/null @@ -1,3637 +0,0 @@ - - - - - -Created by FontForge 20200314 at Wed Jul 15 11:59:41 2020 - By Robert Madole -Copyright (c) Font Awesome - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.ttf b/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.ttf deleted file mode 100644 index 16852bf..0000000 Binary files a/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.ttf and /dev/null differ diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.woff b/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.woff deleted file mode 100644 index 6cf6fb3..0000000 Binary files a/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.woff and /dev/null differ diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.woff2 b/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.woff2 deleted file mode 100644 index f2a4e36..0000000 Binary files a/proco/assets/vendor/fontawesome/webfonts/fa-brands-400.woff2 and /dev/null differ diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.eot b/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.eot deleted file mode 100644 index 479b32c..0000000 Binary files a/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.eot and /dev/null differ diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.svg b/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.svg deleted file mode 100644 index 7947ca8..0000000 --- a/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.svg +++ /dev/null @@ -1,805 +0,0 @@ - - - - - -Created by FontForge 20200314 at Wed Jul 15 11:59:40 2020 - By Robert Madole -Copyright (c) Font Awesome - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.ttf b/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.ttf deleted file mode 100644 index 42a04fd..0000000 Binary files a/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.ttf and /dev/null differ diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.woff b/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.woff deleted file mode 100644 index c390c60..0000000 Binary files a/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.woff and /dev/null differ diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.woff2 b/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.woff2 deleted file mode 100644 index 11c71d2..0000000 Binary files a/proco/assets/vendor/fontawesome/webfonts/fa-regular-400.woff2 and /dev/null differ diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.eot b/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.eot deleted file mode 100644 index 52883b9..0000000 Binary files a/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.eot and /dev/null differ diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.svg b/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.svg deleted file mode 100644 index d5e4d52..0000000 --- a/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.svg +++ /dev/null @@ -1,5015 +0,0 @@ - - - - - -Created by FontForge 20200314 at Wed Jul 15 11:59:41 2020 - By Robert Madole -Copyright (c) Font Awesome - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.ttf b/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.ttf deleted file mode 100644 index 7c59512..0000000 Binary files a/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.ttf and /dev/null differ diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.woff b/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.woff deleted file mode 100644 index aff125d..0000000 Binary files a/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.woff and /dev/null differ diff --git a/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.woff2 b/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.woff2 deleted file mode 100644 index aa2b791..0000000 Binary files a/proco/assets/vendor/fontawesome/webfonts/fa-solid-900.woff2 and /dev/null differ diff --git a/proco/background/admin.py b/proco/background/admin.py deleted file mode 100644 index 88ce0d6..0000000 --- a/proco/background/admin.py +++ /dev/null @@ -1,11 +0,0 @@ -from django.contrib import admin - -from proco.background.models import BackgroundTask - - -@admin.register(BackgroundTask) -class BackgroundTaskAdmin(admin.ModelAdmin): - readonly_fields = ('task_id', 'status', 'created_at', 'log', 'completed_at') - change_form_template = 'admin/background/backgroud_task_change.html' - list_display = ('task_id', 'status', 'created_at', 'completed_at') - search_fields = ('task_id', 'log') diff --git a/proco/background/api.py b/proco/background/api.py index fefe6eb..6414adf 100644 --- a/proco/background/api.py +++ b/proco/background/api.py @@ -11,6 +11,7 @@ from proco.background.models import BackgroundTask from proco.background.serializers import BackgroundTaskSerializer, BackgroundTaskHistorySerializer from proco.core import permissions as core_permissions +from proco.core import utils as core_utilities from proco.core.viewsets import BaseModelViewSet from proco.utils.error_message import delete_succ_mess from proco.utils.filters import NullsAlwaysLastOrderingFilter @@ -33,7 +34,7 @@ class BackgroundTaskViewSet(BaseModelViewSet): ordering_field_names = ['-created_at', '-completed_at'] apply_query_pagination = True - search_fields = ('task_id', 'log',) + search_fields = ('task_id', 'log', 'name', 'description', 'status') filterset_fields = { 'task_id': ['exact', 'in'], 'log': ['exact', 'in'], @@ -45,12 +46,15 @@ def destroy(self, request, *args, **kwargs): try: ids = request.data['task_id'] if isinstance(ids, list) and len(ids) > 0: - queryset = self.model.objects.filter(task_id__in=ids, ) - if queryset.exists(): - action_log(request, queryset, 3, 'Background task deleted', self.model, 'task_id') - queryset.delete() + task_qs = self.model.objects.filter(task_id__in=ids, ) + if task_qs.exists(): + action_log(request, task_qs, 3, 'Background task deleted', self.model, 'name') + task_qs.update( + deleted=core_utilities.get_current_datetime_object(), + deleted_by=core_utilities.get_current_user(request=request), + ) return Response(status=rest_status.HTTP_200_OK, data=delete_succ_mess) - raise ValidationError('{0} value missing in database: {1}'.format('id', ids)) + raise ValidationError('{0} value missing in database: {1}'.format('task_id', ids)) except KeyError as ex: return Response(['Required key {0} missing in the request body'.format(ex)], status=status.HTTP_400_BAD_REQUEST) diff --git a/proco/background/api_urls.py b/proco/background/api_urls.py index 835814e..16cb07c 100644 --- a/proco/background/api_urls.py +++ b/proco/background/api_urls.py @@ -1,4 +1,5 @@ -from django.urls import include, path +from django.urls import path + from proco.background import api app_name = 'background' @@ -7,12 +8,12 @@ path('backgroundtask/', api.BackgroundTaskViewSet.as_view({ 'get': 'list', 'delete': 'destroy', - }), name='list_or_destroy_backgroundtask'), + }), name='list-destroy-backgroundtask'), path('backgroundtask//', api.BackgroundTaskViewSet.as_view({ 'get': 'retrieve', - }), name='update_or_retrieve_backgroundtask'), + }), name='update-retrieve-backgroundtask'), path('backgroundtask//history', api.BackgroundTaskHistoryViewSet.as_view({ 'get': 'list', - }), name='background_task_history'), + }), name='background-task-history'), ] diff --git a/proco/background/migrations/0002_added_name_descritpion_soft_delete_fields.py b/proco/background/migrations/0002_added_name_descritpion_soft_delete_fields.py new file mode 100755 index 0000000..14e66c1 --- /dev/null +++ b/proco/background/migrations/0002_added_name_descritpion_soft_delete_fields.py @@ -0,0 +1,48 @@ +# Generated by Django 2.2.28 on 2024-06-04 12:39 + +from django.db import migrations, models +import proco.core.models + + +class Migration(migrations.Migration): + dependencies = [ + ('background', '0001_initial'), + ] + + operations = [ + migrations.AddField( + model_name='backgroundtask', + name='deleted', + field=proco.core.models.CustomDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='backgroundtask', + name='description', + field=models.CharField(blank=True, db_index=True, max_length=255, null=True, + verbose_name='Task Readable Name'), + ), + migrations.AddField( + model_name='backgroundtask', + name='name', + field=models.CharField(db_index=True, max_length=255, null=True, verbose_name='Task Unique Name'), + ), + migrations.AlterField( + model_name='backgroundtask', + name='completed_at', + field=proco.core.models.CustomDateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name='backgroundtask', + name='created_at', + field=proco.core.models.CustomDateTimeField(blank=True, null=True), + ), + migrations.RunSQL( + sql="UPDATE background_backgroundtask SET name = task_id", + reverse_sql=migrations.RunSQL.noop + ), + migrations.AlterField( + model_name='backgroundtask', + name='name', + field=models.CharField(db_index=True, max_length=255, verbose_name='Task Unique Name'), + ), + ] diff --git a/proco/background/migrations/0003_added_deleted_bu_field.py b/proco/background/migrations/0003_added_deleted_bu_field.py new file mode 100755 index 0000000..185d276 --- /dev/null +++ b/proco/background/migrations/0003_added_deleted_bu_field.py @@ -0,0 +1,22 @@ +# Generated by Django 2.2.28 on 2024-06-04 12:53 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('background', '0002_added_name_descritpion_soft_delete_fields'), + ] + + operations = [ + migrations.AddField( + model_name='backgroundtask', + name='deleted_by', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, + related_name='deleted_backgroundtasks', to=settings.AUTH_USER_MODEL, + verbose_name='Deleted By'), + ), + ] diff --git a/proco/background/migrations/0004_added_unique_constraints.py b/proco/background/migrations/0004_added_unique_constraints.py new file mode 100755 index 0000000..ac80382 --- /dev/null +++ b/proco/background/migrations/0004_added_unique_constraints.py @@ -0,0 +1,22 @@ +# Generated by Django 2.2.28 on 2024-06-05 07:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ('background', '0003_added_deleted_bu_field'), + ] + + operations = [ + migrations.AddConstraint( + model_name='backgroundtask', + constraint=models.UniqueConstraint(fields=('name', 'status', 'deleted'), + name='background_task_unique_with_deleted'), + ), + migrations.AddConstraint( + model_name='backgroundtask', + constraint=models.UniqueConstraint(condition=models.Q(deleted=None), fields=('name', 'status'), + name='background_task_unique_without_deleted'), + ), + ] diff --git a/proco/background/models.py b/proco/background/models.py index bdaa6a7..47efb1a 100644 --- a/proco/background/models.py +++ b/proco/background/models.py @@ -1,10 +1,15 @@ from django.conf import settings from django.db import models -from django.utils import timezone +from django.db.models import Q +from django.db.models.constraints import UniqueConstraint from django.utils.translation import ugettext as _ - from model_utils import Choices +from proco.core import models as core_models +from proco.core import utils as core_utilities +from proco.core.managers import BaseManager +from proco.utils import dates as date_utilities + class BackgroundTask(models.Model): STATUSES = Choices( @@ -14,22 +19,56 @@ class BackgroundTask(models.Model): PROCESS_STATUSES = [STATUSES.running] task_id = models.CharField(max_length=50, primary_key=True) + created_at = core_models.CustomDateTimeField(null=True, blank=True) + completed_at = core_models.CustomDateTimeField(null=True, blank=True) + + name = models.CharField( + max_length=255, + null=False, + verbose_name='Task Unique Name', + db_index=True, + ) + description = models.CharField( + max_length=255, + null=True, + blank=True, + verbose_name='Task Readable Name', + db_index=True, + ) + status = models.CharField(default=STATUSES.running, choices=STATUSES, max_length=10) log = models.TextField() - created_at = models.DateTimeField(auto_now_add=True) - completed_at = models.DateTimeField(null=True) - objects = models.Manager() + deleted = core_models.CustomDateTimeField(null=True, blank=True) + deleted_by = models.ForeignKey( + settings.AUTH_USER_MODEL, + blank=True, + null=True, + related_name='deleted_%(class)ss', + on_delete=models.DO_NOTHING, + verbose_name='Deleted By' + ) + + objects = BaseManager() class Meta: verbose_name = _('Background Task') + constraints = [ + UniqueConstraint(fields=['name', 'status', 'deleted'], + name='background_task_unique_with_deleted'), + UniqueConstraint(fields=['name', 'status'], + condition=Q(deleted=None), + name='background_task_unique_without_deleted'), + ] def __str__(self): - return f'{self.task_id} {self.status}' + return f'Task: {self.name}, Status: {self.status}' def info(self, text: str): if self.log: self.log += '\n' - self.log += '{0}: {1}'.format(timezone.now().strftime(settings.DATETIME_FORMAT), text) + self.log += '{0}: {1}'.format( + date_utilities.format_datetime(core_utilities.get_current_datetime_object(), frmt='%d-%m-%Y %H:%M:%S'), + text) self.save() diff --git a/proco/background/tests/test_api.py b/proco/background/tests/test_api.py index cc8301c..4b52d63 100644 --- a/proco/background/tests/test_api.py +++ b/proco/background/tests/test_api.py @@ -1,45 +1,43 @@ -import traceback +import uuid +from datetime import datetime +import pytz +from django.core.cache import cache from django.test import TestCase from django.urls import reverse from rest_framework import status -from django.core.cache import cache -from datetime import datetime -import pytz, uuid from proco.background.models import BackgroundTask -from proco.custom_auth import models as auth_models +from proco.custom_auth.tests import test_utils as test_utilities from proco.utils.tests import TestAPIViewSetMixin class BackgroundTaskTestCase(TestAPIViewSetMixin, TestCase): base_view = 'background:' - databases = {'read_only_database', 'default'} + databases = {'default', } @classmethod def setUpTestData(cls): - # self.databases = 'default' - cls.email = 'test@test.com' - cls.password = 'SomeRandomPass96' - cls.user = auth_models.ApplicationUser.objects.create_user(username=cls.email, password=cls.password) + cls.user = test_utilities.setup_admin_user_by_role() - cls.role = auth_models.Role.objects.create(name='Admin', category='system') - cls.role_permission = auth_models.UserRoleRelationship.objects.create(user=cls.user, role=cls.role) - - cls.data = {'task_id': str(uuid.uuid4()), 'status': 'running', 'log': "", - 'completed_at': datetime.now(pytz.timezone('Africa/Lagos'))} + cls.data = { + 'task_id': str(uuid.uuid4()), + 'status': 'running', + 'log': '', + 'completed_at': datetime.now(pytz.timezone('Africa/Lagos')) + } cls.task = BackgroundTask.objects.create(**cls.data) - cls.delete_data = {"task_id": [cls.task.task_id]} + cls.delete_data = {'task_id': [cls.task.task_id]} def setUp(self): cache.clear() super().setUp() - def test_retrive(self): + def test_retrieve(self): response = self.forced_auth_req( 'get', - reverse(self.base_view + "update_or_retrieve_backgroundtask", args=(self.task.task_id,)), + reverse(self.base_view + 'update-retrieve-backgroundtask', args=(self.task.task_id,)), user=self.user, ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -48,33 +46,33 @@ def test_retrive(self): def test_destroy(self): response = self.forced_auth_req( 'delete', - reverse(self.base_view + "list_or_destroy_backgroundtask"), + reverse(self.base_view + 'list-destroy-backgroundtask'), data=self.delete_data, user=self.user, ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertNotEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) class BackgroundTaskHistoryTestCase(TestAPIViewSetMixin, TestCase): base_view = 'background:' - databases = {'default', 'read_only_database'} + databases = {'default', } def setUp(self): - self.email = 'test@test.com' - self.password = 'SomeRandomPass96' - self.user = auth_models.ApplicationUser.objects.create_user(username=self.email, password=self.password) - - self.data = {'task_id': '8303395e-e8c0-4e72-afb8-35f3a53d01d7', 'status': 'running', 'log': "", - 'completed_at': '2024-03-11 06:50:12'} + self.user = test_utilities.setup_admin_user_by_role() + + self.data = { + 'task_id': '8303395e-e8c0-4e72-afb8-35f3a53d01d7', + 'status': 'running', + 'log': '', + 'completed_at': '2024-03-11 06:50:12' + } self.task = BackgroundTask.objects.create(**self.data) return super().setUp() def test_list(self): response = self.forced_auth_req( 'get', - reverse(self.base_view + "background_task_history", args=(self.task.task_id,)), + reverse(self.base_view + 'background-task-history', args=(self.task.task_id,)), user=self.user) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertNotEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) diff --git a/proco/background/tests/test_task.py b/proco/background/tests/test_task.py index d55b260..3e4b825 100644 --- a/proco/background/tests/test_task.py +++ b/proco/background/tests/test_task.py @@ -1,13 +1,13 @@ -from typing import List -from django.utils import timezone -from celery import current_task -from proco.background.models import BackgroundTask -from proco.locations.models import Country +import uuid from datetime import datetime + import pytz -import uuid, random -from django.test import TestCase from django.core.cache import cache +from django.test import TestCase +from django.utils import timezone + +from proco.background.models import BackgroundTask +from proco.locations.models import Country class BackgroundCeleryTaskTestCase(TestCase): diff --git a/proco/background/utils.py b/proco/background/utils.py new file mode 100644 index 0000000..a237b06 --- /dev/null +++ b/proco/background/utils.py @@ -0,0 +1,35 @@ +from datetime import timedelta + +from proco.background.models import BackgroundTask +from proco.core.utils import get_current_datetime_object + + +def task_on_start(task_id, unique_name, description, check_previous=False): + try: + task = BackgroundTask.objects.filter(name=unique_name).first() + if task: + return + else: + if check_previous and BackgroundTask.objects.filter( + description=description, + created_at__gte=get_current_datetime_object() - timedelta(hours=12), + status=BackgroundTask.STATUSES.running, + ).exists(): + return + else: + task = BackgroundTask.objects.create( + task_id=task_id, + name=unique_name, + description=description, + created_at=get_current_datetime_object(), + status=BackgroundTask.STATUSES.running, + ) + return task + except: + return + + +def task_on_complete(task): + task.status = BackgroundTask.STATUSES.completed + task.completed_at = get_current_datetime_object() + task.save() diff --git a/proco/connection_statistics/admin.py b/proco/connection_statistics/admin.py deleted file mode 100644 index e22f0c9..0000000 --- a/proco/connection_statistics/admin.py +++ /dev/null @@ -1,113 +0,0 @@ -from django.contrib import admin - -from proco.connection_statistics.models import ( - CountryDailyStatus, - CountryWeeklyStatus, - RealTimeConnectivity, - SchoolDailyStatus, - SchoolWeeklyStatus, -) -from proco.locations.filters import CountryFilterList, SchoolCountryFilterList -from proco.utils.admin import CountryNameDisplayAdminMixin, SchoolNameDisplayAdminMixin - - -@admin.register(CountryWeeklyStatus) -class CountryWeeklyStatusAdmin(CountryNameDisplayAdminMixin, admin.ModelAdmin): - list_display = ('get_country_name', 'year', 'week', 'integration_status', 'connectivity_speed', 'schools_total', - 'schools_connected', 'schools_connectivity_unknown', 'schools_connectivity_no', - 'schools_connectivity_moderate', 'schools_connectivity_good') - list_filter = ('integration_status', CountryFilterList) - list_select_related = ('country',) - search_fields = ('country__name', 'year', 'week') - ordering = ('-id',) - readonly_fields = ('year', 'week', 'integration_status') - - def get_queryset(self, request): - qs = super().get_queryset(request) - if not request.user.is_superuser: - qs = qs.filter(country__in=request.user.countries_available.all()) - return qs.defer( - 'country__geometry', - 'country__geometry_simplified', - ) - - def has_change_permission(self, request, obj=None): - perm = super().has_change_permission(request, obj) - if not request.user.is_superuser and obj: - perm = obj.country in request.user.countries_available.all() - return perm - - -@admin.register(SchoolWeeklyStatus) -class SchoolWeeklyStatusAdmin(SchoolNameDisplayAdminMixin, admin.ModelAdmin): - list_display = ('get_school_name', 'year', 'week', 'connectivity_speed', - 'connectivity_latency', 'num_students', 'num_teachers') - list_filter = (SchoolCountryFilterList,) - list_select_related = ('school',) - search_fields = ('school__name', 'year', 'week') - ordering = ('-id',) - readonly_fields = ('year', 'week') - raw_id_fields = ('school',) - - def get_queryset(self, request): - qs = super().get_queryset(request) - if not request.user.is_superuser: - qs = qs.filter(school__country__in=request.user.countries_available.all()) - return qs - - -@admin.register(CountryDailyStatus) -class CountryDailyStatusAdmin(CountryNameDisplayAdminMixin, admin.ModelAdmin): - list_display = ('get_country_name', 'date', 'connectivity_speed', 'connectivity_latency') - list_select_related = ('country',) - list_filter = (CountryFilterList,) - search_fields = ('country__name',) - ordering = ('-id',) - date_hierarchy = 'date' - raw_id_fields = ('country',) - - def get_queryset(self, request): - qs = super().get_queryset(request) - if not request.user.is_superuser: - qs = qs.filter(country__in=request.user.countries_available.all()) - return qs.defer( - 'country__geometry', - 'country__geometry_simplified', - ) - - -@admin.register(SchoolDailyStatus) -class SchoolDailyStatusAdmin(SchoolNameDisplayAdminMixin, admin.ModelAdmin): - list_display = ('get_school_name', 'date', 'connectivity_speed', 'connectivity_latency') - list_select_related = ('school',) - search_fields = ('school__name',) - list_filter = (SchoolCountryFilterList,) - ordering = ('-id',) - raw_id_fields = ('school',) - show_full_result_count = False - - def get_queryset(self, request): - qs = super().get_queryset(request) - if not request.user.is_superuser: - qs = qs.filter(school__country__in=request.user.countries_available.all()) - return qs - - -@admin.register(RealTimeConnectivity) -class RealTimeConnectivityAdmin(admin.ModelAdmin): - list_display = ('id', 'created', 'connectivity_speed', 'connectivity_latency') - ordering = ('-id',) - readonly_fields = ('school', 'created', 'modified') - show_full_result_count = False - - def has_add_permission(self, request): - return False - - def has_delete_permission(self, request, obj=None): - return False - - def has_change_permission(self, request, obj=None): - return False - - def has_view_permission(self, request, obj=None): - return True diff --git a/proco/connection_statistics/aggregations.py b/proco/connection_statistics/aggregations.py index a2e61d4..97fff28 100644 --- a/proco/connection_statistics/aggregations.py +++ b/proco/connection_statistics/aggregations.py @@ -31,9 +31,11 @@ def aggregate_connectivity_by_availability(qs): def aggregate_coverage_by_types(qs): return qs.aggregate(**{ ColorMapSchema.GOOD: Count('school', filter=Q( - coverage_type__in=[SchoolWeeklyStatus.COVERAGE_4G, SchoolWeeklyStatus.COVERAGE_3G], + coverage_type__in=[SchoolWeeklyStatus.COVERAGE_5G, SchoolWeeklyStatus.COVERAGE_4G], + )), + ColorMapSchema.MODERATE: Count('school', filter=Q( + coverage_type__in=[SchoolWeeklyStatus.COVERAGE_3G, SchoolWeeklyStatus.COVERAGE_2G], )), - ColorMapSchema.MODERATE: Count('school', filter=Q(coverage_type=SchoolWeeklyStatus.COVERAGE_2G)), ColorMapSchema.NO: Count('school', filter=Q(coverage_type=SchoolWeeklyStatus.COVERAGE_NO)), ColorMapSchema.UNKNOWN: Count('school', filter=Q(coverage_type=SchoolWeeklyStatus.COVERAGE_UNKNOWN)), }) diff --git a/proco/connection_statistics/api.py b/proco/connection_statistics/api.py index 556d16b..5fac616 100644 --- a/proco/connection_statistics/api.py +++ b/proco/connection_statistics/api.py @@ -33,9 +33,9 @@ from proco.connection_statistics.utils import get_benchmark_value_for_default_download_layer from proco.core import db_utils as db_utilities from proco.core import permissions as core_permissions -from proco.core.utils import is_blank_string, get_current_datetime_object +from proco.core import utils as core_utilities from proco.core.viewsets import BaseModelViewSet -from proco.locations.models import Country, CountryAdminMetadata +from proco.locations.models import Country from proco.schools.models import School from proco.utils import dates as date_utilities from proco.utils.cache import cache_manager @@ -117,6 +117,20 @@ def calculate_global_statistic(self): 'all_countries', 'schools_with_connectivity_status_mapped', 'countries_with_connectivity_status_mapped').order_by() + school_filters = core_utilities.get_filter_sql(self.request, 'schools', 'schools_school') + if len(school_filters) > 0: + school_connectivity_status_qry = school_connectivity_status_qry.extra(where=[school_filters]) + + school_static_filters = core_utilities.get_filter_sql(self.request, 'school_static', + 'connection_statistics_schoolweeklystatus') + if len(school_static_filters) > 0: + school_connectivity_status_qry = school_connectivity_status_qry.annotate( + total_weekly_schools=Count('last_weekly_status__school_id', distinct=True), + ).values('connected', 'not_connected', 'unknown', 'total_schools', + 'all_countries', 'schools_with_connectivity_status_mapped', + 'countries_with_connectivity_status_mapped', 'total_weekly_schools') + school_connectivity_status_qry = school_connectivity_status_qry.extra(where=[school_static_filters]) + giga_connectivity_benchmark, giga_connectivity_benchmark_unit = get_benchmark_value_for_default_download_layer( 'global', None) @@ -141,338 +155,6 @@ def calculate_global_statistic(self): } -# @method_decorator([cache_control(public=True, max_age=settings.CACHE_CONTROL_MAX_AGE_FOR_FE)], name='dispatch') -# class ConnectivityStatsAPIVIEW(APIView): -# permission_classes = (AllowAny,) -# model = SchoolWeeklyStatus -# queryset = model.objects.all() -# serializer_class = statistics_serializers.SchoolWeeklyStatusSerializer -# schools_daily_status_qs = SchoolDailyStatus.objects.all() -# global_giga_benchmark = 20000000 # this will be a dynamic value from the admin panel -# -# def get(self, request, *args, **kwargs): -# -# request_path = remove_query_param(request.get_full_path(), 'cache') -# is_weekly = request.query_params.get('is_weekly', 'true') == 'true' -# -# start_date = date_utilities.to_date(self.request.query_params.get('start_date'), -# default=datetime.combine(datetime.now(), time.min)) -# end_date = date_utilities.to_date(self.request.query_params.get('end_date'), -# default=datetime.combine(datetime.now(), time.min)) -# -# month_number = date_utilities.get_month_from_date(start_date) -# year_number = date_utilities.get_year_from_date(start_date) -# -# if is_weekly: -# # If is_weekly == True, then pick the week number based on start_date -# week_number = date_utilities.get_week_from_date(start_date) -# else: -# # If is_weekly == False, then: -# # 1. Collect dates on all sundays of the given month and year -# # 2. Get the week numbers for all sundays and look into SchoolWeeklyStatus table for which last week number -# # data was created in the given month of the year. And pick this week number -# dates_on_all_sundays = date_utilities.all_days_of_a_month(year_number, month_number, -# day_name='sunday').keys() -# week_numbers_for_month = [date_utilities.get_week_from_date(date) for date in dates_on_all_sundays] -# week_number = self.queryset.filter(year=year_number, week__in=week_numbers_for_month -# ).order_by('-week').values_list('week', flat=True).first() -# -# if not week_number: -# # If for any week of the month data is not available then pick last week number -# week_number = week_numbers_for_month[-1] -# -# indicator = request.query_params.get('indicator', '').lower() -# -# if indicator == 'download': -# cache_key = f'{week_number}{year_number}WEEKLY_DOWNLOAD_CONNECTIVITY_STATS' \ -# if is_weekly else f'{month_number}{year_number}MONTHLY_DOWNLOAD_CONNECTIVITY_STATS' -# -# data = None # cache_manager.get(cache_key) -# -# if not data: -# data = self.calculate_download_connectivity_statistic(start_date, end_date, week_number, year_number) -# cache_manager.set(cache_key, data, request_path=request_path) -# -# elif indicator == 'uptime': -# cache_key = f'{week_number}{year_number}WEEKLY_UPTIME_CONNECTIVITY_STATS' \ -# if is_weekly else f'{month_number}{year_number}MONTHLY_UPTIME_CONNECTIVITY_STATS' -# -# data = None # cache_manager.get(cache_key) -# -# if not data: -# data = { -# 'live_avg': 28, -# 'no_of_schools_measure': 60000, -# 'school_with_realtime_data': 16000, -# 'real_time_connected_schools': { -# 'good': 4000, -# 'moderate': 4000, -# 'no_internet': 4000, -# 'unknown': 4000 -# }, -# 'graph_data': generate_static_graph_data(start_date, end_date, indicator) -# } -# -# cache_manager.set(cache_key, data, request_path=request_path) -# -# else: -# cache_key = f'{week_number}{year_number}WEEKLY_LATENCY_CONNECTIVITY_STATS' \ -# if is_weekly else f'{month_number}{year_number}MONTHLY_LATENCY_CONNECTIVITY_STATS' -# -# data = None # cache_manager.get(cache_key) -# if not data: -# data = { -# 'live_avg': 28, -# 'no_of_schools_measure': 60000, -# 'school_with_realtime_data': 16000, -# 'real_time_connected_schools': { -# 'good': 4000, -# 'moderate': 4000, -# 'no_internet': 4000, -# 'unknown': 4000 -# }, -# 'graph_data': generate_static_graph_data(start_date, end_date, indicator) -# } -# -# cache_manager.set(cache_key, data, request_path=request_path) -# -# return Response(data=data) -# -# def calculate_download_connectivity_statistic(self, start_date, end_date, week_number, year_number): -# -# benchmark = self.request.query_params.get('benchmark', 'global') -# speed_benchmark = GigaGlobalBenchmark.connectivity_speed.value.get( -# 'value', statuses_schema.CONNECTIVITY_SPEED_FOR_GOOD_CONNECTIVITY_STATUS -# ) -# if benchmark == 'national': -# speed_benchmark = statuses_schema.CONNECTIVITY_SPEED_FOR_GOOD_CONNECTIVITY_STATUS -# -# weekly_queryset = self.queryset.filter(week=week_number, year=year_number).annotate( -# dummy_group_by=Value(1)).values('dummy_group_by').annotate( -# good=Count(Case(When(connectivity_speed__gt=speed_benchmark, then='school')), distinct=True), -# moderate=Count(Case(When(connectivity_speed__lte=speed_benchmark, connectivity_speed__gt=1000000, -# then='school')), distinct=True), -# bad=Count(Case(When(connectivity_speed__lte=1000000, then='school')), distinct=True), -# unknown=Count(Case(When(connectivity_speed__isnull=True, then='school')), distinct=True), -# school_with_realtime_data=Count(Case(When(connectivity_speed__isnull=False, then='school')), distinct=True), -# no_of_schools_measure=Count('school', distinct=True), -# ).values('good', 'moderate', 'bad', 'unknown', 'school_with_realtime_data', 'no_of_schools_measure').order_by() -# -# real_time_connected_schools = { -# 'good': weekly_queryset[0]['good'], -# 'moderate': weekly_queryset[0]['moderate'], -# 'no_internet': weekly_queryset[0]['bad'], -# 'unknown': weekly_queryset[0]['unknown'], -# } -# -# graph_data, positive_speeds = self.generate_graph_data(start_date, end_date) -# live_avg = round(sum(positive_speeds) / len(positive_speeds), 2) if len(positive_speeds) > 0 else 0 -# -# data = { -# 'live_avg': live_avg, -# 'no_of_schools_measure': weekly_queryset[0]['no_of_schools_measure'], -# 'school_with_realtime_data': weekly_queryset[0]['school_with_realtime_data'], -# 'real_time_connected_schools': real_time_connected_schools, -# 'graph_data': graph_data -# } -# -# return data -# -# def generate_graph_data(self, start_date, end_date): -# # Get all the school ids from SchoolWeeklyStatus model for the given start_date and end_date -# # school_ids = self.queryset.filter(date__range=[start_date, end_date]).values('school').distinct() -# # Get the daily connectivity_speed for the given school ids from SchoolDailyStatus model -# avg_daily_connectivity_speed = self.schools_daily_status_qs.filter(date__range=[start_date, end_date]).values( -# 'date').annotate(avg_speed=Avg('connectivity_speed')).order_by('date') -# -# # Generate the graph data in the desired format -# graph_data = [] -# current_date = start_date -# -# while current_date <= end_date: -# graph_data.append({ -# 'group': 'Download speed', -# 'key': date_utilities.format_date(current_date), -# 'value': None # Default value, will be updated later if data exists for the date -# }) -# current_date += timedelta(days=1) -# -# all_positive_speeds = [] -# -# # Update the graph_data with actual values if they exist -# for daily_avg_data in avg_daily_connectivity_speed: -# formatted_date = date_utilities.format_date(daily_avg_data['date']) -# for entry in graph_data: -# if entry['key'] == formatted_date: -# try: -# rounded_speed = 0 -# if daily_avg_data['avg_speed'] is not None: -# rounded_speed = round(daily_avg_data['avg_speed'] / 1000000, 2) -# entry['value'] = rounded_speed -# all_positive_speeds.append(rounded_speed) -# except (KeyError, TypeError): -# pass -# -# return graph_data, all_positive_speeds - - -# def generate_static_graph_data(current_date, end_date, indicator): -# data = [] -# -# while current_date <= end_date: -# data.append({ -# 'group': indicator, -# 'key': date_utilities.format_date(current_date), -# 'value': random.randint(2, 50) -# }) -# -# current_date += timedelta(days=1) -# -# return data - - -# class CountryWeekStatsAPIView(RetrieveAPIView): -# permission_classes = (AllowAny,) -# -# model = SchoolWeeklyStatus -# queryset = model.objects.all() -# -# country_daily_status_qs = CountryDailyStatus.objects.all() -# -# def get(self, *args, **kwargs): -# country_id = self.request.query_params.get('country_id', None) -# get_object_or_404(Country.objects.defer('geometry', 'geometry_simplified', ), id=country_id, ) -# -# is_weekly = self.request.query_params.get('is_weekly', 'true') == 'true' -# start_date = date_utilities.to_date(self.request.query_params.get('start_date'), -# default=datetime.combine(datetime.now(), time.min)) -# end_date = date_utilities.to_date(self.request.query_params.get('end_date'), -# default=datetime.combine(datetime.now(), time.min)) -# -# month_number = date_utilities.get_month_from_date(start_date) -# year_number = date_utilities.get_year_from_date(start_date) -# -# if is_weekly: -# # If is_weekly == True, then pick the week number based on start_date -# week_number = date_utilities.get_week_from_date(start_date) -# else: -# # If is_weekly == False, then: -# # 1. Collect dates on all sundays of the given month and year -# # 2. Get the week numbers for all sundays and look into SchoolWeeklyStatus table for which last week number -# # data was created in the given month of the year. And pick this week number -# dates_on_all_sundays = date_utilities.all_days_of_a_month(year_number, month_number, -# day_name='sunday').keys() -# week_numbers_for_month = [date_utilities.get_week_from_date(date) for date in dates_on_all_sundays] -# week_number = self.queryset.filter(year=year_number, week__in=week_numbers_for_month, -# ).order_by('-week').values_list('week', flat=True).first() -# -# if not week_number: -# # If for any week of the month data is not available then pick last week number -# week_number = week_numbers_for_month[-1] -# -# indicator = self.request.query_params.get('indicator', '').lower() -# -# if indicator == 'download': -# data = self.calculate_country_download_indicator(start_date, end_date, week_number, year_number, country_id) -# else: -# data = { -# 'live_avg': 28, -# 'no_of_schools_measure': 60000, -# 'school_with_realtime_data': 1600000, -# 'real_time_connected_schools': { -# 'good': 400000, -# 'moderate': 400000, -# 'no_internet': 300000, -# 'unknown': 500000 -# }, -# 'graph_data': generate_static_graph_data(start_date, end_date, indicator), -# 'is_data_synced': True, -# } -# -# return Response(data=data) -# -# def calculate_country_download_indicator(self, start_date, end_date, week_number, year_number, country_id): -# benchmark = self.request.query_params.get('benchmark', 'global') -# speed_benchmark = GigaGlobalBenchmark.connectivity_speed.value.get( -# 'value', statuses_schema.CONNECTIVITY_SPEED_FOR_GOOD_CONNECTIVITY_STATUS -# ) -# if benchmark == 'national': -# speed_benchmark = statuses_schema.CONNECTIVITY_SPEED_FOR_GOOD_CONNECTIVITY_STATUS -# -# weekly_queryset = self.queryset.filter( -# school__country_id=country_id, week=week_number, year=year_number).annotate( -# dummy_group_by=Value(1)).values('dummy_group_by').annotate( -# good=Count(Case(When(connectivity_speed__gt=speed_benchmark, then='school')), distinct=True), -# moderate=Count(Case(When(connectivity_speed__lte=speed_benchmark, connectivity_speed__gt=1000000, -# then='school')), distinct=True), -# bad=Count(Case(When(connectivity_speed__lte=1000000, then='school')), distinct=True), -# unknown=Count(Case(When(connectivity_speed__isnull=True, then='school')), distinct=True), -# school_with_realtime_data=Count(Case(When(connectivity_speed__isnull=False, then='school')), distinct=True), -# no_of_schools_measure=Count('school', distinct=True), -# ).values('good', 'moderate', 'bad', 'unknown', 'school_with_realtime_data', 'no_of_schools_measure').order_by() -# -# real_time_connected_schools = { -# 'good': weekly_queryset[0]['good'], -# 'moderate': weekly_queryset[0]['moderate'], -# 'no_internet': weekly_queryset[0]['bad'], -# 'unknown': weekly_queryset[0]['unknown'], -# } -# -# graph_data, positive_speeds = self.generate_country_graph_data(start_date, end_date, country_id) -# -# # live_avg = country_instance.get('connectivity_speed', 0) -# live_avg = round(sum(positive_speeds) / len(positive_speeds), 2) if len(positive_speeds) > 0 else 0 -# -# is_data_synced = self.queryset.filter( -# school__country_id=country_id, -# school__realtime_registration_status__rt_registered=True, -# ).exists() -# -# return { -# 'live_avg': live_avg, -# 'no_of_schools_measure': weekly_queryset[0]['no_of_schools_measure'], -# 'school_with_realtime_data': weekly_queryset[0]['school_with_realtime_data'], -# 'real_time_connected_schools': real_time_connected_schools, -# 'graph_data': graph_data, -# 'is_data_synced': is_data_synced, -# } -# -# def generate_country_graph_data(self, start_date, end_date, country_id): -# # Get the daily connectivity_speed for the given country from CountryDailyStatus model -# daily_connectivity_speed = self.country_daily_status_qs.filter(country_id=country_id, -# date__range=[start_date, end_date]) -# -# # Generate the graph data in the desired format -# graph_data = [] -# current_date = start_date -# -# while current_date <= end_date: -# graph_data.append({ -# 'group': 'Download speed', -# 'key': date_utilities.format_date(current_date), -# 'value': None # Default value, will be updated later if data exists for the date -# }) -# current_date += timedelta(days=1) -# -# all_positive_speeds = [] -# -# # Update the graph_data with actual values if they exist -# for daily_data in daily_connectivity_speed: -# formatted_date = date_utilities.format_date(daily_data.date) -# for entry in graph_data: -# if entry['key'] == formatted_date: -# try: -# rounded_speed = 0 -# if daily_data.connectivity_speed is not None: -# rounded_speed = round(daily_data.connectivity_speed / 1000000, 2) -# entry['value'] = rounded_speed -# all_positive_speeds.append(rounded_speed) -# except (KeyError, TypeError): -# pass -# -# return graph_data, all_positive_speeds - - class CountryDailyStatsListAPIView(ListAPIView): model = CountryDailyStatus queryset = model.objects.all() @@ -514,58 +196,6 @@ def get_queryset(self): return queryset.filter(school_id=self.kwargs['school_id']) -@method_decorator([cache_control(public=True, max_age=settings.CACHE_CONTROL_MAX_AGE_FOR_FE)], name='dispatch') -class CoverageStatsAPIVIEW(APIView): - permission_classes = (AllowAny,) - - CACHE_KEY = 'cache' - CACHE_KEY_PREFIX = 'COVERAGE_STATS' - - def get_cache_key(self): - params = dict(self.request.query_params) - params.pop(self.CACHE_KEY, None) - return '{0}_{1}'.format(self.CACHE_KEY_PREFIX, - '_'.join(map(lambda x: '{0}_{1}'.format(x[0], x[1]), sorted(params.items()))), ) - - def get(self, request, *args, **kwargs): - use_cached_data = self.request.query_params.get(self.CACHE_KEY, 'on').lower() in ['on', 'true'] - request_path = remove_query_param(request.get_full_path(), 'cache') - cache_key = self.get_cache_key() - - data = None - if use_cached_data: - data = cache_manager.get(cache_key) - - if not data: - # Query the School table to get the coverage data - # Get the total number of schools with coverage data - # Get the count of schools falling under different coverage types - school_coverage_type_qry = School.objects.all().annotate( - dummy_group_by=Value(1)).values('dummy_group_by').annotate( - g_4_5=Count(Case(When(coverage_type__in=['5g', '4g'], then='id')), distinct=True), - g_2_3=Count(Case(When(coverage_type__in=['3g', '2g'], then='id')), distinct=True), - no_coverage=Count(Case(When(coverage_type='no', then='id')), distinct=True), - unknown=Count(Case(When(coverage_type__in=['unknown', None], then='id')), distinct=True), - total_coverage_schools=Count(Case(When(coverage_type__isnull=False, then='id')), distinct=True), - ).values('g_4_5', 'g_2_3', 'no_coverage', 'unknown', 'total_coverage_schools').order_by() - - coverage_data = { - '5g_4g': school_coverage_type_qry[0]['g_4_5'], - '3g_2g': school_coverage_type_qry[0]['g_2_3'], - 'no_coverage': school_coverage_type_qry[0]['no_coverage'], - 'unknown': school_coverage_type_qry[0]['unknown'], - } - - data = { - 'total_coverage_schools': school_coverage_type_qry[0]['total_coverage_schools'], - 'coverage_schools': coverage_data, - } - - cache_manager.set(cache_key, data, request_path=request_path, soft_timeout=settings.CACHE_CONTROL_MAX_AGE) - - return Response(data=data) - - class SchoolConnectivityStatsListAPIView(ListAPIView): model = School queryset = model.objects.all().select_related('last_weekly_status') @@ -600,10 +230,9 @@ def update_kwargs(self): time.min)) school_ids = self.request.query_params.get('school_ids', '') - if not is_blank_string(school_ids): + if not core_utilities.is_blank_string(school_ids): school_ids = [int(school_id.strip()) for school_id in school_ids.split(',')] else: - # TODO: If School_ID not provided, then limit the output rows to 3 schools (P3 - Low priority) school_ids = [34554] self.kwargs.update({ @@ -658,7 +287,7 @@ def get_queryset(self): benchmark = self.request.query_params.get('benchmark', 'global') country_id = self.kwargs['country_id'] - speed_benchmark, speed_benchmark_unit = get_benchmark_value_for_default_download_layer(benchmark, country_id) + speed_benchmark, _ = get_benchmark_value_for_default_download_layer(benchmark, country_id) self.kwargs['speed_benchmark'] = speed_benchmark school_status_in_given_week_qry = SchoolWeeklyStatus.objects.filter( @@ -735,10 +364,9 @@ class SchoolCoverageStatsListAPIView(ListAPIView): def update_kwargs(self): school_ids = self.request.query_params.get('school_ids', '') - if not is_blank_string(school_ids): + if not core_utilities.is_blank_string(school_ids): school_ids = [int(school_id.strip()) for school_id in school_ids.split(',')] else: - # TODO: If School_ID not provided, then limit the output rows to 3 schools (P3 - Low priority) school_ids = [34554] self.kwargs.update({ @@ -764,57 +392,6 @@ def list(self, request, *args, **kwargs): return Response(serializer.data) -@method_decorator([cache_control(public=True, max_age=settings.CACHE_CONTROL_MAX_AGE_FOR_FE)], name='dispatch') -class CountryCoverageStatsAPIView(APIView): - permission_classes = (AllowAny,) - - CACHE_KEY = 'cache' - CACHE_KEY_PREFIX = 'COUNTRY_COVERAGE_STATS' - - def get_cache_key(self): - params = dict(self.request.query_params) - params.pop(self.CACHE_KEY, None) - return '{0}_{1}'.format(self.CACHE_KEY_PREFIX, - '_'.join(map(lambda x: '{0}_{1}'.format(x[0], x[1]), sorted(params.items()))), ) - - def get(self, request, *args, **kwargs): - use_cached_data = self.request.query_params.get(self.CACHE_KEY, 'on').lower() in ['on', 'true'] - request_path = remove_query_param(request.get_full_path(), self.CACHE_KEY) - cache_key = self.get_cache_key() - - data = None - if use_cached_data: - data = cache_manager.get(cache_key) - - if not data: - country_id = self.request.query_params.get('country_id', None) - - school_coverage_type_qry = School.objects.filter(country_id=country_id).annotate( - dummy_group_by=Value(1)).values('dummy_group_by').annotate( - g_4_5=Count(Case(When(coverage_type__in=['5g', '4g'], then='id')), distinct=True), - g_2_3=Count(Case(When(coverage_type__in=['3g', '2g'], then='id')), distinct=True), - no_coverage=Count(Case(When(coverage_type='no', then='id')), distinct=True), - unknown=Count(Case(When(coverage_type__in=['unknown', None], then='id')), distinct=True), - total_coverage_schools=Count(Case(When(coverage_type__isnull=False, then='id')), distinct=True), - ).values('g_4_5', 'g_2_3', 'no_coverage', 'unknown', 'total_coverage_schools').order_by() - - coverage_data = { - '5g_4g': school_coverage_type_qry[0]['g_4_5'], - '3g_2g': school_coverage_type_qry[0]['g_2_3'], - 'no_coverage': school_coverage_type_qry[0]['no_coverage'], - 'unknown': school_coverage_type_qry[0]['unknown'], - } - - data = { - 'total_coverage_schools': school_coverage_type_qry[0]['total_coverage_schools'], - 'coverage_schools': coverage_data, - } - - cache_manager.set(cache_key, data, request_path=request_path, soft_timeout=settings.CACHE_CONTROL_MAX_AGE) - - return Response(data=data) - - @method_decorator([cache_control(public=True, max_age=settings.CACHE_CONTROL_MAX_AGE_FOR_FE)], name='dispatch') class ConnectivityAPIView(APIView): permission_classes = (AllowAny,) @@ -825,6 +402,9 @@ class ConnectivityAPIView(APIView): CACHE_KEY = 'cache' CACHE_KEY_PREFIX = 'CONNECTIVITY_STATS' + school_filters = [] + school_static_filters = [] + def get_cache_key(self): params = dict(self.request.query_params) params.pop(self.CACHE_KEY, None) @@ -841,16 +421,16 @@ def get(self, request, *args, **kwargs): data = cache_manager.get(cache_key) if not data: + self.school_filters = core_utilities.get_filter_sql(self.request, 'schools', 'schools_school') + self.school_static_filters = core_utilities.get_filter_sql(self.request, 'school_static', + 'connection_statistics_schoolweeklystatus') + country_id = self.request.query_params.get('country_id', None) if country_id: - get_object_or_404(Country.objects.defer('geometry', 'geometry_simplified', ), id=country_id) self.queryset = self.queryset.filter(country_id=country_id) admin1_id = self.request.query_params.get('admin1_id', None) if admin1_id: - get_object_or_404(CountryAdminMetadata.objects.filter( - layer_name=CountryAdminMetadata.LAYER_NAME_ADMIN1, - ), id=admin1_id) self.queryset = self.queryset.filter(admin1_id=admin1_id) is_weekly = self.request.query_params.get('is_weekly', 'true') == 'true' @@ -890,7 +470,7 @@ def calculate_country_download_data(self, start_date, end_date, week_number, yea benchmark = self.request.query_params.get('benchmark', 'global') country_id = self.request.query_params.get('country_id', None) - speed_benchmark, speed_benchmark_unit = get_benchmark_value_for_default_download_layer(benchmark, country_id) + speed_benchmark, _ = get_benchmark_value_for_default_download_layer(benchmark, country_id) weekly_queryset = self.queryset.annotate( t=FilteredRelation( @@ -914,16 +494,28 @@ def calculate_country_download_data(self, start_date, end_date, week_number, yea ).values('good', 'moderate', 'bad', 'unknown', 'school_with_realtime_data', 'no_of_schools_measure', 'countries_with_realtime_data').order_by() + if len(self.school_filters) > 0: + weekly_queryset = weekly_queryset.extra(where=[self.school_filters]) + + if len(self.school_static_filters) > 0: + school_static_filters = core_utilities.get_filter_sql(self.request, 'school_static', 'T5') + weekly_queryset = weekly_queryset.annotate( + total_weekly_schools=Count('last_weekly_status__school_id', distinct=True), + ).values( + 'good', 'moderate', 'bad', 'unknown', 'school_with_realtime_data', + 'no_of_schools_measure', 'countries_with_realtime_data', 'total_weekly_schools' + ).extra(where=[school_static_filters]) + + weekly_status = list(weekly_queryset)[0] real_time_connected_schools = { - 'good': weekly_queryset[0]['good'], - 'moderate': weekly_queryset[0]['moderate'], - 'no_internet': weekly_queryset[0]['bad'], - 'unknown': weekly_queryset[0]['unknown'], + 'good': weekly_status['good'], + 'moderate': weekly_status['moderate'], + 'no_internet': weekly_status['bad'], + 'unknown': weekly_status['unknown'], } graph_data, positive_speeds = self.generate_country_graph_data(start_date, end_date) - # live_avg = country_instance.get('connectivity_speed', 0) live_avg = round(sum(positive_speeds) / len(positive_speeds), 2) if len(positive_speeds) > 0 else 0 live_avg_connectivity = 'unknown' @@ -940,30 +532,30 @@ def calculate_country_download_data(self, start_date, end_date, week_number, yea country_id = self.request.query_params.get('country_id', None) admin1_id = self.request.query_params.get('admin1_id', None) + is_data_synced_qs = SchoolWeeklyStatus.objects.filter( + school__realtime_registration_status__rt_registered=True, + ) + + if len(self.school_filters) > 0: + is_data_synced_qs = is_data_synced_qs.extra(where=[self.school_filters]) + + if len(self.school_static_filters) > 0: + is_data_synced_qs = is_data_synced_qs.extra(where=[self.school_static_filters]) + if admin1_id: - is_data_synced = SchoolWeeklyStatus.objects.filter( - school__admin1_id=admin1_id, - school__realtime_registration_status__rt_registered=True, - ).exists() - elif country_id: - is_data_synced = SchoolWeeklyStatus.objects.filter( - school__country_id=country_id, - school__realtime_registration_status__rt_registered=True, - ).exists() - else: - is_data_synced = SchoolWeeklyStatus.objects.filter( - school__realtime_registration_status__rt_registered=True, - ).exists() + is_data_synced_qs = is_data_synced_qs.filter(school__admin1_id=admin1_id) + if country_id: + is_data_synced_qs = is_data_synced_qs.filter(school__country_id=country_id) return { 'live_avg': live_avg, 'live_avg_connectivity': live_avg_connectivity, - 'no_of_schools_measure': weekly_queryset[0]['no_of_schools_measure'], - 'school_with_realtime_data': weekly_queryset[0]['school_with_realtime_data'], - 'countries_with_realtime_data': weekly_queryset[0]['countries_with_realtime_data'], + 'no_of_schools_measure': weekly_status['no_of_schools_measure'], + 'school_with_realtime_data': weekly_status['school_with_realtime_data'], + 'countries_with_realtime_data': weekly_status['countries_with_realtime_data'], 'real_time_connected_schools': real_time_connected_schools, 'graph_data': graph_data, - 'is_data_synced': is_data_synced, + 'is_data_synced': is_data_synced_qs.exists(), 'benchmark_metadata': { 'benchmark_value': str(speed_benchmark), 'benchmark_unit': "bps", @@ -984,6 +576,15 @@ def generate_country_graph_data(self, start_date, end_date): avg_speed=Avg('daily_status__connectivity_speed'), ).order_by('daily_status__date') + if len(self.school_filters) > 0: + avg_daily_connectivity_speed = avg_daily_connectivity_speed.extra(where=[self.school_filters]) + + if len(self.school_static_filters) > 0: + avg_daily_connectivity_speed = avg_daily_connectivity_speed.annotate( + total_weekly_schools=Count('last_weekly_status__school_id', distinct=True), + ) + avg_daily_connectivity_speed = avg_daily_connectivity_speed.extra(where=[self.school_static_filters]) + # Generate the graph data in the desired format graph_data = [] current_date = start_date @@ -997,8 +598,6 @@ def generate_country_graph_data(self, start_date, end_date): current_date += timedelta(days=1) all_positive_speeds = [] - - # Update the graph_data with actual values if they exist # Update the graph_data with actual values if they exist for daily_avg_data in avg_daily_connectivity_speed: formatted_date = date_utilities.format_date(daily_avg_data['daily_status__date']) @@ -1036,6 +635,9 @@ class CoverageAPIView(APIView): 'id': ['exact', 'in'], } + school_filters = [] + school_static_filters = [] + def get_cache_key(self): params = dict(self.request.query_params) params.pop(self.CACHE_KEY, None) @@ -1065,6 +667,10 @@ def get(self, request, *args, **kwargs): data = cache_manager.get(cache_key) if not data: + self.school_filters = core_utilities.get_filter_sql(self.request, 'schools', 'schools_school') + self.school_static_filters = core_utilities.get_filter_sql(self.request, 'school_static', + 'connection_statistics_schoolweeklystatus') + # Query the School table to get the coverage data # Get the total number of schools with coverage data # Get the count of schools falling under different coverage types @@ -1079,15 +685,26 @@ def get(self, request, *args, **kwargs): total_coverage_schools=Count(Case(When(coverage_type__isnull=False, then='id')), distinct=True), ).values('g_4_5', 'g_2_3', 'no_coverage', 'unknown', 'total_coverage_schools').order_by() + if len(self.school_filters) > 0: + school_coverage_type_qry = school_coverage_type_qry.extra(where=[self.school_filters]) + + if len(self.school_static_filters) > 0: + school_coverage_type_qry = school_coverage_type_qry.annotate( + total_weekly_schools=Count('last_weekly_status__school_id', distinct=True), + ).values( + 'g_4_5', 'g_2_3', 'no_coverage', 'unknown', 'total_coverage_schools', 'total_weekly_schools' + ).extra(where=[self.school_static_filters]) + + school_coverage_status = list(school_coverage_type_qry)[0] coverage_data = { - '5g_4g': school_coverage_type_qry[0]['g_4_5'], - '3g_2g': school_coverage_type_qry[0]['g_2_3'], - 'no_coverage': school_coverage_type_qry[0]['no_coverage'], - 'unknown': school_coverage_type_qry[0]['unknown'], + '5g_4g': school_coverage_status['g_4_5'], + '3g_2g': school_coverage_status['g_2_3'], + 'no_coverage': school_coverage_status['no_coverage'], + 'unknown': school_coverage_status['unknown'], } data = { - 'total_schools': school_coverage_type_qry[0]['total_coverage_schools'], + 'total_schools': school_coverage_status['total_coverage_schools'], 'connected_schools': coverage_data, } @@ -1127,23 +744,18 @@ def get(self, request, *args, **kwargs): country_id = self.request.query_params.get('country_id', None) if country_id: - get_object_or_404(Country.objects.defer('geometry', 'geometry_simplified', ), id=country_id) self.queryset = self.queryset.filter(school__country_id=country_id) admin1_id = self.request.query_params.get('admin1_id', None) if admin1_id: - get_object_or_404(CountryAdminMetadata.objects.filter( - layer_name=CountryAdminMetadata.LAYER_NAME_ADMIN1, - ), id=admin1_id) self.queryset = self.queryset.filter(school__admin1_id=admin1_id) school_id = self.request.query_params.get('school_id', None) if school_id: - get_object_or_404(School.objects.all(), id=school_id) self.queryset = self.queryset.filter(school=school_id) school_ids = self.request.query_params.get('school_ids', '') - if not is_blank_string(school_ids): + if not core_utilities.is_blank_string(school_ids): school_ids = [int(school_id.strip()) for school_id in school_ids.split(',')] self.queryset = self.queryset.filter(school__in=school_ids) @@ -1174,7 +786,7 @@ def get(self, request, *args, **kwargs): live_data_source__in=live_data_sources, ).filter(**{parameter_column_name + '__isnull': False}) - today_date = get_current_datetime_object().date() + today_date = core_utilities.get_current_datetime_object().date() monday_date = today_date - timedelta(days=today_date.weekday()) latest_daily_entry = self.queryset.filter( @@ -1207,7 +819,6 @@ def get(self, request, *args, **kwargs): return Response(data=static_data) -# @method_decorator([cache_control(public=True, max_age=settings.CACHE_CONTROL_MAX_AGE_FOR_FE)], name='dispatch') class CountrySummaryAPIViewSet(BaseModelViewSet): model = CountryWeeklyStatus serializer_class = statistics_serializers.CountryWeeklyStatusSerializer @@ -1378,10 +989,11 @@ def retrieve(self, request, pk): try: country_daily_status = CountryDailyStatus.objects.get(id=pk) if country_daily_status: - serializer = statistics_serializers.CountryDailyStatusUpdateRetrieveSerializer(country_daily_status, - partial=True, - context={ - 'request': request}, ) + serializer = statistics_serializers.CountryDailyStatusUpdateRetrieveSerializer( + country_daily_status, + partial=True, + context={'request': request}, + ) return Response(serializer.data) return Response(status=rest_status.HTTP_404_NOT_FOUND, data=error_mess) except CountryDailyStatus.DoesNotExist: @@ -1424,7 +1036,10 @@ class SchoolSummaryAPIViewSet(BaseModelViewSet): ordering_field_names = ['-year', '-week', 'school__name'] apply_query_pagination = True - search_fields = ('=school__id', 'school__name', 'year', 'week',) + search_fields = ( + '=school__id', 'school__name', '=school__giga_id_school', '=school__external_id', + 'year', 'week', + ) filterset_fields = { 'school_id': ['exact', 'in'], 'year': ['exact', 'in'], @@ -1535,7 +1150,9 @@ class SchoolDailyConnectivitySummaryAPIViewSet(BaseModelViewSet): ordering_field_names = ['-date', 'school__name', ] apply_query_pagination = True - search_fields = ('=school__id', 'school__name',) + search_fields = ( + '=school__id', 'school__name', '=school__giga_id_school', '=school__external_id', + ) filterset_fields = { 'school_id': ['exact', 'in'], } @@ -1562,11 +1179,6 @@ def get_queryset(self): if len(countries_ids) > 0: school_data = School.objects.filter(country_id__in=countries_ids).values_list('id', flat=True).distinct() - # limit = len(school_data) - 1 - # if Country.objects.filter(code='BR').exists(): - # if Country.objects.get(code='BR').id in countries_ids: - # limit = 10000 - # print([school_data[i:i + 2] for i in range(0, len(school_data), 2)]) queryset = queryset.filter(school_id__in=school_data) return queryset @@ -1602,10 +1214,11 @@ def retrieve(self, request, pk): try: school_daily_status = SchoolDailyStatus.objects.get(id=pk) if school_daily_status: - serializer = statistics_serializers.SchoolDailyStatusUpdateRetriveSerializer(school_daily_status, - partial=True, - context={ - 'request': request}, ) + serializer = statistics_serializers.SchoolDailyStatusUpdateRetriveSerializer( + school_daily_status, + partial=True, + context={'request': request}, + ) return Response(serializer.data) return Response(status=rest_status.HTTP_404_NOT_FOUND, data=error_mess) except SchoolDailyStatus.DoesNotExist: @@ -1655,7 +1268,8 @@ def get_live_query(self, **kwargs): ELSE 'unknown' END AS field_status, CASE WHEN rt_status.rt_registered = True - AND EXTRACT(YEAR FROM CAST(rt_status.rt_registration_date AS DATE)) <= EXTRACT(YEAR FROM CAST(t.date AS DATE)) + AND EXTRACT(YEAR FROM CAST(rt_status.rt_registration_date AS DATE)) <= + EXTRACT(YEAR FROM CAST(t.date AS DATE)) THEN True ELSE False END as is_rt_connected FROM schools_school AS s INNER JOIN connection_statistics_schooldailystatus t ON s.id = t.school_id diff --git a/proco/connection_statistics/api_urls.py b/proco/connection_statistics/api_urls.py index 71893c2..4f8326a 100644 --- a/proco/connection_statistics/api_urls.py +++ b/proco/connection_statistics/api_urls.py @@ -15,6 +15,7 @@ path('coverage/', api.CoverageAPIView.as_view(), name='global-coverage-stat'), path('countrycoverage/', api.CoverageAPIView.as_view(), name='country-coverage-stat'), path('schoolcoverage/', api.SchoolCoverageStatsListAPIView.as_view(), name='school-coverage-stat'), + path( 'country//daily-stat/', api.CountryDailyStatsListAPIView.as_view(), @@ -26,38 +27,38 @@ 'get': 'list', 'post': 'create', 'delete': 'destroy', - }), name='list_or_create_destroy_countryweeklystatus'), + }), name='list-create-destroy-countryweeklystatus'), path('countryweeklystatus//', api.CountrySummaryAPIViewSet.as_view({ 'put': 'update', 'get': 'retrieve', - }), name='update_or_retrieve_countryweeklystatus'), + }), name='update-retrieve-countryweeklystatus'), path('countrydailystatus/', api.CountryDailyConnectivitySummaryAPIViewSet.as_view({ 'get': 'list', 'post': 'create', 'delete': 'destroy', - }), name='list_or_create_destroy_countrydailystatus'), + }), name='list-create-destroy-countrydailystatus'), path('countrydailystatus//', api.CountryDailyConnectivitySummaryAPIViewSet.as_view({ 'put': 'update', 'get': 'retrieve', - }), name='update_or_retrieve_countrydailystatus'), + }), name='update-retrieve-countrydailystatus'), path('schoolweeklystatus/', api.SchoolSummaryAPIViewSet.as_view({ 'get': 'list', 'post': 'create', 'delete': 'destroy', - }), name='list_or_create_destroy_schoolweeklystatus'), + }), name='list-create-destroy-schoolweeklystatus'), path('schoolweeklystatus//', api.SchoolSummaryAPIViewSet.as_view({ 'put': 'update', 'get': 'retrieve', - }), name='update_or_retrieve_schoolweeklystatus'), + }), name='update-retrieve-schoolweeklystatus'), path('schooldailystatus/', api.SchoolDailyConnectivitySummaryAPIViewSet.as_view({ 'get': 'list', 'post': 'create', 'delete': 'destroy', - }), name='list_or_create_destroy_schooldailystatus'), + }), name='list-create-destroy-schooldailystatus'), path('schooldailystatus//', api.SchoolDailyConnectivitySummaryAPIViewSet.as_view({ 'put': 'update', 'get': 'retrieve', - }), name='update_or_retrieve_schooldailystatus'), + }), name='update-retrieve-schooldailystatus'), ] diff --git a/proco/connection_statistics/migrations/0063_added_5g_choice_in_coverage_type.py b/proco/connection_statistics/migrations/0063_added_5g_choice_in_coverage_type.py new file mode 100755 index 0000000..fefb143 --- /dev/null +++ b/proco/connection_statistics/migrations/0063_added_5g_choice_in_coverage_type.py @@ -0,0 +1,19 @@ +# Generated by Django 2.2.28 on 2024-06-14 09:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ('connection_statistics', '0062_updated_max_allowed_values_num_students_field'), + ] + + operations = [ + migrations.AlterField( + model_name='schoolweeklystatus', + name='coverage_type', + field=models.CharField( + choices=[('unknown', 'Unknown'), ('no', 'No'), ('2g', '2G'), ('3g', '3G'), ('4g', '4G'), ('5g', '5G')], + default='unknown', max_length=8), + ), + ] diff --git a/proco/connection_statistics/models.py b/proco/connection_statistics/models.py index 8069e86..ffcfd5b 100644 --- a/proco/connection_statistics/models.py +++ b/proco/connection_statistics/models.py @@ -1,7 +1,6 @@ from django.core.validators import MaxValueValidator, MinValueValidator from django.db import models from django.db.models import Q -# from proco.utils.models import ApproxQuerySet from django.db.models.constraints import UniqueConstraint from django.utils import timezone from django.utils.translation import ugettext as _ @@ -12,7 +11,6 @@ from proco.connection_statistics.config import app_config as statistics_configs from proco.core import models as core_models from proco.core.managers import BaseManager -from proco.core.models import CustomDateTimeField from proco.locations.models import Country from proco.schools.constants import statuses_schema from proco.schools.models import School @@ -26,7 +24,8 @@ class ConnectivityStatistics(models.Model): connectivity_latency = models.FloatField(help_text=_('ms'), blank=True, null=True, default=None) connectivity_speed_probe = models.PositiveIntegerField(help_text=_('bps'), blank=True, null=True, default=None) - connectivity_upload_speed_probe = models.PositiveIntegerField(help_text=_('bps'), blank=True, null=True, default=None) + connectivity_upload_speed_probe = models.PositiveIntegerField(help_text=_('bps'), + blank=True, null=True, default=None) connectivity_latency_probe = models.FloatField(help_text=_('ms'), blank=True, null=True, default=None) @@ -41,7 +40,7 @@ class ConnectivityStatistics(models.Model): default=statistics_configs.UNKNOWN_SOURCE, ) - deleted = CustomDateTimeField(db_index=True, null=True, blank=True) + deleted = core_models.CustomDateTimeField(db_index=True, null=True, blank=True) class Meta: abstract = True @@ -161,18 +160,20 @@ def delete(self, *args, **kwargs): class SchoolWeeklyStatus(ConnectivityStatistics, TimeStampedModel, models.Model): - # unable to use choives as should be (COVERAGE_TYPES.4g), because digit goes first + # unable to use choices as should be (COVERAGE_TYPES.4g), because digit goes first COVERAGE_UNKNOWN = 'unknown' COVERAGE_NO = 'no' COVERAGE_2G = '2g' COVERAGE_3G = '3g' COVERAGE_4G = '4g' + COVERAGE_5G = '5g' COVERAGE_TYPES = Choices( (COVERAGE_UNKNOWN, _('Unknown')), (COVERAGE_NO, _('No')), (COVERAGE_2G, _('2G')), (COVERAGE_3G, _('3G')), (COVERAGE_4G, _('4G')), + (COVERAGE_5G, _('5G')), ) school = models.ForeignKey(School, related_name='weekly_status', on_delete=models.CASCADE) @@ -307,7 +308,6 @@ class SchoolDailyStatus(ConnectivityStatistics, TimeStampedModel, models.Model): school = models.ForeignKey(School, related_name='daily_status', on_delete=models.CASCADE) date = models.DateField() - # objects = ApproxQuerySet.as_manager() objects = BaseManager() class Meta: @@ -339,7 +339,6 @@ def delete(self, *args, **kwargs): class RealTimeConnectivity(ConnectivityStatistics, TimeStampedModel, models.Model): school = models.ForeignKey(School, related_name='realtime_status', on_delete=models.CASCADE) - # objects = ApproxQuerySet.as_manager() objects = BaseManager() class Meta: diff --git a/proco/connection_statistics/serializers.py b/proco/connection_statistics/serializers.py index 537c367..d69703e 100644 --- a/proco/connection_statistics/serializers.py +++ b/proco/connection_statistics/serializers.py @@ -356,9 +356,6 @@ class Meta(CountryDailyStatus.Meta): model = CountryDailyStatus fields = ('id', 'country_name', 'date', 'connectivity_speed', 'connectivity_latency',) - # def get_country_name(self, instance): - # return instance.country.name - class DetailCountryDailyStatusSerializer(CountryDailyStatusSerializer): class Meta(CountryWeeklyStatus.Meta): @@ -417,9 +414,6 @@ class Meta(SchoolDailyStatus.Meta): model = SchoolDailyStatus fields = ('id', 'school_name', 'date', 'connectivity_speed', 'connectivity_latency',) - # def get_school_name(self, instance): - # return instance.school.name - class DetailSchoolDailyStatusSerializer(SchoolDailyStatusSerializer): class Meta(SchoolDailyStatus.Meta): diff --git a/proco/connection_statistics/tests/factories.py b/proco/connection_statistics/tests/factories.py index 4a98236..2f03cc2 100644 --- a/proco/connection_statistics/tests/factories.py +++ b/proco/connection_statistics/tests/factories.py @@ -1,9 +1,11 @@ +import datetime from datetime import date from factory import SubFactory from factory import django as django_factory from factory import fuzzy +from proco.connection_statistics.config import app_config as statistics_configs from proco.connection_statistics.models import ( CountryDailyStatus, CountryWeeklyStatus, @@ -17,8 +19,14 @@ class RealTimeConnectivityFactory(django_factory.DjangoModelFactory): school = SubFactory(SchoolFactory) + + created = fuzzy.FuzzyDateTime(datetime.datetime(year=1970, month=1, day=1, tzinfo=datetime.timezone.utc)) + connectivity_speed = fuzzy.FuzzyInteger(1, 1000000) - connectivity_latency = fuzzy.FuzzyInteger(1, 100) + connectivity_upload_speed = fuzzy.FuzzyInteger(1, 1000000) + connectivity_latency = fuzzy.FuzzyFloat(0.0, 1000.0) + + live_data_source = fuzzy.FuzzyChoice(dict(statistics_configs.LIVE_DATA_SOURCE_CHOICES).keys()) class Meta: model = RealTimeConnectivity @@ -27,8 +35,12 @@ class Meta: class CountryDailyStatusFactory(django_factory.DjangoModelFactory): country = SubFactory(CountryFactory) date = fuzzy.FuzzyDate(date(year=1970, month=1, day=1)) + connectivity_speed = fuzzy.FuzzyInteger(1, 1000000) - connectivity_latency = fuzzy.FuzzyInteger(1, 100) + connectivity_upload_speed = fuzzy.FuzzyInteger(1, 1000000) + connectivity_latency = fuzzy.FuzzyFloat(0.0, 1000.0) + + live_data_source = fuzzy.FuzzyChoice(dict(statistics_configs.LIVE_DATA_SOURCE_CHOICES).keys()) class Meta: model = CountryDailyStatus @@ -37,8 +49,12 @@ class Meta: class SchoolDailyStatusFactory(django_factory.DjangoModelFactory): school = SubFactory(SchoolFactory) date = fuzzy.FuzzyDate(date(year=1970, month=1, day=1)) + connectivity_speed = fuzzy.FuzzyInteger(1, 1000000) - connectivity_latency = fuzzy.FuzzyInteger(1, 100) + connectivity_upload_speed = fuzzy.FuzzyInteger(1, 1000000) + connectivity_latency = fuzzy.FuzzyFloat(0.0, 1000.0) + + live_data_source = fuzzy.FuzzyChoice(dict(statistics_configs.LIVE_DATA_SOURCE_CHOICES).keys()) class Meta: model = SchoolDailyStatus @@ -53,7 +69,11 @@ class CountryWeeklyStatusFactory(django_factory.DjangoModelFactory): schools_connectivity_no = fuzzy.FuzzyInteger(0, 1000) schools_connectivity_moderate = fuzzy.FuzzyInteger(0, 1000) schools_connectivity_good = fuzzy.FuzzyInteger(0, 1000) + connectivity_speed = fuzzy.FuzzyInteger(1, 1000000) + connectivity_upload_speed = fuzzy.FuzzyInteger(1, 1000000) + connectivity_latency = fuzzy.FuzzyFloat(0.0, 1000.0) + integration_status = fuzzy.FuzzyChoice(dict(CountryWeeklyStatus.INTEGRATION_STATUS_TYPES).keys()) avg_distance_school = fuzzy.FuzzyFloat(0.0, 1000.0) schools_coverage_good = fuzzy.FuzzyInteger(0, 1000) @@ -72,7 +92,8 @@ class SchoolWeeklyStatusFactory(django_factory.DjangoModelFactory): connectivity_type = fuzzy.FuzzyText(length=64) connectivity_speed = fuzzy.FuzzyInteger(1, 1000000) - connectivity_latency = fuzzy.FuzzyInteger(1, 100) + connectivity_upload_speed = fuzzy.FuzzyInteger(1, 1000000) + connectivity_latency = fuzzy.FuzzyFloat(0.0, 1000.0) class Meta: model = SchoolWeeklyStatus diff --git a/proco/connection_statistics/tests/test_aggregates.py b/proco/connection_statistics/tests/test_aggregates.py index ef7cb24..85b678b 100644 --- a/proco/connection_statistics/tests/test_aggregates.py +++ b/proco/connection_statistics/tests/test_aggregates.py @@ -28,15 +28,18 @@ class AggregateConnectivityDataTestCase(TestCase): - databases = ['default', 'read_only_database'] + databases = ['default', ] + @classmethod def setUpTestData(cls): - # cls.databases = ['default'] - # cls.databases = ['default', 'read_only_database'] cls.country = CountryFactory() cls.school = SchoolFactory(country=cls.country) - RealTimeConnectivityFactory(school=cls.school, connectivity_speed=4000000) - RealTimeConnectivityFactory(school=cls.school, connectivity_speed=6000000) + cls.today_datetime = datetime.now(tz=timezone.utc) + + RealTimeConnectivityFactory(school=cls.school, connectivity_speed=4000000, created=cls.today_datetime, + live_data_source='DAILY_CHECK_APP_MLAB') + RealTimeConnectivityFactory(school=cls.school, connectivity_speed=6000000, created=cls.today_datetime, + live_data_source='DAILY_CHECK_APP_MLAB') def test_aggregate_real_time_data_to_school_daily_status(self): aggregate_real_time_data_to_school_daily_status(self.country, timezone.now().date()) @@ -50,28 +53,33 @@ def test_aggregate_real_time_data_to_country_daily_status(self): self.assertEqual(CountryDailyStatus.objects.first().connectivity_speed, 5000000) def test_aggregate_real_time_yesterday_data(self): - yesterday_status = SchoolDailyStatusFactory(school=self.school, date=timezone.now().date() - timedelta(days=1)) + yesterday = timezone.now() - timedelta(days=1) + yesterday_status = SchoolDailyStatusFactory(school=self.school, date=yesterday.date(), + live_data_source='DAILY_CHECK_APP_MLAB') RealTimeConnectivityFactory( - school=self.school, connectivity_speed=3000000, created=timezone.now() - timedelta(days=1), + school=self.school, connectivity_speed=3000000, created=yesterday, + live_data_source='DAILY_CHECK_APP_MLAB' ) - finalize_previous_day_data(None, self.country.id, timezone.now().date()) + finalize_previous_day_data(None, self.country.id, yesterday.date()) yesterday_status.refresh_from_db() - # self.assertEqual(yesterday_status.connectivity_speed, 3000000) - # self.assertEqual(self.country.daily_status.get(date=yesterday_status.date).connectivity_speed, 3000000) + self.assertEqual(yesterday_status.connectivity_speed, 3000000) + self.assertEqual(self.country.daily_status.get(date=yesterday_status.date).connectivity_speed, 3000000) def test_aggregate_school_daily_to_country_daily(self): today = datetime.now().date() - SchoolDailyStatusFactory(school__country=self.country, connectivity_speed=4000000, date=today) - SchoolDailyStatusFactory(school__country=self.country, connectivity_speed=6000000, date=today) + SchoolDailyStatusFactory(school__country=self.country, connectivity_speed=4000000, date=today, + live_data_source='DAILY_CHECK_APP_MLAB') + SchoolDailyStatusFactory(school__country=self.country, connectivity_speed=6000000, date=today, + live_data_source='DAILY_CHECK_APP_MLAB') aggregate_school_daily_to_country_daily(self.country, timezone.now().date()) self.assertEqual(CountryDailyStatus.objects.get(country=self.country, date=today).connectivity_speed, 5000000) def test_aggregate_country_daily_status_to_country_weekly_status(self): today = datetime.now().date() - CountryDailyStatusFactory(country=self.country, date=today) + CountryDailyStatusFactory(country=self.country, date=today, live_data_source='DAILY_CHECK_APP_MLAB') SchoolWeeklyStatusFactory( school__country=self.country, connectivity=True, connectivity_speed=4000000, @@ -98,23 +106,29 @@ def test_aggregate_country_daily_status_to_country_weekly_status(self): CountryWeeklyStatus.COVERAGE_TYPES_AVAILABILITY.coverage_availability) def test_aggregate_school_daily_status_to_school_weekly_status(self): - today = datetime.now().date() - SchoolDailyStatusFactory(school=self.school, connectivity_speed=4000000, date=today - timedelta(days=1)) - SchoolDailyStatusFactory(school=self.school, connectivity_speed=6000000, date=today) + date = datetime.now().date() - timedelta(days=6) + monday_date = date - timedelta(days=date.weekday()) + tuesday_date = monday_date + timedelta(days=1) + + SchoolDailyStatusFactory(school=self.school, connectivity_speed=4000000, date=monday_date, + live_data_source='DAILY_CHECK_APP_MLAB') + SchoolDailyStatusFactory(school=self.school, connectivity_speed=6000000, date=tuesday_date, + live_data_source='DAILY_CHECK_APP_MLAB') self.school.last_weekly_status = None self.school.save() - aggregate_school_daily_status_to_school_weekly_status(self.country, today) + aggregate_school_daily_status_to_school_weekly_status(self.country, tuesday_date) self.school.refresh_from_db() self.assertNotEqual(self.school.last_weekly_status, None) self.assertEqual(SchoolWeeklyStatus.objects.count(), 1) - # self.assertEqual(SchoolWeeklyStatus.objects.last().connectivity_speed, 6000000) + self.assertEqual(SchoolWeeklyStatus.objects.last().connectivity_speed, 5000000) self.assertEqual(SchoolWeeklyStatus.objects.last().connectivity, True) def test_aggregate_school_daily_status_to_school_weekly_status_connectivity_unknown(self): # daily status is too old, so it wouldn't be involved into country calculations today = datetime.now().date() - SchoolDailyStatusFactory(school=self.school, connectivity_speed=None, date=today - timedelta(days=8)) + SchoolDailyStatusFactory(school=self.school, connectivity_speed=None, date=today - timedelta(days=8), + live_data_source='DAILY_CHECK_APP_MLAB') SchoolWeeklyStatusFactory( school=self.school, week=get_current_week(), year=get_current_year(), connectivity=None, ) @@ -125,7 +139,8 @@ def test_aggregate_school_daily_status_to_school_weekly_status_connectivity_unkn def test_aggregate_school_daily_status_to_school_weekly_status_connectivity_no(self): today = datetime.now().date() - SchoolDailyStatusFactory(school=self.school, connectivity_speed=None, date=today - timedelta(days=8)) + SchoolDailyStatusFactory(school=self.school, connectivity_speed=None, date=today - timedelta(days=8), + live_data_source='DAILY_CHECK_APP_MLAB') SchoolWeeklyStatusFactory( school=self.school, week=get_current_week(), year=get_current_year(), connectivity=False, ) diff --git a/proco/connection_statistics/tests/test_api.py b/proco/connection_statistics/tests/test_api.py index d803086..57fb0b2 100755 --- a/proco/connection_statistics/tests/test_api.py +++ b/proco/connection_statistics/tests/test_api.py @@ -2,13 +2,14 @@ from datetime import datetime, timedelta from django.core.cache import cache +from django.core.management import call_command from django.test import TestCase from django.urls import resolve, reverse from isoweek import Week from rest_framework import exceptions as rest_exceptions from rest_framework import status -from rest_framework.test import APITestCase +from proco.accounts import models as accounts_models from proco.connection_statistics.models import CountryWeeklyStatus from proco.connection_statistics.tests.factories import ( CountryDailyStatusFactory, @@ -16,25 +17,50 @@ SchoolDailyStatusFactory, SchoolWeeklyStatusFactory, ) -from proco.custom_auth import models as auth_models -from proco.locations.tests.factories import CountryFactory +from proco.custom_auth.tests import test_utils as test_utilities +from proco.locations.tests.factories import CountryFactory, Admin1Factory from proco.schools.tests.factories import SchoolFactory from proco.utils.dates import format_date, get_first_date_of_month, get_last_date_of_month from proco.utils.tests import TestAPIViewSetMixin +def statistics_url(url_params, query_param, view_name='global-stat'): + url = reverse('connection_statistics:' + view_name, args=url_params) + view = resolve(url) + view_info = view.func + + if len(query_param) > 0: + query_params = '?' + '&'.join([key + '=' + str(val) for key, val in query_param.items()]) + url += query_params + return url, view, view_info + + +def accounts_url(url_params, query_param, view_name='list-or-create-api-keys'): + url = reverse('accounts:' + view_name, args=url_params) + view = resolve(url) + view_info = view.func + + if len(query_param) > 0: + query_params = '?' + '&'.join([key + '=' + str(val) for key, val in query_param.items()]) + url += query_params + return url, view, view_info + + class GlobalStatisticsApiTestCase(TestAPIViewSetMixin, TestCase): - databases = ['default', 'read_only_database'] + databases = ['default', ] @classmethod def setUpTestData(cls): cls.country_one = CountryFactory() + cls.school_one = SchoolFactory(country=cls.country_one, location__country=cls.country_one, geopoint=None) cls.school_two = SchoolFactory(country=cls.country_one, location__country=cls.country_one) + SchoolWeeklyStatusFactory(school=cls.school_one, connectivity=True) SchoolWeeklyStatusFactory(school=cls.school_two, connectivity=False) CountryWeeklyStatusFactory(country=cls.country_one, integration_status=CountryWeeklyStatus.REALTIME_MAPPED, year=datetime.now().year + 1, schools_connectivity_no=1) + cls.cws = CountryWeeklyStatusFactory(integration_status=CountryWeeklyStatus.STATIC_MAPPED, schools_connectivity_no=0, year=datetime.now().year + 2) @@ -44,9 +70,11 @@ def setUp(self): super().setUp() def test_global_stats(self): + url, _, view = statistics_url((), {}) + response = self.forced_auth_req( 'get', - reverse('connection_statistics:global-stat'), + url, ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -56,58 +84,13 @@ def test_global_stats(self): ['connected', 'not_connected', 'unknown']) def test_global_stats_queries(self): - with self.assertNumQueries(7): + url, _, view = statistics_url((), {}) + + with self.assertNumQueries(2): self.forced_auth_req( 'get', - reverse('connection_statistics:global-stat'), + url, ) - # TODO: Test only when caching is enabled - # with self.assertNumQueries(0): - # self.forced_auth_req( - # 'get', - # reverse('connection_statistics:global-stat'), - # ) - - -# -# class CountryWeekStatsApiTestCase(TestAPIViewSetMixin, TestCase): -# @classmethod -# def setUpTestData(cls): -# cls.country_one = CountryFactory() -# cls.country_two = CountryFactory() -# cls.stat_one = CountryWeeklyStatusFactory(country=cls.country_one) -# cls.stat_two = CountryWeeklyStatusFactory(country=cls.country_two) -# -# def test_country_weekly_stats(self): -# response = self.forced_auth_req( -# 'get', -# reverse('connection_statistics:country-weekly-stat', kwargs={ -# 'country_code': self.stat_one.country.code.lower(), -# 'year': self.stat_one.year, -# 'week': self.stat_one.week, -# }), -# ) -# self.assertEqual(response.status_code, status.HTTP_200_OK) -# self.assertEqual(response.data['schools_total'], self.stat_one.schools_total) -# self.assertEqual(response.data['avg_distance_school'], self.stat_one.avg_distance_school) -# self.assertEqual(response.data['schools_connected'], self.stat_one.schools_connected) -# self.assertEqual(response.data['schools_connectivity_unknown'], self.stat_one.schools_connectivity_unknown) -# self.assertEqual(response.data['schools_connectivity_moderate'], self.stat_one.schools_connectivity_moderate) -# self.assertEqual(response.data['schools_connectivity_good'], self.stat_one.schools_connectivity_good) -# self.assertEqual(response.data['schools_connectivity_no'], self.stat_one.schools_connectivity_no) -# self.assertEqual(response.data['integration_status'], self.stat_one.integration_status) -# -# def test_country_weekly_stats_queries(self): -# code = self.stat_one.country.code.lower() -# with self.assertNumQueries(2): -# self.forced_auth_req( -# 'get', -# reverse('connection_statistics:country-weekly-stat', kwargs={ -# 'country_code': code, -# 'year': self.stat_one.year, -# 'week': self.stat_one.week, -# }), -# ) class CountryDailyStatsApiTestCase(TestAPIViewSetMixin, TestCase): @@ -122,34 +105,54 @@ def setUpTestData(cls): CountryDailyStatusFactory(country=cls.country_two) - def test_country_weekly_stats(self): - response = self.forced_auth_req( - 'get', - reverse('connection_statistics:country-daily-stat', kwargs={ - 'country_code': self.country_one.code.lower(), - }), - ) + def test_country_daily_stats(self): + url, _, view = statistics_url((self.country_one.code.lower(),), {}, view_name='country-daily-stat') + response = self.forced_auth_req('get', url) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], self.country_one_stats_number) - response = self.forced_auth_req( - 'get', - reverse('connection_statistics:country-daily-stat', kwargs={ - 'country_code': self.country_two.code.lower(), - }), - ) + url, _, view = statistics_url((self.country_two.code.lower(),), {}, view_name='country-daily-stat') + response = self.forced_auth_req('get', url) + self.assertEqual(response.data['count'], 1) - def test_country_weekly_stats_queries(self): - code = self.country_one.code.lower() + def test_country_daily_stats_queries(self): + url, _, view = statistics_url((self.country_one.code.lower(),), {}, view_name='country-daily-stat') with self.assertNumQueries(2): - self.forced_auth_req( - 'get', - reverse('connection_statistics:country-daily-stat', kwargs={ - 'country_code': code, - }), - ) + self.forced_auth_req('get', url) + + +class SchoolDailyStatsApiTestCase(TestAPIViewSetMixin, TestCase): + @classmethod + def setUpTestData(cls): + cls.country = CountryFactory() + + cls.school_one = SchoolFactory() + cls.school_two = SchoolFactory() + + cls.school_one_stats_number = random.SystemRandom().randint(a=5, b=25) + for _i in range(cls.school_one_stats_number): + SchoolDailyStatusFactory(school=cls.school_one) + + SchoolDailyStatusFactory(school=cls.school_two) + + def test_school_daily_stats(self): + url, _, view = statistics_url((self.school_one.id,), {}, view_name='school-daily-stat') + response = self.forced_auth_req('get', url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['count'], self.school_one_stats_number) + + url, _, view = statistics_url((self.school_two.id,), {}, view_name='school-daily-stat') + response = self.forced_auth_req('get', url) + + self.assertEqual(response.data['count'], 1) + + def test_school_daily_stats_queries(self): + url, _, view = statistics_url((self.school_one.id,), {}, view_name='school-daily-stat') + with self.assertNumQueries(1): + self.forced_auth_req('get', url) class SchoolCoverageStatApiTestCase(TestAPIViewSetMixin, TestCase): @@ -157,9 +160,11 @@ class SchoolCoverageStatApiTestCase(TestAPIViewSetMixin, TestCase): @classmethod def setUpTestData(cls): cls.country = CountryFactory() + cls.school_one = SchoolFactory(country=cls.country, location__country=cls.country) cls.school_two = SchoolFactory(country=cls.country, location__country=cls.country) cls.school_three = SchoolFactory(country=cls.country, location__country=cls.country) + cls.school_weekly_one = SchoolWeeklyStatusFactory( school=cls.school_one, connectivity=True, connectivity_speed=3 * (10 ** 6), @@ -175,6 +180,7 @@ def setUpTestData(cls): connectivity=None, connectivity_speed=None, coverage_availability=None, coverage_type='unknown', ) + cls.school_one.last_weekly_status = cls.school_weekly_one cls.school_one.save() cls.school_two.last_weekly_status = cls.school_weekly_two @@ -186,53 +192,41 @@ def setUp(self): cache.clear() super().setUp() - def list_school_coverage_url(self, url_params, query_param): - view_name = 'connection_statistics:school-coverage-stat' - url = reverse(view_name, args=url_params) - view_info = resolve(url).func - - if len(query_param) > 0: - query_params = '?' + '&'.join([key + '=' + str(val) for key, val in query_param.items()]) - url += query_params - return url, view_info - def test_school_coverage_stat_school_list(self): - url, view = self.list_school_coverage_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), - }) + }, view_name='school-coverage-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 3) def test_school_coverage_stat_without_school_id(self): - url, view = self.list_school_coverage_url((), { - 'country_id': self.country.id, - }) + url, _, view = statistics_url((), {'country_id': self.country.id}, view_name='school-coverage-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) # TODO: Change it once that hard coded school id is removed self.assertEqual(len(response.data), 0) def test_school_coverage_stat_without_country_id(self): - url, view = self.list_school_coverage_url((), {}) + url, _, view = statistics_url((), {}, view_name='school-coverage-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 0) def test_school_coverage_stat_for_one_school(self): - url, view = self.list_school_coverage_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, 'school_ids': str(self.school_one.id), - }) + }, view_name='school-coverage-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 1) @@ -247,12 +241,12 @@ def test_school_coverage_stat_for_one_school(self): self.assertEqual(school_data['country_name'], self.country.name) def test_school_coverage_stat_for_one_school_statistics(self): - url, view = self.list_school_coverage_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, 'school_ids': str(self.school_one.id), - }) + }, view_name='school-coverage-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 1) @@ -266,12 +260,12 @@ def test_school_coverage_stat_for_one_school_statistics(self): self.assertEqual(school_statistics_data['connectivity_speed'], round(3 * (10 ** 6) / 1000000, 2)) def test_school_coverage_stat_for_coverage_type_choices(self): - url, view = self.list_school_coverage_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), - }) + }, view_name='school-coverage-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -293,12 +287,12 @@ def test_school_coverage_stat_for_one_school_when_school_weekly_status_not_avail """ school_four = SchoolFactory(country=self.country, location__country=self.country) - url, view = self.list_school_coverage_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, 'school_ids': school_four.id, - }) + }, view_name='school-coverage-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 1) @@ -314,12 +308,12 @@ def test_school_coverage_stat_for_connectivity_status(self): self.country.last_weekly_status.connectivity_availability = connectivity_availability self.country.last_weekly_status.save() - url, view = self.list_school_coverage_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), - }) + }, view_name='school-coverage-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data[0]['statistics']['connectivity_status'], 'unknown') @@ -331,12 +325,12 @@ def test_school_coverage_stat_for_connectivity_status_when_connectivity_availabi self.country.last_weekly_status.connectivity_availability = connectivity_availability self.country.last_weekly_status.save() - url, view = self.list_school_coverage_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), - }) + }, view_name='school-coverage-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data[0]['statistics']['connectivity_status'], 'unknown') @@ -347,12 +341,12 @@ def test_school_coverage_stat_for_connectivity_status_when_country_weekly_status self.country.last_weekly_status = None self.country.save() - url, view = self.list_school_coverage_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), - }) + }, view_name='school-coverage-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -361,15 +355,18 @@ def test_school_coverage_stat_for_connectivity_status_when_country_weekly_status self.assertEqual(response.data[2]['statistics']['connectivity_status'], 'unknown') -class SchoolConnectivityStatApiTestCase(TestAPIViewSetMixin, TestCase): +class ConnectivityStatApiTestCase(TestAPIViewSetMixin, TestCase): @classmethod def setUpTestData(cls): cls.country = CountryFactory() + cls.country_two = CountryFactory() - cls.school_one = SchoolFactory(country=cls.country, location__country=cls.country) - cls.school_two = SchoolFactory(country=cls.country, location__country=cls.country) - cls.school_three = SchoolFactory(country=cls.country, location__country=cls.country) + cls.admin1_one = Admin1Factory(country=cls.country, layer_name='adm1') + + cls.school_one = SchoolFactory(country=cls.country, location__country=cls.country, admin1=cls.admin1_one) + cls.school_two = SchoolFactory(country=cls.country, location__country=cls.country, admin1=cls.admin1_one) + cls.school_three = SchoolFactory(country=cls.country, location__country=cls.country, admin1=cls.admin1_one) cls.school_weekly_one = SchoolWeeklyStatusFactory( school=cls.school_one, @@ -401,19 +398,9 @@ def setUp(self): cache.clear() super().setUp() - def list_school_connectivity_stat_url(self, url_params, query_param): - view_name = 'connection_statistics:school-connectivity-stat' - url = reverse(view_name, args=url_params) - view_info = resolve(url).func - - if len(query_param) > 0: - query_params = '?' + '&'.join([key + '=' + str(val) for key, val in query_param.items()]) - url += query_params - return url, view_info - - def test_school_download_connectivity_stat_school_list(self): + def test_country_download_connectivity_stat(self): """ - test_school_download_connectivity_stat_school_list + test_country_download_connectivity_stat Positive test case for weekly data. Expected: HTTP_200_OK - List of data for all 3 schools @@ -421,66 +408,86 @@ def test_school_download_connectivity_stat_school_list(self): today = datetime.now().date() date_7_days_back = today - timedelta(days=6) - url, view = self.list_school_connectivity_stat_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, - 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), 'start_date': format_date(date_7_days_back), 'end_date': format_date(today), 'is_weekly': 'true', - }) + }, view_name='country-connectivity-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data), 3) - def test_school_download_connectivity_stat_without_school_id(self): + response_data = response.data + self.assertIn('live_avg', response_data) + self.assertIn('no_of_schools_measure', response_data) + self.assertIn('school_with_realtime_data', response_data) + self.assertIn('is_data_synced', response_data) + self.assertIn('graph_data', response_data) + self.assertIn('real_time_connected_schools', response_data) + + def test_admin1_download_connectivity_stat_monthly(self): """ - test_school_download_connectivity_stat_without_school_id - Negative test case for weekly data without passing the school id in url query parameters. + test_admin1_download_connectivity_stat_monthly + Positive test case for monthly data. - Expected: HTTP_200_OK - As of now it will return no data as we have hard coded the school id as 34554 - in API View. But with changes in API it will return the list of schools from the country. + Expected: HTTP_200_OK - List of data for all 3 schools """ today = datetime.now().date() - date_7_days_back = today - timedelta(days=6) + start_date = get_first_date_of_month(today.year, today.month) + end_date = get_last_date_of_month(today.year, today.month) - url, view = self.list_school_connectivity_stat_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, - 'start_date': format_date(date_7_days_back), - 'end_date': format_date(today), - 'is_weekly': 'true', - }) + 'admin1_id': self.admin1_one.id, + 'start_date': format_date(start_date), + 'end_date': format_date(end_date), + 'is_weekly': 'false', + }, view_name='global-connectivity-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) - # TODO: Change it once that hard coded school id is removed - self.assertEqual(len(response.data), 0) - def test_school_download_connectivity_stat_without_country_id(self): + response_data = response.data + self.assertIn('live_avg', response_data) + self.assertIn('no_of_schools_measure', response_data) + self.assertIn('school_with_realtime_data', response_data) + self.assertIn('is_data_synced', response_data) + self.assertIn('graph_data', response_data) + self.assertIn('real_time_connected_schools', response_data) + + def test_country_download_connectivity_stat_without_country_id(self): """ - test_school_download_connectivity_stat_without_country_id - Negative test case for weekly data without passing the country id in url query parameters. + test_school_download_connectivity_stat_without_school_id + Negative test case for weekly data without passing the school id in url query parameters. - Expected: HTTP_404_NOT_FOUND - Country ID is a mandatory field. + Expected: HTTP_200_OK - As of now it will return no data as we have hard coded the school id as 34554 + in API View. But with changes in API it will return the list of schools from the country. """ today = datetime.now().date() date_7_days_back = today - timedelta(days=6) - url, view = self.list_school_connectivity_stat_url((), { - 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), + url, _, view = statistics_url((), { 'start_date': format_date(date_7_days_back), 'end_date': format_date(today), 'is_weekly': 'true', - }) + }, view_name='country-connectivity-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(response.data.get('detail').code, rest_exceptions.NotFound.default_code) + self.assertEqual(response.status_code, status.HTTP_200_OK) - def test_school_download_connectivity_stat_for_one_school_without_daily(self): + response_data = response.data + self.assertIn('live_avg', response_data) + self.assertIn('no_of_schools_measure', response_data) + self.assertIn('school_with_realtime_data', response_data) + self.assertIn('is_data_synced', response_data) + self.assertIn('graph_data', response_data) + self.assertIn('real_time_connected_schools', response_data) + + def test_country_download_connectivity_stat_for_one_country_without_daily(self): """ test_school_download_connectivity_stat_for_one_school_without_daily Positive test case for weekly data for 1 school and School Daily records are also not available. @@ -492,33 +499,19 @@ def test_school_download_connectivity_stat_for_one_school_without_daily(self): today = datetime.now().date() date_7_days_back = today - timedelta(days=6) - url, view = self.list_school_connectivity_stat_url((), { - 'country_id': self.country.id, - 'school_ids': self.school_one.id, + url, _, view = statistics_url((), { + 'country_id': self.country_two.id, 'start_date': format_date(date_7_days_back), 'end_date': format_date(today), 'is_weekly': 'true', - }) + }, view_name='country-connectivity-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data), 1) - - school_data = response.data[0] - - self.assertIn('name', school_data) - self.assertIn('connectivity_speed', school_data['statistics']) - self.assertIn('connectivity_status', school_data['statistics']) - - self.assertEqual(school_data['name'], self.school_one.name) - # INFO: As we have not created any School daily data, it will return 0 for download - self.assertIsNone(school_data['statistics']['connectivity_speed']) - self.assertEqual(school_data['statistics']['connectivity_status'], 'unknown') - - self.assertEqual(len(school_data['graph_data']), 7) + self.assertEqual(response.data['no_of_schools_measure'], 0) - def test_school_download_connectivity_stat_for_one_school_statistics(self): + def test_country_download_connectivity_stat_for_one_country_statistics(self): """ test_school_download_connectivity_stat_for_one_school_statistics Positive test case to test the statistics JSON for weekly data for 1 school and @@ -533,71 +526,19 @@ def test_school_download_connectivity_stat_for_one_school_statistics(self): today = datetime.now().date() date_7_days_back = today - timedelta(days=6) - url, view = self.list_school_connectivity_stat_url((), { - 'country_id': self.country.id, - 'school_ids': self.school_one.id, - 'start_date': format_date(date_7_days_back), - 'end_date': format_date(today), - 'is_weekly': 'true', - }) - - response = self.forced_auth_req('get', url, user=None, view=view) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data), 1) - - school_data = response.data[0] - self.assertIn('statistics', school_data) - self.assertIsNone(school_data['statistics']['connectivity_speed']) - - self.assertEqual(school_data['statistics']['connectivity_status'], 'unknown') - self.assertIsNone(school_data['statistics']['connectivity_speed']) - - def test_school_download_connectivity_stat_for_connectivity_status_choices(self): - """ - test_school_download_connectivity_stat_for_connectivity_status_choices - Positive test case to test the connectivity_status field for weekly data for 3 schools and - School Daily records only present for school_two. - - Expected: HTTP_200_OK - 3 school data with filled statistics json and graph_data json. - 1. school_one.connectivity_status == connected, as country.last_weekly_status.connectivity_availability == - connectivity and school_one.last_weekly_status.connectivity=True - - 2. school_two.connectivity_status == not_connected, as country.last_weekly_status.connectivity_availability == - connectivity and school_one.last_weekly_status.connectivity=False - - 3. school_three.connectivity_status == unknown, as country.last_weekly_status.connectivity_availability == - connectivity and school_one.last_weekly_status.connectivity=None - """ - - connectivity_availability = CountryWeeklyStatus.CONNECTIVITY_TYPES_AVAILABILITY.connectivity - self.country.last_weekly_status.connectivity_availability = connectivity_availability - self.country.last_weekly_status.save() - - today = datetime.now().date() - date_7_days_back = today - timedelta(days=6) - - url, view = self.list_school_connectivity_stat_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, - 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), 'start_date': format_date(date_7_days_back), 'end_date': format_date(today), 'is_weekly': 'true', - }) - - response = self.forced_auth_req('get', url, user=None, view=view) + }, view_name='country-connectivity-stat') - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertIn('connectivity_status', response.data[0]) - self.assertIn('connectivity_status', response.data[1]) - self.assertIn('connectivity_status', response.data[2]) + with self.assertNumQueries(4): + response = self.forced_auth_req('get', url, view=view) - # self.assertEqual(response.data[0]['connectivity_status'], 'connected') - # self.assertEqual(response.data[1]['connectivity_status'], 'not_connected') - # self.assertEqual(response.data[2]['connectivity_status'], 'unknown') + self.assertEqual(response.status_code, status.HTTP_200_OK) - def test_school_download_connectivity_stat_for_one_school_graph_data_when_school_daily_status_not_available(self): + def test_country_download_connectivity_stat_for_one_school_graph_data_when_school_daily_status_not_available(self): """ test_school_download_connectivity_stat_for_one_school_graph_data_when_school_daily_status_not_available Positive test case to test the graph_data JSON for weekly data for 1 school when @@ -609,337 +550,1363 @@ def test_school_download_connectivity_stat_for_one_school_graph_data_when_school today = datetime.now().date() date_7_days_back = today - timedelta(days=6) - url, view = self.list_school_connectivity_stat_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, - 'school_ids': self.school_one.id, 'start_date': format_date(date_7_days_back), 'end_date': format_date(today), 'is_weekly': 'true', - }) + }, view_name='country-connectivity-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data), 1) - school_data = response.data[0] - self.assertIn('graph_data', school_data) - self.assertEqual(len(school_data['graph_data']), 7) - self.assertIsNone(school_data['statistics']['connectivity_speed']) + response_data = response.data + self.assertIn('graph_data', response_data) + self.assertEqual(len(response_data['graph_data']), 7) - graph_data = school_data['graph_data'] + graph_data = response_data['graph_data'] for data in graph_data: self.assertIsNone(data['value']) - def test_school_download_connectivity_stat_for_one_school_graph_data_when_school_daily_status_available(self): + def test_country_download_connectivity_stat_for_global_benchmark(self): """ - test_school_download_connectivity_stat_for_one_school_graph_data_when_school_daily_status_available - Positive test case to test the graph_data JSON for weekly data for 1 school when - School Daily records are available. + test_country_download_connectivity_stat + Positive test case for country weekly data. - Expected: HTTP_200_OK - 1 school data with filled statistics json and filled graph_data json. - Connectivity_speed == 4, as for download speed is calculated based on graph data aggregation. + Expected: HTTP_200_OK - List of data for given country id """ - today = datetime.now().date() - date_7_days_back = today - timedelta(days=6) + date = Week(self.school_weekly_one.year, self.school_weekly_one.week).monday() + start_date = date - timedelta(days=1) + end_date = start_date + timedelta(days=6) - url, view = self.list_school_connectivity_stat_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, - 'school_ids': self.school_two.id, - 'start_date': format_date(date_7_days_back), - 'end_date': format_date(today), + 'start_date': format_date(start_date), + 'end_date': format_date(end_date), 'is_weekly': 'true', - }) - - response = self.forced_auth_req('get', url, user=None, view=view) + 'benchmark': 'global' + }, view_name='country-connectivity-stat') + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data), 1) - - school_data = response.data[0] - self.assertIn('graph_data', school_data) - self.assertEqual(len(school_data['graph_data']), 7) - self.assertEqual(school_data['statistics']['connectivity_speed'], 4) - graph_data = school_data['graph_data'] - for data in graph_data: - if data['key'] == format_date((today - timedelta(days=1))): - self.assertEqual(data['value'], 4) - else: - self.assertIsNone(data['value']) - - def test_school_download_connectivity_stat_for_one_school_when_school_weekly_status_not_available(self): - """ - test_school_download_connectivity_stat_for_one_school_when_school_weekly_status_not_available - Positive test case to test the connectivity_status field for weekly data for 1 school when - School Weekly Status records are not available. + response_data = response.data + self.assertEqual(type(response_data), dict) - Expected: HTTP_200_OK - 1 school data with empty statistics json and filled graph_data json. - Connectivity_speed == 0, as for download speed is calculated based on graph data aggregation. - """ - school_four = SchoolFactory(country=self.country, location__country=self.country) + self.assertIn('live_avg', response_data) + self.assertIn('school_with_realtime_data', response_data) + self.assertIn('is_data_synced', response_data) + self.assertIn('graph_data', response_data) + self.assertIn('real_time_connected_schools', response_data) - today = datetime.now().date() - date_7_days_back = today - timedelta(days=6) + def test_country_download_connectivity_stat_for_invalid_date_range(self): + date = Week(2023, 56).monday() + start_date = date - timedelta(days=1) + end_date = start_date + timedelta(days=6) - url, view = self.list_school_connectivity_stat_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, - 'school_ids': school_four.id, - 'start_date': format_date(date_7_days_back), - 'end_date': format_date(today), + 'start_date': format_date(start_date), + 'end_date': format_date(end_date), 'is_weekly': 'true', - }) + 'benchmark': 'global' + }, view_name='country-connectivity-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data), 1) - school_data = response.data[0] - self.assertIn('graph_data', school_data) - self.assertEqual(len(school_data['graph_data']), 7) - # self.assertEqual(school_data['statistics']['connectivity_speed'], 0) - self.assertEqual(school_data['connectivity_status'], 'unknown') + def test_country_download_connectivity_stat_for_missing_country_id(self): + date = Week(self.school_weekly_one.year, self.school_weekly_one.week).monday() + start_date = date - timedelta(days=1) + end_date = start_date + timedelta(days=6) - self.assertEqual(len(school_data['statistics']), 0) + url, _, view = statistics_url((), { + 'start_date': format_date(start_date), + 'end_date': format_date(end_date), + 'is_weekly': 'true', + 'benchmark': 'global' + }, view_name='country-connectivity-stat') - for data in school_data['graph_data']: - self.assertIsNone(data['value']) + response = self.forced_auth_req('get', url, view=view) - def test_school_latency_connectivity_stat_school_list(self): - """ - test_school_latency_connectivity_stat_school_list - Positive test case for weekly data for latency. + self.assertEqual(response.status_code, status.HTTP_200_OK) - Expected: HTTP_200_OK - List of data for all 3 schools - """ - today = datetime.now().date() - date_7_days_back = today - timedelta(days=6) + def test_country_download_connectivity_stat_for_national_benchmark(self): + date = Week(self.school_weekly_one.year, self.school_weekly_one.week).monday() + start_date = date - timedelta(days=1) + end_date = start_date + timedelta(days=6) - url, view = self.list_school_connectivity_stat_url((), { + url, _, view = statistics_url((), { 'country_id': self.country.id, - 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), - 'start_date': format_date(date_7_days_back), - 'end_date': format_date(today), + 'start_date': format_date(start_date), + 'end_date': format_date(end_date), 'is_weekly': 'true', - }) + 'benchmark': 'national', + }, view_name='country-connectivity-stat') - response = self.forced_auth_req('get', url, user=None, view=view) + response = self.forced_auth_req('get', url, view=view) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertEqual(type(response_data), dict) + + self.assertIn('live_avg', response_data) + self.assertIn('school_with_realtime_data', response_data) + self.assertIn('is_data_synced', response_data) + self.assertIn('graph_data', response_data) + self.assertIn('real_time_connected_schools', response_data) + + +class SchoolConnectivityStatApiTestCase(TestAPIViewSetMixin, TestCase): + + @classmethod + def setUpTestData(cls): + cls.country = CountryFactory() + + cls.school_one = SchoolFactory(country=cls.country, location__country=cls.country) + cls.school_two = SchoolFactory(country=cls.country, location__country=cls.country) + cls.school_three = SchoolFactory(country=cls.country, location__country=cls.country) + + cls.school_weekly_one = SchoolWeeklyStatusFactory( + school=cls.school_one, + connectivity=True, connectivity_speed=3 * (10 ** 6), + coverage_availability=True, coverage_type='3g', + ) + cls.school_weekly_two = SchoolWeeklyStatusFactory( + school=cls.school_one, + connectivity=False, connectivity_speed=None, + coverage_availability=False, coverage_type='no', + ) + cls.school_weekly_three = SchoolWeeklyStatusFactory( + school=cls.school_one, + connectivity=None, connectivity_speed=None, + coverage_availability=None, coverage_type='unknown', + ) + cls.school_one.last_weekly_status = cls.school_weekly_one + cls.school_one.save() + cls.school_two.last_weekly_status = cls.school_weekly_two + cls.school_two.save() + cls.school_three.last_weekly_status = cls.school_weekly_three + cls.school_three.save() + + cls.school_daily_two = SchoolDailyStatusFactory(school=cls.school_two, + date=datetime.now().date() - timedelta(days=1), + connectivity_speed=4000000) + + def setUp(self): + cache.clear() + super().setUp() + + def test_school_download_connectivity_stat_school_list(self): + """ + test_school_download_connectivity_stat_school_list + Positive test case for weekly data. + + Expected: HTTP_200_OK - List of data for all 3 schools + """ + today = datetime.now().date() + date_7_days_back = today - timedelta(days=6) + + url, _, view = statistics_url((), { + 'country_id': self.country.id, + 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), + 'start_date': format_date(date_7_days_back), + 'end_date': format_date(today), + 'is_weekly': 'true', + }, view_name='school-connectivity-stat') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 3) + + def test_school_download_connectivity_stat_school_list_for_month(self): + """ + test_school_download_connectivity_stat_school_list + Positive test case for weekly data. + + Expected: HTTP_200_OK - List of data for all 3 schools + """ + today = datetime.now().date() + start_date = get_first_date_of_month(today.year, today.month) + end_date = get_last_date_of_month(today.year, today.month) + + url, _, view = statistics_url((), { + 'country_id': self.country.id, + 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), + 'start_date': format_date(start_date), + 'end_date': format_date(end_date), + 'is_weekly': 'false', + }, view_name='school-connectivity-stat') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 3) + + def test_school_download_connectivity_stat_without_school_id(self): + """ + test_school_download_connectivity_stat_without_school_id + Negative test case for weekly data without passing the school id in url query parameters. + + Expected: HTTP_200_OK - As of now it will return no data as we have hard coded the school id as 34554 + in API View. But with changes in API it will return the list of schools from the country. + """ + today = datetime.now().date() + date_7_days_back = today - timedelta(days=6) + + url, _, view = statistics_url((), { + 'country_id': self.country.id, + 'start_date': format_date(date_7_days_back), + 'end_date': format_date(today), + 'is_weekly': 'true', + }, view_name='school-connectivity-stat') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + # TODO: Change it once that hard coded school id is removed + self.assertEqual(len(response.data), 0) + + def test_school_download_connectivity_stat_without_country_id(self): + """ + test_school_download_connectivity_stat_without_country_id + Negative test case for weekly data without passing the country id in url query parameters. + + Expected: HTTP_404_NOT_FOUND - Country ID is a mandatory field. + """ + today = datetime.now().date() + date_7_days_back = today - timedelta(days=6) + + url, _, view = statistics_url((), { + 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), + 'start_date': format_date(date_7_days_back), + 'end_date': format_date(today), + 'is_weekly': 'true', + }, view_name='school-connectivity-stat') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertEqual(response.data.get('detail').code, rest_exceptions.NotFound.default_code) + + def test_school_download_connectivity_stat_for_one_school_without_daily(self): + """ + test_school_download_connectivity_stat_for_one_school_without_daily + Positive test case for weekly data for 1 school and School Daily records are also not available. + + Expected: HTTP_200_OK - 1 school data with graph_data json with value as null. + Connectivity_speed == 0, as for download speed is calculated based on graph data aggregation. + connectivity_status == unknown, as country.last_weekly_status.connectivity_availability == no_connectivity + """ + today = datetime.now().date() + date_7_days_back = today - timedelta(days=6) + + url, _, view = statistics_url((), { + 'country_id': self.country.id, + 'school_ids': self.school_one.id, + 'start_date': format_date(date_7_days_back), + 'end_date': format_date(today), + 'is_weekly': 'true', + }, view_name='school-connectivity-stat') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 1) + + school_data = response.data[0] + + self.assertIn('name', school_data) + self.assertIn('connectivity_speed', school_data['statistics']) + self.assertIn('connectivity_status', school_data['statistics']) + + self.assertEqual(school_data['name'], self.school_one.name) + # INFO: As we have not created any School daily data, it will return 0 for download + self.assertIsNone(school_data['statistics']['connectivity_speed']) + self.assertEqual(school_data['statistics']['connectivity_status'], 'unknown') + + self.assertEqual(len(school_data['graph_data']), 7) + + def test_school_download_connectivity_stat_for_one_school_statistics(self): + """ + test_school_download_connectivity_stat_for_one_school_statistics + Positive test case to test the statistics JSON for weekly data for 1 school and + School Daily records are also not available. + + Expected: HTTP_200_OK - 1 school data with filled statistics json and graph_data json with value as null. + Connectivity_speed == 0, as for download speed is calculated based on graph data aggregation. + connectivity_status == unknown, as country.last_weekly_status.connectivity_availability == no_connectivity + + statistics.connectivity_speed == round(3 * (10 ** 6) / 1000000, 2), as speed is picked from SchoolWeeklyStatus + """ + today = datetime.now().date() + date_7_days_back = today - timedelta(days=6) + + url, _, view = statistics_url((), { + 'country_id': self.country.id, + 'school_ids': self.school_one.id, + 'start_date': format_date(date_7_days_back), + 'end_date': format_date(today), + 'is_weekly': 'true', + }, view_name='school-connectivity-stat') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 1) + + school_data = response.data[0] + self.assertIn('statistics', school_data) + self.assertIsNone(school_data['statistics']['connectivity_speed']) + + self.assertEqual(school_data['statistics']['connectivity_status'], 'unknown') + self.assertIsNone(school_data['statistics']['connectivity_speed']) + + def test_school_download_connectivity_stat_for_connectivity_status_choices(self): + """ + test_school_download_connectivity_stat_for_connectivity_status_choices + Positive test case to test the connectivity_status field for weekly data for 3 schools and + School Daily records only present for school_two. + + Expected: HTTP_200_OK - 3 school data with filled statistics json and graph_data json. + 1. school_one.connectivity_status == connected, as country.last_weekly_status.connectivity_availability == + connectivity and school_one.last_weekly_status.connectivity=True + + 2. school_two.connectivity_status == not_connected, as country.last_weekly_status.connectivity_availability == + connectivity and school_one.last_weekly_status.connectivity=False + + 3. school_three.connectivity_status == unknown, as country.last_weekly_status.connectivity_availability == + connectivity and school_one.last_weekly_status.connectivity=None + """ + + connectivity_availability = CountryWeeklyStatus.CONNECTIVITY_TYPES_AVAILABILITY.connectivity + self.country.last_weekly_status.connectivity_availability = connectivity_availability + self.country.last_weekly_status.save() + + today = datetime.now().date() + date_7_days_back = today - timedelta(days=6) + + url, _, view = statistics_url((), { + 'country_id': self.country.id, + 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), + 'start_date': format_date(date_7_days_back), + 'end_date': format_date(today), + 'is_weekly': 'true', + }, view_name='school-connectivity-stat') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + self.assertIn('connectivity_status', response.data[0]) + self.assertIn('connectivity_status', response.data[1]) + self.assertIn('connectivity_status', response.data[2]) + + # self.assertEqual(response.data[0]['connectivity_status'], 'connected') + # self.assertEqual(response.data[1]['connectivity_status'], 'not_connected') + # self.assertEqual(response.data[2]['connectivity_status'], 'unknown') + + def test_school_download_connectivity_stat_for_one_school_graph_data_when_school_daily_status_not_available(self): + """ + test_school_download_connectivity_stat_for_one_school_graph_data_when_school_daily_status_not_available + Positive test case to test the graph_data JSON for weekly data for 1 school when + School Daily records are not available. + + Expected: HTTP_200_OK - 1 school data with filled statistics json and filled graph_data json with null values. + Connectivity_speed == 0, as for download speed is calculated based on graph data aggregation. + """ + today = datetime.now().date() + date_7_days_back = today - timedelta(days=6) + + url, _, view = statistics_url((), { + 'country_id': self.country.id, + 'school_ids': self.school_one.id, + 'start_date': format_date(date_7_days_back), + 'end_date': format_date(today), + 'is_weekly': 'true', + }, view_name='school-connectivity-stat') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 1) + + school_data = response.data[0] + self.assertIn('graph_data', school_data) + self.assertEqual(len(school_data['graph_data']), 7) + self.assertIsNone(school_data['statistics']['connectivity_speed']) + + graph_data = school_data['graph_data'] + for data in graph_data: + self.assertIsNone(data['value']) + + def test_school_download_connectivity_stat_for_one_school_graph_data_when_school_daily_status_available(self): + """ + test_school_download_connectivity_stat_for_one_school_graph_data_when_school_daily_status_available + Positive test case to test the graph_data JSON for weekly data for 1 school when + School Daily records are available. + + Expected: HTTP_200_OK - 1 school data with filled statistics json and filled graph_data json. + Connectivity_speed == 4, as for download speed is calculated based on graph data aggregation. + """ + today = datetime.now().date() + date_7_days_back = today - timedelta(days=6) + + url, _, view = statistics_url((), { + 'country_id': self.country.id, + 'school_ids': self.school_two.id, + 'start_date': format_date(date_7_days_back), + 'end_date': format_date(today), + 'is_weekly': 'true', + }, view_name='school-connectivity-stat') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 1) + + school_data = response.data[0] + self.assertIn('graph_data', school_data) + self.assertEqual(len(school_data['graph_data']), 7) + self.assertEqual(school_data['statistics']['connectivity_speed'], 4) + + graph_data = school_data['graph_data'] + for data in graph_data: + if data['key'] == format_date((today - timedelta(days=1))): + self.assertEqual(data['value'], 4) + else: + self.assertIsNone(data['value']) + + def test_school_download_connectivity_stat_for_one_school_when_school_weekly_status_not_available(self): + """ + test_school_download_connectivity_stat_for_one_school_when_school_weekly_status_not_available + Positive test case to test the connectivity_status field for weekly data for 1 school when + School Weekly Status records are not available. + + Expected: HTTP_200_OK - 1 school data with empty statistics json and filled graph_data json. + Connectivity_speed == 0, as for download speed is calculated based on graph data aggregation. + """ + school_four = SchoolFactory(country=self.country, location__country=self.country) + + today = datetime.now().date() + date_7_days_back = today - timedelta(days=6) + + url, _, view = statistics_url((), { + 'country_id': self.country.id, + 'school_ids': school_four.id, + 'start_date': format_date(date_7_days_back), + 'end_date': format_date(today), + 'is_weekly': 'true', + }, view_name='school-connectivity-stat') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 1) + + school_data = response.data[0] + self.assertIn('graph_data', school_data) + self.assertEqual(len(school_data['graph_data']), 7) + # self.assertEqual(school_data['statistics']['connectivity_speed'], 0) + self.assertEqual(school_data['connectivity_status'], 'unknown') + + self.assertEqual(len(school_data['statistics']), 0) + + for data in school_data['graph_data']: + self.assertIsNone(data['value']) + + def test_school_latency_connectivity_stat_school_list(self): + """ + test_school_latency_connectivity_stat_school_list + Positive test case for weekly data for latency. + + Expected: HTTP_200_OK - List of data for all 3 schools + """ + today = datetime.now().date() + date_7_days_back = today - timedelta(days=6) + + url, _, view = statistics_url((), { + 'country_id': self.country.id, + 'school_ids': ','.join([str(self.school_one.id), str(self.school_two.id), str(self.school_three.id)]), + 'start_date': format_date(date_7_days_back), + 'end_date': format_date(today), + 'is_weekly': 'true', + }, view_name='school-connectivity-stat') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 3) + + # self.assertEqual(response.data[0]['statistics']['connectivity_speed'], 0) + # self.assertEqual(response.data[1]['statistics']['connectivity_speed'], 0) + # self.assertEqual(response.data[2]['statistics']['connectivity_speed'], 0) + + +class CountryCoverageStatsAPITestCase(TestAPIViewSetMixin, TestCase): + @classmethod + def setUpTestData(cls): + cls.country_one = CountryFactory() + cls.country_two = CountryFactory() + + cls.stat_one = CountryWeeklyStatusFactory(country=cls.country_one) + cls.stat_two = CountryWeeklyStatusFactory(country=cls.country_two) + + def setUp(self): + cache.clear() + super().setUp() + + def test_get_country_coverage_stats(self): + url, _, view = statistics_url((), {'country_id': self.country_one.id}, view_name='country-coverage-stat') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['total_schools'], self.stat_one.schools_total) + # self.assertEqual(response.data['connected_schools']['5g_4g'], self.stat_one.schools_coverage_good) + # self.assertEqual(response.data['connected_schools']['3g_2g'], self.stat_one.schools_coverage_moderate) + # self.assertEqual(response.data['connected_schools']['no_coverage'], self.stat_one.schools_coverage_no) + # self.assertEqual(response.data['connected_schools']['unknown'], self.stat_one.schools_coverage_unknown) + + def test_get_country_coverage_stats_no_data(self): + url, _, view = statistics_url((), {'country_id': 999}, view_name='country-coverage-stat') + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_get_country_coverage_stats_cached(self): + url, _, view = statistics_url((), {'country_id': self.country_one.id}, view_name='country-coverage-stat') + + # Call the API to cache the data + with self.assertNumQueries(2): + self.forced_auth_req('get', url, view=view) + + with self.assertNumQueries(0): + self.forced_auth_req('get', url, view=view) + + def test_get_country_coverage_stats_no_cache(self): + url = reverse('connection_statistics:country-coverage-stat') + query_params = {'country_id': self.country_one.id} + # Call the API without caching + with self.assertNumQueries(3): + response = self.client.get(url, query_params, HTTP_CACHE_CONTROL='no-cache') + self.assertEqual(response.status_code, status.HTTP_200_OK) + + +class ConnectivityConfigurationsAPITestCase(TestAPIViewSetMixin, TestCase): + @classmethod + def setUpTestData(cls): + cls.country_one = CountryFactory() + cls.country_two = CountryFactory() + + cls.admin1_one = Admin1Factory(country=cls.country_one, layer_name='adm1') + + cls.school_one = SchoolFactory(country=cls.country_one, admin1=cls.admin1_one) + cls.school_two = SchoolFactory(country=cls.country_one) + cls.school_three = SchoolFactory(country=cls.country_one) + + cls.stat_one = SchoolDailyStatusFactory(school=cls.school_one, live_data_source='DAILY_CHECK_APP_MLAB') + cls.stat_two = SchoolDailyStatusFactory(school=cls.school_two, live_data_source='QOS') + + args = ['--delete_data_sources', '--update_data_sources', '--update_data_layers'] + call_command('load_system_data_layers', *args) + + def setUp(self): + cache.clear() + super().setUp() + + def test_global_latest_configurations(self): + url, _, view = statistics_url((), {}, view_name='get-latest-week-and-month') + + with self.assertNumQueries(2): + response = self.forced_auth_req('get', url, view=view) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertIn('week', response_data) + self.assertIn('month', response_data) + self.assertIn('years', response_data) + + with self.assertNumQueries(0): + self.forced_auth_req('get', url, view=view) + + def test_country_with_schools_latest_configurations(self): + url, _, view = statistics_url((), {'country_id': self.country_one.id}, view_name='get-latest-week-and-month') + + with self.assertNumQueries(2): + response = self.forced_auth_req('get', url, view=view) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertIn('week', response_data) + self.assertIn('month', response_data) + self.assertIn('years', response_data) + + with self.assertNumQueries(0): + self.forced_auth_req('get', url, view=view) + + def test_country_with_schools_latest_configurations_for_live_layer(self): + layer = accounts_models.DataLayer.objects.filter( + type=accounts_models.DataLayer.LAYER_TYPE_LIVE, + category=accounts_models.DataLayer.LAYER_CATEGORY_CONNECTIVITY, + status=accounts_models.DataLayer.LAYER_STATUS_PUBLISHED, + created_by__isnull=True, + ).first() + url, _, view = statistics_url((), {'country_id': self.country_one.id, 'layer_id': layer.id}, + view_name='get-latest-week-and-month') + + with self.assertNumQueries(6): + response = self.forced_auth_req('get', url, view=view) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertIn('week', response_data) + self.assertIn('month', response_data) + self.assertIn('years', response_data) + + def test_country_without_schools_latest_configurations(self): + url, _, view = statistics_url((), {'country_id': self.country_two.id}, view_name='get-latest-week-and-month') + + with self.assertNumQueries(1): + response = self.forced_auth_req('get', url, view=view) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 0) + + with self.assertNumQueries(1): + self.forced_auth_req('get', url, view=view) + + def test_admin1_latest_configurations(self): + url, _, view = statistics_url((), {'admin1_id': self.admin1_one.id}, view_name='get-latest-week-and-month') + + with self.assertNumQueries(2): + response = self.forced_auth_req('get', url, view=view) + self.assertEqual(response.status_code, status.HTTP_200_OK) + response_data = response.data + self.assertIn('week', response_data) + self.assertIn('month', response_data) + self.assertIn('years', response_data) + + with self.assertNumQueries(0): + self.forced_auth_req('get', url, view=view) + + def test_school_latest_configurations(self): + url, _, view = statistics_url((), {'school_id': self.school_one.id}, view_name='get-latest-week-and-month') + + with self.assertNumQueries(2): + response = self.forced_auth_req('get', url, view=view) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertIn('week', response_data) + self.assertIn('month', response_data) + self.assertIn('years', response_data) + + with self.assertNumQueries(0): + self.forced_auth_req('get', url, view=view) + + def test_schools_latest_configurations(self): + url, _, view = statistics_url((), { + 'school_ids': ','.join([str(s) for s in [self.school_one.id, self.school_two.id]]) + }, view_name='get-latest-week-and-month') + + with self.assertNumQueries(2): + response = self.forced_auth_req('get', url, view=view) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertIn('week', response_data) + self.assertIn('month', response_data) + self.assertIn('years', response_data) + + with self.assertNumQueries(0): + self.forced_auth_req('get', url, view=view) + + +class CountrySummaryAPIViewSetAPITestCase(TestAPIViewSetMixin, TestCase): + databases = ['default', ] + + @classmethod + def setUpTestData(cls): + cls.country_one = CountryFactory() + cls.country_two = CountryFactory() + + cls.stat_one = CountryWeeklyStatusFactory( + country=cls.country_one, + integration_status=CountryWeeklyStatus.REALTIME_MAPPED, + year=datetime.now().year - 1, + week=12, + schools_connectivity_no=1 + ) + cls.stat_two = CountryWeeklyStatusFactory( + country=cls.country_one, + integration_status=CountryWeeklyStatus.REALTIME_MAPPED, + year=datetime.now().year - 1, + week=13, + schools_connectivity_no=1 + ) + + cls.stat_three = CountryWeeklyStatusFactory( + country=cls.country_two, + integration_status=CountryWeeklyStatus.REALTIME_MAPPED, + year=datetime.now().year - 1, + week=12, + schools_connectivity_no=1 + ) + + cls.user = test_utilities.setup_admin_user_by_role() + + def setUp(self): + cache.clear() + super().setUp() + + def test_list(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-countryweeklystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertEqual(type(response_data), dict) + # 3 records as we created manually in setup, 2 for each country with latest year and latest week + self.assertEqual(response_data['count'], 5) + self.assertEqual(len(response_data['results']), 5) + + def test_country_id_filter(self): + url, _, view = statistics_url((), {'country_id': self.country_one.id}, + view_name='list-create-destroy-countryweeklystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertEqual(type(response_data), dict) + # 2 records as we created manually in setup, 1 for country with latest year and latest week + self.assertEqual(response_data['count'], 3) + self.assertEqual(len(response_data['results']), 3) + + def test_year_week_filter(self): + url, _, view = statistics_url((), {'year': datetime.now().year - 1, 'week': 12}, + view_name='list-create-destroy-countryweeklystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertEqual(type(response_data), dict) + # 2 records as we created manually in setup + self.assertEqual(response_data['count'], 2) + self.assertEqual(len(response_data['results']), 2) + + def test_search(self): + url, _, view = statistics_url((), {'search': self.country_one.name}, + view_name='list-create-destroy-countryweeklystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertEqual(type(response_data), dict) + # 2 records as we created manually in setup, 1 for country with latest year and latest week + self.assertEqual(response_data['count'], 3) + self.assertEqual(len(response_data['results']), 3) + + def test_retrieve(self): + url, view, view_info = statistics_url((self.stat_one.id,), {}, + view_name='update-retrieve-countryweeklystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view, view_info=view_info, ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertEqual(response_data['id'], self.stat_one.id) + self.assertEqual(response_data['connectivity_speed'], self.stat_one.connectivity_speed) + self.assertEqual(response_data['year'], self.stat_one.year) + self.assertEqual(response_data['week'], self.stat_one.week) + self.assertEqual(response_data['integration_status'], self.stat_one.integration_status) + + def test_retrieve_wrong_id(self): + url, view, view_info = statistics_url((1234546,), {}, + view_name='update-retrieve-countryweeklystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view, view_info=view_info, ) + + self.assertEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_update(self): + url, _, view = statistics_url((self.stat_two.id,), {}, + view_name='update-retrieve-countryweeklystatus') + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data={ + "id": self.stat_two.id, + "created": self.stat_two.created, + "modified": self.stat_two.modified, + "connectivity_speed": self.stat_two.connectivity_speed, + "connectivity_upload_speed": self.stat_two.connectivity_upload_speed, + "year": self.stat_two.year, + "week": self.stat_two.week, + "date": self.stat_two.date, + "integration_status": CountryWeeklyStatus.STATIC_MAPPED, + "country": self.stat_two.country.id + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + def test_update_wrong_id(self): + url, _, view = statistics_url((123434567,), {}, + view_name='update-retrieve-countryweeklystatus') + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data={ + "id": self.stat_two.id, + "created": self.stat_two.created, + "modified": self.stat_two.modified, + "connectivity_speed": self.stat_two.connectivity_speed, + "connectivity_upload_speed": self.stat_two.connectivity_upload_speed, + "year": self.stat_two.year, + "week": self.stat_two.week, + "date": self.stat_two.date, + "integration_status": CountryWeeklyStatus.STATIC_MAPPED, + "country": self.stat_two.country.id + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_update_invalid_data(self): + url, _, view = statistics_url((self.stat_two.id,), {}, + view_name='update-retrieve-countryweeklystatus') + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data={ + "id": self.stat_two.id, + "created": self.stat_two.created, + "modified": self.stat_two.modified, + "connectivity_speed": self.stat_two.connectivity_speed, + "connectivity_upload_speed": self.stat_two.connectivity_upload_speed, + "year": self.stat_two.year, + "week": self.stat_two.week, + "date": self.stat_two.date, + "integration_status": 8, + "country": self.stat_two.country.id + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_delete(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-countryweeklystatus') + + response = self.forced_auth_req( + 'delete', + url, + data={'id': [self.stat_two.id]}, + user=self.user, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_delete_without_ids(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-countryweeklystatus') + + response = self.forced_auth_req( + 'delete', + url, + user=self.user, + ) + self.assertEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_delete_wrong_ids(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-countryweeklystatus') + + response = self.forced_auth_req( + 'delete', + url, + data={'id': [12345432]}, + user=self.user, + ) + self.assertEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) + + +class CountryDailyConnectivitySummaryAPIViewSetAPITestCase(TestAPIViewSetMixin, TestCase): + databases = ['default', ] + + @classmethod + def setUpTestData(cls): + cls.country_one = CountryFactory() + cls.country_two = CountryFactory() + + today = datetime.now().date() + + cls.stat_one = CountryDailyStatusFactory( + country=cls.country_one, + date=today, + live_data_source='DAILY_CHECK_APP_MLAB' + ) + cls.stat_two = CountryDailyStatusFactory( + country=cls.country_one, + date=today, + live_data_source='QOS' + ) + + cls.stat_three = CountryDailyStatusFactory( + country=cls.country_two, + date=today, + live_data_source='DAILY_CHECK_APP_MLAB' + ) + + cls.user = test_utilities.setup_admin_user_by_role() + + def setUp(self): + cache.clear() + super().setUp() + + def test_list(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-countrydailystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertEqual(type(response_data), dict) + # 3 records as we created manually in setup + self.assertEqual(response_data['count'], 3) + self.assertEqual(len(response_data['results']), 3) + + def test_country_id_filter(self): + url, _, view = statistics_url((), {'country_id': self.country_one.id}, + view_name='list-create-destroy-countrydailystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data), 3) - # self.assertEqual(response.data[0]['statistics']['connectivity_speed'], 0) - # self.assertEqual(response.data[1]['statistics']['connectivity_speed'], 0) - # self.assertEqual(response.data[2]['statistics']['connectivity_speed'], 0) + response_data = response.data + self.assertEqual(type(response_data), dict) + # 2 records as we created manually in setup + self.assertEqual(response_data['count'], 2) + self.assertEqual(len(response_data['results']), 2) + + def test_search(self): + url, _, view = statistics_url((), {'search': self.country_one.name}, + view_name='list-create-destroy-countrydailystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertEqual(type(response_data), dict) + # 2 records as we created manually in setup + self.assertEqual(response_data['count'], 2) + self.assertEqual(len(response_data['results']), 2) + + def test_retrieve(self): + url, view, view_info = statistics_url((self.stat_one.id,), {}, + view_name='update-retrieve-countrydailystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view, view_info=view_info, ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertEqual(response_data['id'], self.stat_one.id) + self.assertEqual(response_data['connectivity_speed'], self.stat_one.connectivity_speed) + self.assertEqual(response_data['date'], format_date(self.stat_one.date)) + + def test_retrieve_wrong_id(self): + url, view, view_info = statistics_url((1234546,), {}, + view_name='update-retrieve-countrydailystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view, view_info=view_info, ) + + self.assertEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_update(self): + url, _, view = statistics_url((self.stat_two.id,), {}, + view_name='update-retrieve-countrydailystatus') + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data={ + "id": self.stat_two.id, + "created": self.stat_two.created, + "modified": self.stat_two.modified, + "connectivity_speed": 10000000, + "connectivity_upload_speed": self.stat_two.connectivity_upload_speed, + "date": self.stat_two.date, + "country": self.stat_two.country.id + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + def test_update_wrong_id(self): + url, _, view = statistics_url((123434567,), {}, + view_name='update-retrieve-countrydailystatus') + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data={ + "id": self.stat_two.id, + "created": self.stat_two.created, + "modified": self.stat_two.modified, + "connectivity_speed": self.stat_two.connectivity_speed, + "connectivity_upload_speed": self.stat_two.connectivity_upload_speed, + "date": self.stat_two.date, + "country": self.stat_two.country.id + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_update_invalid_data(self): + url, _, view = statistics_url((self.stat_two.id,), {}, + view_name='update-retrieve-countrydailystatus') + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data={ + "id": self.stat_two.id, + "created": self.stat_two.created, + "modified": self.stat_two.modified, + "connectivity_speed": 234.123, + "connectivity_upload_speed": self.stat_two.connectivity_upload_speed, + "date": self.stat_two.date, + "country": self.stat_two.country.id + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_delete(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-countrydailystatus') + + response = self.forced_auth_req( + 'delete', + url, + data={'id': [self.stat_two.id]}, + user=self.user, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_delete_without_ids(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-countrydailystatus') + + response = self.forced_auth_req( + 'delete', + url, + user=self.user, + ) + self.assertEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_delete_wrong_ids(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-countrydailystatus') + + response = self.forced_auth_req( + 'delete', + url, + data={'id': [12345432]}, + user=self.user, + ) + self.assertEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) -class CountryWeekStatsApiTestCase(TestAPIViewSetMixin, TestCase): +class SchoolSummaryAPIViewSetAPITestCase(TestAPIViewSetMixin, TestCase): + databases = ['default', ] @classmethod def setUpTestData(cls): - cls.country_one = CountryFactory() - cls.country_two = CountryFactory() - - cls.stat_one = CountryWeeklyStatusFactory(country=cls.country_one) - cls.stat_two = CountryWeeklyStatusFactory(country=cls.country_two) + cls.country = CountryFactory() - cls.country_one_daily = CountryDailyStatusFactory(country=cls.country_one, - date=Week(cls.stat_one.year, cls.stat_one.week).monday()) + cls.school_one = SchoolFactory(country=cls.country, location__country=cls.country, geopoint=None) + cls.school_two = SchoolFactory(country=cls.country, location__country=cls.country) - cls.email = 'test@test.com' - cls.password = 'SomeRandomPass96' - cls.user = auth_models.ApplicationUser.objects.create_user(username=cls.email, password=cls.password) + cls.stat_one = SchoolWeeklyStatusFactory( + school=cls.school_one, + connectivity=True, + year=datetime.now().year - 1, + week=12, + ) + cls.stat_two = SchoolWeeklyStatusFactory( + school=cls.school_one, + connectivity=False, + year=datetime.now().year - 1, + week=13, + ) + cls.stat_three = SchoolWeeklyStatusFactory( + school=cls.school_two, + connectivity=True, + year=datetime.now().year - 1, + week=12, + ) - cls.role = auth_models.Role.objects.create(name='Admin', category='system') - cls.role_permission = auth_models.UserRoleRelationship.objects.create(user=cls.user, role=cls.role) + cls.user = test_utilities.setup_admin_user_by_role() def setUp(self): cache.clear() super().setUp() - def country_connectivity_stat_url(self, url_params, query_param): - view_name = 'connection_statistics:list_or_create_destroy_countryweeklystatus' - url = reverse(view_name, args=url_params) - view_info = resolve(url).func + def test_list(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-schoolweeklystatus') - if len(query_param) > 0: - query_params = '?' + '&'.join([key + '=' + str(val) for key, val in query_param.items()]) - url += query_params - return url, view_info + response = self.forced_auth_req('get', url, user=self.user, view=view) - def test_country_download_connectivity_stat(self): - """ - test_country_download_connectivity_stat - Positive test case for country weekly data. + self.assertEqual(response.status_code, status.HTTP_200_OK) - Expected: HTTP_200_OK - List of data for given country id - """ - date = Week(self.stat_one.year, self.stat_one.week).monday() - start_date = date - timedelta(days=1) - end_date = start_date + timedelta(days=6) + response_data = response.data + self.assertEqual(type(response_data), dict) + # 3 records as we created manually in setup + self.assertEqual(response_data['count'], 3) + self.assertEqual(len(response_data['results']), 3) - url, view = self.country_connectivity_stat_url((), { - 'country_id': self.country_one.id, - 'start_date': format_date(start_date), - 'end_date': format_date(end_date), - 'is_weekly': 'true', - 'benchmark': 'global' - }) + def test_school_id_filter(self): + url, _, view = statistics_url((), {'school_id': self.school_one.id}, + view_name='list-create-destroy-schoolweeklystatus') response = self.forced_auth_req('get', url, user=self.user, view=view) - # print(response.data) + self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.data self.assertEqual(type(response_data), dict) + # 2 records as we created manually in setup + self.assertEqual(response_data['count'], 2) + self.assertEqual(len(response_data['results']), 2) - # self.assertIn('live_avg', response_data) - # self.assertIn('schools_total', response_data['results']) - # self.assertIn('school_with_realtime_data', response_data) - # self.assertIn('is_data_synced', response_data) - # self.assertIn('graph_data', response_data) - # self.assertIn('real_time_connected_schools', response_data) - - def test_country_download_connectivity_stat_data(self): - date = Week(self.stat_one.year, self.stat_one.week).monday() - start_date = date - timedelta(days=1) - end_date = start_date + timedelta(days=6) - - url, view = self.country_connectivity_stat_url((), { - 'country_id': self.country_one.id, - 'start_date': format_date(start_date), - 'end_date': format_date(end_date), - 'is_weekly': 'true', - 'benchmark': 'global' - }) + def test_year_week_filter(self): + url, _, view = statistics_url((), {'year': datetime.now().year - 1, 'week': 12}, + view_name='list-create-destroy-schoolweeklystatus') response = self.forced_auth_req('get', url, user=self.user, view=view) - # print(response.data) + self.assertEqual(response.status_code, status.HTTP_200_OK) - # self.assertEqual(response.data[0]['schools_total'], self.stat_one.schools_total) - # self.assertEqual(response.data[0]['school_with_realtime_data'], self.stat_one.schools_connected) - def test_country_download_connectivity_stat_for_invalid_country_id(self): - date = Week(self.stat_one.year, self.stat_one.week).monday() - start_date = date - timedelta(days=1) - end_date = start_date + timedelta(days=6) + response_data = response.data + self.assertEqual(type(response_data), dict) + # 2 records as we created manually in setup + self.assertEqual(response_data['count'], 2) + self.assertEqual(len(response_data['results']), 2) - url, view = self.country_connectivity_stat_url((), { - 'country_id': 123456, - 'start_date': format_date(start_date), - 'end_date': format_date(end_date), - 'is_weekly': 'true', - 'benchmark': 'global' - }) + def test_search(self): + url, _, view = statistics_url((), {'search': self.school_one.name}, + view_name='list-create-destroy-schoolweeklystatus') response = self.forced_auth_req('get', url, user=self.user, view=view) - # print(response.data) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - def test_country_download_connectivity_stat_for_invalid_date_range(self): - date = Week(2023, 56).monday() - start_date = date - timedelta(days=1) - end_date = start_date + timedelta(days=6) + self.assertEqual(response.status_code, status.HTTP_200_OK) - url, view = self.country_connectivity_stat_url((), { - 'country_id': self.country_one.id, - 'start_date': format_date(start_date), - 'end_date': format_date(end_date), - 'is_weekly': 'true', - 'benchmark': 'global' - }) + response_data = response.data + self.assertEqual(type(response_data), dict) + # 2 records as we created manually in setup + self.assertEqual(response_data['count'], 2) + self.assertEqual(len(response_data['results']), 2) - response = self.forced_auth_req('get', url, user=self.user, view=view) + def test_retrieve(self): + url, view, view_info = statistics_url((self.stat_one.id,), {}, + view_name='update-retrieve-schoolweeklystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view, view_info=view_info, ) self.assertEqual(response.status_code, status.HTTP_200_OK) - def test_country_download_connectivity_stat_for_missing_country_id(self): - date = Week(self.stat_one.year, self.stat_one.week).monday() - start_date = date - timedelta(days=1) - end_date = start_date + timedelta(days=6) + response_data = response.data + self.assertEqual(response_data['id'], self.stat_one.id) + self.assertEqual(response_data['connectivity_speed'], self.stat_one.connectivity_speed) + self.assertEqual(response_data['year'], self.stat_one.year) + self.assertEqual(response_data['week'], self.stat_one.week) + + def test_retrieve_wrong_id(self): + url, view, view_info = statistics_url((1234546,), {}, + view_name='update-retrieve-schoolweeklystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view, view_info=view_info, ) + + self.assertEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_update(self): + url, _, view = statistics_url((self.stat_two.id,), {}, + view_name='update-retrieve-schoolweeklystatus') + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data={ + "id": self.stat_two.id, + "created": self.stat_two.created, + "modified": self.stat_two.modified, + "connectivity_speed": self.stat_two.connectivity_speed, + "connectivity_upload_speed": self.stat_two.connectivity_upload_speed, + "year": self.stat_two.year, + "week": self.stat_two.week, + "date": self.stat_two.date, + "school": self.school_one.id + } + ) - url, view = self.country_connectivity_stat_url((), { - 'start_date': format_date(start_date), - 'end_date': format_date(end_date), - 'is_weekly': 'true', - 'benchmark': 'global' - }) + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + def test_update_wrong_id(self): + url, _, view = statistics_url((123434567,), {}, + view_name='update-retrieve-schoolweeklystatus') + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data={ + "id": self.stat_two.id, + "created": self.stat_two.created, + "modified": self.stat_two.modified, + "connectivity_speed": self.stat_two.connectivity_speed, + "connectivity_upload_speed": self.stat_two.connectivity_upload_speed, + "year": self.stat_two.year, + "week": self.stat_two.week, + "date": self.stat_two.date, + "school": self.school_two.id + } + ) - response = self.forced_auth_req('get', url, user=self.user, view=view) + self.assertEqual(put_response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_update_invalid_data(self): + url, _, view = statistics_url((self.stat_two.id,), {}, + view_name='update-retrieve-schoolweeklystatus') + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data={ + "id": self.stat_two.id, + "created": self.stat_two.created, + "modified": self.stat_two.modified, + "connectivity_speed": self.stat_two.connectivity_speed, + "connectivity_upload_speed": self.stat_two.connectivity_upload_speed, + "year": self.stat_two.year, + "week": self.stat_two.week, + "date": self.stat_two.date, + 'coverage_type': '7g', + "school": self.school_one.id + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_400_BAD_REQUEST) + def test_delete(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-schoolweeklystatus') + + response = self.forced_auth_req( + 'delete', + url, + data={'id': [self.stat_two.id]}, + user=self.user, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - def test_country_download_connectivity_stat_for_national_benchmark(self): - date = Week(self.stat_one.year, self.stat_one.week).monday() - start_date = date - timedelta(days=1) - end_date = start_date + timedelta(days=6) + def test_delete_without_ids(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-schoolweeklystatus') - url, view = self.country_connectivity_stat_url((), { - 'country_id': self.country_one.id, - 'start_date': format_date(start_date), - 'end_date': format_date(end_date), - 'is_weekly': 'true', - 'benchmark': 'national', - }) + response = self.forced_auth_req( + 'delete', + url, + user=self.user, + ) + self.assertEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_delete_wrong_ids(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-schoolweeklystatus') + + response = self.forced_auth_req( + 'delete', + url, + data={'id': [12345432]}, + user=self.user, + ) + self.assertEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) + + +class SchoolDailyConnectivitySummaryAPIViewSetAPITestCase(TestAPIViewSetMixin, TestCase): + databases = ['default', ] + + @classmethod + def setUpTestData(cls): + cls.country = CountryFactory() + + cls.school_one = SchoolFactory(country=cls.country, location__country=cls.country, geopoint=None) + cls.school_two = SchoolFactory(country=cls.country, location__country=cls.country) + + today = datetime.now().date() + + cls.stat_one = SchoolDailyStatusFactory( + school=cls.school_one, + date=today, + live_data_source='DAILY_CHECK_APP_MLAB' + ) + cls.stat_two = SchoolDailyStatusFactory( + school=cls.school_one, + date=today, + live_data_source='QOS' + ) + + cls.stat_three = SchoolDailyStatusFactory( + school=cls.school_two, + date=today, + live_data_source='DAILY_CHECK_APP_MLAB' + ) + + cls.user = test_utilities.setup_admin_user_by_role() + + def setUp(self): + cache.clear() + super().setUp() + + def test_list(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-schooldailystatus') response = self.forced_auth_req('get', url, user=self.user, view=view) - # print(response.data) + self.assertEqual(response.status_code, status.HTTP_200_OK) - # self.assertEqual(response.data['schools_total'], self.stat_one.schools_total) - - # self.assertEqual(response.data['real_time_connected_schools']['good'], - # self.stat_one.schools_connectivity_good) - # self.assertEqual(response.data['real_time_connected_schools']['moderate'], - # self.stat_one.schools_connectivity_moderate) - # self.assertEqual(response.data['real_time_connected_schools']['no_internet'], - # self.stat_one.schools_connectivity_no) - # self.assertEqual(response.data['real_time_connected_schools']['unknown'], - # self.stat_one.schools_connectivity_unknown) - - def test_country_uptime_connectivity_stat(self): - date = Week(self.stat_one.year, self.stat_one.week).monday() - start_date = date - timedelta(days=1) - end_date = start_date + timedelta(days=6) - url, view = self.country_connectivity_stat_url((), { - 'country_id': self.country_one.id, - 'start_date': format_date(start_date), - 'end_date': format_date(end_date), - 'is_weekly': 'true', - 'benchmark': 'global' - }) + response_data = response.data + self.assertEqual(type(response_data), dict) + # 3 records as we created manually in setup + self.assertEqual(response_data['count'], 3) + self.assertEqual(len(response_data['results']), 3) + + def test_school_id_filter(self): + url, _, view = statistics_url((), {'school_id': self.school_one.id}, + view_name='list-create-destroy-schooldailystatus') response = self.forced_auth_req('get', url, user=self.user, view=view) self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.data - # self.assertIn('live_avg', response_data) - # self.assertIn('schools_total', response_data['results']) - # self.assertIn('school_with_realtime_data', response_data) - # self.assertIn('is_data_synced', response_data) - # self.assertIn('graph_data', response_data) - # self.assertIn('real_time_connected_schools', response_data) - - def test_country_download_connectivity_stat_monthly(self): - """ - test_country_download_connectivity_stat_monthly - Positive test case for country weekly data. - - Expected: HTTP_200_OK - List of data for given country id - """ - date = Week(self.stat_one.year, self.stat_one.week).monday() - start_date = get_first_date_of_month(date.year, date.month) - end_date = get_last_date_of_month(date.year, date.month) + self.assertEqual(type(response_data), dict) + # 2 records as we created manually in setup + self.assertEqual(response_data['count'], 2) + self.assertEqual(len(response_data['results']), 2) - url, view = self.country_connectivity_stat_url((), { - 'country_id': self.country_one.id, - 'start_date': format_date(start_date), - 'end_date': format_date(end_date), - 'is_weekly': 'false', - 'benchmark': 'global' - }) + def test_search(self): + url, _, view = statistics_url((), {'search': self.school_one.name}, + view_name='list-create-destroy-schooldailystatus') response = self.forced_auth_req('get', url, user=self.user, view=view) @@ -947,82 +1914,234 @@ def test_country_download_connectivity_stat_monthly(self): response_data = response.data self.assertEqual(type(response_data), dict) + # 2 records as we created manually in setup + self.assertEqual(response_data['count'], 2) + self.assertEqual(len(response_data['results']), 2) - # self.assertIn('live_avg', response_data) - # self.assertIn('schools_total', response_data['results']) - # self.assertIn('school_with_realtime_data', response_data) - # self.assertIn('is_data_synced', response_data) - # self.assertIn('graph_data', response_data) - # self.assertIn('real_time_connected_schools', response_data) + def test_retrieve(self): + url, view, view_info = statistics_url((self.stat_one.id,), {}, + view_name='update-retrieve-schooldailystatus') - def test_country_download_connectivity_stat_monthly_invalid_country_id(self): - """ - test_country_download_connectivity_stat - Positive test case for country weekly data. + response = self.forced_auth_req('get', url, user=self.user, view=view, view_info=view_info, ) - Expected: HTTP_200_OK - List of data for given country id - """ - date = Week(self.stat_one.year, self.stat_one.week).monday() - start_date = get_first_date_of_month(date.year, date.month) - end_date = get_last_date_of_month(date.year, date.month) + self.assertEqual(response.status_code, status.HTTP_200_OK) - url, view = self.country_connectivity_stat_url((), { - 'country_id': 123456, - 'start_date': format_date(start_date), - 'end_date': format_date(end_date), - 'is_weekly': 'false', - 'benchmark': 'global' - }) + response_data = response.data + self.assertEqual(response_data['id'], self.stat_one.id) + self.assertEqual(response_data['connectivity_speed'], self.stat_one.connectivity_speed) + self.assertEqual(response_data['date'], format_date(self.stat_one.date)) + + def test_retrieve_wrong_id(self): + url, view, view_info = statistics_url((1234546,), {}, + view_name='update-retrieve-schooldailystatus') + + response = self.forced_auth_req('get', url, user=self.user, view=view, view_info=view_info, ) + + self.assertEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_update(self): + url, _, view = statistics_url((self.stat_two.id,), {}, + view_name='update-retrieve-schooldailystatus') + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data={ + "id": self.stat_two.id, + "created": self.stat_two.created, + "modified": self.stat_two.modified, + "connectivity_speed": 10000000, + "connectivity_upload_speed": self.stat_two.connectivity_upload_speed, + "date": self.stat_two.date, + "school": self.stat_two.school.id + } + ) - response = self.forced_auth_req('get', url, user=self.user, view=view) + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + def test_update_wrong_id(self): + url, _, view = statistics_url((123434567,), {}, + view_name='update-retrieve-schooldailystatus') + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data={ + "id": self.stat_two.id, + "created": self.stat_two.created, + "modified": self.stat_two.modified, + "connectivity_speed": self.stat_two.connectivity_speed, + "connectivity_upload_speed": self.stat_two.connectivity_upload_speed, + "date": self.stat_two.date, + "school": self.stat_two.school.id + } + ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(put_response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_update_invalid_data(self): + url, _, view = statistics_url((self.stat_two.id,), {}, + view_name='update-retrieve-schooldailystatus') + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data={ + "id": self.stat_two.id, + "created": self.stat_two.created, + "modified": self.stat_two.modified, + "connectivity_speed": 234.123, + "connectivity_upload_speed": self.stat_two.connectivity_upload_speed, + "date": self.stat_two.date, + "school": self.stat_two.school.id + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_delete(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-schooldailystatus') + + response = self.forced_auth_req( + 'delete', + url, + data={'id': [self.stat_two.id]}, + user=self.user, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_delete_without_ids(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-schooldailystatus') + + response = self.forced_auth_req( + 'delete', + url, + user=self.user, + ) + self.assertEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_delete_wrong_ids(self): + url, _, view = statistics_url((), {}, view_name='list-create-destroy-schooldailystatus') + response = self.forced_auth_req( + 'delete', + url, + data={'id': [12345432]}, + user=self.user, + ) + self.assertEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) + + +class TimePlayerApiTestCase(TestAPIViewSetMixin, TestCase): + databases = ['default', ] -class CountryCoverageStatsAPITestCase(APITestCase): @classmethod def setUpTestData(cls): - cls.country_one = CountryFactory() - cls.country_two = CountryFactory() - cls.stat_one = CountryWeeklyStatusFactory(country=cls.country_one) - cls.stat_two = CountryWeeklyStatusFactory(country=cls.country_two) + args = ['--delete_data_sources', '--update_data_sources', '--update_data_layers'] + call_command('load_system_data_layers', *args) - def setUp(self): - cache.clear() - super().setUp() + cls.admin_user = test_utilities.setup_admin_user_by_role() + cls.read_only_user = test_utilities.setup_read_only_user_by_role() - def test_get_country_coverage_stats(self): - url = reverse('connection_statistics:country-coverage-stat') - query_params = {'country_id': self.country_one.id} - response = self.client.get(url, query_params) - # print(response.data) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data['total_schools'], self.stat_one.schools_total) - # self.assertEqual(response.data['connected_schools']['5g_4g'], self.stat_one.schools_coverage_good) - # self.assertEqual(response.data['connected_schools']['3g_2g'], self.stat_one.schools_coverage_moderate) - # self.assertEqual(response.data['connected_schools']['no_coverage'], self.stat_one.schools_coverage_no) - # self.assertEqual(response.data['connected_schools']['unknown'], self.stat_one.schools_coverage_unknown) + def test_get_invalid_layer_id(self): + url, _, view = statistics_url((), { + 'layer_id': 123, + 'country_id': 123, + }, view_name='get-time-player-data') - def test_get_country_coverage_stats_no_data(self): - url = reverse('connection_statistics:country-coverage-stat') - query_params = {'country_id': 999} # Assuming this country ID does not exist - response = self.client.get(url, query_params) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + response = self.forced_auth_req('get', url, _, view=view) - def test_get_country_coverage_stats_cached(self): - url = reverse('connection_statistics:country-coverage-stat') - query_params = {'country_id': self.country_one.id} - # Call the API to cache the data - with self.assertNumQueries(6): - self.client.get(url, query_params) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - with self.assertNumQueries(0): - self.client.get(url, query_params) + def test_for_live_layer(self): + pcdc_data_source = accounts_models.DataSource.objects.filter( + data_source_type=accounts_models.DataSource.DATA_SOURCE_TYPE_DAILY_CHECK_APP, + ).first() - def test_get_country_coverage_stats_no_cache(self): - url = reverse('connection_statistics:country-coverage-stat') - query_params = {'country_id': self.country_one.id} - # Call the API without caching - with self.assertNumQueries(6): - response = self.client.get(url, query_params, HTTP_CACHE_CONTROL='no-cache') - self.assertEqual(response.status_code, status.HTTP_200_OK) + url, _, view = accounts_url((), {}, view_name='list-or-create-data-layers') + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data={ + 'icon': '', + 'name': 'Test data layer 3', + 'description': 'Test data layer 3 description', + 'version': '1.0.0', + 'type': accounts_models.DataLayer.LAYER_TYPE_LIVE, + 'data_sources_list': [pcdc_data_source.id, ], + 'data_source_column': pcdc_data_source.column_config[0], + 'global_benchmark': { + 'value': '20000000', + 'unit': 'bps', + 'convert_unit': 'mbps' + }, + 'is_reverse': False, + 'legend_configs': { + 'good': { + 'values': [], + 'labels': 'Good' + }, + 'moderate': { + 'values': [], + 'labels': 'Moderate' + }, + 'bad': { + 'values': [], + 'labels': 'Bad' + }, + 'unknown': { + 'values': [], + 'labels': 'Unknown' + } + } + } + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + response_data = response.data + + layer_id = response_data['id'] + + url, _, view = accounts_url((layer_id,), {}, + view_name='update-or-delete-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_READY_TO_PUBLISH, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = accounts_url((layer_id,), {}, + view_name='publish-data-layer') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'status': accounts_models.DataLayer.LAYER_STATUS_PUBLISHED, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + url, _, view = statistics_url((), { + 'layer_id': layer_id, + 'country_id': 123, + 'z': '2', + 'x': '1', + 'y': '2.mvt', + }, view_name='get-time-player-data') + + response = self.forced_auth_req('get', url, _, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/proco/connection_statistics/tests/test_models.py b/proco/connection_statistics/tests/test_models.py index cef208b..6c2d310 100644 --- a/proco/connection_statistics/tests/test_models.py +++ b/proco/connection_statistics/tests/test_models.py @@ -51,7 +51,7 @@ def setUpTestData(cls): cls.school_weekly = SchoolWeeklyStatusFactory( school=cls.school, connectivity=True, connectivity_speed=3 * (10 ** 6), - coverage_availability=True, coverage_type='3g', + coverage_availability=True, coverage_type='4g', ) cls.school.last_weekly_status = cls.school_weekly diff --git a/proco/connection_statistics/utils.py b/proco/connection_statistics/utils.py index 052e703..e32e94e 100644 --- a/proco/connection_statistics/utils.py +++ b/proco/connection_statistics/utils.py @@ -336,7 +336,8 @@ def update_country_weekly_status(country: Country, date): country_status.schools_coverage_moderate = coverage_stats[ColorMapSchema.MODERATE] country_status.schools_coverage_no = coverage_stats[ColorMapSchema.NO] - schools_coverage_known = country_status.schools_coverage_good + country_status.schools_coverage_moderate + country_status.schools_coverage_no + schools_coverage_known = (country_status.schools_coverage_good + country_status.schools_coverage_moderate + + country_status.schools_coverage_no) country_status.schools_coverage_unknown = country_status.schools_total - schools_coverage_known # calculate speed & latency where available @@ -384,8 +385,10 @@ def update_country_weekly_status(country: Country, date): ]): country_status.integration_status = CountryWeeklyStatus.STATIC_MAPPED - if country_status.integration_status == CountryWeeklyStatus.STATIC_MAPPED \ - and country_status.connectivity_availability == connectivity_types.realtime_speed: + if ( + country_status.integration_status == CountryWeeklyStatus.STATIC_MAPPED and + country_status.connectivity_availability == connectivity_types.realtime_speed + ): country_status.integration_status = CountryWeeklyStatus.REALTIME_MAPPED country_status.avg_distance_school = country.calculate_avg_distance_school() @@ -411,7 +414,6 @@ def update_country_data_source_by_csv_filename(imported_file): def get_benchmark_value_for_default_download_layer(benchmark, country_id): data_layer_instance = DataLayer.objects.filter( - # name__icontains='download', type=DataLayer.LAYER_TYPE_LIVE, category=DataLayer.LAYER_CATEGORY_CONNECTIVITY, status=DataLayer.LAYER_STATUS_PUBLISHED, @@ -427,17 +429,16 @@ def get_benchmark_value_for_default_download_layer(benchmark, country_id): benchmark_val = data_layer_instance.global_benchmark.get('value') benchmark_unit = data_layer_instance.global_benchmark.get('unit') - if benchmark == 'national': - if country_id: - benchmark_metadata = Country.objects.all().filter( - id=country_id, - benchmark_metadata__isnull=False, - ).order_by('id').values_list('benchmark_metadata', flat=True).first() - - if benchmark_metadata and len(benchmark_metadata) > 0: - benchmark_metadata = json.loads(benchmark_metadata) - all_live_layers = benchmark_metadata.get('live_layer', {}) - if len(all_live_layers) > 0 and str(data_layer_instance.id) in (all_live_layers.keys()): - benchmark_val = all_live_layers[str(data_layer_instance.id)] + if benchmark == 'national' and country_id: + benchmark_metadata = Country.objects.all().filter( + id=country_id, + benchmark_metadata__isnull=False, + ).order_by('id').values_list('benchmark_metadata', flat=True).first() + + if benchmark_metadata and len(benchmark_metadata) > 0: + benchmark_metadata = json.loads(benchmark_metadata) + all_live_layers = benchmark_metadata.get('live_layer', {}) + if len(all_live_layers) > 0 and str(data_layer_instance.id) in (all_live_layers.keys()): + benchmark_val = all_live_layers[str(data_layer_instance.id)] return convert_to_int(str(benchmark_val), default='20000000'), benchmark_unit diff --git a/proco/contact/tests/test_api.py b/proco/contact/tests/test_api.py index 1c53a56..32344ba 100644 --- a/proco/contact/tests/test_api.py +++ b/proco/contact/tests/test_api.py @@ -10,11 +10,10 @@ class ContactAPITestCase(TestAPIViewSetMixin, TestCase): base_view = 'contact:' - databases = {'read_only_database', 'default'} + databases = {'default', } @classmethod def setUpTestData(cls): - # self.databases = 'default' cls.email = 'test@test.com' cls.password = 'SomeRandomPass96' cls.user = auth_models.ApplicationUser.objects.create_user(username=cls.email, password=cls.password) diff --git a/proco/core/config.py b/proco/core/config.py index 144d207..410330b 100644 --- a/proco/core/config.py +++ b/proco/core/config.py @@ -1,4 +1,9 @@ """ Config file to specify application configurations used in the PROCO app""" +import json +import os +from django.conf import settings + +FILTERS_FIELDS = None class AppConfig(object): @@ -46,5 +51,21 @@ def mobile_number_length(self): """Length of valid mobile number""" return 10 + @property + def get_giga_filter_fields(self): + global FILTERS_FIELDS + if FILTERS_FIELDS is None: + filter_fields = {} + filters_data = settings.FILTERS_DATA + for data in filters_data: + parameter = data['parameter'] + table_filters = filter_fields.get(parameter['table'], []) + table_filters.append(parameter['field'] + '__' + parameter['filter']) + if data.get('include_none_filter', False): + table_filters.append(parameter['field'] + '__none_' + parameter['filter']) + filter_fields[parameter['table']] = table_filters + FILTERS_FIELDS = filter_fields + return FILTERS_FIELDS + app_config = AppConfig() diff --git a/proco/core/db_utils.py b/proco/core/db_utils.py index 70f79f1..44238be 100644 --- a/proco/core/db_utils.py +++ b/proco/core/db_utils.py @@ -1,5 +1,9 @@ +import logging + from django.db import connection +logger = logging.getLogger('gigamaps.' + __name__) + def dictfetchall(cursor): """ @@ -11,7 +15,7 @@ def dictfetchall(cursor): def sql_to_response(sql, label=''): - print('Query to execute for "{0}": {1}'.format(label, sql.replace('\n', ''))) + logger.debug('Query to execute for "{0}": {1}'.format(label, sql.replace('\n', ''))) try: with connection.cursor() as cur: cur.execute(sql) @@ -19,5 +23,5 @@ def sql_to_response(sql, label=''): return return dictfetchall(cur) except Exception as ex: - print('ERROR: Exception on query execution - {0}'.format(str(ex))) + logger.error('Exception on query execution - {0}'.format(str(ex))) return diff --git a/proco/core/filters.py b/proco/core/filters.py new file mode 100644 index 0000000..e4de7b1 --- /dev/null +++ b/proco/core/filters.py @@ -0,0 +1,11 @@ +import logging +import socket + + +class HostInfoFilter(logging.Filter): + + def filter(self, record): + record.hostname = socket.gethostname() + record.hostip = socket.gethostbyname(record.hostname) + + return True diff --git a/proco/core/management/commands/create_admin_user.py b/proco/core/management/commands/create_admin_user.py index d745dde..48bff57 100644 --- a/proco/core/management/commands/create_admin_user.py +++ b/proco/core/management/commands/create_admin_user.py @@ -1,12 +1,16 @@ -# encoding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals +import logging +# encoding: utf-8 + from django.core.management.base import BaseCommand from django.core.validators import validate_email from django.utils import timezone from proco.custom_auth.models import Role, ApplicationUser, UserRoleRelationship +logger = logging.getLogger('gigamaps.' + __name__) + def create_user_role_relationship(user, role_name): role = Role.objects.get(name=role_name) @@ -15,7 +19,7 @@ def create_user_role_relationship(user, role_name): def valid_email(value): - print('Validating: {0}'.format(value)) + logger.debug('Validating email: {0}'.format(value)) validate_email(value) return value @@ -43,7 +47,7 @@ def add_arguments(self, parser): def handle(self, **options): user_email = options.get('user_email') - print('*** User create/update operation STARTED ({0}) ***'.format(user_email)) + logger.debug('User create/update operation started ({0})'.format(user_email)) user_instance, created = ApplicationUser.objects.update_or_create( username=user_email, @@ -58,6 +62,6 @@ def handle(self, **options): }, ) - print(user_instance.__dict__) + logger.debug(user_instance.__dict__) create_user_role_relationship(user_instance, Role.SYSTEM_ROLE_NAME_ADMIN) - print('*** User create/update operation ENDED ({0}) ***'.format(user_email)) + logger.debug('User create/update operation ended ({0})'.format(user_email)) diff --git a/proco/core/management/commands/create_api_key_with_write_access.py b/proco/core/management/commands/create_api_key_with_write_access.py index 35f9974..cb17e8b 100644 --- a/proco/core/management/commands/create_api_key_with_write_access.py +++ b/proco/core/management/commands/create_api_key_with_write_access.py @@ -1,3 +1,5 @@ +import logging + from django.core.management import call_command from django.core.management.base import BaseCommand from django.core.validators import validate_email @@ -9,16 +11,18 @@ from proco.custom_auth.models import ApplicationUser from proco.utils import dates as date_utilities +logger = logging.getLogger('gigamaps.' + __name__) + def get_user(email, force_user, first_name, last_name, inactive_email): - print('Validating: {0}'.format(email)) + logger.debug('Validating: {0}'.format(email)) validate_email(email) application_user = ApplicationUser.objects.all().annotate(email_lower=Lower('email')).filter( email_lower=str(email).lower()).first() if not application_user and force_user: - print('Creating the superuser as user with given email does not exist.') + logger.info('Creating the superuser as user with given email does not exist.') args = ['-email={0}'.format(email)] if first_name: args.append('-first_name={0}'.format(first_name)) @@ -32,10 +36,10 @@ def get_user(email, force_user, first_name, last_name, inactive_email): application_user = ApplicationUser.objects.all().annotate(email_lower=Lower('email')).filter( email_lower=str(email).lower()).first() elif application_user: - print('User with given email already exists.') + logger.info('User with given email already exists.') else: - print('ERROR: User with give email address is not present in the system. ' - 'To force this user, please pass --force_user argument.') + logger.error('User with give email address is not present in the system. ' + 'To force this user, please pass --force_user argument.') exit(0) return application_user @@ -82,7 +86,7 @@ def add_arguments(self, parser): ) def handle(self, **options): - print('Creating API Key with write access....') + logger.info('Creating API Key with write access.') user_email = options.get('user_email') force_user = options.get('force_user') @@ -109,7 +113,7 @@ def handle(self, **options): api_key_instance.write_access_reason = reason api_key_instance.valid_to = valid_till_date api_key_instance.save(update_fields=('write_access_reason', 'valid_to',)) - print('API Key with write access updated successfully!\n') + logger.info('Api key with write access updated successfully!\n') else: api_key_instance = accounts_models.APIKey.objects.create( api=get_object_or_404(accounts_models.API.objects.all(), code=api_code), @@ -120,7 +124,6 @@ def handle(self, **options): has_write_access=True, write_access_reason=reason, ) - print('API Key with write access created successfully!\n') + logger.info('Api key with write access created successfully!\n') - print('API Key: {0}'.format(api_key_instance.api_key)) - print('\n') + logger.debug('Api key: {0}'.format(api_key_instance.api_key)) diff --git a/proco/core/management/commands/data_alteration_through_sql.py b/proco/core/management/commands/data_alteration_through_sql.py index e71be20..58f6f26 100644 --- a/proco/core/management/commands/data_alteration_through_sql.py +++ b/proco/core/management/commands/data_alteration_through_sql.py @@ -1,3 +1,5 @@ +import logging + from collections import OrderedDict from django.core.management.base import BaseCommand @@ -5,10 +7,12 @@ from django.db import transaction from django.utils import timezone +logger = logging.getLogger('gigamaps.' + __name__) + @transaction.atomic def create_and_execute_update_query(value, data_dict_list): - print('Executing update statement for: {0} records'.format(len(data_dict_list))) + logger.debug('Executing update statement for: {0} records'.format(len(data_dict_list))) # create update query stmt = ("UPDATE public.connection_statistics_schooldailystatus " @@ -16,13 +20,13 @@ def create_and_execute_update_query(value, data_dict_list): with connection.cursor() as cursor: for data_dict in data_dict_list: update_query = stmt.format(value=value, school_id=data_dict['school_id']) - print('Current Update Query: {}'.format(update_query)) + logger.debug('Current update query: {}'.format(update_query)) cursor.execute(update_query) @transaction.atomic def create_and_execute_update_query_v2(stmt): - print('Current Update Query: {}'.format(stmt)) + logger.debug('Current update query: {}'.format(stmt)) with connection.cursor() as cursor: cursor.execute(stmt) @@ -40,7 +44,7 @@ def populate_live_data_source_as_qos(start_school_id, end_school_id): create_and_execute_update_query_v2(query) te = timezone.now() - print('Executed the function in {} seconds'.format((te - ts).seconds)) + logger.debug('Executed the function in {} seconds'.format((te - ts).seconds)) query = """ SELECT DISTINCT s.id AS school_id @@ -63,8 +67,8 @@ def populate_live_data_source_as_qos(start_school_id, end_school_id): query = query.format(where_condition=where_condition) - print('Getting select statement query result from "schools_school" table for live_data_source records.') - print('Query: {}'.format(query)) + logger.info('Getting select statement query result from "schools_school" table for live_data_source records.') + logger.debug('Query: {}'.format(query)) data_list = [] with connection.cursor() as cursor: @@ -79,7 +83,7 @@ def populate_live_data_source_as_qos(start_school_id, end_school_id): create_and_execute_update_query('QOS', data_list) te2 = timezone.now() - print('Executed the function in {} seconds'.format((te2 - te).seconds)) + logger.debug('Executed the function in {} seconds'.format((te2 - te).seconds)) def populate_live_data_source_as_daily_check_app(start_school_id, end_school_id): @@ -95,7 +99,7 @@ def populate_live_data_source_as_daily_check_app(start_school_id, end_school_id) create_and_execute_update_query_v2(query) te = timezone.now() - print('Executed the function in {} seconds'.format((te - ts).seconds)) + logger.debug('Executed the function in {} seconds'.format((te - ts).seconds)) query = """ SELECT DISTINCT s.id AS school_id @@ -118,8 +122,8 @@ def populate_live_data_source_as_daily_check_app(start_school_id, end_school_id) query = query.format(where_condition=where_condition) - print('Getting select statement query result from "schools_school" table for live_data_source records.') - print('Query: {}'.format(query)) + logger.info('Getting select statement query result from "schools_school" table for live_data_source records.') + logger.debug('Query: {}'.format(query)) data_list = [] with connection.cursor() as cursor: @@ -134,7 +138,7 @@ def populate_live_data_source_as_daily_check_app(start_school_id, end_school_id) create_and_execute_update_query('DAILY_CHECK_APP_MLAB', data_list) te2 = timezone.now() - print('Executed the function in {} seconds'.format((te2 - te).seconds)) + logger.debug('Executed the function in {} seconds'.format((te2 - te).seconds)) class Command(BaseCommand): @@ -168,11 +172,11 @@ def handle(self, **options): end_school_id = options.get('end_school_id') if options.get('update_brasil_live_data_source', False): - print('update_brasil_live_data_source - START') + logger.info('Update brasil live data source - start') populate_live_data_source_as_qos(start_school_id, end_school_id) if options.get('update_non_brasil_live_data_source_name', False): - print('update_non_brasil_live_data_source_name - START') + logger.info('Update non brasil live data source name - start') populate_live_data_source_as_daily_check_app(start_school_id, end_school_id) - print('Data updated successfully!\n') + logger.info('Data updated successfully!\n') diff --git a/proco/core/management/commands/data_cleanup.py b/proco/core/management/commands/data_cleanup.py index 006fe0e..ff11998 100644 --- a/proco/core/management/commands/data_cleanup.py +++ b/proco/core/management/commands/data_cleanup.py @@ -1,3 +1,5 @@ +import logging + from datetime import timedelta from django.core.management.base import BaseCommand @@ -13,6 +15,8 @@ from proco.utils.dates import get_current_year from proco.utils.tasks import redo_aggregations_task, populate_school_new_fields_task +logger = logging.getLogger('gigamaps.' + __name__) + def delete_duplicate_schools_based_on_giga_id(): # updated deleted time if multiple school has same deleted datetime @@ -20,9 +24,9 @@ def delete_duplicate_schools_based_on_giga_id(): 'country_id', 'giga_id_school', 'deleted').annotate( total_records=Count('id', distinct=False), ).order_by('-total_records', 'country_id', 'giga_id_school', 'deleted').filter(total_records__gt=1)[:10000] - print('Queryset to get max 10K records to update the deleted time ' - 'where more than 1 record has same Country, School Giga ID and deleted datetime ' - 'in School table: {0}'.format(rows_with_more_than_1_records.query)) + logger.debug('Queryset to get max 10K records to update the deleted time ' + 'where more than 1 record has same Country, School Giga ID and deleted datetime ' + 'in School table: {0}'.format(rows_with_more_than_1_records.query)) for row in rows_with_more_than_1_records: count = 1 @@ -30,7 +34,7 @@ def delete_duplicate_schools_based_on_giga_id(): country_id=row['country_id'], giga_id_school=row['giga_id_school'], ).order_by('-id')[1:]: - print('Deletion for: Id - {0}, Country ID - {1}, Giga ID - {2}'.format( + logger.debug('Deletion for: Id - {0}, Country ID - {1}, Giga ID - {2}'.format( deleted_row.id, deleted_row.country_id, deleted_row.giga_id_school)) deleted_row.deleted = get_current_datetime_object() + timedelta(minutes=count) deleted_row.save(update_fields=('deleted',)) @@ -41,16 +45,15 @@ def delete_duplicate_schools_based_on_giga_id(): 'country_id', 'giga_id_school').annotate( total_records=Count('id', distinct=False), ).order_by('-total_records', 'country_id', 'giga_id_school').filter(total_records__gt=1)[:10000] - print('Queryset to get max 10K records to delete where more than 1 record has same ' - 'Country and School Giga ID in School table: {0}'.format(rows_with_more_than_1_records.query)) + logger.debug('Queryset to get max 10K records to delete where more than 1 record has same ' + 'Country and School Giga ID in School table: {0}'.format(rows_with_more_than_1_records.query)) for row in rows_with_more_than_1_records: - count = 1 for row_to_delete in School.objects.all().filter( country_id=row['country_id'], giga_id_school=row['giga_id_school'], ).order_by('-id')[1:]: - print('Deletion for: Id - {0}, Country ID - {1}, Giga ID - {2}'.format( + logger.debug('Deletion for: Id - {0}, Country ID - {1}, Giga ID - {2}'.format( row_to_delete.id, row_to_delete.country_id, row_to_delete.giga_id_school)) row_to_delete.delete() @@ -60,8 +63,9 @@ def delete_duplicate_schools_based_on_external_id(): 'country_id', 'external_id').annotate( total_records=Count('id', distinct=False), ).order_by('-total_records', 'country_id', 'external_id').filter(total_records__gt=1)[:10000] - print('Queryset to get max 10K records to delete where more than 1 record has same Country and School External ID ' - 'in School table: {0}'.format(rows_with_more_than_1_records.query)) + logger.debug( + 'Queryset to get max 10K records to delete where more than 1 record has same Country and School External ID ' + 'in School table: {0}'.format(rows_with_more_than_1_records.query)) # for row in rows_with_more_than_1_records: # for row_to_delete in School.objects.filter( @@ -79,8 +83,8 @@ def delete_duplicate_school_weekly_records(): 'school_id', 'week', 'year').annotate( total_records=Count('school_id', distinct=False), ).order_by('-total_records', 'school_id', 'week', 'year').filter(total_records__gt=1)[:10000] - print('Queryset to get max 10K records to delete where more than 1 record has same Year, Week and ' - 'School ID in School Weekly table: {0}'.format(rows_with_more_than_1_records.query)) + logger.debug('Queryset to get max 10K records to delete where more than 1 record has same Year, Week and ' + 'School ID in School Weekly table: {0}'.format(rows_with_more_than_1_records.query)) for row in rows_with_more_than_1_records: school_id = row['school_id'] @@ -97,24 +101,24 @@ def delete_duplicate_school_weekly_records(): ).values_list('id', flat=True).order_by('id')) if last_weekly_id in school_weekly_ids_to_delete: - print('School Last Weekly Status id ({0}) is IN the deletion list. ' - 'Hence skipping the current record and deleting all remaining.'.format(last_weekly_id)) + logger.debug('School Last Weekly Status id ({0}) is IN the deletion list. ' + 'Hence skipping the current record and deleting all remaining.'.format(last_weekly_id)) for row_to_delete in statistics_models.SchoolWeeklyStatus.objects.filter( id__in=school_weekly_ids_to_delete, ).exclude(id=last_weekly_id).order_by('-id'): - print('Deletion for: Id - {0}, Year - {1}, Week - {2}, School Id - {3}'.format( + logger.debug('Deletion for: Id - {0}, Year - {1}, Week - {2}, School Id - {3}'.format( row_to_delete.id, row_to_delete.year, row_to_delete.week, row_to_delete.school_id)) # Hard deletion row_to_delete.delete(force=True) else: - print('School Last Weekly Status id ({0}) is NOT IN the deletion list. ' - 'Hence skipping first record and deleting all remaining based on ID DESC.'.format(last_weekly_id)) + logger.debug('School Last Weekly Status id ({0}) is NOT IN the deletion list. Hence skipping first ' + 'record and deleting all remaining based on ID DESC.'.format(last_weekly_id)) for row_to_delete in statistics_models.SchoolWeeklyStatus.objects.filter( school_id=row['school_id'], week=row['week'], year=row['year'], ).order_by('-id')[1:]: - print('Deletion for: Id - {0}, Year - {1}, Week - {2}, School Id - {3}'.format( + logger.debug('Deletion for: Id - {0}, Year - {1}, Week - {2}, School Id - {3}'.format( row_to_delete.id, row_to_delete.year, row_to_delete.week, row_to_delete.school_id)) # Hard deletion row_to_delete.delete(force=True) @@ -125,8 +129,9 @@ def delete_duplicate_school_daily_records(): 'school_id', 'date', 'live_data_source').annotate( total_records=Count('school_id', distinct=False), ).order_by('-total_records', 'school_id', 'date', 'live_data_source').filter(total_records__gt=1)[:10000] - print('Queryset to get max 10K records to delete where more than 1 record has same Date, Live Data Source and ' - 'School ID in School Daily table: {0}'.format(rows_with_more_than_1_records.query)) + logger.debug( + 'Queryset to get max 10K records to delete where more than 1 record has same Date, Live Data Source and ' + 'School ID in School Daily table: {0}'.format(rows_with_more_than_1_records.query)) for row in rows_with_more_than_1_records: for row_to_delete in statistics_models.SchoolDailyStatus.objects.filter( @@ -134,7 +139,7 @@ def delete_duplicate_school_daily_records(): date=row['date'], live_data_source=row['live_data_source'], ).order_by('-id')[1:]: - print('Deletion for: Id - {0}, Date - {1}, Data Source - {2}, School Id - {3}'.format( + logger.debug('Deletion for: Id - {0}, Date - {1}, Data Source - {2}, School Id - {3}'.format( row_to_delete.id, row_to_delete.date, row_to_delete.live_data_source, row_to_delete.school_id)) # Hard deletion row_to_delete.delete(force=True) @@ -145,8 +150,9 @@ def delete_duplicate_country_daily_records(): 'country_id', 'date', 'live_data_source').annotate( total_records=Count('country_id', distinct=False), ).order_by('-total_records', 'country_id', 'date', 'live_data_source').filter(total_records__gt=1)[:10000] - print('Queryset to get max 10K records to delete where more than 1 record has same Date, Live Data Source and ' - 'Country ID in Country Daily table: {0}'.format(rows_with_more_than_1_records.query)) + logger.debug( + 'Queryset to get max 10K records to delete where more than 1 record has same Date, Live Data Source and ' + 'Country ID in Country Daily table: {0}'.format(rows_with_more_than_1_records.query)) for row in rows_with_more_than_1_records: for row_to_delete in statistics_models.CountryDailyStatus.objects.filter( @@ -154,7 +160,7 @@ def delete_duplicate_country_daily_records(): date=row['date'], live_data_source=row['live_data_source'], ).order_by('-id')[1:]: - print('Deletion for: Id - {0}, Date - {1}, Data Source - {2}, Country Id - {3}'.format( + logger.debug('Deletion for: Id - {0}, Date - {1}, Data Source - {2}, Country Id - {3}'.format( row_to_delete.id, row_to_delete.date, row_to_delete.live_data_source, row_to_delete.country_id)) # Hard deletion row_to_delete.delete(force=True) @@ -165,15 +171,15 @@ def delete_duplicate_qos_model_records(): 'school_id', 'timestamp').annotate( total_records=Count('school_id'), ).order_by('-total_records', 'school_id', 'timestamp').filter(total_records__gt=1) - print('Queryset to get records to delete where more than 1 record has same School and Timestamp in ' - 'QoS Data table: {0}'.format(rows_with_more_than_1_records.query)) + logger.debug('Queryset to get records to delete where more than 1 record has same school and timestamp in ' + 'QoS Data table: {0}'.format(rows_with_more_than_1_records.query)) for row in rows_with_more_than_1_records: for row_to_delete in QoSData.objects.filter( school_id=row['school_id'], timestamp=row['timestamp'], ).order_by('-version')[1:]: - print('Deletion for: Id - {0}, Timestamp - {1}, School Id - {2}'.format( + logger.debug('Deletion for: Id - {0}, Timestamp - {1}, School Id - {2}'.format( row_to_delete.id, row_to_delete.timestamp, row_to_delete.school_id)) # Hard deletion row_to_delete.delete() @@ -184,8 +190,8 @@ def delete_duplicate_country_weekly_records(): 'country_id', 'week', 'year').annotate( total_records=Count('country_id', distinct=False), ).order_by('-total_records', 'country_id', 'week', 'year').filter(total_records__gt=1)[:10000] - print('Queryset to get max 10K records to delete where more than 1 record has same Year, Week and ' - 'Country ID in Country Weekly table: {0}'.format(rows_with_more_than_1_records.query)) + logger.debug('Queryset to get max 10K records to delete where more than 1 record has same Year, Week and ' + 'Country ID in Country Weekly table: {0}'.format(rows_with_more_than_1_records.query)) for row in rows_with_more_than_1_records: country_id = row['country_id'] @@ -202,24 +208,24 @@ def delete_duplicate_country_weekly_records(): ).values_list('id', flat=True).order_by('id')) if last_weekly_id in country_weekly_ids_to_delete: - print('Country Last Weekly Status id ({0}) is IN the deletion list. ' - 'Hence skipping the current record and deleting all remaining.'.format(last_weekly_id)) + logger.debug('Country Last Weekly Status id ({0}) is IN the deletion list. ' + 'Hence skipping the current record and deleting all remaining.'.format(last_weekly_id)) for row_to_delete in statistics_models.CountryWeeklyStatus.objects.filter( id__in=country_weekly_ids_to_delete, ).exclude(id=last_weekly_id).order_by('-id'): - print('Deletion for: Id - {0}, Year - {1}, Week - {2}, Country Id - {3}'.format( + logger.debug('Deletion for: Id - {0}, Year - {1}, Week - {2}, Country Id - {3}'.format( row_to_delete.id, row_to_delete.year, row_to_delete.week, row_to_delete.country_id)) # Hard deletion row_to_delete.delete(force=True) else: - print('Country Last Weekly Status id ({0}) is NOT IN the deletion list. ' - 'Hence skipping first record and deleting all remaining based on ID DESC.'.format(last_weekly_id)) + logger.debug('Country Last Weekly Status id ({0}) is NOT IN the deletion list. Hence skipping first ' + 'record and deleting all remaining based on ID DESC.'.format(last_weekly_id)) for row_to_delete in statistics_models.CountryWeeklyStatus.objects.filter( country_id=row['country_id'], week=row['week'], year=row['year'], ).order_by('-id')[1:]: - print('Deletion for: Id - {0}, Year - {1}, Week - {2}, Country Id - {3}'.format( + logger.debug('Deletion for: Id - {0}, Year - {1}, Week - {2}, Country Id - {3}'.format( row_to_delete.id, row_to_delete.year, row_to_delete.week, row_to_delete.country_id)) # Hard deletion row_to_delete.delete(force=True) @@ -235,15 +241,15 @@ def delete_duplicate_school_records(): ).exclude( giga_id_school='', ) - print('Queryset to get records to delete where more than 1 record has same Giga ID and ' - 'Country ID in School table: {0}'.format(rows_with_more_than_1_records.query)) + logger.debug('Queryset to get records to delete where more than 1 record has same Giga ID and ' + 'Country ID in School table: {0}'.format(rows_with_more_than_1_records.query)) for row in rows_with_more_than_1_records: for row_to_delete in School.objects.filter( country_id=row['country_id'], giga_id_school=row['giga_id_school'], ).order_by('-id')[1:]: - print('Deletion for: Id - {0}, Country ID - {1}, Giga ID - {2}'.format( + logger.debug('Deletion for: Id - {0}, Country ID - {1}, Giga ID - {2}'.format( row_to_delete.id, row_to_delete.country_id, row_to_delete.giga_id_school)) # Hard deletion may fail row_to_delete.delete() @@ -340,7 +346,7 @@ def update_school_giga_ids(): for tok in row.split(',') ] file_data.append(dict(zip(headers, row_data))) - print(file_data) + logger.debug(file_data) for data in file_data: School.objects.filter( country_id=data['country_id'], @@ -470,8 +476,8 @@ def add_arguments(self, parser): ) def handle(self, **options): - print('Executing "data_cleanup" utility ....\n') - print('Options: {}\n\n'.format(options)) + logger.info('Executing data cleanup utility.\n') + logger.debug('Options: {}\n\n'.format(options)) country_id = options.get('country_id', None) start_school_id = options.get('start_school_id', None) @@ -479,57 +485,57 @@ def handle(self, **options): week_no = options.get('week_no', None) if options.get('clean_duplicate_school_gigs_ids'): - print('Performing School Duplicate record cleanup base on Giga ID and Country ID.') + logger.info('Performing school duplicate record cleanup base on giga ID and country ID.') delete_duplicate_schools_based_on_giga_id() - print('Completed School Duplicate record cleanup base on Giga ID and Country ID.\n\n') + logger.info('Completed school duplicate record cleanup base on giga ID and country ID.\n\n') if options.get('clean_duplicate_school_external_ids'): - print('Performing School Duplicate record cleanup base on External ID and Country ID.') + logger.info('Performing school duplicate record cleanup base on external ID and country ID.') delete_duplicate_schools_based_on_external_id() - print('Completed School Duplicate record cleanup base on External ID and Country ID.\n\n') + logger.info('Completed school duplicate record cleanup base on external ID and country ID.\n\n') if options.get('clean_duplicate_school_weekly'): - print('Performing School Weekly Duplicate record cleanup.') + logger.info('Performing school weekly duplicate record cleanup.') delete_duplicate_school_weekly_records() - print('Completed School Weekly Duplicate record cleanup.\n\n') + logger.info('Completed school weekly duplicate record cleanup.\n\n') if options.get('clean_duplicate_school_daily'): - print('Performing School Daily Duplicate record cleanup.') + logger.info('Performing school daily duplicate record cleanup.') delete_duplicate_school_daily_records() - print('Completed School Daily Duplicate record cleanup.\n\n') + logger.info('Completed school daily duplicate record cleanup.\n\n') if options.get('clean_duplicate_country_weekly'): - print('Performing Country Weekly Duplicate record cleanup.') + logger.info('Performing country weekly duplicate record cleanup.') delete_duplicate_country_weekly_records() - print('Completed Country Weekly Duplicate record cleanup.\n\n') + logger.info('Completed country weekly duplicate record cleanup.\n\n') if options.get('clean_duplicate_country_daily'): - print('Performing Country Daily Duplicate record cleanup.') + logger.info('Performing country daily duplicate record cleanup.') delete_duplicate_country_daily_records() - print('Completed Country Daily Duplicate record cleanup.\n\n') + logger.info('Completed country daily duplicate record cleanup.\n\n') if options.get('cleanup_qos_data_rows'): - print('Performing QoS Data Model Duplicate record cleanup.') + logger.info('Performing QoS data model duplicate record cleanup.') delete_duplicate_qos_model_records() - print('Completed QoS Data Model Duplicate record cleanup.\n\n') + logger.info('Completed QoS data model duplicate record cleanup.\n\n') if options.get('cleanup_school_master_rows'): - print('Performing School Master Data Source Duplicate record cleanup.') + logger.info('Performing school master data source duplicate record cleanup.') sources_tasks.cleanup_school_master_rows() - print('Completed School Master Data Source Duplicate record cleanup.\n\n') + logger.info('Completed school master data source duplicate record cleanup.\n\n') if options.get('clean_duplicate_schools'): - print('Performing School Duplicate record cleanup.') + logger.info('Performing school duplicate record cleanup.') delete_duplicate_school_records() - print('Completed School Duplicate record cleanup.\n\n') + logger.info('Completed school duplicate record cleanup.\n\n') if options.get('update_school_giga_ids'): - print('Performing School Giga ID update.') + logger.info('Performing school giga ID update.') update_school_giga_ids() - print('Completed School Giga ID update.\n\n') + logger.info('Completed school giga ID update.\n\n') if options.get('handle_published_school_master_data_row'): - print('Performing School Master Data Source Publish task handling.') + logger.info('Performing school master data source publish task handling.') if country_id: sources_tasks.handle_published_school_master_data_row(country_ids=[country_id, ]) @@ -540,7 +546,7 @@ def handle(self, **options): for row in new_published_records: sources_tasks.handle_published_school_master_data_row(published_row=row) - print('Completed School Master Data Source Publish task handling.\n\n') + logger.info('Completed school master data source publish task handling.\n\n') if options.get('handle_published_school_master_data_row_with_schedular'): sources_tasks.handle_published_school_master_data_row.delay(country_ids=[country_id, ]) @@ -572,9 +578,9 @@ def handle(self, **options): country_id_vs_year_qs = country_id_vs_year_qs.filter(year=options.get('year')) country_id_vs_year_qs = country_id_vs_year_qs.filter(year__lte=get_current_year(), ) - print('Query to select Country and Year for scheduling: {}\n\n'.format(country_id_vs_year_qs.query)) + logger.debug('Query to select country and year for scheduling: {}\n\n'.format(country_id_vs_year_qs.query)) for country_year in country_id_vs_year_qs: # redo_aggregations_task(country_year[0], country_year[1], None) redo_aggregations_task.delay(country_year[0], country_year[1], week_no) - print('Completed "data_cleanup" successfully ....\n') + logger.info('Completed data cleanup successfully.\n') diff --git a/proco/core/management/commands/index_rebuild_schools.py b/proco/core/management/commands/index_rebuild_schools.py index 1eeb5e3..1c55770 100644 --- a/proco/core/management/commands/index_rebuild_schools.py +++ b/proco/core/management/commands/index_rebuild_schools.py @@ -2,6 +2,7 @@ from __future__ import absolute_import, division, print_function, unicode_literals import time +import logging from azure.core.credentials import AzureKeyCredential from azure.search.documents import SearchClient @@ -16,6 +17,8 @@ from proco.locations.search_indexes import SchoolIndex from proco.schools.models import School +logger = logging.getLogger('gigamaps.' + __name__) + # Create a service client cognitive_search_settings = settings.AZURE_CONFIG.get('COGNITIVE_SEARCH') @@ -27,10 +30,10 @@ def delete_index(): try: result = admin_client.delete_index(SchoolIndex.Meta.index_name) - print('Index', SchoolIndex.Meta.index_name, 'Deleted') - print(result) + logger.debug('Index: ', SchoolIndex.Meta.index_name, 'Deleted') + logger.debug(result) except Exception as ex: - print(ex) + logger.error(ex) def create_index(): @@ -48,7 +51,7 @@ def create_index(): cors_options = CorsOptions(allowed_origins=['*'], max_age_in_seconds=24 * 60 * 60) scoring_profiles = [] - print('Index name: ', SchoolIndex.Meta.index_name) + logger.debug('Index name: ', SchoolIndex.Meta.index_name) index = SearchIndex( name=SchoolIndex.Meta.index_name, @@ -59,9 +62,9 @@ def create_index(): try: result = admin_client.create_index(index) - print('Index', result.name, 'created') + logger.debug('Index: ', result.name, 'created') except Exception as ex: - print(ex) + logger.error(ex) def clear_index(): @@ -69,11 +72,12 @@ def clear_index(): AzureKeyCredential(cognitive_search_settings['SEARCH_API_KEY'])) doc_counts = search_client.get_document_count() - print("There are {0} documents in the {1} search index.".format(doc_counts, repr(SchoolIndex.Meta.index_name))) + logger.debug("There are {0} documents in the {1} search index.".format( + doc_counts, repr(SchoolIndex.Meta.index_name))) if doc_counts > 0: all_docs = search_client.search('*') - print('All documents: {0}'.format(all_docs)) + logger.debug('All documents: {0}'.format(all_docs)) search_client.delete_documents(all_docs) @@ -112,7 +116,7 @@ def collect_data(country_id): del qry_data['admin2_id'] docs.append(qry_data) - print('Total records to upload: {0}'.format(len(docs))) + logger.debug('Total records to upload: {0}'.format(len(docs))) # docs = docs[0:100000] # print('Total records to upload: {0}'.format(len(docs))) return docs @@ -130,13 +134,14 @@ def upload_docs(search_client, headers, data_chunk, failed_data_chunks, count, r while retry_no <= 3 and not uploaded: try: result = search_client.upload_documents(documents=data_chunk, headers=headers) - print("Upload of new document SUCCEEDED for count '{0}' in retry no: '{1}': {2}".format( + logger.debug("Upload of new document succeeded for count '{0}' in retry no: '{1}': {2}".format( count, retry_no, result[0].succeeded) ) uploaded = True break except Exception as ex: - print("Upload of new document FAILED for count '{0}' in retry no: '{1}': {2}".format(count, retry_no, ex)) + logger.error( + "Upload of new document failed for count '{0}' in retry no: '{1}': {2}".format(count, retry_no, ex)) time.sleep(1.0) retry_no += 1 uploaded = upload_docs(search_client, headers, data_chunk, failed_data_chunks, count, retry_no=retry_no) @@ -160,7 +165,7 @@ def load_index(docs, batch_size=1000): for data_chunk in divide_chunks(docs, batch_size=batch_size): uploaded = upload_docs(search_client, headers, data_chunk, failed_data_chunks, count, retry_no=1) if not uploaded: - print('ERROR: Failed to upload the docs even after 3 retries. Please check error file for more details.') + logger.error('Failed to upload the docs even after 3 retries. Please check error file for more details.') failed_data_chunks.append(data_chunk) time.sleep(1.0) @@ -197,29 +202,29 @@ def add_arguments(self, parser): ) def handle(self, **options): - print('*** Index operations STARTED ({0}) ***'.format(SchoolIndex.Meta.index_name)) + logger.debug('Index operations STARTED ({0})'.format(SchoolIndex.Meta.index_name)) if settings.ENABLE_AZURE_COGNITIVE_SEARCH: country_id = options.get('country_id', False) if options.get('delete_index', False): - print('DELETE_INDEX - START') + logger.info('Delete index - Start') delete_index() if options.get('create_index', False): - print('CREATE_INDEX - START') + logger.info('Create index - Start') create_index() if options.get('clean_index', False): - print('CLEAR_INDEX - START') + logger.info('Clear index - Start') clear_index() if options.get('update_index', False): - print('COLLECT_INDEX_DATA - START') + logger.info('Collect index data - Start') if country_id: data_to_load = collect_data(country_id) if len(data_to_load) > 0: - print('LOAD_INDEX - START - {0}'.format(country_id)) + logger.debug('Load index - Start - {0}'.format(country_id)) load_index(data_to_load, batch_size=10000) else: all_countries = list( @@ -229,7 +234,7 @@ def handle(self, **options): data_to_load = collect_data(country_id) if len(data_to_load) > 0: - print('LOAD_INDEX - START - {0}'.format(country_id)) + logger.debug('Load index - Start - {0}'.format(country_id)) load_index(data_to_load, batch_size=10000) - print('*** Index operations ENDED ({0}) ***'.format(SchoolIndex.Meta.index_name)) + logger.debug('Index operations ENDED ({0})'.format(SchoolIndex.Meta.index_name)) diff --git a/proco/core/management/commands/load_about_us_content.py b/proco/core/management/commands/load_about_us_content.py index fd75e10..65dc505 100644 --- a/proco/core/management/commands/load_about_us_content.py +++ b/proco/core/management/commands/load_about_us_content.py @@ -1,11 +1,14 @@ import sys import traceback +import logging from django.core.management.base import BaseCommand from django.db import transaction from proco.about_us.models import AboutUs +logger = logging.getLogger('gigamaps.' + __name__) + about_us_content_json = [ { "text": [ @@ -595,7 +598,7 @@ def load_data_sources_data(): AboutUs.objects.all().delete() sys.stdout.write('\nDelete all old record') except: - print(traceback.format_exc()) + logger.error(traceback.format_exc()) for row_data in about_us_content_json: try: @@ -605,7 +608,7 @@ def load_data_sources_data(): else: sys.stdout.write('\nExisting About Us content updated: {}'.format(instance.__dict__)) except: - print(traceback.format_exc()) + logger.error(traceback.format_exc()) class Command(BaseCommand): diff --git a/proco/core/management/commands/load_country_admin_data.py b/proco/core/management/commands/load_country_admin_data.py index 2bf9fdd..b1a7701 100644 --- a/proco/core/management/commands/load_country_admin_data.py +++ b/proco/core/management/commands/load_country_admin_data.py @@ -1,6 +1,7 @@ import json import os import sys +import logging import numpy as np import pandas as pd @@ -11,6 +12,8 @@ from proco.core import utils as core_utilities from proco.locations.models import Country, CountryAdminMetadata +logger = logging.getLogger('gigamaps.' + __name__) + def is_file(fp): if not os.path.isfile(fp): @@ -43,14 +46,15 @@ def load_admin0_file_data(file_path): csv_required_cols = ['iso31661', 'iso31661alpha3', 'name', 'nameen', 'description', 'centroid', 'bbox', 'mapboxid'] core_utilities.column_normalize(input_df, valid_columns=csv_required_cols) - print('CSV normalized columns: {0}'.format(input_df.columns.tolist())) + logger.debug('Csv normalized columns: {0}.'.format(input_df.columns.tolist())) input_df.drop_duplicates(subset=['iso31661', 'iso31661alpha3'], keep='last', inplace=True) country_codes = dict(Country.objects.all().annotate(code_lower=Lower('code')).values_list('code_lower', 'id')) - country_iso3_codes = dict(Country.objects.all().annotate(iso3_format_lower=Lower('iso3_format')).values_list('iso3_format_lower', 'id')) - print('Country Code mapping: {0}'.format(country_codes)) - print('Country ISO3 Code mapping: {0}'.format(country_iso3_codes)) + country_iso3_codes = dict( + Country.objects.all().annotate(iso3_format_lower=Lower('iso3_format')).values_list('iso3_format_lower', 'id')) + logger.debug('Country code mapping: {0}'.format(country_codes)) + logger.debug('Country ISO3 code mapping: {0}'.format(country_iso3_codes)) input_df['errors'] = None @@ -69,20 +73,20 @@ def load_admin0_file_data(file_path): errors.append('Name field is required') if len(errors) > 0: - print('Errors: ', errors, ', Code: ', row['iso31661'], ', Country Id: ', country_id) + logger.debug('Errors: ', errors, ', Code: ', row['iso31661'], ', Country Id: ', country_id) has_data_errors = True input_df.at[index, 'errors'] = ','.join(errors) - print('Has data errors: {0}'.format(has_data_errors)) + logger.debug('Has data errors: {0}'.format(has_data_errors)) if has_data_errors: error_file = '_errors.'.join(get_file_name_and_extension(file_path)) - print('ERROR: CSV has data errors. Please check the error file, correct it and then start again.' - ' Error file: {0}'.format(error_file)) + logger.error('Csv has data errors. Please check the error file, correct it and then start again.' + ' Error file: {0}'.format(error_file)) input_df.to_csv(error_file, quotechar='"', index=False) return - print('SUCCESS: Validation has passed by the input file.') + logger.info('Success: Validation has passed by the input file.') input_df = input_df.replace(np.nan, None) rows = input_df.to_dict(orient='records') @@ -116,16 +120,16 @@ def load_admin1_file_data(file_path): core_utilities.column_normalize(input_df, valid_columns=csv_required_cols) - print('CSV normalized columns: {0}'.format(input_df.columns.tolist())) + logger.debug('Csv normalized columns: {0}'.format(input_df.columns.tolist())) input_df.drop_duplicates(subset=['iso31661', 'iso31661alpha3', 'admin1idgiga'], keep='last', inplace=True) country_codes = dict(Country.objects.all().annotate(code_lower=Lower('code')).values_list('code_lower', 'id')) - print('Country Code mapping: {0}'.format(country_codes)) + logger.debug('Country code mapping: {0}'.format(country_codes)) country_iso3_codes = dict( Country.objects.all().annotate(iso3_format_lower=Lower('iso3_format')).values_list('iso3_format_lower', 'id')) - print('Country ISO3 Code mapping: {0}'.format(country_iso3_codes)) + logger.debug('Country ISO3 code mapping: {0}'.format(country_iso3_codes)) parent_code_vs_id = dict( CountryAdminMetadata.objects.all().filter( @@ -133,7 +137,7 @@ def load_admin1_file_data(file_path): giga_id_admin_lower=Lower('giga_id_admin')).values_list( 'giga_id_admin_lower', 'id'), ) - print('Country Giga ID Code - ID mapping: {0}'.format(parent_code_vs_id)) + logger.debug('Country giga id code - ID mapping: {0}'.format(parent_code_vs_id)) input_df['errors'] = None @@ -157,20 +161,20 @@ def load_admin1_file_data(file_path): errors.append('Name field is required') if len(errors) > 0: - print('Errors: ', errors, ', Code: ', row['admin1idgiga'], ', Country Id: ', country_id) + logger.debug('Errors: ', errors, ', Code: ', row['admin1idgiga'], ', Country Id: ', country_id) has_data_errors = True input_df.at[index, 'errors'] = ','.join(errors) - print('Has data errors: {0}'.format(has_data_errors)) + logger.debug('Has data errors: {0}'.format(has_data_errors)) if has_data_errors: error_file = '_errors.'.join(get_file_name_and_extension(file_path)) - print('ERROR: CSV has data errors. Please check the error file, correct it and then start again.' - ' Error file: {0}'.format(error_file)) + logger.error('Csv has data errors. Please check the error file, correct it and then start again.' + ' Error file: {0}'.format(error_file)) input_df.to_csv(error_file, quotechar='"', index=False) return - print('SUCCESS: Validation has passed by the input file.') + logger.info('Success: Validation has passed by the input file.') input_df = input_df.replace(np.nan, None) rows = input_df.to_dict(orient='records') @@ -205,14 +209,15 @@ def load_admin2_file_data(file_path): core_utilities.column_normalize(input_df, valid_columns=csv_required_cols) - print('CSV normalized columns: {0}'.format(input_df.columns.tolist())) + logger.debug('Csv normalized columns: {0}'.format(input_df.columns.tolist())) input_df.drop_duplicates(subset=['iso31661', 'iso31661alpha3', 'admin2idgiga'], keep='last', inplace=True) country_codes = dict(Country.objects.all().annotate(code_lower=Lower('code')).values_list('code_lower', 'id')) - country_iso3_codes = dict(Country.objects.all().annotate(iso3_format_lower=Lower('iso3_format')).values_list('iso3_format_lower', 'id')) - print('Country Code mapping: {0}'.format(country_codes)) - print('Country ISO3 Code mapping: {0}'.format(country_iso3_codes)) + country_iso3_codes = dict( + Country.objects.all().annotate(iso3_format_lower=Lower('iso3_format')).values_list('iso3_format_lower', 'id')) + logger.debug('Country code mapping: {0}'.format(country_codes)) + logger.debug('Country ISO3 code mapping: {0}'.format(country_iso3_codes)) parent_code_vs_id = dict( CountryAdminMetadata.objects.all().filter( @@ -220,7 +225,7 @@ def load_admin2_file_data(file_path): giga_id_admin_lower=Lower('giga_id_admin')).values_list( 'giga_id_admin_lower', 'id'), ) - print('Admin1 Giga ID Code - ID mapping: {0}'.format(parent_code_vs_id)) + logger.debug('Admin1 giga id code - ID mapping: {0}'.format(parent_code_vs_id)) input_df['errors'] = None @@ -242,20 +247,20 @@ def load_admin2_file_data(file_path): errors.append('Name field is required') if len(errors) > 0: - print('Errors: ', errors, ', Code: ', row['admin2idgiga'], ', Country Id: ', country_id) + logger.error('Errors: ', errors, ', Code: ', row['admin2idgiga'], ', Country Id: ', country_id) has_data_errors = True input_df.at[index, 'errors'] = ','.join(errors) - print('Has data errors: {0}'.format(has_data_errors)) + logger.debug('Has data errors: {0}'.format(has_data_errors)) if has_data_errors: error_file = '_errors.'.join(get_file_name_and_extension(file_path)) - print('ERROR: CSV has data errors. Please check the error file, correct it and then start again.' - ' Error file: {0}'.format(error_file)) + logger.error('Csv has data errors. Please check the error file, correct it and then start again.' + ' Error file: {0}'.format(error_file)) input_df.to_csv(error_file, quotechar='"', index=False) return - print('SUCCESS: Validation has passed by the input file.') + logger.info('Success: Validation has passed by the input file.') input_df = input_df.replace(np.nan, None) rows = input_df.to_dict(orient='records') @@ -305,11 +310,11 @@ def handle(self, **options): ['admin0', 'admin1', 'admin2'])) with transaction.atomic(): - print('Loading Admins data for {0} ....'.format(admin_type)) + logger.info('Loading admins data for {0} ....'.format(admin_type)) if admin_type == 'admin0': load_admin0_file_data(input_file) elif admin_type == 'admin1': load_admin1_file_data(input_file) elif admin_type == 'admin2': load_admin2_file_data(input_file) - print('Data loaded successfully!\n') + logger.info('Data loaded successfully!\n') diff --git a/proco/core/management/commands/load_iso3_format_code_for_countries.py b/proco/core/management/commands/load_iso3_format_code_for_countries.py index 141cbf2..0518b39 100644 --- a/proco/core/management/commands/load_iso3_format_code_for_countries.py +++ b/proco/core/management/commands/load_iso3_format_code_for_countries.py @@ -1,5 +1,6 @@ import os import sys +import logging import pandas as pd from django.core.management.base import BaseCommand, CommandError @@ -8,6 +9,8 @@ from proco.core.utils import is_blank_string from proco.locations.models import Country +logger = logging.getLogger('gigamaps.' + __name__) + def is_file(fp): if not os.path.isfile(fp): @@ -28,10 +31,10 @@ def load_data(input_file): Country.objects.filter(code=str(country_code).strip()).update( iso3_format=str(country_iso3_code).strip(), ) else: - print('ERROR: Invalid Country Code/ISO3 Format submitted.') - print(country_code, '\t', country_iso3_code) + logger.error('Invalid country code ISO3 format submitted.') + logger.error(country_code, '\t', country_iso3_code) except Exception as ex: - print('Error raised for creation: {0}'.format(ex)) + logger.error('Error raised for creation: {0}'.format(ex)) class Command(BaseCommand): diff --git a/proco/core/management/commands/load_system_data_layers.py b/proco/core/management/commands/load_system_data_layers.py index 728d25f..6aa35fa 100644 --- a/proco/core/management/commands/load_system_data_layers.py +++ b/proco/core/management/commands/load_system_data_layers.py @@ -4,7 +4,7 @@ from django.db import transaction from proco.accounts import models as accounts_models -from proco.core.utils import get_current_datetime_object +from proco.core.utils import get_current_datetime_object, normalize_str data_source_json = [ { @@ -228,6 +228,7 @@ download_and_coverage_data_layer_json = [ { + 'code': 'DEFAULT_DOWNLOAD', 'name': 'Download', 'icon': """""", 'description': 'System Download Layer', @@ -268,6 +269,7 @@ ] }, { + 'code': 'DEFAULT_COVERAGE', 'name': 'Coverage data', 'icon': """""", 'description': 'Mobile coverage in the area', @@ -388,6 +390,17 @@ def load_system_data_layers_data(): pass +def populate_data_layer_codes(): + for data_layer_instance in accounts_models.DataLayer.objects.all_records(): + if data_layer_instance.code == 'UNKNOWN': + possible_code = normalize_str(str(data_layer_instance.name)).upper() + count = 1 + while accounts_models.DataLayer.objects.all_records().filter(code=possible_code).exists(): + possible_code = possible_code + '_' + str(count) + data_layer_instance.code = possible_code + data_layer_instance.save(update_fields=('code',)) + + class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( @@ -410,6 +423,11 @@ def add_arguments(self, parser): help='If provided, already created Download/Coverage data layers will be updated again.' ) + parser.add_argument( + '--update_data_layers_code', action='store_true', dest='update_data_layers_code', default=False, + help='If provided, already created data layers will be updated with code picked from name field.' + ) + def handle(self, **options): sys.stdout.write('\nLoading APIs data....') @@ -437,4 +455,7 @@ def handle(self, **options): if options.get('update_data_layers', False): load_system_data_layers_data() + if options.get('update_data_layers_code', False): + populate_data_layer_codes() + sys.stdout.write('\nData loaded successfully!\n') diff --git a/proco/core/management/commands/populate_active_data_layer_for_countries.py b/proco/core/management/commands/populate_active_data_layer_for_countries.py index 257df72..cc33c75 100644 --- a/proco/core/management/commands/populate_active_data_layer_for_countries.py +++ b/proco/core/management/commands/populate_active_data_layer_for_countries.py @@ -1,6 +1,7 @@ # encoding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals +import logging from django.core.management.base import BaseCommand from proco.accounts import models as accounts_models @@ -9,8 +10,10 @@ from proco.core.utils import get_current_datetime_object from proco.locations.models import Country +logger = logging.getLogger('gigamaps.' + __name__) -def delete_relationships(country_id, layer_id): + +def delete_relationships(country_id, layer_id, excluded_ids): relationships = accounts_models.DataLayerCountryRelationship.objects.all() if country_id: @@ -19,6 +22,9 @@ def delete_relationships(country_id, layer_id): if layer_id: relationships = relationships.filter(data_layer_id=layer_id) + if len(excluded_ids) > 0: + relationships = relationships.exclude(id__in=excluded_ids) + relationships.update(deleted=get_current_datetime_object()) @@ -42,15 +48,11 @@ def add_arguments(self, parser): ) def handle(self, **options): - print('*** Active Data Layer for Country Mapping operations STARTED ***') + logger.info('Active data layer for country mapping operations started.') country_id = options.get('country_id', None) layer_id = options.get('layer_id', None) - - if options.get('reset_mapping', False): - print('DELETE_OLD_RECORDS - START') - delete_relationships(country_id, layer_id) - print('DELETE_OLD_RECORDS - END') + ids_to_keep = [] all_published_layers = accounts_models.DataLayer.objects.all() if layer_id: @@ -62,7 +64,7 @@ def handle(self, **options): all_country_ids = list(Country.objects.all().values_list('id', flat=True).order_by('id')) if all_published_layers.count() > 0 and len(all_country_ids) > 0: - print('RELATIONSHIP_CREATION - START') + logger.info('Relationship creation - start') for data_layer_instance in all_published_layers: data_sources = data_layer_instance.data_sources.all() @@ -101,20 +103,24 @@ def handle(self, **options): label='DataLayerCountryRelationship') for country_id_has_layer_data in all_country_ids_has_layer_data: - relationship_instance, created = accounts_models.DataLayerCountryRelationship.objects.update_or_create( - data_layer=data_layer_instance, - country_id=country_id_has_layer_data['country_id'], - defaults={ - 'is_default': not data_layer_instance.created_by, - 'last_modified_at': get_current_datetime_object(), - }, + relationship_instance, created = ( + accounts_models.DataLayerCountryRelationship.objects.update_or_create( + data_layer=data_layer_instance, + country_id=country_id_has_layer_data['country_id'], + defaults={ + # 'is_default': not data_layer_instance.created_by, + 'last_modified_at': get_current_datetime_object(), + }, + ) ) + ids_to_keep.append(relationship_instance.id) if created: - print('New DataLayers + Country Relationship created for LIVE LAYER: {0}'.format( + logger.debug('New dataLayers + country relationship created for live layer: {0}'.format( relationship_instance.__dict__)) else: - print('Existing DataLayers + Country Relationship updated for LIVE LAYER: {0}'.format( - relationship_instance.__dict__)) + logger.debug( + 'Existing dataLayers + country relationship updated for live layer: {0}'.format( + relationship_instance.__dict__)) elif data_layer_instance.type == accounts_models.DataLayer.LAYER_TYPE_STATIC: unknown_condition = '' if parameter_column_type == 'str': @@ -139,19 +145,28 @@ def handle(self, **options): label='DataLayerCountryRelationship') for country_id_has_layer_data in all_country_ids_has_layer_data: - relationship_instance, created = accounts_models.DataLayerCountryRelationship.objects.update_or_create( - data_layer=data_layer_instance, - country_id=country_id_has_layer_data['country_id'], - defaults={ - 'is_default': False, - 'last_modified_at': get_current_datetime_object(), - }, + relationship_instance, created = ( + accounts_models.DataLayerCountryRelationship.objects.update_or_create( + data_layer=data_layer_instance, + country_id=country_id_has_layer_data['country_id'], + defaults={ + 'is_default': False, + 'last_modified_at': get_current_datetime_object(), + }, + ) ) + ids_to_keep.append(relationship_instance.id) if created: - print('New DataLayers + Country Relationship created for STATIC LAYER: {0}'.format( + logger.debug('New dataLayers + country relationship created for static layer: {0}'.format( relationship_instance.__dict__)) else: - print('Existing DataLayers + Country Relationship updated for STATIC LAYER: {0}'.format( - relationship_instance.__dict__)) + logger.debug( + 'Existing dataLayers + country relationship updated for static layer: {0}'.format( + relationship_instance.__dict__)) + + if options.get('reset_mapping', False): + logger.info('Delete records which are not active now - start') + delete_relationships(country_id, layer_id, ids_to_keep) + logger.info('Delete records which are not active now - end') - print('*** Active Data Layer for Country Mapping operations ***') + logger.info('Active data layer for country mapping operations.') diff --git a/proco/core/management/commands/populate_admin_id_fields_to_schools.py b/proco/core/management/commands/populate_admin_id_fields_to_schools.py index 91000c4..a7a5847 100644 --- a/proco/core/management/commands/populate_admin_id_fields_to_schools.py +++ b/proco/core/management/commands/populate_admin_id_fields_to_schools.py @@ -1,15 +1,18 @@ +import sys +import logging from collections import OrderedDict from django.core.management.base import BaseCommand from django.db import connection from django.db import transaction from django.utils import timezone -import sys + +logger = logging.getLogger('gigamaps.' + __name__) @transaction.atomic def create_and_execute_update_query(column, data_dict_list): - print('Executing update statement for: {0} records'.format(len(data_dict_list))) + logger.debug('Executing update statement for: {0} records'.format(len(data_dict_list))) # create update query stmt = "UPDATE schools_school SET {column} = {value} WHERE id = {school_id}" @@ -17,7 +20,7 @@ def create_and_execute_update_query(column, data_dict_list): for data_dict in data_dict_list: update_query = stmt.format(column=column, value=data_dict[column], school_id=data_dict['school_id']) # print('Current record: {}'.format(data_dict)) - print('Current Update Query: {}'.format(update_query)) + logger.debug('Current update query: {}'.format(update_query)) cursor.execute(update_query) @@ -55,8 +58,8 @@ def populate_school_admin1_data(start_school_id, end_school_id): query = query.format(where_condition=where_condition) - print('Getting select statement query result from "schools_with_admin_data" table for Admin1 records.') - print('Query: {}'.format(query)) + logger.info('Getting select statement query result from "schools_with_admin_data" table for Admin1 records.') + logger.debug('Query: {}'.format(query)) data_list = [] with connection.cursor() as cursor: @@ -71,7 +74,7 @@ def populate_school_admin1_data(start_school_id, end_school_id): create_and_execute_update_query('admin1_id', data_list) te = timezone.now() - print('Executed the function in {} seconds'.format((te - ts).seconds)) + logger.debug('Executed the function in {} seconds'.format((te - ts).seconds)) def populate_school_admin2_data(start_school_id, end_school_id): @@ -107,8 +110,8 @@ def populate_school_admin2_data(start_school_id, end_school_id): query = query.format(where_condition=where_condition) - print('Getting select statement query result from "schools_with_admin_data" table for Admin2.') - print('Query: {}'.format(query)) + logger.info('Getting select statement query result from "schools_with_admin_data" table for Admin2.') + logger.debug('Query: {}'.format(query)) data_list = [] with connection.cursor() as cursor: @@ -123,7 +126,7 @@ def populate_school_admin2_data(start_school_id, end_school_id): create_and_execute_update_query('admin2_id', data_list) te = timezone.now() - print('Executed the function in {} seconds'.format((te - ts).seconds)) + logger.debug('Executed the function in {} seconds'.format((te - ts).seconds)) class Command(BaseCommand): @@ -156,7 +159,8 @@ def handle(self, **options): sys.exit("Mandatory argument '--admin-type/-at' is missing. Available options: {0}".format( ['admin1', 'admin2', 'both'])) - print('*** School update operation STARTED ({0} - {1}) ***'.format(start_school_id, end_school_id)) + logger.debug('School update operation started for: Start school ID - {0}, End school ID - {1}'.format( + start_school_id, end_school_id)) if admin_type == 'admin1': populate_school_admin1_data(start_school_id, end_school_id) @@ -166,4 +170,4 @@ def handle(self, **options): populate_school_admin1_data(start_school_id, end_school_id) populate_school_admin2_data(start_school_id, end_school_id) - print('Data loaded successfully!\n') + logger.info('Data loaded successfully!\n') diff --git a/proco/core/management/commands/populate_admin_ui_labels.py b/proco/core/management/commands/populate_admin_ui_labels.py index dfb7065..802522d 100644 --- a/proco/core/management/commands/populate_admin_ui_labels.py +++ b/proco/core/management/commands/populate_admin_ui_labels.py @@ -1,15 +1,17 @@ -from collections import OrderedDict +import logging +import sys from django.core.management.base import BaseCommand from django.db import connection from django.db import transaction from django.utils import timezone -import sys + +logger = logging.getLogger('gigamaps.' + __name__) @transaction.atomic def create_and_execute_update_query(stmt): - print('Current Update Query: {}'.format(stmt)) + logger.debug('Current update query: {}'.format(stmt)) with connection.cursor() as cursor: cursor.execute(stmt) @@ -52,7 +54,7 @@ def populate_ui_label_for_admin1_data(country_id, parent_id): create_and_execute_update_query(query) te = timezone.now() - print('Executed the function in {} seconds'.format((te - ts).seconds)) + logger.debug('Executed the function in {} seconds'.format((te - ts).seconds)) def populate_ui_label_for_admin2_data(country_id, parent_id): @@ -92,7 +94,7 @@ def populate_ui_label_for_admin2_data(country_id, parent_id): create_and_execute_update_query(query) te = timezone.now() - print('Executed the function in {} seconds'.format((te - ts).seconds)) + logger.debug('Executed the function in {} seconds'.format((te - ts).seconds)) class Command(BaseCommand): @@ -133,7 +135,7 @@ def handle(self, **options): country_id = options.get('country_id') parent_id = options.get('parent_id') - print('*** Admin update operation STARTED ({0} - {1}) ***'.format(country_id, parent_id)) + logger.debug('Admin update operation started ({0} - {1})'.format(country_id, parent_id)) if admin_type == 'admin1': populate_ui_label_for_admin1_data(country_id, parent_id) @@ -143,4 +145,4 @@ def handle(self, **options): populate_ui_label_for_admin1_data(country_id, parent_id) populate_ui_label_for_admin2_data(country_id, parent_id) - print('Data loaded successfully!\n') + logger.info('Data loaded successfully!\n') diff --git a/proco/core/management/commands/populate_school_new_fields.py b/proco/core/management/commands/populate_school_new_fields.py index dabf8e5..dc8b969 100644 --- a/proco/core/management/commands/populate_school_new_fields.py +++ b/proco/core/management/commands/populate_school_new_fields.py @@ -1,3 +1,5 @@ +import logging + from django.core.management.base import BaseCommand from django.db import transaction from django.db.models import Prefetch @@ -7,8 +9,10 @@ from proco.schools import utils as school_utilities from proco.schools.models import School +logger = logging.getLogger('gigamaps.' + __name__) + -def populate_school_new_fields(school_id, start_school_id, end_school_id, country_id): +def populate_school_new_fields(school_id, start_school_id, end_school_id, country_id, school_ids): """ """ schools_qry = School.objects.all() @@ -16,18 +20,21 @@ def populate_school_new_fields(school_id, start_school_id, end_school_id, countr Prefetch('country', Country.objects.defer('geometry', 'geometry_simplified')), ) if school_id and isinstance(school_id, int): - schools_qry = schools_qry.filter(id=school_id,) + schools_qry = schools_qry.filter(id=school_id, ) if start_school_id: - schools_qry = schools_qry.filter(id__gte=start_school_id,) + schools_qry = schools_qry.filter(id__gte=start_school_id, ) if end_school_id: - schools_qry = schools_qry.filter(id__lte=end_school_id,) + schools_qry = schools_qry.filter(id__lte=end_school_id, ) if country_id: - schools_qry = schools_qry.filter(country_id=country_id,) + schools_qry = schools_qry.filter(country_id=country_id, ) + + if school_ids and len(school_ids) > 0: + schools_qry = schools_qry.filter(id__in=school_ids.split(',')) - print('Starting the process: ', schools_qry.query) + logger.debug('Starting the process: {}'.format(schools_qry.query)) count = 1 for data_chunk in core_utilities.queryset_iterator(schools_qry, chunk_size=20000): with transaction.atomic(): @@ -37,9 +44,9 @@ def populate_school_new_fields(school_id, start_school_id, end_school_id, countr school.connectivity_status = school_utilities.get_connectivity_status_by_master_api(school) school.save(update_fields=['coverage_type', 'coverage_status', 'connectivity_status']) - print("Update on school records SUCCEEDED for count '{0}'".format(count)) + logger.debug("Update on school records succeeded for count '{0}'".format(count)) count += 1 - print('Completed the process.') + logger.info('Completed the process.') class Command(BaseCommand): @@ -64,13 +71,18 @@ def add_arguments(self, parser): '-country_id', dest='country_id', required=False, type=int, help='Pass the Country ID in case want to control the update.' ) + parser.add_argument( + '-school_ids', dest='school_ids', required=False, type=str, + help='Pass the School IDs in case want to control the update.' + ) def handle(self, **options): school_id = options.get('school_id') + school_ids = options.get('school_ids') start_school_id = options.get('start_school_id') end_school_id = options.get('end_school_id') country_id = options.get('country_id') - print('*** School update operation STARTED ({0}) ***'.format(school_id)) + logger.debug('School update operation started ({0})'.format(options)) - populate_school_new_fields(school_id, start_school_id, end_school_id, country_id) + populate_school_new_fields(school_id, start_school_id, end_school_id, country_id, school_ids) diff --git a/proco/core/management/commands/populate_school_registration_data.py b/proco/core/management/commands/populate_school_registration_data.py index 29561ef..c51a6e7 100644 --- a/proco/core/management/commands/populate_school_registration_data.py +++ b/proco/core/management/commands/populate_school_registration_data.py @@ -1,3 +1,4 @@ +import logging from collections import OrderedDict from django.core.management.base import BaseCommand @@ -7,6 +8,8 @@ from proco.core.utils import get_current_datetime_object from proco.connection_statistics import models as statistics_models +logger = logging.getLogger('gigamaps.' + __name__) + def delete_relationships(country_id, school_id): relationships = statistics_models.SchoolRealTimeRegistration.objects.all() @@ -38,11 +41,11 @@ def create_and_execute_insert_query(table_columns, insert_statement_list): ) insert_ts = timezone.now() - print('Executing bulk insert for: {0} records'.format(len(insert_statement_list))) - print(insert_statement) + logger.debug('Executing bulk insert for total: {} records'.format(len(insert_statement_list))) + logger.debug(insert_statement) cursor.executemany(insert_statement, insert_statement_list) insert_te = timezone.now() - print('bulk insert time is {} second'.format((insert_te - insert_ts).seconds)) + logger.debug('Bulk insert time is "{}" second'.format((insert_te - insert_ts).seconds)) def populate_school_registration_data(country_id, school_id): @@ -72,8 +75,8 @@ def populate_school_registration_data(country_id, school_id): query += f' AND school.country_id = {country_id}' with connection.cursor() as cursor: - print('getting select statement query result from School + SchoolDailyStatus tables') - print('Query: {}'.format(query)) + logger.debug('Getting select statement query result from School + SchoolDailyStatus tables') + logger.debug('Query: {}'.format(query)) cursor.execute(query) description = cursor.description @@ -94,7 +97,7 @@ def populate_school_registration_data(country_id, school_id): create_and_execute_insert_query(table_columns, insert_statement_list) te = timezone.now() - print('Executed the function in {} seconds'.format((te - ts).seconds)) + logger.debug('Executed the function in "{}" seconds'.format((te - ts).seconds)) return current_rows_num, last_processed_id @@ -122,12 +125,10 @@ def handle(self, **options): school_id = options.get('school_id') if options.get('reset_mapping', False): - print('DELETE_OLD_RECORDS - START') + logger.debug('Starting deleting old records.') delete_relationships(country_id, school_id) - print('DELETE_OLD_RECORDS - END') - - print('*** School Registration update operation STARTED ({0}) ***'.format(options)) + logger.debug('Deleted old records.') + logger.info('School Registration update operation STARTED ({0})'.format(options)) populate_school_registration_data(country_id, school_id) - - print('*** School Registration update operation ENDED ({0}) ***'.format(options)) + logger.info('School Registration update operation ENDED ({0})'.format(options)) diff --git a/proco/core/management/commands/redo_aggregations.py b/proco/core/management/commands/redo_aggregations.py index ce07a8f..c208c64 100644 --- a/proco/core/management/commands/redo_aggregations.py +++ b/proco/core/management/commands/redo_aggregations.py @@ -1,4 +1,5 @@ import datetime +import logging from django.core.management.base import BaseCommand @@ -12,6 +13,8 @@ from proco.utils import dates as date_utilities from proco.connection_statistics.models import SchoolWeeklyStatus +logger = logging.getLogger('gigamaps.' + __name__) + def get_date_list(year, week_no): if week_no: @@ -72,8 +75,8 @@ def add_arguments(self, parser): ) def handle(self, **options): - print('Executing "redo_aggregations" utility ....\n') - print('Options: {}\n\n'.format(options)) + logger.info('Executing redo aggregations utility.\n') + logger.debug('Options: {}\n\n'.format(options)) country_id = options.get('country_id', None) country = Country.objects.get(id=country_id) @@ -85,21 +88,21 @@ def handle(self, **options): monday_date_list = list(get_all_monday_dates(dates_list)) if options.get('update_school_weekly'): - print('Performing School Weekly Aggregations for date range: {0} - {1}'.format( + logger.debug('Performing school weekly aggregations for date range: {0} - {1}'.format( monday_date_list[0], monday_date_list[-1])) for monday_date in monday_date_list: aggregate_school_daily_status_to_school_weekly_status(country, monday_date) - print('Completed School Weekly Aggregations.\n\n') + logger.info('Completed school weekly aggregations.\n\n') if options.get('update_country_daily'): - print('Performing Country Daily Aggregations for date range: {0} - {1}'.format( + logger.debug('Performing country daily aggregations for date range: {0} - {1}'.format( dates_list[0], dates_list[-1])) for date in dates_list: aggregate_school_daily_to_country_daily(country, date) - print('Completed Country Daily Aggregations.\n\n') + logger.info('Completed country daily aggregations.\n\n') if options.get('update_country_weekly'): - print('Performing Country Weekly Aggregations for date range: {0} - {1}'.format( + logger.debug('Performing country weekly aggregations for date range: {0} - {1}'.format( monday_date_list[0], monday_date_list[-1])) for monday_date in monday_date_list: monday_week_no = date_utilities.get_week_from_date(monday_date) @@ -108,10 +111,10 @@ def handle(self, **options): ).exists(): update_country_weekly_status(country, monday_date) else: - print('Country Weekly Aggregations skipped as School Weekly has no record for same data:' - ' Year - {0}, Week No - {1}'.format(year, monday_week_no)) - print('Completed Country Weekly Aggregations.\n\n') + logger.debug('Country weekly aggregations skipped as school weekly has no record for same data:' + ' Year - {0}, Week No - {1}'.format(year, monday_week_no)) + logger.info('Completed country weekly aggregations.\n\n') country.invalidate_country_related_cache() - print('Completed "redo_aggregations" successfully ....\n') + logger.info('Completed redo aggregations successfully.\n') diff --git a/proco/core/management/commands/update_system_role_permissions.py b/proco/core/management/commands/update_system_role_permissions.py index 35a88b4..2247a79 100644 --- a/proco/core/management/commands/update_system_role_permissions.py +++ b/proco/core/management/commands/update_system_role_permissions.py @@ -1,4 +1,5 @@ # encoding: utf-8 +import logging from collections import OrderedDict @@ -7,6 +8,8 @@ from proco.custom_auth.models import Role, RolePermission +logger = logging.getLogger('gigamaps.' + __name__) + role_permissions = OrderedDict({ Role.SYSTEM_ROLE_NAME_ADMIN: [perm[0] for perm in RolePermission.PERMISSION_CHOICES], Role.SYSTEM_ROLE_NAME_READ_ONLY: [RolePermission.CAN_DELETE_API_KEY, ], @@ -34,7 +37,7 @@ class Command(BaseCommand): help = "Update the System Role's permissions as it can not be updated from GUI." def handle(self, **options): - print('*** System role update operation STARTED ***') + logger.info('System role update operation started.') with transaction.atomic(): populate_role_permissions() - print('*** System role update operation ENDED ***') + logger.info('System role update operation ended.') diff --git a/proco/core/mixins.py b/proco/core/mixins.py index 021207a..c2e2694 100644 --- a/proco/core/mixins.py +++ b/proco/core/mixins.py @@ -61,12 +61,7 @@ def perform_pre_checks(self, request, *args, **kwargs): if core_utilities.is_blank_string(api_key): raise core_exceptions.RequiredAPIKeyFilterError() - # api = headers.get('Api') - # if core_utilities.is_blank_string(api): - # raise core_exceptions.RequiredAPIFilterError() - valid_api_key = accounts_models.APIKey.objects.filter( - # api=api, user=request.user, # Check if API key is created by the current user api_key=api_key, # Check the API key in database table status=accounts_models.APIKey.APPROVED, # API Key must be APPROVED to enable the download/documentation diff --git a/proco/core/resources/filters.json b/proco/core/resources/filters.json new file mode 100644 index 0000000..4886229 --- /dev/null +++ b/proco/core/resources/filters.json @@ -0,0 +1,83 @@ +[ + { + "name": "School area type", + "type": "drop-down", + "description": "Urban or rural region in which school is located", + "choices": [ + { + "label": "Urban", + "value": "urban" + }, + { + "label": "Rural", + "value": "rural" + }, + { + "label": "None", + "value": "none" + } + ], + "parameter": { + "label": "Region", + "table": "schools", + "field": "environment", + "filter": "iexact" + }, + "active_countries_filter": "LOWER(environment) IN ('urban', 'rural')", + "active_countries_list": null + }, + { + "name": "School funding source", + "type": "drop-down", + "description": "", + "choices": [ + { + "label": "Private", + "value": "private" + }, + { + "label": "Public", + "value": "public" + }, + { + "label": "None", + "value": "none" + } + ], + "parameter": { + "label": "School Type", + "table": "schools", + "field": "school_type", + "filter": "iexact" + }, + "active_countries_filter": "LOWER(school_type) IN ('private', 'public')", + "active_countries_list": null + }, + { + "name": "Education level", + "type": "drop-down", + "description": "Highest level of education taught at the school", + "choices": [ + { + "label": "Primary", + "value": "primary" + }, + { + "label": "Secondary", + "value": "secondary" + }, + { + "label": "None", + "value": "none" + } + ], + "parameter": { + "label": "Education Level", + "table": "schools", + "field": "education_level", + "filter": "iexact" + }, + "active_countries_filter": "LOWER(education_level) IN ('primary', 'secondary')", + "active_countries_list": null + } +] \ No newline at end of file diff --git a/proco/core/tests/test_utils.py b/proco/core/tests/test_utils.py index f6bdb67..8947b82 100644 --- a/proco/core/tests/test_utils.py +++ b/proco/core/tests/test_utils.py @@ -85,11 +85,6 @@ def test_get_random_choice_utility(self): self.assertIn(core_utilities.get_random_choice(['aa', 'bb', 'cc', 'dd']), ['aa', 'bb', 'cc', 'dd']) - def test_get_sender_email_utility(self): - self.assertEqual(type(core_utilities.get_sender_email()), str) - - self.assertIsNotNone(core_utilities.get_sender_email()) - def test_get_support_email_utility(self): self.assertEqual(type(core_utilities.get_support_email()), str) diff --git a/proco/core/utils.py b/proco/core/utils.py index 86de8a5..82d0d7c 100644 --- a/proco/core/utils.py +++ b/proco/core/utils.py @@ -1,8 +1,8 @@ import gc -import hashlib import locale -import random +import logging import re +import secrets from decimal import Decimal import pytz @@ -11,6 +11,8 @@ from proco.core.config import app_config as core_config +logger = logging.getLogger('gigamaps.' + __name__) + def get_timezone_converted_value(value, tz=settings.TIME_ZONE): """ @@ -18,7 +20,7 @@ def get_timezone_converted_value(value, tz=settings.TIME_ZONE): Method to convert the timezone of the datetime field value :param tz: timezone :param value: DateTime instance - :return: DateTime instance + :return: """ response_timezone = pytz.timezone(tz) return value.astimezone(response_timezone) @@ -88,11 +90,10 @@ def is_blank_string(val): """Check if the given string is empty.""" if val is None: return True - elif type(val) != str: - return False - else: + elif isinstance(val, str): attr = val.strip().lower() return len(attr) == 0 + return False def sanitize_str(val): @@ -114,16 +115,8 @@ def normalize_str(val): def get_random_string(length=264, allowed_chars='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._*#'): """ Return a securely generated random string. - - The default length of 12 with the a-z, A-Z, 0-9 character set returns - a 71-bit value. log_2((26+26+10)^12) =~ 71 bits """ - random.seed( - hashlib.sha256( - ('%s%s%s' % (random.getstate(), get_current_datetime_object, settings.SECRET_KEY)).encode() - ).digest() - ) - return ''.join(random.choice(allowed_chars) for i in range(length)) + return ''.join(secrets.choice(allowed_chars) for _ in range(length)) def format_decimal_data(value): @@ -183,17 +176,7 @@ def get_footer_copyright(): def get_random_choice(choices): """ Accepts a list of choices and return the randomly chosen choice. """ - return random.choice(choices) - - -def get_sender_email(): - """ - get email id for sending emails - :return: - """ - emails = settings.DEFAULT_FROM_EMAIL - email_options = emails.split(',') - return get_random_choice(email_options) + return secrets.choice(choices) def get_support_email(): @@ -202,8 +185,10 @@ def get_support_email(): :return: """ emails = settings.SUPPORT_EMAIL_ID - email_options = emails.split(',') - return get_random_choice(email_options) + if len(emails) > 0: + email_options = emails.split(',') + return get_random_choice(email_options) + return '' def get_project_title(): @@ -233,7 +218,7 @@ def queryset_iterator(queryset, chunk_size=1000, print_msg=True): Note that the implementation of the iterator does not support ordered query sets. """ if not queryset: - print('Queryset has not data to iterate over: {0}'.format(queryset.query)) + logger.debug('Queryset has not data to iterate over: {0}'.format(queryset.query)) return list(queryset) pk = 0 @@ -241,7 +226,7 @@ def queryset_iterator(queryset, chunk_size=1000, print_msg=True): queryset = queryset.order_by('pk') while pk < last_pk: if print_msg: - print('Current selection query: {0}'.format(queryset.filter(pk__gt=pk)[:chunk_size].query)) + logger.debug('Current selection query: {0}'.format(queryset.filter(pk__gt=pk)[:chunk_size].query)) row = list(queryset.filter(pk__gt=pk)[:chunk_size]) pk = row[-1].pk yield row @@ -256,7 +241,7 @@ def column_normalize(data_df, valid_columns=None): :param data_df: data frame :param valid_columns: list - list of supported normalized column names, if empty/None keep all columns otherwise remove missing columns from data frame - :return: data frame + :return: """ _columns = dict() _to_delete = [] @@ -291,7 +276,7 @@ def bulk_create_or_update(records, model, unique_fields, batch_size=1000): # This is where we delegate our records to our split lists: # - if the record already exists in the DB (the 'id' primary key), add it to the update list. - # - Otherwise, add it to the create list. + # - Otherwise, add it to the creation list. [ records_to_update.append(record) if record['id'] is not None @@ -300,7 +285,7 @@ def bulk_create_or_update(records, model, unique_fields, batch_size=1000): ] if len(records_to_create) > 0: - print('Total records to create: {}'.format(len(records_to_create))) + logger.debug('Total records to create: {}'.format(len(records_to_create))) # Remove the 'id' field, as these will all hold a value of None, # since these records do not already exist in the DB [record.pop('id') for record in records_to_create] @@ -310,7 +295,7 @@ def bulk_create_or_update(records, model, unique_fields, batch_size=1000): ) if len(records_to_update) > 0: - print('Total records to update: {}'.format(len(records_to_update))) + logger.debug('Total records to update: {}'.format(len(records_to_update))) for f in unique_fields: [record.pop(f) for record in records_to_update] @@ -323,3 +308,82 @@ def bulk_create_or_update(records, model, unique_fields, batch_size=1000): batch_size=batch_size, ) + +def get_filter_sql(request, filter_key, table_name): + filter_fields = core_config.get_giga_filter_fields.get(filter_key, []) + query_params = request.query_params.dict() + + advance_filters = set(filter_fields) & set(query_params.keys()) + + sql_list = [] + for field_filter in advance_filters: + filter_value = str(query_params[field_filter]).lower() + sql_str = None + field_name = None + + if field_filter.endswith('__iexact'): + field_name = field_filter.replace('__iexact', '') + + if filter_value == 'none': + sql_str = """coalesce(TRIM({table_name}."{field_name}"), '') = ''""" + else: + sql_str = """LOWER({table_name}."{field_name}") = '{value}'""" + elif field_filter.endswith('__on'): + field_name = field_filter.replace('__on', '') + + if filter_value == 'none': + sql_str = """{table_name}."{field_name}" IS NULL""" + else: + sql_str = """{table_name}."{field_name}" = {value}""" + elif field_filter.endswith('__range'): + field_name = field_filter.replace('__range', '') + + start, end = filter_value.split(',') + if start != 'null': + sql_list.append("""{table_name}."{field_name}" >= {value}""".format( + table_name=table_name, + field_name=field_name, + value=start, + )) + if end != 'null': + sql_list.append("""{table_name}."{field_name}" <= {value}""".format( + table_name=table_name, + field_name=field_name, + value=end, + )) + elif field_filter.endswith('__none_range'): + field_name = field_filter.replace('__none_range', '') + none_sql_str = """{table_name}."{field_name}" IS NULL""".format( + table_name=table_name, + field_name=field_name, + ) + + start, end = filter_value.split(',') + range_sql_list = [] + if start != 'null': + range_sql_list.append("""{table_name}."{field_name}" >= {value}""".format( + table_name=table_name, + field_name=field_name, + value=start, + )) + if end != 'null': + range_sql_list.append("""{table_name}."{field_name}" <= {value}""".format( + table_name=table_name, + field_name=field_name, + value=end, + )) + if len(range_sql_list) == 0: + sql_list.append(none_sql_str) + elif len(range_sql_list) == 1: + sql_list.append('(' + none_sql_str + ' OR ' + range_sql_list[0] + ')') + elif len(range_sql_list) == 2: + sql_list.append('(' + none_sql_str + ' OR (' + range_sql_list[0] + ' AND ' + range_sql_list[1] + '))') + elif field_filter.endswith('__in'): + field_name = field_filter.replace('__in', '') + filter_value = ','.join(["'" + str(f).lower() + "'" for f in filter_value.split(',')]) + sql_str = """LOWER({table_name}."{field_name}") IN ({value})""" + + if sql_str: + sql_list.append(sql_str.format(table_name=table_name, field_name=field_name, value=filter_value)) + + return ' AND '.join(sql_list) diff --git a/proco/custom_auth/authentication.py b/proco/custom_auth/authentication.py index 4790464..c105b00 100644 --- a/proco/custom_auth/authentication.py +++ b/proco/custom_auth/authentication.py @@ -1,18 +1,18 @@ +import logging + from django.contrib.auth import get_user_model from django.utils.translation import ugettext as _ from rest_framework import exceptions from rest_framework_jwt import authentication as jwt_authentication from rest_framework_jwt.settings import api_settings -from proco.custom_auth.models import Role from proco.core.utils import is_blank_string +from proco.custom_auth.models import Role +logger = logging.getLogger('gigamaps.' + __name__) jwt_decode_handler = api_settings.JWT_DECODE_HANDLER -# jwt_get_username_from_payload = api_settings.JWT_PAYLOAD_GET_USERNAME_HANDLER - - class JSONWebTokenAuthentication(jwt_authentication.JSONWebTokenAuthentication): """ Token based authentication using the JSON Web Token standard. @@ -20,13 +20,6 @@ class JSONWebTokenAuthentication(jwt_authentication.JSONWebTokenAuthentication): def authenticate(self, request): user_model = get_user_model() - # user = user_model.objects.filter(email='admin@test.com').first() - # payload = jwt_serializers.jwt_payload_handler(user) - # token = jwt_serializers.jwt_encode_handler(payload) - # payload2 = jwt_decode_handler(token) - # header_token_decoded = jwt_decode_handler(jwt_token) - # username_from_token = jwt_get_username_from_payload(header_token_decoded) - try: return super().authenticate(request) except exceptions.AuthenticationFailed as ex: @@ -59,5 +52,5 @@ def authenticate(self, request): except user_model.DoesNotExist: msg = _('Invalid signatures.') e = exceptions.AuthenticationFailed(msg) - print(str(e)) + logger.debug(str(e)) raise e diff --git a/proco/custom_auth/migrations/0014_deleted_unused_historical_models.py b/proco/custom_auth/migrations/0014_deleted_unused_historical_models.py new file mode 100644 index 0000000..0984c5d --- /dev/null +++ b/proco/custom_auth/migrations/0014_deleted_unused_historical_models.py @@ -0,0 +1,47 @@ +# Generated by Django 2.2.28 on 2024-07-09 08:43 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('custom_auth', '0013_updated_applicationuser_username_max_length_same_as_email'), + ] + + operations = [ + migrations.RemoveField( + model_name='historicaluserrolerelationship', + name='created_by', + ), + migrations.RemoveField( + model_name='historicaluserrolerelationship', + name='history_user', + ), + migrations.RemoveField( + model_name='historicaluserrolerelationship', + name='last_modified_by', + ), + migrations.RemoveField( + model_name='historicaluserrolerelationship', + name='role', + ), + migrations.RemoveField( + model_name='historicaluserrolerelationship', + name='user', + ), + migrations.AlterModelOptions( + name='rolepermission', + options={}, + ), + migrations.AlterModelOptions( + name='userrolerelationship', + options={}, + ), + migrations.DeleteModel( + name='HistoricalRolePermission', + ), + migrations.DeleteModel( + name='HistoricalUserRoleRelationship', + ), + ] diff --git a/proco/custom_auth/models.py b/proco/custom_auth/models.py index 6971615..9248769 100644 --- a/proco/custom_auth/models.py +++ b/proco/custom_auth/models.py @@ -28,7 +28,7 @@ class ApplicationUser(core_models.BaseModelMixin, AbstractBaseUser): validators=[ validators.RegexValidator( r'^[\w.@+-]+$', - _('Enter a valid username. This value may contain only letters, numbers ' 'and @/./+/-/_ characters.'), + _('Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.'), ), ], error_messages={ @@ -95,9 +95,6 @@ def get_user_permissions(self, user): :type user: custom_auth.models.User :returns perms: Dictionary of permissions """ - # attr = USER_PERMISSIONS_KEY.format(user.id) - # perms = cache_utils.get_or_set_cache(attr, lambda: self.calculate_user_permissions(user)) - # return perms return self.calculate_user_permissions(user) def calculate_user_permissions(self, user): @@ -175,7 +172,7 @@ def permission_slugs(self): return self.permissions.all().values_list('slug', flat=True) -class UserRoleRelationship(core_models.BaseModel): +class UserRoleRelationship(core_models.BaseModelMixin): """ UserRoleRelationship This model is used to store the user roles. @@ -192,7 +189,7 @@ def perm_dict(self): return role_perms -class RolePermission(core_models.BaseModel): +class RolePermission(core_models.BaseModelMixin): """ RolePermission diff --git a/proco/custom_auth/serializers.py b/proco/custom_auth/serializers.py index 6fd239d..013bfcc 100644 --- a/proco/custom_auth/serializers.py +++ b/proco/custom_auth/serializers.py @@ -1,11 +1,9 @@ import re +import logging -from django.contrib.auth import get_user_model from django.db import transaction -from django.utils.translation import ugettext as _ from rest_flex_fields.serializers import FlexFieldsModelSerializer from rest_framework import serializers -from rest_framework_jwt import serializers as jwt_serializers from proco.core import utils as core_utilities from proco.custom_auth import exceptions as auth_exceptions @@ -13,6 +11,8 @@ from proco.custom_auth import utils as auth_utilities from proco.custom_auth.config import app_config as auth_config +logger = logging.getLogger('gigamaps.' + __name__) + class RoleSerializer(serializers.ModelSerializer): """ @@ -70,7 +70,7 @@ def validate_name(self, name): return name raise auth_exceptions.InvalidRoleNameError() - def _validate_custom_role_count_error(self, data): + def _validate_custom_role_count_error(self): max_role_count = auth_config.custom_role_count_limit custom_role = auth_models.Role.objects.filter(category='custom') @@ -105,7 +105,7 @@ def validate(self, data): # If reference role is provided then copy all the permissions as well data['permissions'] = reference_role.permissions - self._validate_custom_role_count_error(data) + self._validate_custom_role_count_error() self._validate_unique_role_name(data) return data @@ -262,11 +262,11 @@ def validate_email(self, email): email_lower = email.lower() if auth_models.ApplicationUser.objects.filter(email=email_lower).exists(): e = auth_exceptions.EmailAlreadyExistsError() - print(e.message) - print('Details: {0}'.format(email_lower)) + logger.error(e.message) + logger.debug('Details: {0}'.format(email_lower)) raise e - print('Email validated') - print('Details: {0}'.format(email_lower)) + logger.info('Email validated.') + logger.debug('Details: {0}'.format(email_lower)) return email_lower def get_role_fields(self): @@ -288,7 +288,6 @@ def get_role_fields(self): def to_representation(self, user): user_role = user.get_roles() - # role_fields = self.get_role_fields() role_serializer = RoleSerializer(instance=user_role) setattr(user, 'role', role_serializer.data) return super().to_representation(user) @@ -307,29 +306,16 @@ class Meta: 'role', ) - # def _check_for_custom_role(self, role): - # - # if role.category == auth_models.Role.ROLE_CATEGORY_CUSTOM: - # - # is_super_user = self.instance and self.instance.user.is_superuser - # - # # Cannot assign custom role to a superuser. - # if is_super_user: - # raise auth_exceptions.CannotAssignCustomRoleToSuperuserUser() - def validate_role(self, role): """ Method to validate new role of the user. """ - if role: - # self._check_for_custom_role(role) - # In case of create user role relationship. - if self.instance: - # In case of updating the user role relationship, the new role should not be same - # as that of the existing role. - if self.instance.role.id != role.id: - return role - raise auth_exceptions.InvalidRoleError() + if role and self.instance: + # In case of updating the user role relationship, the new role should not be same + # as that of the existing role. + if self.instance.role.id != role.id: + return role + raise auth_exceptions.InvalidRoleError() return role @@ -437,23 +423,11 @@ class Meta(BaseUserSerializer.Meta): 'role', ) - # def _check_for_custom_role(self, role): - # - # if role.category == auth_models.Role.ROLE_CATEGORY_CUSTOM: - # - # is_super_user = self.instance and self.instance.is_superuser - # - # # Cannot assign custom role to a superuser. - # if is_super_user: - # raise auth_exceptions.CannotAssignCustomRoleToSuperuserUser() - def validate_role(self, new_role): """ Method to validate new role of the user. """ if new_role and self.instance: - # self._check_for_custom_role(role) - # In case of updating the user role relationship, the new role should not be same # as that of the existing role. user_existing_role = self.instance.get_roles() diff --git a/proco/custom_auth/templates/base.html b/proco/custom_auth/templates/base.html deleted file mode 100644 index a76450a..0000000 --- a/proco/custom_auth/templates/base.html +++ /dev/null @@ -1,11 +0,0 @@ - - - - {% block title %}test_project{% endblock %} - - - - -{% block content %}{% endblock %} - - diff --git a/proco/custom_auth/templates/email/email_base.html b/proco/custom_auth/templates/email/email_base.html deleted file mode 100644 index 711c408..0000000 --- a/proco/custom_auth/templates/email/email_base.html +++ /dev/null @@ -1,77 +0,0 @@ -{% load static %} - -{% block html %} - - - - - - - - - - - - -
- - - - - - - - - - - - -
- - Logo -
- {% block content %}{% endblock %} -
-

- - - -

-
- Copyright © {% now 'Y' %},
- All rights reserved.
- Terms of Service | - Privacy Policy | -
-
-
- - -{% endblock %} diff --git a/proco/custom_auth/tests/test_api.py b/proco/custom_auth/tests/test_api.py index d859823..fff1b67 100755 --- a/proco/custom_auth/tests/test_api.py +++ b/proco/custom_auth/tests/test_api.py @@ -19,7 +19,7 @@ def custom_auth_url(url_params, query_param, view_name='create-and-list-users'): class UserApiTestCase(TestAPIViewSetMixin, TestCase): - databases = ['default'] + databases = ['default',] @classmethod def setUpTestData(cls): diff --git a/proco/custom_auth/tests/test_utils.py b/proco/custom_auth/tests/test_utils.py index 69c7a0c..313c95c 100644 --- a/proco/custom_auth/tests/test_utils.py +++ b/proco/custom_auth/tests/test_utils.py @@ -38,7 +38,7 @@ def setup_read_only_role(): role = auth_models.Role.objects.create(name=auth_models.Role.SYSTEM_ROLE_NAME_READ_ONLY, category=auth_models.Role.ROLE_CATEGORY_SYSTEM) - perms = [] + perms = [auth_models.RolePermission.CAN_DELETE_API_KEY, ] for perm in perms: auth_models.RolePermission.objects.get_or_create( @@ -58,7 +58,13 @@ def setup_admin_user_by_role(): password = 'SomeRandomPass96' user = auth_models.ApplicationUser.objects.filter(username=email).first() if not user: - user = auth_models.ApplicationUser.objects.create_user(username=email, password=password) + user = auth_models.ApplicationUser.objects.create_user( + username=email, + email=email, + password=password, + first_name='Admin', + last_name='User', + ) admin_role = setup_admin_role() auth_models.UserRoleRelationship.objects.create(user=user, role=admin_role) @@ -71,7 +77,13 @@ def setup_read_only_user_by_role(): password = 'SomeRandomPass96' user = auth_models.ApplicationUser.objects.filter(username=email).first() if not user: - user = auth_models.ApplicationUser.objects.create_user(username=email, password=password) + user = auth_models.ApplicationUser.objects.create_user( + username=email, + email=email, + password=password, + first_name='Read Only', + last_name='User', + ) read_only_role = setup_read_only_role() auth_models.UserRoleRelationship.objects.create(user=user, role=read_only_role) diff --git a/proco/custom_auth/utils.py b/proco/custom_auth/utils.py index b0add6b..cfd5e0c 100644 --- a/proco/custom_auth/utils.py +++ b/proco/custom_auth/utils.py @@ -1,3 +1,4 @@ +import logging from datetime import datetime, timedelta import jwt @@ -9,6 +10,8 @@ from proco.core import utils as core_utilities from proco.custom_auth import models as auth_models +logger = logging.getLogger('gigamaps.' + __name__) + def jwt_get_username_from_payload_handler(payload): """ @@ -84,7 +87,7 @@ def jwt_decode_handler(token): """ try: token_header = jwt.get_unverified_header(token) - print('Token header: {0}'.format(token_header)) + logger.debug('Token header: {0}'.format(token_header)) payload = jwt.decode( token, @@ -93,7 +96,7 @@ def jwt_decode_handler(token): algorithms=[token_header.get('alg')], options={'verify_signature': False} ) - print('Token as decoded payload: {0}'.format(payload)) + logger.debug('Token as decoded payload: {0}'.format(payload)) validate_azure_ad_b2c_token(payload) except jwt.ExpiredSignature: msg = _('Signature has expired.') diff --git a/proco/dailycheckapp_contact/__init__.py b/proco/dailycheckapp_contact/__init__.py deleted file mode 100644 index 3ae615e..0000000 --- a/proco/dailycheckapp_contact/__init__.py +++ /dev/null @@ -1 +0,0 @@ -default_app_config = 'proco.dailycheckapp_contact.apps.DailyCheckAppContactConfig' diff --git a/proco/dailycheckapp_contact/admin.py b/proco/dailycheckapp_contact/admin.py deleted file mode 100644 index 0e76926..0000000 --- a/proco/dailycheckapp_contact/admin.py +++ /dev/null @@ -1,16 +0,0 @@ -from django.contrib import admin - -from proco.dailycheckapp_contact.models import ContactMessage - - -class ContactMessageAdmin(admin.ModelAdmin): - list_display = ('firstname', 'lastname', 'school_id', 'email', 'created', 'message') - - def has_add_permission(self, request): - return False - - def has_change_permission(self, request, obj=None): - return False - - -admin.site.register(ContactMessage, ContactMessageAdmin) diff --git a/proco/dailycheckapp_contact/api.py b/proco/dailycheckapp_contact/api.py deleted file mode 100644 index 62d320f..0000000 --- a/proco/dailycheckapp_contact/api.py +++ /dev/null @@ -1,9 +0,0 @@ -from rest_framework import permissions -from rest_framework.generics import CreateAPIView - -from proco.dailycheckapp_contact.serializers import DailyCheckAppContactSerializer - - -class DailyCheckAppContactAPIView(CreateAPIView): - permission_classes = (permissions.AllowAny,) - serializer_class = DailyCheckAppContactSerializer diff --git a/proco/dailycheckapp_contact/api_urls.py b/proco/dailycheckapp_contact/api_urls.py deleted file mode 100644 index 47ec2c9..0000000 --- a/proco/dailycheckapp_contact/api_urls.py +++ /dev/null @@ -1,9 +0,0 @@ -from django.urls import path - -from proco.dailycheckapp_contact import api - -app_name = 'dailycheckapp_contact' - -urlpatterns = [ - path('dailycheckapp_contact/', api.DailyCheckAppContactAPIView.as_view(), name='dailycheckapp_contact'), -] diff --git a/proco/dailycheckapp_contact/apps.py b/proco/dailycheckapp_contact/apps.py deleted file mode 100644 index bfb9840..0000000 --- a/proco/dailycheckapp_contact/apps.py +++ /dev/null @@ -1,9 +0,0 @@ -from django.apps import AppConfig - - -class DailyCheckAppContactConfig(AppConfig): - name = 'proco.dailycheckapp_contact' - verbose_name = 'Daily Check App Contact' - - def ready(self): - from proco.dailycheckapp_contact import receivers # NOQA diff --git a/proco/dailycheckapp_contact/migrations/0001_initial.py b/proco/dailycheckapp_contact/migrations/0001_initial.py deleted file mode 100644 index a85f8e8..0000000 --- a/proco/dailycheckapp_contact/migrations/0001_initial.py +++ /dev/null @@ -1,34 +0,0 @@ -# Generated by Django 2.2.17 on 2020-12-01 10:29 - -from django.db import migrations, models -import django.utils.timezone -import model_utils.fields - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ] - - operations = [ - migrations.CreateModel( - name='ContactMessage', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')), - ('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')), - ('firstname', models.CharField(max_length=256)), - ('lastname', models.CharField(max_length=256)), - ('school_id', models.CharField(max_length=256)), - ('email', models.CharField(max_length=256)), - ('message', models.TextField()), - ], - options={ - # 'abstract': False, - 'db_table': 'dailycheckapp_contact_contactmessages', - # 'managed': False, - }, - ), - ] diff --git a/proco/dailycheckapp_contact/migrations/__init__.py b/proco/dailycheckapp_contact/migrations/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/proco/dailycheckapp_contact/models.py b/proco/dailycheckapp_contact/models.py deleted file mode 100644 index 71c0ec6..0000000 --- a/proco/dailycheckapp_contact/models.py +++ /dev/null @@ -1,17 +0,0 @@ -from django.db import models - -from model_utils.models import TimeStampedModel - - -class ContactMessage(TimeStampedModel, models.Model): - firstname = models.CharField(max_length=256) - lastname = models.CharField(max_length=256) - school_id = models.CharField(max_length=256) - email = models.CharField(max_length=256) - message = models.TextField() - - class Meta: - db_table = 'dailycheckapp_contact_contactmessages' - - def __str__(self): - return 'Message from: {0} ({1})'.format(self.firstname, self.created) diff --git a/proco/dailycheckapp_contact/receivers.py b/proco/dailycheckapp_contact/receivers.py deleted file mode 100644 index 0f27b76..0000000 --- a/proco/dailycheckapp_contact/receivers.py +++ /dev/null @@ -1,18 +0,0 @@ -from django.conf import settings -from django.db.models.signals import post_save -from django.dispatch import receiver - -from constance import config -from templated_email import send_templated_mail - -from proco.dailycheckapp_contact.models import ContactMessage - - -@receiver(post_save, sender=ContactMessage) -def send_email_notification(instance, created=False, **kwargs): - if created and config.DAILYCHECKAPP_CONTACT_EMAIL: - send_templated_mail( - '/dailycheckapp_contact_email', settings.DEFAULT_FROM_EMAIL, [config.DAILYCHECKAPP_CONTACT_EMAIL], context={ - 'contact_message': instance, - }, - ) diff --git a/proco/dailycheckapp_contact/serializers.py b/proco/dailycheckapp_contact/serializers.py deleted file mode 100644 index ef353fb..0000000 --- a/proco/dailycheckapp_contact/serializers.py +++ /dev/null @@ -1,9 +0,0 @@ -from rest_framework import serializers - -from proco.dailycheckapp_contact.models import ContactMessage - - -class DailyCheckAppContactSerializer(serializers.ModelSerializer): - class Meta: - model = ContactMessage - fields = ('firstname', 'lastname', 'school_id', 'email', 'message') diff --git a/proco/data_sources/api.py b/proco/data_sources/api.py index bdbb590..f9a55bc 100644 --- a/proco/data_sources/api.py +++ b/proco/data_sources/api.py @@ -66,7 +66,6 @@ def get(self, request, *args, **kwargs): sources_tasks.load_data_from_qos_apis() date = QoSData.objects.all().values_list('date', flat=True).order_by('-date').first() - print('Latest date from QoSData: {}'.format(date)) countries_ids = QoSData.objects.all().values_list('country_id', flat=True).order_by('country_id').distinct( 'country_id') diff --git a/proco/data_sources/exceptions.py b/proco/data_sources/exceptions.py index 9dad954..01b1607 100644 --- a/proco/data_sources/exceptions.py +++ b/proco/data_sources/exceptions.py @@ -33,7 +33,6 @@ def __init__(self, **extra): class InvalidSchoolMasterDataRowStatusAtUpdateError(BaseInvalidValidationError): message = _('Invalid School Master Data row status at update: DB status: "{old}", Requested status: "{new}"') - # description = _('"PUBLISHED" school master data row can not be updated.') code = 'invalid_school_master_data_row_status' @@ -47,4 +46,3 @@ class ZeroSchoolMasterDataRowError(BaseInvalidValidationError): message = _('Zero School Master Data row to update.') description = _('Zero rows to update.') code = 'invalid_school_master_data_row_count' - diff --git a/proco/data_sources/management/commands/data_loss_recovery_for_pcdc.py b/proco/data_sources/management/commands/data_loss_recovery_for_pcdc.py index ffe3b42..cae118b 100644 --- a/proco/data_sources/management/commands/data_loss_recovery_for_pcdc.py +++ b/proco/data_sources/management/commands/data_loss_recovery_for_pcdc.py @@ -1,3 +1,5 @@ +import logging + from datetime import timedelta from django.conf import settings @@ -16,6 +18,8 @@ from proco.schools.models import School from proco.utils import dates as date_utilities +logger = logging.getLogger('gigamaps.' + __name__) + ds_settings = settings.DATA_SOURCE_CONFIG today_date = get_current_datetime_object().date() @@ -27,22 +31,22 @@ def check_missing_dates_to_table(date_list): timestamp__date__lte=date_list[-1], ).values_list('timestamp__date', flat=True).distinct('timestamp__date').order_by('timestamp__date') - print('Missing dates are between {0} - {1}: '.format(date_list[0], date_list[-1])) + logger.debug('Missing dates are between {0} - {1}: '.format(date_list[0], date_list[-1])) missing_dates = list(set(date_list) - set(list(pcdc_timestamp_qry))) for missing_date in sorted(missing_dates): # print missing date in string format - print(date_utilities.format_date(missing_date)) + logger.debug(date_utilities.format_date(missing_date)) def delete_dailycheckapp_realtime_data(date): - print('Deleting all the PCDC rows only, from "RealTimeConnectivity" Data Table for date: {0}'.format(date)) + logger.debug('Deleting all the PCDC rows only, from "RealTimeConnectivity" data table for date: {0}'.format(date)) RealTimeConnectivity.objects.filter( created__date=date, live_data_source=statistics_configs.DAILY_CHECK_APP_MLAB_SOURCE, ).delete() - print('Deleting all the rows from "DailyCheckAppMeasurementData" Data Table for date: {0}'.format(date)) + logger.debug('Deleting all the rows from "DailyCheckAppMeasurementData" data table for date: {0}'.format(date)) DailyCheckAppMeasurementData.objects.filter(timestamp__date=date).delete() @@ -98,13 +102,13 @@ def add_arguments(self, parser): ) def handle(self, **options): - print('Executing "data_loss_recovery_for_pcdc" ....') + logger.info('Executing data loss recovery for pcdc.') check_missing_dates = options.get('check_missing_dates') start_date = date_utilities.to_date(options.get('start_date')) end_date = date_utilities.to_date(options.get('end_date')) if start_date > end_date: - print('ERROR: start_date value can not be greater than end_date.') + logger.error('Start date value can not be greater than end_date.') exit(0) date_list = sorted([(start_date + timedelta(days=x)).date() for x in range((end_date - start_date).days)] + [ @@ -119,16 +123,16 @@ def handle(self, **options): if pull_data and pull_data_date: pull_data_date = pull_data_date.date() - print('Deleting PCDC data for date: {}'.format(pull_data_date)) + logger.debug('Deleting PCDC data for date: {}'.format(pull_data_date)) delete_dailycheckapp_realtime_data(pull_data_date) - print('Data deleted successfully.\n\n') + logger.debug('Data deleted successfully.\n\n') - print('Syncing the PCDC API data to Proco PCDC table for date: {}'.format(pull_data_date)) + logger.debug('Syncing the PCDC api data to proco PCDC table for date: {}'.format(pull_data_date)) sync_dailycheckapp_realtime_data(pull_data_date) - print('Data synced successfully.\n\n') + logger.debug('Data synced successfully.\n\n') - print('Aggregating the pulled data by giga_id_school + country_code and ' - 'storing in RealTimeConnectivity table.') + logger.debug('Aggregating the pulled data by giga_id_school + country_code and ' + 'storing in RealTimeConnectivity table.') dailycheckapp_measurements = DailyCheckAppMeasurementData.objects.filter( timestamp__date=pull_data_date, ).filter( @@ -144,8 +148,8 @@ def handle(self, **options): ).order_by('country_code', 'giga_id_school', 'school_id', 'source') if not dailycheckapp_measurements.exists(): - print('ERROR: No records to aggregate on provided date: "{0}". ' - 'Hence stopping the execution here.'.format(pull_data_date)) + logger.error('No records to aggregate on provided date: "{0}". ' + 'Hence stopping the execution here.'.format(pull_data_date)) return realtime = [] @@ -154,7 +158,7 @@ def handle(self, **options): 'country_code', flat=True, ).order_by('country_code')) for country_code in countries: - print('Current Country Code: {}'.format(country_code)) + logger.debug('Current country code: {}'.format(country_code)) if country_code: country = Country.objects.filter(code=country_code).first() else: @@ -175,7 +179,7 @@ def handle(self, **options): school.giga_id_school: school for school in schools_qs.filter(giga_id_school__in=dcm_giga_ids) } - print('Total schools in DailyCheckApp: {0}, Successfully mapped schools: {1}'.format( + logger.debug('Total schools in dailycheckapp: {0}, Successfully mapped schools: {1}'.format( len(dcm_giga_ids), len(dcm_schools))) mlab_school_ids = set(dailycheckapp_measurements.filter( @@ -189,7 +193,7 @@ def handle(self, **options): school.external_id: school for school in schools_qs.filter(external_id__in=mlab_school_ids) } - print('Total schools in MLab: {0}, Successfully mapped schools: {1}'.format( + logger.debug('Total schools in MLab: {0}, Successfully mapped schools: {1}'.format( len(mlab_school_ids), len(mlab_schools))) unknown_schools = [] @@ -228,21 +232,22 @@ def handle(self, **options): )) if len(realtime) == 5000: - print('Loading the data to "RealTimeConnectivity" table as it has reached 5000 benchmark.') + logger.debug( + 'Loading the data to "RealTimeConnectivity" table as it has reached 5000 benchmark.') RealTimeConnectivity.objects.bulk_create(realtime) realtime = [] if len(unknown_schools) > 0: - print('Skipped dailycheckapp_measurement for country: "{0}" unknown school: {1}'.format( + logger.debug('Skipped dailycheckapp measurement for country: "{0}" unknown school: {1}'.format( country_code, unknown_schools)) - print('Loading the remaining ({0}) data to "RealTimeConnectivity" table.'.format(len(realtime))) + logger.debug('Loading the remaining ({0}) data to "RealTimeConnectivity" table.'.format(len(realtime))) if len(realtime) > 0: RealTimeConnectivity.objects.bulk_create(realtime) - print('Aggregated successfully to RealTimeConnectivity table.\n\n') + logger.debug('Aggregated successfully to RealTimeConnectivity table.\n\n') - print('Starting finalizing the records to actual proco tables.') + logger.debug('Starting finalizing the records to actual proco tables.') countries_ids = RealTimeConnectivity.objects.filter( created__date=pull_data_date, live_data_source=statistics_configs.DAILY_CHECK_APP_MLAB_SOURCE, @@ -256,12 +261,12 @@ def handle(self, **options): monday_week_no = date_utilities.get_week_from_date(monday_date) monday_year = date_utilities.get_year_from_date(monday_date) - print('Weekly record details. \tWeek No: {0}\tYear: {1}'.format(monday_week_no, monday_year)) + logger.debug('Weekly record details. \tWeek No: {0}\tYear: {1}'.format(monday_week_no, monday_year)) for country_id in countries_ids: - print('Finalizing the records for Country ID: {0}'.format(country_id)) + logger.debug('Finalizing the records for Country ID: {0}'.format(country_id)) finalize_previous_day_data(None, country_id, pull_data_date) - print('Finalized records successfully to actual proco tables.\n\n') + logger.info('Finalized records successfully to actual proco tables.\n\n') - print('Completed "data_loss_recovery_for_pcdc" successfully ....\n') + logger.info('Completed dataloss recovery for pcdc successfully.\n') diff --git a/proco/data_sources/management/commands/data_loss_recovery_for_qos.py b/proco/data_sources/management/commands/data_loss_recovery_for_qos.py index b858d04..5ac8c31 100644 --- a/proco/data_sources/management/commands/data_loss_recovery_for_qos.py +++ b/proco/data_sources/management/commands/data_loss_recovery_for_qos.py @@ -1,5 +1,6 @@ import json import os +import logging from datetime import timedelta import delta_sharing @@ -18,6 +19,8 @@ from proco.schools.models import School from proco.utils import dates as date_utilities +logger = logging.getLogger('gigamaps.' + __name__) + ds_settings = settings.DATA_SOURCE_CONFIG today_date = get_current_datetime_object().date() @@ -62,18 +65,19 @@ def load_qos_data_source_response_to_model(version_number, country): if country.iso3_format != table_name: continue - print('#' * 10) + logger.debug('#' * 10) try: if QoSData.objects.all().filter( country=country, version=version_number, ).exists(): - print('WARNING: QoSData table has given version data already in the table. ' - 'To re collect, please clean this version data first then retry again.' - 'Country Code: {0}, \t\tVersion: {1}'.format(table_name, version_number)) + logger.warning('QoSData table has given version data already in the table. ' + 'To re collect, please clean this version data first then retry again.' + 'Country code: {0}, \t\tVersion: {1}'.format(table_name, version_number)) continue - print('Current version data not available in the table. Hence fetching the data from QoS API.') + logger.info( + 'Current version data not available in the table. Hence fetching the data from QoS api.') # Create an url to access a shared table. # A table path is the profile file path following with `#` and the fully qualified name of a table @@ -85,11 +89,11 @@ def load_qos_data_source_response_to_model(version_number, country): ) api_current_version = delta_sharing.get_table_version(table_url) - print('Current version from API: {0}'.format(api_current_version)) + logger.debug('Current version from api: {0}'.format(api_current_version)) if version_number > api_current_version: - print('ERROR: Given version must not be higher then latest API version. ' - 'Hence skipping current data pull.') + logger.error('Given version must not be higher then latest api version. ' + 'Hence skipping current data pull.') exit(0) loaded_data_df = delta_sharing.load_table_changes_as_pandas( @@ -99,13 +103,13 @@ def load_qos_data_source_response_to_model(version_number, country): None, None, ) - print('Total count of rows in the {0} version data: {1}'.format( + logger.debug('Total count of rows in the {0} version data: {1}'.format( version_number, len(loaded_data_df))) loaded_data_df = loaded_data_df[loaded_data_df[DeltaSharingReader._change_type_col_name()].isin( ['insert', 'update_postimage'])] - print('Total count of rows after filtering only ["insert", "update_postimage"] in the "{0}" ' - 'version data: {1}'.format(version_number, len(loaded_data_df))) + logger.info('Total count of rows after filtering only ["insert", "update_postimage"] in the "{0}" ' + 'version data: {1}'.format(version_number, len(loaded_data_df))) if len(loaded_data_df) > 0: insert_entries = [] @@ -117,8 +121,8 @@ def load_qos_data_source_response_to_model(version_number, country): 'modified', 'school_id', 'country_id', 'modified_by', ] - print('All QoS API response columns: {}'.format(df_columns)) - print('All QoS API response columns to delete: {}'.format( + logger.debug('All QoS api response columns: {}'.format(df_columns)) + logger.debug('All QoS api response columns to delete: {}'.format( list(set(df_columns) - set(qos_model_fields)))) loaded_data_df.drop(columns=cols_to_delete, inplace=True, errors='ignore', ) @@ -134,8 +138,8 @@ def load_qos_data_source_response_to_model(version_number, country): ).first() if not school: - print('ERROR: School with Giga ID ({0}) not found in PROCO DB. ' - 'Hence skipping the load for current school.'.format(row['school_id_giga'])) + logger.error('School with giga ID ({0}) not found in proco db. Hence skipping the ' + 'load for current school.'.format(row['school_id_giga'])) continue row['school'] = school @@ -147,35 +151,35 @@ def load_qos_data_source_response_to_model(version_number, country): version__gt=version_number, ) if duplicate_higher_version_records.exists(): - print('ERROR: Higher version for same School ID and Timestamp already exists. ' - 'Hence skipping the update for current row.') + logger.error('Higher version for same school ID and timestamp already exists. ' + 'Hence skipping the update for current row.') qos_instance = duplicate_higher_version_records.first() - print('School ID: {0},\tTimestamp: {1},\tCurrent Version: {2},\t' - 'Higher Version: {3}'.format(qos_instance.school_id, qos_instance.timestamp, - version_number, qos_instance.version)) + logger.debug('School ID: {0},\tTimestamp: {1},\tCurrent Version: {2},\t' + 'Higher Version: {3}'.format(qos_instance.school_id, + qos_instance.timestamp, + version_number, qos_instance.version)) continue insert_entries.append(row_as_dict) if len(insert_entries) == 5000: - print('Loading the data to "QoSData" table as it has reached 5000 benchmark.') + logger.info('Loading the data to "QoSData" table as it has reached 5000 benchmark.') bulk_create_or_update(insert_entries, QoSData, ['school', 'timestamp']) insert_entries = [] - print('#' * 10) - print('\n\n') + logger.debug('#\n' * 10) - print('Loading the remaining ({0}) data to "QoSData" table.'.format(len(insert_entries))) + logger.debug('Loading the remaining ({0}) data to "QoSData" table.'.format(len(insert_entries))) if len(insert_entries) > 0: bulk_create_or_update(insert_entries, QoSData, ['school', 'timestamp']) else: - print('INFO: No data to update in current table: {0}.'.format(table_name)) + logger.debug('No data to update in current table: {0}.'.format(table_name)) except Exception as ex: - print('ERROR: Exception caught for "{0}": {1}'.format(schema_table.name, str(ex))) + logger.error('Exception caught for "{0}": {1}'.format(schema_table.name, str(ex))) else: - print('ERROR: QoS schema ({0}) does not exist to use for share ({1}).'.format(schema_name, share_name)) + logger.error('QoS schema ({0}) does not exist to use for share ({1}).'.format(schema_name, share_name)) exit(0) else: - print('ERROR: QoS share ({0}) does not exist to use.'.format(share_name)) + logger.error('QoS share ({0}) does not exist to use.'.format(share_name)) exit(0) @@ -199,10 +203,10 @@ def sync_qos_realtime_data(date, country): ).order_by('school') if not qos_measurements.exists(): - print('ERROR: No records to aggregate on provided date: "{0}". Hence skipping for the given date.'.format(date)) + logger.debug('No records to aggregate on provided date: "{0}". Hence skipping for the given date.'.format(date)) return - print('Migrating the records from "QoSData" to "RealTimeConnectivity" with date: {0} '.format(date)) + logger.debug('Migrating the records from "QoSData" to "RealTimeConnectivity" with date: {0} '.format(date)) realtime = [] @@ -244,11 +248,11 @@ def sync_qos_realtime_data(date, country): )) if len(realtime) == 5000: - print('Loading the data to "RealTimeConnectivity" table as it has reached 5000 benchmark.') + logger.debug('Loading the data to "RealTimeConnectivity" table as it has reached 5000 benchmark.') RealTimeConnectivity.objects.bulk_create(realtime) realtime = [] - print('Loading the remaining ({0}) data to "RealTimeConnectivity" table.'.format(len(realtime))) + logger.debug('Loading the remaining ({0}) data to "RealTimeConnectivity" table.'.format(len(realtime))) if len(realtime) > 0: RealTimeConnectivity.objects.bulk_create(realtime) @@ -265,7 +269,7 @@ def get_latest_api_version(country_code=None): if qos_schema: schema_tables = client.list_tables(qos_schema) - print('\nAll tables ready to access: {0}'.format(schema_tables)) + logger.debug('\nAll tables ready to access: {0}'.format(schema_tables)) for schema_table in schema_tables: table_name = schema_table.name @@ -284,11 +288,12 @@ def get_latest_api_version(country_code=None): ) table_current_version = delta_sharing.get_table_version(table_url) - print('Country "{0}" current version from API: {1}\n'.format(table_name, table_current_version)) + logger.debug( + 'Country "{0}" current version from API: {1}\n'.format(table_name, table_current_version)) version_for_countries[table_name] = table_current_version except Exception as ex: - print('ERROR: Exception caught for "{0}": {1}\n'.format(table_name, str(ex))) + logger.error('Exception caught for "{0}": {1}\n'.format(table_name, str(ex))) return version_for_countries @@ -320,9 +325,10 @@ def check_missing_versions_from_table(country_code=None): missing_version_list = list(set(must_version_list) - set(versions_list)) - print('Missing versions details for country "{0}" are: \n\tStart Version from DB: {1}' - '\n\tEnd Version from API: {2}' - '\n\tmissing versions: {3}\n'.format(country_iso_code, start_version, end_version, missing_version_list)) + logger.debug('Missing versions details for country "{0}" are: \n\tStart version from DB: {1}' + '\n\tEnd version from API: {2}' + '\n\tMissing versions: {3}\n'.format(country_iso_code, start_version, end_version, + missing_version_list)) class Command(BaseCommand): @@ -374,7 +380,7 @@ def add_arguments(self, parser): ) def handle(self, **options): - print('Executing "data_loss_recovery_for_QoS" ....\n') + logger.info('Executing data loss recovery for QoS" ....\n') check_missing_versions = options.get('check_missing_versions') country_iso3_format = options.get('country_iso3_format') @@ -382,36 +388,36 @@ def handle(self, **options): country = None if country_iso3_format: country = Country.objects.filter(iso3_format=country_iso3_format).first() - print('Country object: {0}'.format(country)) + logger.debug('Country object: {0}'.format(country)) if not country: - print('ERROR: Country with ISO3 Format ({0}) not found in PROCO DB. ' - 'Hence stopping the load.'.format(country_iso3_format)) + logger.error('Country with ISO3 format ({0}) not found in proco db. ' + 'Hence stopping the load.'.format(country_iso3_format)) exit(0) if check_missing_versions: - print('\n*** Checking the missing versions ***') + logger.info('\nChecking the missing versions.') check_missing_versions_from_table(country_code=country_iso3_format) - print('*** Checking the missing versions action completed successfully ***\n') + logger.debug('Checking the missing versions action completed successfully.\n') pull_data = options.get('pull_data') if pull_data: if not country: - print('ERROR: Country Code is mandatory to pull the data.' - ' Please pass required parameters as: -country_code=\n') + logger.error('Country code is mandatory to pull the data.' + ' Please pass required parameters as: -country_code=\n') exit(0) pull_start_version = options.get('pull_start_version') pull_end_version = options.get('pull_end_version') if pull_start_version and pull_end_version and pull_start_version <= pull_end_version: - print('\n*** Loading the API data to "data_sources_qosdata" table ***\n') + logger.debug('\nLoading the api data to "data_sources_qosdata" table ***\n') for version_number in range(pull_start_version, pull_end_version + 1): load_qos_data_source_response_to_model(version_number, country) - print('\n*** Data load completed successfully ***\n') + logger.info('\nData load completed successfully.\n') else: - print('ERROR: Please provide valid required parameters as:' - ' -pull_start_version= -pull_end_version=\n') + logger.error('Please provide valid required parameters as:' + ' -pull_start_version= -pull_end_version=\n') exit(0) try: @@ -422,8 +428,8 @@ def handle(self, **options): aggregate_data = options.get('aggregate_data') if aggregate_data: if not country: - print('ERROR: Country Code is mandatory to aggregate the data.' - ' Please pass required parameters as: -country_code=') + logger.error('Country code is mandatory to aggregate the data.' + ' Please pass required parameters as: -country_code=') exit(0) aggregate_start_version = options.get('aggregate_start_version') @@ -439,26 +445,27 @@ def handle(self, **options): date_list_from_versions = qos_queryset.order_by('timestamp__date').values_list( 'timestamp__date', flat=True).distinct('timestamp__date') - print('date_list_from_versions: {0}'.format(date_list_from_versions)) + logger.debug('Date list from versions: {0}'.format(date_list_from_versions)) for pull_data_date in date_list_from_versions: - print('\nSyncing the "data_sources_qosdata" data to "connection_statistics_realtimeconnectivity" ' - 'for date: {0}'.format(pull_data_date)) + logger.debug( + '\nSyncing the "data_sources_qosdata" data to "connection_statistics_realtimeconnectivity" ' + 'for date: {0}'.format(pull_data_date)) sync_qos_realtime_data(pull_data_date, country) - print('Data synced successfully.\n\n') + logger.debug('Data synced successfully.\n\n') - print('Starting finalizing the records to actual proco tables.') + logger.debug('Starting finalizing the records to actual proco tables.') monday_date = pull_data_date - timedelta(days=pull_data_date.weekday()) monday_week_no = date_utilities.get_week_from_date(monday_date) monday_year = date_utilities.get_year_from_date(monday_date) - print('Weekly record details. \tWeek No: {0}\tYear: {1}'.format(monday_week_no, monday_year)) + logger.debug('Weekly record details. \tWeek No: {0}\tYear: {1}'.format(monday_week_no, monday_year)) - print('\n\nFinalizing the records for Country ID: {0}'.format(country.id)) + logger.debug('\n\nFinalizing the records for country ID: {0}'.format(country.id)) finalize_previous_day_data(None, country.id, pull_data_date) - print('Finalized records successfully to actual proco tables.\n\n') + logger.debug('Finalized records successfully to actual proco tables.\n\n') else: - print('ERROR: Please pass required parameters as:' - ' -pull_start_version= -pull_end_version=') + logger.error('Please pass required parameters as:' + ' -pull_start_version= -pull_end_version=') - print('Completed "data_loss_recovery_for_qos" successfully ....\n') + logger.info('Completed data loss recovery for qos successfully.\n') exit(0) diff --git a/proco/data_sources/migrations/0012_added_deleted_published_status.py b/proco/data_sources/migrations/0012_added_deleted_published_status.py old mode 100644 new mode 100755 diff --git a/proco/data_sources/models.py b/proco/data_sources/models.py index 29b77dc..27cb6f5 100644 --- a/proco/data_sources/models.py +++ b/proco/data_sources/models.py @@ -98,8 +98,6 @@ class SchoolMasterData(TimeStampedModel, models.Model): # connectivity_govt_collection_year connectivity_govt_collection_year = models.PositiveSmallIntegerField(blank=True, default=None, null=True) disputed_region = models.CharField(blank=True, null=True, max_length=255) # disputed_region - # nearest_NR_id = models.CharField(blank=True, null=True, max_length=255) - # connectivity_static = models.CharField(blank=True, null=True, max_length=255) # SchoolRealTimeRegistration connectivity_RT = models.CharField(blank=True, null=True, max_length=255) # rt_registered diff --git a/proco/data_sources/serializers.py b/proco/data_sources/serializers.py index 2211897..21a184f 100644 --- a/proco/data_sources/serializers.py +++ b/proco/data_sources/serializers.py @@ -171,8 +171,8 @@ def get_modified_fields(self, row): } old_education_level = None \ - if core_utilities.is_blank_string(row.school.education_level) else str( - row.school.education_level).lower() + if core_utilities.is_blank_string(row.school.education_level) \ + else str(row.school.education_level).lower() new_education_level = None \ if core_utilities.is_blank_string(row.education_level) else str(row.education_level).lower() @@ -361,7 +361,7 @@ class Meta: list_serializer_class = UpdateListSerializer - def _validate_status(self, instance, validated_data): + def _validate_status(self, instance): if instance.status in [ sources_models.SchoolMasterData.ROW_STATUS_UPDATED_IN_DRAFT, sources_models.SchoolMasterData.ROW_STATUS_PUBLISHED, @@ -375,7 +375,6 @@ def _validate_status(self, instance, validated_data): raise source_exceptions.InvalidSchoolMasterDataRowStatusError(message_kwargs=message_kwargs) def delete_all_related_rows(self, instance): - print('Deleting all the row for same school giga id: {0}'.format(instance.school_id_giga)) sources_models.SchoolMasterData.objects.filter( school_id_giga=instance.school_id_giga, ).exclude( @@ -400,7 +399,7 @@ def update(self, instance, validated_data): validated_data['status'] = sources_models.SchoolMasterData.ROW_STATUS_DELETED_PUBLISHED else: validated_data['status'] = sources_models.SchoolMasterData.ROW_STATUS_PUBLISHED - self._validate_status(instance, validated_data) + self._validate_status(instance) validated_data['published_at'] = core_utilities.get_current_datetime_object() request_user = core_utilities.get_current_user(context=self.context) diff --git a/proco/data_sources/tasks.py b/proco/data_sources/tasks.py index fb9c34b..df89c97 100644 --- a/proco/data_sources/tasks.py +++ b/proco/data_sources/tasks.py @@ -1,16 +1,18 @@ import json +import logging import os +import uuid from datetime import timedelta -from celery import chain, chord, group +from celery import chain, chord, group, current_task from django.conf import settings from django.contrib.gis.geos import Point -from django.core.cache import cache from django.db.models import Count from django.db.utils import DataError from requests.exceptions import HTTPError from proco.accounts import utils as account_utilities +from proco.background import utils as background_task_utilities from proco.connection_statistics import models as statistics_models from proco.connection_statistics.utils import ( aggregate_real_time_data_to_school_daily_status, @@ -28,6 +30,9 @@ from proco.schools.models import School from proco.taskapp import app from proco.utils.dates import format_date +from proco.utils.tasks import populate_school_new_fields_task + +logger = logging.getLogger('gigamaps.' + __name__) @app.task @@ -35,13 +40,13 @@ def finalize_task(): return 'Done' -def load_data_from_school_master_apis(*args, country_iso3_format=None): +def load_data_from_school_master_apis(country_iso3_format=None): """ Background task which handles School Master Data source changes from APIs to PROCO DB Execution Frequency: Once in a week """ - print('***** Loading the School Master Data to PROCO DB *****') + logger.info('Starting loading the school master data from API to DB.') errors = [] ds_settings = settings.DATA_SOURCE_CONFIG.get('SCHOOL_MASTER') @@ -77,20 +82,20 @@ def load_data_from_school_master_apis(*args, country_iso3_format=None): if school_master_schema: schema_tables = client.list_tables(school_master_schema) - print('All tables ready to access: {0}'.format(schema_tables)) + logger.debug('All tables ready to access: {0}'.format(schema_tables)) school_master_fields = [f.name for f in sources_models.SchoolMasterData._meta.get_fields()] for schema_table in schema_tables: - print('#' * 10) - print('Table: %s', schema_table) + logger.debug('#' * 10) + logger.debug('Table: %s', schema_table) if country_iso3_format and country_iso3_format != schema_table.name: continue if len(country_codes_for_exclusion) > 0 and schema_table.name in country_codes_for_exclusion: - print('WARNING: Country with ISO3 Format ({0}) configured to exclude from School Master data pull. ' - 'Hence skipping the load for this country code.'.format(schema_table.name)) + logger.warning('Country with ISO3 Format ({0}) configured to exclude from School Master data pull. ' + 'Hence skipping the load for this country code.'.format(schema_table.name)) continue try: @@ -98,17 +103,17 @@ def load_data_from_school_master_apis(*args, country_iso3_format=None): profile_file, share_name, schema_name, schema_table.name, changes_for_countries, deleted_schools, school_master_fields) except (HTTPError, DataError, ValueError) as ex: - print('ERROR: Exception caught for "{0}": {1}'.format(schema_table.name, str(ex))) + logger.error('Exception caught for "{0}": {1}'.format(schema_table.name, str(ex))) errors.append('{0} : {1} - {2}'.format(schema_table.name, type(ex).__name__, str(ex))) except Exception as ex: - print('ERROR: Exception caught for "{0}": {1}'.format(schema_table.name, str(ex))) + logger.error('Exception caught for "{0}": {1}'.format(schema_table.name, str(ex))) errors.append('{0} : {1} - {2}'.format(schema_table.name, type(ex).__name__, str(ex))) else: - print('ERROR: School Master schema ({0}) does not exist to use for share ({1}).'.format(schema_name, + logger.error('School Master schema ({0}) does not exist to use for share ({1}).'.format(schema_name, share_name)) else: - print('ERROR: School Master share ({0}) does not exist to use.'.format(share_name)) + logger.error('School Master share ({0}) does not exist to use.'.format(share_name)) try: os.remove(profile_file) @@ -175,7 +180,7 @@ def handle_published_school_master_data_row(published_row=None, country_ids=None Execution Frequency: Every 12 hours """ - print('***** Handling the School Master Data row publish *****') + logger.info('Handling the published school master data rows.') environment_map = { 'urban': 'urban', @@ -187,27 +192,29 @@ def handle_published_school_master_data_row(published_row=None, country_ids=None true_choices = ['true', 'yes', '1'] if country_ids and len(country_ids) > 0: - task_cache_key = 'handle_published_school_master_data_row_status_{current_time}_country_ids_{ids}'.format( + task_key = 'handle_published_school_master_data_row_status_{current_time}_country_ids_{ids}'.format( current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H'), ids='_'.join([str(c_id) for c_id in country_ids]), ) + task_description = 'Handle published school master data rows for countries' elif published_row: - task_cache_key = 'handle_published_school_master_data_row_status_{current_time}_row_id_{ids}'.format( + task_key = 'handle_published_school_master_data_row_status_{current_time}_row_id_{ids}'.format( current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H'), ids=published_row.id, ) + task_description = 'Handle published school master data row for single record' else: - task_cache_key = 'handle_published_school_master_data_row_status_{current_time}'.format( + task_key = 'handle_published_school_master_data_row_status_{current_time}'.format( current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H')) - running_task = cache.get(task_cache_key, None) - - print('***** Before checking the task status in Redis *****') + task_description = 'Handle published school master data rows' - if running_task in [None, b'completed', 'completed'] or published_row: - print('***** Not found running Job *****') - cache.set(task_cache_key, 'running', None) - print('***** After checking the task status in Redis *****') + task_id = current_task.request.id or str(uuid.uuid4()) + task_instance = background_task_utilities.task_on_start( + task_id, task_key, task_description) + if task_instance: + logger.debug('Not found running job for published rows handler task: {}'.format(task_key)) + updated_school_ids = [] new_published_records = sources_models.SchoolMasterData.objects.filter( status=sources_models.SchoolMasterData.ROW_STATUS_PUBLISHED, is_read=False, ) @@ -218,11 +225,9 @@ def handle_published_school_master_data_row(published_row=None, country_ids=None if country_ids and len(country_ids) > 0: new_published_records = new_published_records.filter(country_id__in=country_ids) - count = 0 - print('***** Before starting the iteration *****') - for data_chunk in core_utilities.queryset_iterator(new_published_records, chunk_size=10, print_msg=False): - count += 1 - print('***** Iteration No: {} *****'.format(count)) + task_instance.info('Total published records to update: {}'.format(new_published_records.count())) + + for data_chunk in core_utilities.queryset_iterator(new_published_records, chunk_size=100, print_msg=False): for row in data_chunk: try: environment = row.school_area_type.lower() if not core_utilities.is_blank_string( @@ -245,7 +250,7 @@ def handle_published_school_master_data_row(published_row=None, country_ids=None layer_name=CountryAdminMetadata.LAYER_NAME_ADMIN2, ).first() - school, created = School.objects.update_or_create( + school, _ = School.objects.update_or_create( giga_id_school=row.school_id_giga, country=row.country, defaults={ @@ -388,14 +393,20 @@ def handle_published_school_master_data_row(published_row=None, country_ids=None row.is_read = True row.school = school row.save() + + updated_school_ids.append(school.id) except Exception as ex: - print('Error reported on publishing: {0}'.format(ex)) - print('Record: {0}'.format(row.__dict__)) + logger.error('Error reported on publishing: {0}'.format(ex)) + logger.error('Record: {0}'.format(row.__dict__)) + task_instance.info('Error reported for ID ({0}) on publishing: {1}'.format(row.id, ex)) + + if len(updated_school_ids) > 0: + for i in range(0, len(updated_school_ids), 20): + populate_school_new_fields_task.delay(None, None, None, school_ids=updated_school_ids[i:i + 20]) - cache.set(task_cache_key, 'completed', None) + background_task_utilities.task_on_complete(task_instance) else: - print('***** Found running Job with "{0}" name so skipping current iteration *****'.format(task_cache_key)) - # TODO: Handle cache clean + logger.error('Found running Job with "{0}" name so skipping current iteration'.format(task_key)) @app.task(soft_time_limit=2 * 55 * 60, time_limit=2 * 55 * 60) @@ -405,27 +416,29 @@ def handle_deleted_school_master_data_row(deleted_row=None, country_ids=None): Execution Frequency: Every day """ - print('***** Handling the School Master Data row deletion *****') + logger.info('Handling the deleted school master data rows.') if country_ids and len(country_ids) > 0: - task_cache_key = 'handle_deleted_school_master_data_row_status_{current_time}_country_ids_{ids}'.format( + task_key = 'handle_deleted_school_master_data_row_status_{current_time}_country_ids_{ids}'.format( current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H'), ids='_'.join([str(c_id) for c_id in country_ids]), ) + task_description = 'Handle deleted school master data rows for countries' elif deleted_row: - task_cache_key = 'handle_deleted_school_master_data_row_status_{current_time}_row_id_{ids}'.format( + task_key = 'handle_deleted_school_master_data_row_status_{current_time}_row_id_{ids}'.format( current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H'), ids=deleted_row.id, ) + task_description = 'Handle deleted school master data row for single record' else: - task_cache_key = 'handle_deleted_school_master_data_row_status_{current_time}'.format( + task_key = 'handle_deleted_school_master_data_row_status_{current_time}'.format( current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H')) + task_description = 'Handle deleted school master data rows' - running_task = cache.get(task_cache_key, None) - - if running_task in [None, b'completed', 'completed'] or deleted_row: - print('***** Not found running Job *****') - cache.set(task_cache_key, 'running', None) + task_id = current_task.request.id or str(uuid.uuid4()) + task_instance = background_task_utilities.task_on_start(task_id, task_key, task_description) + if task_instance: + logger.debug('Not found running job for deleted rows handler: {}'.format(task_key)) new_deleted_records = sources_models.SchoolMasterData.objects.filter( status=sources_models.SchoolMasterData.ROW_STATUS_DELETED_PUBLISHED, is_read=False, @@ -439,6 +452,7 @@ def handle_deleted_school_master_data_row(deleted_row=None, country_ids=None): new_deleted_records = new_deleted_records.filter(country_id__in=country_ids) current_date = core_utilities.get_current_datetime_object() + task_instance.info('Total records to update: {}'.format(new_deleted_records.count())) for data_chunk in core_utilities.queryset_iterator(new_deleted_records, chunk_size=1000): for row in data_chunk: @@ -456,13 +470,14 @@ def handle_deleted_school_master_data_row(deleted_row=None, country_ids=None): row.save() except Exception as ex: - print('Error reported on deletion: {0}'.format(ex)) - print('Record: {0}'.format(row.__dict__)) + logger.error('Error reported on deletion: {0}'.format(ex)) + logger.error('Record: {0}'.format(row.__dict__)) + task_instance.info('Error reported for ID ({0}) on deletion: {1}'.format(row.id, ex)) - cache.set(task_cache_key, 'completed', None) + task_instance.info('Remaining records: {}'.format(new_deleted_records.count())) + background_task_utilities.task_on_complete(task_instance) else: - print('***** Found running Job with "{0}" name so skipping current iteration *****'.format(task_cache_key)) - # TODO: Handle cache clean + logger.error('Found running Job with "{0}" name so skipping current iteration'.format(task_key)) @app.task @@ -474,138 +489,158 @@ def email_reminder_to_editor_and_publisher_for_review_waiting_records(): Execution Frequency: Every day only once """ - task_cache_key = 'email_reminder_to_editor_and_publisher_for_review_waiting_records_status_{current_time}'.format( + task_key = 'email_reminder_to_editor_and_publisher_for_review_waiting_records_status_{current_time}'.format( current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y')) - running_task = cache.get(task_cache_key, None) + task_id = current_task.request.id or str(uuid.uuid4()) + task_instance = background_task_utilities.task_on_start( + task_id, task_key, 'Send reminder email to Editor and Publisher to review the school master rows') + + if task_instance: + logger.debug('Not found running job for reminder email task: {}'.format(task_key)) - if running_task in [None, b'completed', 'completed']: - print('***** Not found running Job *****') - cache.set(task_cache_key, 'running', None) ds_settings = settings.DATA_SOURCE_CONFIG.get('SCHOOL_MASTER') review_grace_period = core_utilities.convert_to_int(ds_settings['REVIEW_GRACE_PERIOD_IN_HRS'], default='48') - print('***** Sending email reminder to Editor/Publisher if records are waiting for more ' - 'than {0} hrs *****'.format(review_grace_period)) - - current_time = core_utilities.get_current_datetime_object() - check_time = current_time - timedelta(hours=review_grace_period) - email_user_list = [] - - # If there are records for all editor to review which collected date is more than 48 hrs - has_records_to_review_for_all_editors = sources_models.SchoolMasterData.objects.filter( - status=sources_models.SchoolMasterData.ROW_STATUS_DRAFT, - modified__lt=check_time, - ).exists() - - # If there are records for all publishers to review which are sent to publishers - # to publish more than 48 hrs back - has_records_to_review_for_all_publishers = sources_models.SchoolMasterData.objects.filter( - status__in=[ - sources_models.SchoolMasterData.ROW_STATUS_DRAFT_LOCKED, - sources_models.SchoolMasterData.ROW_STATUS_DELETED, - ], - is_read=False, - modified__lt=check_time, - ).exists() - - # If it has records for all editors and publishers to review than send the reminder email to all - if has_records_to_review_for_all_editors and has_records_to_review_for_all_publishers: - print('All Editors and Publishers has records to review') - email_user_list.extend(get_user_emails_for_permissions([ - auth_models.RolePermission.CAN_UPDATE_SCHOOL_MASTER_DATA, - auth_models.RolePermission.CAN_PUBLISH_SCHOOL_MASTER_DATA, - ])) + logger.info('Sending email reminder to Editor/Publisher if records are waiting for more ' + 'than {0} hrs'.format(review_grace_period)) + task_instance.info('Sending email reminder to Editor/Publisher if records are waiting for ' + 'more than {0} hrs'.format(review_grace_period)) + + if ( + core_utilities.is_blank_string(settings.ANYMAIL.get('MAILJET_API_KEY')) or + core_utilities.is_blank_string(settings.ANYMAIL.get('MAILJET_SECRET_KEY')) + ): + logger.error('MailJet creds are not configured to send the email. Hence email notification is disabled.') + task_instance.info('ERROR: MailJet creds are not configured to send the email. Hence email notification is ' + 'disabled.') else: - # If all editors have records to review, then send reminder email - if has_records_to_review_for_all_editors: - print('All Editors has records to review') - email_user_list.extend( - get_user_emails_for_permissions([auth_models.RolePermission.CAN_UPDATE_SCHOOL_MASTER_DATA])) + current_time = core_utilities.get_current_datetime_object() + check_time = current_time - timedelta(hours=review_grace_period) + email_user_list = [] + + # If there are records for all editor to review which collected date is more than 48 hrs + has_records_to_review_for_all_editors = sources_models.SchoolMasterData.objects.filter( + status=sources_models.SchoolMasterData.ROW_STATUS_DRAFT, + modified__lt=check_time, + ).exists() + + # If there are records for all publishers to review which are sent to publishers + # to publish more than 48 hrs back + has_records_to_review_for_all_publishers = sources_models.SchoolMasterData.objects.filter( + status__in=[ + sources_models.SchoolMasterData.ROW_STATUS_DRAFT_LOCKED, + sources_models.SchoolMasterData.ROW_STATUS_DELETED, + ], + is_read=False, + modified__lt=check_time, + ).exists() + + # If it has records for all editors and publishers to review than send the reminder email to all + if has_records_to_review_for_all_editors and has_records_to_review_for_all_publishers: + logger.info('All Editors and Publishers has records to review') + task_instance.info('All Editors and Publishers has records to review') + email_user_list.extend(get_user_emails_for_permissions([ + auth_models.RolePermission.CAN_UPDATE_SCHOOL_MASTER_DATA, + auth_models.RolePermission.CAN_PUBLISH_SCHOOL_MASTER_DATA, + ])) else: - # Else send the email to those editors who have updated the DRAFT records but not touched - # it in last 48 hrs - editor_ids_who_has_old_updated_records = list(sources_models.SchoolMasterData.objects.filter( - status=sources_models.SchoolMasterData.ROW_STATUS_UPDATED_IN_DRAFT, - modified__lt=check_time, - ).values_list('modified_by_id', flat=True).order_by('modified_by_id').distinct('modified_by_id')) - - if len(editor_ids_who_has_old_updated_records) > 0: - print('Only few Editors has records to review') + # If all editors have records to review, then send reminder email + if has_records_to_review_for_all_editors: + logger.info('All Editors has records to review') + task_instance.info('All Editors has records to review') email_user_list.extend( - get_user_emails_for_permissions( - [auth_models.RolePermission.CAN_UPDATE_SCHOOL_MASTER_DATA], - ids_to_filter=editor_ids_who_has_old_updated_records) - ) - - # If all publishers have records to review, then send reminder email to all - if has_records_to_review_for_all_publishers: - print('All Publishers has records to review') - email_user_list.extend( - get_user_emails_for_permissions([auth_models.RolePermission.CAN_PUBLISH_SCHOOL_MASTER_DATA])) - else: - # Else send the email to those publishers who have updated the records but not touched it in last 48 hrs - publisher_ids_who_has_old_updated_records = list(sources_models.SchoolMasterData.objects.filter( - status=sources_models.SchoolMasterData.ROW_STATUS_UPDATED_IN_DRAFT_LOCKED, - modified__lt=check_time, - ).values_list('modified_by_id', flat=True).order_by('modified_by_id').distinct('modified_by_id')) - - if len(publisher_ids_who_has_old_updated_records) > 0: - print('Only few Publishers has records to review') + get_user_emails_for_permissions([auth_models.RolePermission.CAN_UPDATE_SCHOOL_MASTER_DATA])) + else: + # Else send the email to those editors who have updated the DRAFT records but not touched + # it in last 48 hrs + editor_ids_who_has_old_updated_records = list(sources_models.SchoolMasterData.objects.filter( + status=sources_models.SchoolMasterData.ROW_STATUS_UPDATED_IN_DRAFT, + modified__lt=check_time, + ).values_list('modified_by_id', flat=True).order_by('modified_by_id').distinct('modified_by_id')) + + if len(editor_ids_who_has_old_updated_records) > 0: + logger.info('Only few Editors has records to review') + task_instance.info('Only few Editors has records to review') + email_user_list.extend( + get_user_emails_for_permissions( + [auth_models.RolePermission.CAN_UPDATE_SCHOOL_MASTER_DATA], + ids_to_filter=editor_ids_who_has_old_updated_records) + ) + + # If all publishers have records to review, then send reminder email to all + if has_records_to_review_for_all_publishers: + logger.info('All Publishers has records to review') + task_instance.info('All Publishers has records to review') email_user_list.extend( - get_user_emails_for_permissions( - [auth_models.RolePermission.CAN_PUBLISH_SCHOOL_MASTER_DATA], - ids_to_filter=publisher_ids_who_has_old_updated_records) - ) - - if len(email_user_list) > 0: - # Get the unique email IDs so it sends only 1 email - unique_email_ids = set(email_user_list) - - email_subject = sources_config.school_master_records_to_review_email_subject_format % ( - core_utilities.get_project_title() - ) - - dashboard_url = ds_settings['DASHBOARD_URL'] - email_message = sources_config.school_master_records_to_review_email_message_format.format( - dashboard_url='Dashboard url: {}'.format(dashboard_url) if dashboard_url else '', - ) - - email_content = {'subject': email_subject, 'message': email_message} - print('Sending the below emails:\n' - 'To: {0}\n' - 'Subject: {1}\n' - 'Body: {2}'.format(unique_email_ids, email_subject, email_message)) - account_utilities.send_email_over_mailjet_service(unique_email_ids, **email_content) - cache.set(task_cache_key, 'completed', None) + get_user_emails_for_permissions([auth_models.RolePermission.CAN_PUBLISH_SCHOOL_MASTER_DATA])) + else: + # Else send the email to those publishers who have updated the records + # but not touched it in last 48 hrs + publisher_ids_who_has_old_updated_records = list(sources_models.SchoolMasterData.objects.filter( + status=sources_models.SchoolMasterData.ROW_STATUS_UPDATED_IN_DRAFT_LOCKED, + modified__lt=check_time, + ).values_list('modified_by_id', flat=True).order_by('modified_by_id').distinct('modified_by_id')) + + if len(publisher_ids_who_has_old_updated_records) > 0: + logger.info('Only few Publishers has records to review') + task_instance.info('Only few Publishers has records to review') + email_user_list.extend( + get_user_emails_for_permissions( + [auth_models.RolePermission.CAN_PUBLISH_SCHOOL_MASTER_DATA], + ids_to_filter=publisher_ids_who_has_old_updated_records) + ) + + if len(email_user_list) > 0: + # Get the unique email IDs so it sends only 1 email + unique_email_ids = set(email_user_list) + + email_subject = sources_config.school_master_records_to_review_email_subject_format % ( + core_utilities.get_project_title() + ) + + dashboard_url = ds_settings['DASHBOARD_URL'] + email_message = sources_config.school_master_records_to_review_email_message_format.format( + dashboard_url='Dashboard url: {}'.format(dashboard_url) if dashboard_url else '', + ) + + email_content = {'subject': email_subject, 'message': email_message} + logger.info('Sending the below emails:\n' + 'To: {0}\n' + 'Subject: {1}\n' + 'Body: {2}'.format(unique_email_ids, email_subject, email_message)) + task_instance.info('Sending the below emails:\tTo: {0}\tSubject: {1}\tBody: {2}'.format( + unique_email_ids, email_subject, email_message)) + account_utilities.send_email_over_mailjet_service(unique_email_ids, **email_content) + + background_task_utilities.task_on_complete(task_instance) else: - print('***** Found running Job with "{0}" name so skipping current iteration *****'.format(task_cache_key)) + logger.error('Found running Job with "{0}" name so skipping current iteration'.format(task_key)) @app.task(soft_time_limit=60 * 60, time_limit=60 * 60) def load_data_from_daily_check_app_api(*args): - print('***** Loading the DailyCheckApp Data to PROCO DB *****') + logger.info('Loading the DailyCheckApp data to DB.') source_utilities.sync_dailycheckapp_realtime_data() - print('***** Loaded the DailyCheckApp Data to PROCO DB - SUCCESS *****') + logger.info('Loaded the DailyCheckApp data to DB successfully.') @app.task(soft_time_limit=4 * 60 * 60, time_limit=4 * 60 * 60) def load_data_from_qos_apis(*args): - print('***** Loading the QoS Data to PROCO DB *****') + logger.info('Loading the QoS data to DB.') source_utilities.load_qos_data_source_response_to_model() source_utilities.sync_qos_realtime_data() - print('***** Loaded the QoS Data to PROCO DB - SUCCESS *****') + logger.info('Loaded the QoS data to DB successfully.') @app.task(soft_time_limit=2 * 60 * 60, time_limit=2 * 60 * 60) def cleanup_school_master_rows(): - task_cache_key = 'cleanup_school_master_rows_status_{current_time}'.format( + task_key = 'cleanup_school_master_rows_status_{current_time}'.format( current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H')) - running_task = cache.get(task_cache_key, None) - - if running_task in [None, b'completed', 'completed']: - print('***** Not found running Job *****') - cache.set(task_cache_key, 'running', None) + task_id = current_task.request.id or str(uuid.uuid4()) + task_instance = background_task_utilities.task_on_start(task_id, task_key, 'Cleanup school master rows') + if task_instance: + logger.debug('Not found running job for school master cleanup task: {}'.format(task_key)) # Delete all the old records where more than 1 record are in DRAFT/UPDATED_IN_DRAFT or # ROW_STATUS_DRAFT_LOCKED/ROW_STATUS_UPDATED_IN_DRAFT_LOCKED for same School GIGA ID rows_with_more_than_1_record_in_draft = sources_models.SchoolMasterData.objects.filter( @@ -618,9 +653,10 @@ def cleanup_school_master_rows(): ).values('school_id_giga', 'country_id').annotate( total_records=Count('school_id_giga', distinct=False), ).order_by('-total_records', 'school_id_giga', 'country_id').filter(total_records__gt=1) - print('Queryset to get all the old records to delete where more than 1 record are in DRAFT/' - 'UPDATED_IN_DRAFT/ROW_STATUS_DRAFT_LOCKED/ROW_STATUS_UPDATED_IN_DRAFT_LOCKED ' - 'for same School GIGA ID: {0}'.format(rows_with_more_than_1_record_in_draft.query)) + + logger.debug('Queryset to get all the old records to delete where more than 1 record are in DRAFT/' + 'UPDATED_IN_DRAFT/ROW_STATUS_DRAFT_LOCKED/ROW_STATUS_UPDATED_IN_DRAFT_LOCKED ' + 'for same School GIGA ID: {0}'.format(rows_with_more_than_1_record_in_draft.query)) for row in rows_with_more_than_1_record_in_draft: for row_to_delete in sources_models.SchoolMasterData.objects.filter( @@ -628,14 +664,19 @@ def cleanup_school_master_rows(): country_id=row['country_id'], ).order_by('-created')[1:]: row_to_delete.delete() + task_instance.info('Deleted rows where more than 1 record are in DRAFT/' + 'UPDATED_IN_DRAFT/ROW_STATUS_DRAFT_LOCKED/ROW_STATUS_UPDATED_IN_DRAFT_LOCKED ' + 'for same School GIGA ID') + # Delete all the old records where more than 1 record are in is_read=True for same School GIGA ID rows_with_more_than_1_record_in_read = sources_models.SchoolMasterData.objects.filter( is_read=True, ).values('school_id_giga', 'country_id').annotate( total_records=Count('school_id_giga', distinct=False), ).order_by('-total_records').filter(total_records__gt=1) - print('Queryset to get all the old records to delete where more than 1 record are in is_read=True ' - 'for same School GIGA ID: {0}'.format(rows_with_more_than_1_record_in_read.query)) + + logger.debug('Queryset to get all the old records to delete where more than 1 record are in is_read=True ' + 'for same School GIGA ID: {0}'.format(rows_with_more_than_1_record_in_read.query)) for row in rows_with_more_than_1_record_in_read: for row_to_delete in sources_models.SchoolMasterData.objects.filter( @@ -643,10 +684,11 @@ def cleanup_school_master_rows(): country_id=row['country_id'], ).order_by('-published_at')[1:]: row_to_delete.delete() + task_instance.info('Deleted rows where more than 1 record are in is_read=True for same School GIGA ID') - cache.set(task_cache_key, 'completed', None) + background_task_utilities.task_on_complete(task_instance) else: - print('***** Found running Job with "{0}" name so skipping current iteration *****'.format(task_cache_key)) + logger.error('Found running Job with "{0}" name so skipping current iteration'.format(task_key)) @app.task(soft_time_limit=6 * 60 * 60, time_limit=6 * 60 * 60) @@ -658,25 +700,25 @@ def update_static_data(*args, country_iso3_format=None): Execution Frequency: Once in a week/once in 2 weeks """ - task_cache_key = 'update_static_data_status_{current_time}'.format( - current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y')) - running_task = cache.get(task_cache_key, None) - - if running_task in [None, b'completed', 'completed']: - print('***** Not found running Job *****') - cache.set(task_cache_key, 'running', None) + task_key = 'update_static_data_status_{current_time}'.format( + current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H')) + task_id = current_task.request.id or str(uuid.uuid4()) + task_instance = background_task_utilities.task_on_start( + task_id, task_key, 'Sync Static Data from School Master sources', check_previous=True) + if task_instance: + logger.debug('Not found running job for static data pull handler: {}'.format(task_key)) load_data_from_school_master_apis(country_iso3_format=country_iso3_format) + task_instance.info('Completed the load data from School Master API call') cleanup_school_master_rows.s() - - cache.set(task_cache_key, 'completed', None) + task_instance.info('Scheduled cleanup school master rows') + background_task_utilities.task_on_complete(task_instance) else: - print('***** Found running Job with "{0}" name so skipping current iteration *****'.format(task_cache_key)) + logger.error('Found Job with "{0}" name so skipping current iteration'.format(task_key)) @app.task(soft_time_limit=60 * 60, time_limit=60 * 60) def finalize_previous_day_data(_prev_result, country_id, date, *args): - print('Inside finalize_previous_day_data() *** ', country_id) country = Country.objects.get(id=country_id) aggregate_real_time_data_to_school_daily_status(country, date) @@ -699,17 +741,15 @@ def update_live_data(*args, today=True): Execution Frequency: 4-5 times a day """ - - task_cache_key = 'update_live_data_status_{current_time}_{today}'.format( + task_key = 'update_live_data_status_{current_time}_{today}'.format( current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H'), today=today, ) - running_task = cache.get(task_cache_key, None) - - if running_task in [None, b'completed', 'completed']: - print('***** Not found running Job *****') - cache.set(task_cache_key, 'running', None) + task_id = current_task.request.id or str(uuid.uuid4()) + task_instance = background_task_utilities.task_on_start(task_id, task_key, 'Sync Realtime Data from Live sources') + if task_instance: + logger.debug('Not found running job: {}'.format(task_key)) countries_ids = Country.objects.values_list('id', flat=True) if today: @@ -731,8 +771,6 @@ def update_live_data(*args, today=True): chain( load_data_from_daily_check_app_api.s(), load_data_from_qos_apis.s(), - # load_data_from_unicef_db.s(), - Need to check with Brian for deletion - # load_brasil_daily_statistics.s(), - QoS chord( group([ finalize_previous_day_data.s(country_id, yesterday_date) @@ -743,38 +781,41 @@ def update_live_data(*args, today=True): ).delay() - cache.set(task_cache_key, 'completed', None) + background_task_utilities.task_on_complete(task_instance) else: - print('***** Found running Job with "{0}" name so skipping current iteration *****'.format(task_cache_key)) + logger.error('Found running Job with "{0}" name so skipping current iteration'.format(task_key)) @app.task(soft_time_limit=1 * 60 * 60, time_limit=1 * 60 * 60) def clean_old_live_data(): current_datetime = core_utilities.get_current_datetime_object() - task_cache_key = 'clean_old_live_data_status_{current_time}'.format( - current_time=format_date(current_datetime, frmt='%d%m%Y'), + task_key = 'clean_old_live_data_status_{current_time}'.format( + current_time=format_date(current_datetime, frmt='%d%m%Y_%H'), ) - running_task = cache.get(task_cache_key, None) - - if running_task in [None, b'completed', 'completed']: - print('***** Not found running Job *****') - cache.set(task_cache_key, 'running', None) + task_id = current_task.request.id or str(uuid.uuid4()) + task_instance = background_task_utilities.task_on_start(task_id, task_key, 'Clean live data older than 30 days') + if task_instance: + logger.debug('Not found running job for live data cleanup handler: {}'.format(task_key)) older_then_date = current_datetime - timedelta(days=30) - print('Deleting all the rows from "RealTimeConnectivity" Data Table which is older than: {0}'.format( + logger.debug('Deleting all the rows from "RealTimeConnectivity" Data Table which is older than: {0}'.format( older_then_date)) statistics_models.RealTimeConnectivity.objects.filter(created__lt=older_then_date).delete() + task_instance.info('"RealTimeConnectivity" data table completed') - print('Deleting all the rows from "DailyCheckAppMeasurementData" Data Table which is older than: {0}'.format( - older_then_date)) + logger.debug( + 'Deleting all the rows from "DailyCheckAppMeasurementData" Data Table which is older than: {0}'.format( + older_then_date)) # Delete all entries from DailyCheckApp Data Table which is older than 7 days sources_models.DailyCheckAppMeasurementData.objects.filter(created_at__lt=older_then_date).delete() + task_instance.info('"DailyCheckAppMeasurementData" data table completed') - print('Deleting all the rows from "QoSData" Data Table which is older than: {0}'.format(older_then_date)) + logger.debug('Deleting all the rows from "QoSData" Data Table which is older than: {0}'.format(older_then_date)) # Delete all entries from QoS Data Table which is older than 7 days sources_models.QoSData.objects.filter(timestamp__lt=older_then_date).delete() + task_instance.info('"QoSData" data table completed') - cache.set(task_cache_key, 'completed', None) + background_task_utilities.task_on_complete(task_instance) else: - print('***** Found running Job with "{0}" name so skipping current iteration *****'.format(task_cache_key)) + logger.error('Found running Job with "{0}" name so skipping current iteration'.format(task_key)) diff --git a/proco/data_sources/tests/test_api.py b/proco/data_sources/tests/test_api.py index 4bf0998..c78dd93 100755 --- a/proco/data_sources/tests/test_api.py +++ b/proco/data_sources/tests/test_api.py @@ -21,7 +21,7 @@ def sources_url(url_params, query_param, view_name='list-school-master-rows'): class SchoolMasterApiTestCase(TestAPIViewSetMixin, TestCase): - databases = ['default'] + databases = ['default',] @classmethod def setUpTestData(cls): diff --git a/proco/data_sources/tests/test_utils.py b/proco/data_sources/tests/test_utils.py index dcac08f..3781718 100644 --- a/proco/data_sources/tests/test_utils.py +++ b/proco/data_sources/tests/test_utils.py @@ -98,7 +98,7 @@ def test_has_changes_for_review(self): self.assertFalse(sources_utilities.has_changes_for_review({ 'school_name': school.name, 'school_id_govt': school.external_id, - 'admin1_id_giga': None, + 'admin1_id_giga': school.admin1.giga_id_admin, 'admin2_id_giga': None, 'latitude': school.geopoint.y, 'longitude': school.geopoint.x, diff --git a/proco/data_sources/utils.py b/proco/data_sources/utils.py index 015bc68..99b6065 100644 --- a/proco/data_sources/utils.py +++ b/proco/data_sources/utils.py @@ -13,7 +13,6 @@ from django.conf import settings from django.db.models import Q from django.db.models.functions import Lower -from django.utils import timezone from rest_framework import status from proco.accounts.models import APIKey @@ -27,7 +26,7 @@ from proco.utils.dates import format_date from proco.utils.urls import add_url_params -logger = logging.getLogger('django.' + __name__) +logger = logging.getLogger('gigamaps.' + __name__) response_timezone = pytz.timezone(settings.TIME_ZONE) @@ -180,15 +179,15 @@ def parse_row(row): def sync_school_master_data(profile_file, share_name, schema_name, table_name, changes_for_countries, deleted_schools, school_master_fields): country = Country.objects.filter(iso3_format=table_name, ).first() - print('Country object: {0}'.format(country)) + logger.debug('Country object: {0}'.format(country)) if not country: - print('ERROR: Country with ISO3 Format ({0}) not found in PROCO DB. ' - 'Hence skipping the load for current table.'.format(table_name)) + logger.error('Country with ISO3 Format ({0}) not found in DB. ' + 'Hence skipping the load for current table.'.format(table_name)) raise ValueError(f"Invalid 'iso3_format': {table_name}") table_last_data_version = sources_models.SchoolMasterData.get_last_version(table_name) - print('Table last data version present in PROCO DB: {0}'.format(table_last_data_version)) + logger.debug('Table last data version present in DB: {0}'.format(table_last_data_version)) # Create an url to access a shared table. # A table path is the profile file path following with `#` and the fully qualified name of a table @@ -198,23 +197,22 @@ def sync_school_master_data(profile_file, share_name, schema_name, table_name, c schema_name=schema_name, table_name=table_name, ) - print('Table URL: %s', table_url) + logger.debug('Table URL: %s', table_url) table_current_version = delta_sharing.get_table_version(table_url) - print('Table current version from API: {0}'.format(table_current_version)) + logger.debug('Table current version from API: {0}'.format(table_current_version)) if table_last_data_version == table_current_version: - print('Both School Master data version in DB and Table version from API, are same. ' - 'Hence skipping the data update for current country ({0}).'.format(country)) + logger.info('Both School Master data version in DB and Table version from API, are same. ' + 'Hence skipping the data update for current country ({0}).'.format(country)) return table_protocol = delta_sharing.get_table_protocol(table_url) - print('Table Protocol: {0}'.format(table_protocol)) + logger.debug('Table Protocol: {0}'.format(table_protocol)) table_meta_data = delta_sharing.get_table_metadata(table_url) - print('Table Metadata: {0}'.format(table_meta_data.__dict__)) + logger.debug('Table Metadata: {0}'.format(table_meta_data.__dict__)) - # loaded_data_df = delta_sharing.load_as_pandas(table_url, None, table_current_version) loaded_data_df = delta_sharing.load_table_changes_as_pandas( table_url, table_last_data_version, @@ -222,7 +220,7 @@ def sync_school_master_data(profile_file, share_name, schema_name, table_name, c None, None, ) - print('Total count of rows in the data: {0}'.format(len(loaded_data_df))) + logger.debug('Total count of rows in the data: {0}'.format(len(loaded_data_df))) if len(loaded_data_df) > 0: # Sort the values based on _commit_timestamp ASC @@ -237,15 +235,15 @@ def sync_school_master_data(profile_file, share_name, schema_name, table_name, c loaded_data_df = loaded_data_df[loaded_data_df[DeltaSharingReader._change_type_col_name()].isin( ['insert', 'update_postimage', 'remove', 'delete'])] - print('Total count of rows in the data after duplicate cleanup: {0}'.format(len(loaded_data_df))) + logger.debug('Total count of rows in the data after duplicate cleanup: {0}'.format(len(loaded_data_df))) df_columns = list(loaded_data_df.columns.tolist()) cols_to_delete = list(set(df_columns) - set(school_master_fields)) + ['id', 'created', 'modified', 'school_id', 'country_id', 'status', 'modified_by', 'published_by', 'published_at', 'is_read', ] - print('All School Master API response columns: {}'.format(df_columns)) - print('All School Master API response columns to delete: {}'.format( + logger.debug('All School Master API response columns: {}'.format(df_columns)) + logger.debug('All School Master API response columns to delete: {}'.format( list(set(df_columns) - set(school_master_fields)))) insert_entries = [] @@ -258,10 +256,6 @@ def sync_school_master_data(profile_file, share_name, schema_name, table_name, c loaded_data_df['version'] = table_current_version loaded_data_df['country'] = country - # print('Table data: ') - # print(loaded_data_df) - # print(loaded_data_df.to_dict(orient='records')) - for _, row in loaded_data_df.iterrows(): change_type = row[DeltaSharingReader._change_type_col_name()] @@ -291,11 +285,11 @@ def sync_school_master_data(profile_file, share_name, schema_name, table_name, c insert_entries.append(sources_models.SchoolMasterData(**row_as_dict)) if len(insert_entries) == 5000: - print('Loading the data to "SchoolMasterData" table as it has reached 5000 benchmark.') + logger.debug('Loading the data to "SchoolMasterData" table as it has reached 5000 benchmark.') sources_models.SchoolMasterData.objects.bulk_create(insert_entries) insert_entries = [] - print('#' * 10) - print('\n\n') + logger.debug('#' * 10) + logger.debug('\n\n') elif change_type in ['remove', 'delete']: school = School.objects.filter( @@ -313,24 +307,24 @@ def sync_school_master_data(profile_file, share_name, schema_name, table_name, c remove_entries.append(sources_models.SchoolMasterData(**row_as_dict)) if len(remove_entries) == 5000: - print('Loading the data to "SchoolMasterData" table as it has reached 5000 benchmark.') + logger.info('Loading the data to "SchoolMasterData" table as it has reached 5000 benchmark.') sources_models.SchoolMasterData.objects.bulk_create(remove_entries) remove_entries = [] - print('#' * 10) - print('\n\n') + logger.debug('#' * 10) + logger.debug('\n\n') - print('Loading the remaining ({0}) data to "SchoolMasterData" table.'.format(len(insert_entries))) + logger.info('Loading the remaining ({0}) data to "SchoolMasterData" table.'.format(len(insert_entries))) if len(insert_entries) > 0: sources_models.SchoolMasterData.objects.bulk_create(insert_entries) - print('Removing ({0}) records from "SchoolMasterData" table.'.format(len(remove_entries))) + logger.info('Removing ({0}) records from "SchoolMasterData" table.'.format(len(remove_entries))) if len(remove_entries) > 0: sources_models.SchoolMasterData.objects.bulk_create(remove_entries) deleted_schools.extend( [country.name + ' : ' + school_master_row.school_name for school_master_row in remove_entries]) else: - print('INFO: No data to update in current table: {0}.'.format(table_name)) + logger.info('No data to update in current table: {0}.'.format(table_name)) def get_request_headers(request_configs): @@ -384,7 +378,7 @@ def load_daily_check_app_data_source_response_to_model(model, request_configs): new_params = {} while has_more_data: - print('#' * 10) + logger.debug('#' * 10) source_url = request_configs.get('url') if request_configs.get('query_params'): @@ -394,35 +388,32 @@ def load_daily_check_app_data_source_response_to_model(model, request_configs): page_no += 1 source_url = add_url_params(request_configs.get('url'), new_params) - print('Executing the request URL: {0}'.format(source_url)) - print('Request header: {0}'.format(source_request_headers)) + logger.debug('Executing the request URL: {0}'.format(source_url)) + logger.debug('Request header: {0}'.format(source_request_headers)) response = requests.get(source_url, headers=source_request_headers) if response.status_code != status.HTTP_200_OK: - print('ERROR: Invalid response received {0}'.format(response)) + logger.error('Invalid response received {0}'.format(response)) return response_data = response.json() if len(response_data) == 0: - print('No records to read further.') + logger.debug('No records to read further.') has_more_data = False else: for data in response_data: - # created_at = data.pop('created_at', None) - # if created_at: - # data['timestamp'] = created_at insert_entries.append(model(**data)) if len(insert_entries) >= 5000: - print('Loading the data to "{0}" table as it has reached 5000 benchmark.'.format(model.__name__)) + logger.info('Loading the data to "{0}" table as it has reached 5000 benchmark.'.format(model.__name__)) model.objects.bulk_create(insert_entries) insert_entries = [] - print('#' * 10) - print('\n\n') + logger.debug('#' * 10) + logger.debug('\n\n') - print('Loading the remaining ({0}) data to "{1}" table.'.format(len(insert_entries), model.__name__)) + logger.info('Loading the remaining ({0}) data to "{1}" table.'.format(len(insert_entries), model.__name__)) if len(insert_entries) > 0: model.objects.bulk_create(insert_entries) @@ -431,7 +422,7 @@ def sync_dailycheckapp_realtime_data(): current_datetime = core_utilities.get_current_datetime_object() last_measurement_date = sources_models.DailyCheckAppMeasurementData.get_last_dailycheckapp_measurement_date() - print('Daily Check APP Last Measurement Date: {0}'.format(last_measurement_date)) + logger.info('Daily Check APP last measurement date: {0}'.format(last_measurement_date)) request_configs = { 'url': '{0}/measurements/v2'.format(ds_settings.get('DAILY_CHECK_APP').get('BASE_URL')), @@ -460,8 +451,8 @@ def sync_dailycheckapp_realtime_data(): (Q(upload__isnull=True) | Q(upload__gte=0)) & (Q(latency__isnull=True) | Q(latency__gte=0)), ) - print('Migrating the records from "DailyCheckAppMeasurementData" to "RealTimeConnectivity" ' - 'with date range: {0} - {1}'.format(last_measurement_date, current_datetime)) + logger.debug('Migrating the records from "DailyCheckAppMeasurementData" to "RealTimeConnectivity" ' + 'with date range: {0} - {1}'.format(last_measurement_date, current_datetime)) realtime = [] @@ -470,7 +461,7 @@ def sync_dailycheckapp_realtime_data(): ).order_by('country_code')) for country_code in countries: - print('Current Country Code: {}'.format(country_code)) + logger.debug('Current Country Code: {}'.format(country_code)) if country_code: country = Country.objects.filter(code=country_code).first() else: @@ -491,7 +482,7 @@ def sync_dailycheckapp_realtime_data(): school.giga_id_school: school for school in schools_qs.filter(giga_id_school__in=dcm_giga_ids) } - print('Total schools in DailyCheckApp: {0}, Successfully mapped schools: {1}'.format( + logger.debug('Total schools in DailyCheckApp: {0}, Successfully mapped schools: {1}'.format( len(dcm_giga_ids), len(dcm_schools))) mlab_school_ids = set(dailycheckapp_measurements.filter( @@ -505,7 +496,7 @@ def sync_dailycheckapp_realtime_data(): school.external_id: school for school in schools_qs.filter(external_id__in=mlab_school_ids) } - print('Total schools in MLab: {0}, Successfully mapped schools: {1}'.format( + logger.debug('Total schools in MLab: {0}, Successfully mapped schools: {1}'.format( len(mlab_school_ids), len(mlab_schools))) for dailycheckapp_measurement in dailycheckapp_measurements.filter(country_code=country_code): @@ -544,11 +535,11 @@ def sync_dailycheckapp_realtime_data(): )) if len(realtime) == 5000: - print('Loading the data to "RealTimeConnectivity" table as it has reached 5000 benchmark.') + logger.info('Loading the data to "RealTimeConnectivity" table as it has reached 5000 benchmark.') RealTimeConnectivity.objects.bulk_create(realtime) realtime = [] - print('Loading the remaining ({0}) data to "RealTimeConnectivity" table.'.format(len(realtime))) + logger.info('Loading the remaining ({0}) data to "RealTimeConnectivity" table.'.format(len(realtime))) if len(realtime) > 0: RealTimeConnectivity.objects.bulk_create(realtime) @@ -592,32 +583,32 @@ def load_qos_data_source_response_to_model(): if qos_schema: schema_tables = client.list_tables(qos_schema) - print('All tables ready to access: {0}'.format(schema_tables)) + logger.debug('All tables ready to access: {0}'.format(schema_tables)) qos_model_fields = [f.name for f in sources_models.QoSData._meta.get_fields()] for schema_table in schema_tables: - print('#' * 10) - print('Table: %s', schema_table) + logger.debug('#' * 10) + logger.debug('Table: %s', schema_table) table_name = schema_table.name try: country = Country.objects.filter(iso3_format=table_name).first() - print('Country object: {0}'.format(country)) + logger.debug('Country object: {0}'.format(country)) if not country: - print('ERROR: Country with ISO3 Format ({0}) not found in PROCO DB. ' - 'Hence skipping the load for current table.'.format(table_name)) + logger.error('Country with ISO3 Format ({0}) not found in DB. ' + 'Hence skipping the load for current table.'.format(table_name)) continue if len(country_codes_for_exclusion) > 0 and table_name in country_codes_for_exclusion: - print('WARNING: Country with ISO3 Format ({0}) asked to exclude in PROCO DB. ' - 'Hence skipping the load for current table.'.format(table_name)) + logger.warning('Country with ISO3 Format ({0}) asked to exclude in PROCO DB. ' + 'Hence skipping the load for current table.'.format(table_name)) continue table_last_data_version = sources_models.QoSData.get_last_version(table_name) - print('Table last data version present in PROCO DB: {0}'.format(table_last_data_version)) + logger.debug('Table last data version present in DB: {0}'.format(table_last_data_version)) # Create an url to access a shared table. # A table path is the profile file path following with `#` and the fully qualified name of a table @@ -627,21 +618,21 @@ def load_qos_data_source_response_to_model(): schema_name=schema_name, table_name=table_name, ) - print('Table URL: %s', table_url) + logger.debug('Table URL: %s', table_url) table_current_version = delta_sharing.get_table_version(table_url) - print('Table current version from API: {0}'.format(table_current_version)) + logger.debug('Table current version from API: {0}'.format(table_current_version)) if table_last_data_version == table_current_version: - print('Both QoS data version in DB and Table version from API, are same. ' - 'Hence skipping the data update for current country ({0}).'.format(country)) + logger.info('Both QoS data version in DB and Table version from API, are same. ' + 'Hence skipping the data update for current country ({0}).'.format(country)) continue table_protocol = delta_sharing.get_table_protocol(table_url) - print('Table Protocol: {0}'.format(table_protocol)) + logger.debug('Table Protocol: {0}'.format(table_protocol)) table_meta_data = delta_sharing.get_table_metadata(table_url) - print('Table Metadata: {0}'.format(table_meta_data.__dict__)) + logger.debug('Table Metadata: {0}'.format(table_meta_data.__dict__)) if not table_last_data_version: # In case if its 1st pull, then pull only last 10 version's data at max @@ -650,7 +641,6 @@ def load_qos_data_source_response_to_model(): version_list = list(range(table_last_data_version + 1, table_current_version + 1)) for version in version_list: - # loaded_data_df = delta_sharing.load_as_pandas(table_url, None, version, None) loaded_data_df = delta_sharing.load_table_changes_as_pandas( table_url, version, @@ -658,12 +648,14 @@ def load_qos_data_source_response_to_model(): None, None, ) - print('Total count of rows in the {0} version data: {1}'.format(version, len(loaded_data_df))) + logger.debug( + 'Total count of rows in the {0} version data: {1}'.format(version, len(loaded_data_df))) loaded_data_df = loaded_data_df[loaded_data_df[DeltaSharingReader._change_type_col_name()].isin( ['insert', 'update_postimage'])] - print('Total count of rows after filtering only ["insert", "update_postimage"] in the "{0}" ' - 'version data: {1}'.format(version, len(loaded_data_df))) + logger.debug( + 'Total count of rows after filtering only ["insert", "update_postimage"] in the "{0}" ' + 'version data: {1}'.format(version, len(loaded_data_df))) if len(loaded_data_df) > 0: insert_entries = [] @@ -675,8 +667,8 @@ def load_qos_data_source_response_to_model(): 'modified', 'school_id', 'country_id', 'modified_by', ] - print('All QoS API response columns: {}'.format(df_columns)) - print('All QoS API response columns to delete: {}'.format( + logger.debug('All QoS API response columns: {}'.format(df_columns)) + logger.debug('All QoS API response columns to delete: {}'.format( list(set(df_columns) - set(qos_model_fields)))) loaded_data_df.drop(columns=cols_to_delete, inplace=True, errors='ignore', ) @@ -692,8 +684,9 @@ def load_qos_data_source_response_to_model(): ).first() if not school: - print('ERROR: School with Giga ID ({0}) not found in PROCO DB. ' - 'Hence skipping the load for current school.'.format(row['school_id_giga'])) + logger.warning( + 'School with Giga ID ({0}) not found in PROCO DB. ' + 'Hence skipping the load for current school.'.format(row['school_id_giga'])) continue row['school'] = school @@ -702,25 +695,26 @@ def load_qos_data_source_response_to_model(): insert_entries.append(row_as_dict) if len(insert_entries) == 5000: - print('Loading the data to "QoSData" table as it has reached 5000 benchmark.') + logger.info('Loading the data to "QoSData" table as it has reached 5000 benchmark.') core_utilities.bulk_create_or_update(insert_entries, sources_models.QoSData, ['school', 'timestamp']) insert_entries = [] - print('#' * 10) - print('\n\n') + logger.debug('#' * 10) + logger.debug('\n\n') - print('Loading the remaining ({0}) data to "QoSData" table.'.format(len(insert_entries))) + logger.info( + 'Loading the remaining ({0}) data to "QoSData" table.'.format(len(insert_entries))) if len(insert_entries) > 0: core_utilities.bulk_create_or_update(insert_entries, sources_models.QoSData, ['school', 'timestamp']) else: - print('INFO: No data to update in current table: {0}.'.format(table_name)) + logger.info('No data to update in current table: {0}.'.format(table_name)) except Exception as ex: - print('ERROR: Exception caught for "{0}": {1}'.format(schema_table.name, str(ex))) + logger.error('Exception caught for "{0}": {1}'.format(schema_table.name, str(ex))) else: - print('ERROR: QoS schema ({0}) does not exist to use for share ({1}).'.format(schema_name, share_name)) + logger.error('QoS schema ({0}) does not exist to use for share ({1}).'.format(schema_name, share_name)) else: - print('ERROR: QoS share ({0}) does not exist to use.'.format(share_name)) + logger.error('QoS share ({0}) does not exist to use.'.format(share_name)) try: os.remove(profile_file) @@ -747,7 +741,7 @@ def sync_qos_realtime_data(): 'speed_download_probe', 'speed_upload_probe', 'latency_probe', ).order_by('timestamp').distinct(*['timestamp', 'school']) - print('Migrating the records from "QoSData" to "RealTimeConnectivity" with date range: {0} - {1}'.format( + logger.debug('Migrating the records from "QoSData" to "RealTimeConnectivity" with date range: {0} - {1}'.format( last_entry_date, current_datetime)) realtime = [] @@ -790,10 +784,10 @@ def sync_qos_realtime_data(): )) if len(realtime) == 5000: - print('Loading the data to "RealTimeConnectivity" table as it has reached 5000 benchmark.') + logger.info('Loading the data to "RealTimeConnectivity" table as it has reached 5000 benchmark.') RealTimeConnectivity.objects.bulk_create(realtime) realtime = [] - print('Loading the remaining ({0}) data to "RealTimeConnectivity" table.'.format(len(realtime))) + logger.info('Loading the remaining ({0}) data to "RealTimeConnectivity" table.'.format(len(realtime))) if len(realtime) > 0: RealTimeConnectivity.objects.bulk_create(realtime) diff --git a/proco/locations/admin.py b/proco/locations/admin.py deleted file mode 100644 index 44341d2..0000000 --- a/proco/locations/admin.py +++ /dev/null @@ -1,129 +0,0 @@ -from django.contrib import admin, messages -from django.contrib.gis.admin import GeoModelAdmin -from django.db import transaction -from django.db.models import Prefetch -from django.http import HttpResponseRedirect -from django.template.response import TemplateResponse -from django.urls import path, reverse -from django.utils.safestring import mark_safe - -from proco.background.models import BackgroundTask -from proco.background.tasks import reset_countries_data, validate_countries -from proco.locations.filters import CountryFilterList -from proco.locations.models import Country, Location -from proco.utils.admin import CountryNameDisplayAdminMixin - - -@admin.register(Country) -class CountryAdmin(GeoModelAdmin): - modifiable = False - - list_display = ('name', 'code', 'flag_preview') - search_fields = ('name',) - exclude = ('geometry', 'geometry_simplified') - raw_id_fields = ('last_weekly_status',) - actions = ('update_country_status_to_joined', 'clearing_all_data') - - def flag_preview(self, obj): - if not obj.flag: - return '' - return mark_safe(f'') # noqa: S703,S308 - - flag_preview.short_description = 'Flag' - - def get_queryset(self, request): - return super().get_queryset(request).defer('geometry', 'geometry_simplified') - - def get_actions(self, request): - actions = super().get_actions(request) - del actions['delete_selected'] - return actions - - def get_urls(self): - urls = super().get_urls() - custom_urls = [ - path('mark-as-joined/', self.update_country_status_to_joined, name='update_country_status_to_joined'), - path('delete-schools-and-statistics/', self.clearing_all_data, name='delete-schools-and-statistics'), - ] - return custom_urls + urls - - @staticmethod - def check_access(request, queryset): - access = True if request.user.is_superuser else False - if not access: - countries_available = request.user.countries_available.values('id') - qs_not_available = queryset.exclude(id__in=countries_available) - result = (False, qs_not_available) if not qs_not_available.exists() else (True, None) - else: - result = True, None - return result - - def update_country_status_to_joined(self, request, queryset=None): - access, qs_not_available = self.check_access(request, queryset) - - if not access: - message = f'You do not have access to change countries: ' \ - f'{", ".join(qs_not_available.values_list("name", flat=True))}' - level = messages.ERROR - self.message_user(request, message, level=level) - return HttpResponseRedirect(reverse('admin:locations_country_changelist')) - - else: - if request.method == 'POST' and 'post' in request.POST: - objects = request.POST.get('post') - task = validate_countries.apply_async((objects.split(','),), countdown=2) - BackgroundTask.objects.get_or_create(task_id=task.id) - message = 'Countries validation started. Please wait.' - level = messages.INFO - self.message_user(request, message, level=level) - return HttpResponseRedirect(reverse('admin:background_backgroundtask_change', args=[task.id])) - else: - objects = ','.join(str(i) for i in queryset.values_list('id', flat=True)) - context = {'opts': self.model._meta, 'objects': objects, 'action': 'mark_as_joined'} - return TemplateResponse(request, 'admin/locations/action_confirm.html', context) - - update_country_status_to_joined.short_description = 'Mark country data source as verified (non-OSM)' - - @transaction.atomic - def clearing_all_data(self, request, queryset=None): - access, qs_not_available = self.check_access(request, queryset) - if not access: - message = f'You do not have access to change countries: ' \ - f'{", ".join(qs_not_available.values_list("name", flat=True))}' - level = messages.ERROR - self.message_user(request, message, level=level) - return HttpResponseRedirect(reverse('admin:locations_country_changelist')) - - else: - if request.method == 'POST' and 'post' in request.POST: - objects = request.POST.get('post') - task = reset_countries_data.apply_async((objects.split(','),), countdown=2) - BackgroundTask.objects.get_or_create(task_id=task.id) - message = 'Country data clearing started. Please wait.' - level = messages.INFO - self.message_user(request, message, level=level) - return HttpResponseRedirect(reverse('admin:background_backgroundtask_change', args=[task.id])) - else: - objects = ','.join(str(i) for i in queryset.values_list('id', flat=True)) - context = {'opts': self.model._meta, 'objects': objects, 'action': 'delete_schools_and_statistics'} - return TemplateResponse(request, 'admin/locations/action_confirm.html', context) - - clearing_all_data.short_description = 'Delete school points & saved statistics' - - -@admin.register(Location) -class LocationAdmin(CountryNameDisplayAdminMixin, GeoModelAdmin): - modifiable = False - show_full_result_count = False - - list_display = ('name', 'get_country_name') - list_filter = (CountryFilterList,) - search_fields = ('name', 'country__name') - exclude = ('geometry_simplified',) - raw_id_fields = ('parent', 'country') - ordering = ('id',) - - def get_queryset(self, request): - return super().get_queryset(request).prefetch_related( - Prefetch('country', Country.objects.defer('geometry', 'geometry_simplified')), - ) diff --git a/proco/locations/api.py b/proco/locations/api.py index 2294412..f0d37ce 100644 --- a/proco/locations/api.py +++ b/proco/locations/api.py @@ -1,4 +1,5 @@ import copy +import logging import traceback from collections import OrderedDict @@ -8,7 +9,7 @@ from azure.search.documents.indexes.models import SearchFieldDataType from django.conf import settings from django.core.exceptions import ValidationError -from django.db.models import Case, Count, F, IntegerField, Value, When +from django.db.models import Case, Count, IntegerField, Value, When from django.db.models.functions.text import Lower from django.shortcuts import get_object_or_404 from django.utils.decorators import method_decorator @@ -32,7 +33,7 @@ from proco.data_sources.models import SchoolMasterData from proco.locations.models import Country, CountryAdminMetadata from proco.locations.search_indexes import SchoolIndex -from proco.locations.serializers import ( # BoundaryListCountrySerializer, +from proco.locations.serializers import ( CountryCSVSerializer, CountrySerializer, CountryStatusSerializer, @@ -40,7 +41,6 @@ DetailCountrySerializer, ExpandCountryAdminMetadataSerializer, ListCountrySerializer, - SearchListSerializer, ) from proco.schools.models import School from proco.utils.cache import cache_manager @@ -50,6 +50,8 @@ from proco.utils.mixins import CachedListMixin, CachedRetrieveMixin from proco.utils.tasks import update_country_related_cache +logger = logging.getLogger('gigamaps.' + __name__) + @method_decorator([cache_control(public=True, max_age=settings.CACHE_CONTROL_MAX_AGE_FOR_FE)], name='dispatch') class CountryViewSet( @@ -126,7 +128,6 @@ def filter_queryset(self, queryset): return queryset -# @method_decorator([cache_control(public=True, max_age=settings.CACHE_CONTROL_MAX_AGE_FOR_FE)], name='dispatch') class CountryDataViewSet(BaseModelViewSet): model = Country serializer_class = CountrySerializer @@ -177,7 +178,7 @@ def get_queryset(self, ids=None): def create(self, request, *args, **kwargs): try: data = CountryUpdateRetriveSerializer(data=request.data) - if data.is_valid(): + if data.is_valid(raise_exception=True): data.save() action_log(request, [data.data], 1, '', self.model, field_name='name') update_country_related_cache.delay(data.data.get('code')) @@ -193,11 +194,9 @@ def update(self, request, *args, **kwargs): copy_request_data = copy.deepcopy(request.data) if copy_request_data.get('flag') is None: copy_request_data['flag'] = country.flag - # if copy_request_data.get('map_preview') is None: - # copy_request_data['map_preview'] = country.map_preview data = CountryUpdateRetriveSerializer(instance=country, data=copy_request_data) - if data.is_valid(): + if data.is_valid(raise_exception=True): change_message = changed_fields(country, copy_request_data) action_log(request, [country], 2, change_message, self.model, field_name='name') data.save() @@ -211,9 +210,8 @@ def update(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs): request_user = core_utilities.get_current_user(request=request) if 'pk' in kwargs: - response = super().destroy(request, *args, **kwargs) - instance = self.get_object() + response = super().destroy(request, *args, **kwargs) accounts_models.DataLayerCountryRelationship.objects.filter(country=instance).update( deleted=core_utilities.get_current_datetime_object(), @@ -278,55 +276,6 @@ def validate_ids(data, field='id', unique=True): return [int(data[field])] -# @method_decorator([cache_control(public=True, max_age=settings.CACHE_CONTROL_MAX_AGE_FOR_FE)], name='dispatch') -# class CountryBoundaryListAPIView(CachedListMixin, ListAPIView): -# LIST_CACHE_KEY_PREFIX = 'COUNTRY_BOUNDARY' -# -# queryset = Country.objects.all().annotate( -# geometry_empty=Func(F('geometry'), function='ST_IsEmpty', output_field=BooleanField()), -# ).filter(geometry_empty=False).only('id', 'code', 'geometry_simplified') -# serializer_class = BoundaryListCountrySerializer -# pagination_class = None - - -# class CountryTileGenerator(BaseTileGenerator): -# def __init__(self, table_config): -# super().__init__() -# self.table_config = table_config -# -# def envelope_to_sql(self, env, request): -# tbl = self.table_config.copy() -# tbl['env'] = self.envelope_to_bounds_sql(env) -# tbl['limit'] = int(request.query_params.get('limit', 100000)) -# # tbl['random_order'] = "ORDER BY random()" if int(request.query_params.get('z', 0)) == 2 else "" -# -# """sql with join and connectivity_speed at country level """ -# sql_tmpl = """ -# -# """ -# -# return sql_tmpl.format(**tbl) - -# -# class CountryTileRequestHandler(APIView): -# def __init__(self, *args, **kwargs): -# super().__init__(*args, **kwargs) -# -# table_config = { -# 'table': 'schools_school', -# 'srid': '4326', -# 'geomColumn': 'geopoint', -# 'attrColumns': 'id', -# } -# self.tile_generator = CountryTileGenerator(table_config) -# -# def get(self, request): -# try: -# return self.tile_generator.generate_tile(request) -# except Exception as e: -# return Response({"error": "An error occurred while processing the request"}, status=500) - - class DownloadCountriesViewSet(BaseModelViewSet, core_mixins.DownloadAPIDataToCSVMixin): model = Country queryset = Country.objects.all().select_related('last_weekly_status') @@ -348,8 +297,6 @@ class DownloadCountriesViewSet(BaseModelViewSet, core_mixins.DownloadAPIDataToCS 'id': ['exact', 'in'], } - # permit_list_expands = ['last_weekly_status'] - def list(self, request, *args, **kwargs): if core_utilities.is_export(request, self.action): return self.list_export(request, *args, **kwargs) @@ -359,167 +306,6 @@ def list(self, request, *args, **kwargs): return super().list(request, *args, **kwargs) -class SearchListAPIView(BaseModelViewSet): - """ - SearchListAPIView - This class is used to list all Download APIs. - Inherits: ListAPIView - """ - model = School - serializer_class = SearchListSerializer - - base_auth_permissions = ( - permissions.AllowAny, - ) - - filter_backends = ( - DjangoFilterBackend, - # NullsAlwaysLastOrderingFilter, - ) - - fields = ( - 'id', 'name', - 'admin1_id', 'admin1_name', 'admin1_description', 'admin2_id', 'admin2_name', 'admin2_description', - 'country_id', 'country_name', 'country_code', - ) - - ordering_fields = ('country_name', 'admin1_name', 'admin2_name', 'name') - - filterset_fields = { - 'id': ['exact', 'in'], - 'name': ['iexact', 'contains'], - 'country_id': ['exact', 'in'], - 'country__name': ['iexact', 'contains'], - 'admin1_id': ['exact', 'in'], - 'admin2_id': ['exact', 'in'], - } - - def get_queryset(self): - queryset = self.model.objects.all() - - qry_fields = self.fields - query_param_fields = self.request.query_params.get('fields') - # Select only requested fields - if query_param_fields: - qry_fields = query_param_fields.split(',') - - qry_ordering = self.ordering_fields - query_param_ordering = self.request.query_params.get('ordering') - # Apply the ordering as asked - if query_param_ordering: - qry_ordering = query_param_ordering.split(',') - - qs = queryset.annotate( - country_name=F('country__name'), - country_code=F('country__code'), - admin1_name=F('admin1__name'), - admin1_description=F('admin1__description'), - admin2_name=F('admin2__name'), - admin2_description=F('admin2__description'), - ).filter(country_id=222).values(*qry_fields).order_by(*qry_ordering).distinct(*qry_fields) - - return self.apply_queryset_filters(qs) - - def list(self, request, *args, **kwargs): - queryset = self.filter_queryset(self.get_queryset()) - - page_size = request.query_params.get('page_size') - if page_size: - page = self.paginate_queryset(queryset) - if page is not None: - # serializer = self.get_serializer(page, many=True) - return self.get_paginated_response(page) - - # serializer = self.get_serializer(queryset, many=True) - return Response(list(queryset)) - - def finalize_response(self, request, response, *args, **kwargs): - data = OrderedDict() - response = super().finalize_response(request, response, *args, **kwargs) - if response.status_code == rest_status.HTTP_200_OK: - response_data = [] - if isinstance(response.data, dict): - # If its paginated request then dict - response_data = response.data.get('results', []) - elif isinstance(response.data, list): - # If its normal request then lis - response_data = response.data - - for resp_data in response_data: - country_id = resp_data.get('country_id') - if country_id: - # Country ID exists - country_data = data.get(country_id, { - 'country_id': country_id, - 'country_name': resp_data.get('country_name'), - 'country_code': resp_data.get('country_code'), - 'admin1_data': OrderedDict(), - }) - - if 'admin1_name' in resp_data: - admin1_name = 'Unknown' if core_utilities.is_blank_string(resp_data['admin1_name']) \ - else resp_data['admin1_name'] - # If admin 1 name exist in response - admin1_data = country_data.get('admin1_data') - admin1_name_data = admin1_data.get(admin1_name, { - 'admin1_name': admin1_name, - 'admin1_id': resp_data.get('admin1_id'), - 'admin1_description': resp_data.get('admin1_description'), - 'admin2_data': OrderedDict(), - }) - - if 'admin2_name' in resp_data: - admin2_name = 'Unknown' if core_utilities.is_blank_string(resp_data['admin2_name']) \ - else resp_data['admin2_name'] - # If admin 2 name exist in response - admin2_data = admin1_name_data.get('admin2_data') - admin2_name_data = admin2_data.get(admin2_name, { - 'admin2_name': admin2_name, - 'admin2_id': resp_data.get('admin2_id'), - 'admin2_description': resp_data.get('admin2_description'), - 'school_data': OrderedDict(), - }) - - if 'id' in resp_data: - school_id = resp_data['id'] - # If admin 2 name exist in response - school_data = admin2_name_data.get('school_data') - - school_id_data = school_data.get(school_id, { - 'id': school_id, - 'name': resp_data.get('name', 'Unknown'), - }) - - school_data[school_id] = school_id_data - admin2_name_data['school_data'] = school_data - else: - pass - - admin2_data[admin2_name] = admin2_name_data - else: - # No admin2 name in response - pass - - admin1_data[admin1_name] = admin1_name_data - country_data['admin1_data'] = admin1_data - else: - # No admin1 name in response - pass - - data[country_id] = country_data - else: - # No country ID in response - pass - - if isinstance(response.data, list): - response.data = OrderedDict() - - response.data['results'] = data - # response.data['results'] = response_data - - return response - - class CountrySearchStatListAPIView(CachedListMixin, ListAPIView): """ CountrySearchStatListAPIView @@ -535,18 +321,7 @@ class CountrySearchStatListAPIView(CachedListMixin, ListAPIView): LIST_CACHE_KEY_PREFIX = 'GLOBAL_COUNTRY_SEARCH_MAPPING' def get_queryset(self): - # fields = ('country_id', 'country_name', 'country_code', 'admin1_name', 'admin2_name', 'id', 'name') - # ordering_fields = ('country_name', 'admin1_name', 'admin2_name', 'name') - - queryset = self.model.objects.all() # .filter(country_id=144) - - # qs = queryset.prefetch_related( - # Prefetch('country', - # Country.objects.defer('geometry', 'geometry_simplified')), - # ).annotate( - # country_name=F('country__name'), - # country_code=F('country__code'), - # ).values(*fields).order_by(*ordering_fields).distinct(*fields) + queryset = self.model.objects.all() qs = queryset.values( 'country__id', 'country__name', 'country__code', 'country__last_weekly_status__integration_status', @@ -618,7 +393,6 @@ def _format_result(self, qry_data): admin1_name_data['data'] = admin2_data country_data['data'][admin1_name] = admin1_name_data - # country_data['data'] = admin1_name_data data[country_id] = country_data for country_id, country_data in data.items(): @@ -634,7 +408,6 @@ def _get_raw_list_response(self, request, *args, **kwargs): queryset = self.get_queryset() queryset_data = list(queryset) - # data = self._format_result(queryset_data) data = self._format_result(queryset_data) request_path = remove_query_param(request.get_full_path(), self.CACHE_KEY) @@ -805,7 +578,7 @@ def index_search(self, request, *args, **kwargs): self.params = dict(request.query_params) search_client = self.create_search_client() - print( + logger.debug( 'Search params: \nsearch_text - {search_text}\ninclude_total_count - {include_total_count}' '\norder_by - {order_by}\nsearch_fields - {search_fields}\nselect - {select}' '\nskip - {skip}\ntop - {top}\nfilter - {filter}\nquery_type - {query_type}'.format( @@ -831,10 +604,7 @@ def index_search(self, request, *args, **kwargs): query_type=self.get_query_type, ) - print('Total Documents Matching Query:', results.get_count()) - # for result in results: - # print("{0}".format(result)) - + logger.debug('Total Documents Matching Query: {}'.format(results.get_count())) return results @@ -869,24 +639,9 @@ class AggregateSearchViewSet(BaseSearchMixin, ListAPIView): def list(self, request, *args, **kwargs): resp_data = OrderedDict() data = self.index_search(request, *args, **kwargs) - counts = data.get_count() - # next_url = None - # previous_url = None - - # page = int(str(self.params.get('page', ['0'])[-1])) - # page_size = int(str(self.params.get('page_size', ['20'])[-1])) - # limit = (page * page_size) + page_size - # if counts > limit: - # next_url = replace_query_param(request.get_full_path(), 'page', page + 1) - # if page > 0: - # previous_url = replace_query_param(request.get_full_path(), 'page', page - 1) - resp_data['count'] = counts - # resp_data['next'] = next_url - # resp_data['previous'] = previous_url resp_data['results'] = list(data) - return Response(resp_data) @@ -960,5 +715,5 @@ def create(self, request, *args, **kwargs): message = 'Countries validation started. Please wait.' return Response({'desc': message, 'task_id': [task.id]}, status=rest_status.HTTP_200_OK) except: - print(traceback.format_exc()) + logger.error(traceback.format_exc()) return Response(data=error_mess, status=rest_status.HTTP_502_BAD_GATEWAY) diff --git a/proco/locations/api_urls.py b/proco/locations/api_urls.py index 9d52b91..5c26ae8 100644 --- a/proco/locations/api_urls.py +++ b/proco/locations/api_urls.py @@ -14,10 +14,6 @@ 'get': 'list', }), name='download-countries'), - # DB Table based searching - path('search/', api.SearchListAPIView.as_view({ - 'get': 'list', - }), name='search-countries-schools'), # DB table based listing only for Country, Admin1 and Admin2 path('search-countries/', api.CountrySearchStatListAPIView.as_view(), name='search-countries-admin-schools'), # Cognitive Search Index based searching for Schools @@ -27,11 +23,12 @@ 'get': 'list', 'post': 'create', 'delete': 'destroy', - }), name='list_or_create_or_destroy_country'), + }), name='list-create-destroy-country'), path('country//', api.CountryDataViewSet.as_view({ 'get': 'retrieve', 'put': 'update', - }), name='update_or_retrieve_country'), + 'delete': 'destroy', + }), name='update-retrieve-country'), path('country-admin-metadata/', api.CountryAdminMetadataViewSet.as_view({ 'get': 'list', diff --git a/proco/locations/filters.py b/proco/locations/filters.py deleted file mode 100644 index 3ea8a5e..0000000 --- a/proco/locations/filters.py +++ /dev/null @@ -1,19 +0,0 @@ -from django.contrib.admin import SimpleListFilter -from django.utils.translation import ugettext_lazy as _ - -from proco.locations.models import Country - - -class CountryFilterList(SimpleListFilter): - title = _('Country') - parameter_name = 'country_id' - - def lookups(self, request, model_admin): - return Country.objects.defer('geometry', 'geometry_simplified').values_list('id', 'name') - - def queryset(self, request, queryset): - return queryset.filter(**{self.parameter_name: self.value()}) if self.value() else queryset - - -class SchoolCountryFilterList(CountryFilterList): - parameter_name = 'school__country_id' diff --git a/proco/locations/serializers.py b/proco/locations/serializers.py index 4839d30..17db5a2 100644 --- a/proco/locations/serializers.py +++ b/proco/locations/serializers.py @@ -1,3 +1,4 @@ +import logging import re from collections import OrderedDict @@ -23,6 +24,8 @@ from proco.schools.models import School from proco.schools.serializers import ExpandCountrySerializer +logger = logging.getLogger('gigamaps.' + __name__) + class ExpandCountryAdminMetadataSerializer(FlexFieldsModelSerializer): """ @@ -168,26 +171,26 @@ def create(self, validated_data): if deleted_country_with_same_code_iso3_format: validated_data['deleted'] = None country_instance = super().update(deleted_country_with_same_code_iso3_format, validated_data) - print('Country restored') + logger.info('Country restored.') CountryDailyStatus.objects.all_deleted().filter(country=country_instance).update(deleted=None) - print('Country Daily restored') + logger.info('Country daily restored.') CountryWeeklyStatus.objects.all_deleted().filter(country=country_instance).update(deleted=None) - print('Country Weekly restored') + logger.info('Country weekly restored.') School.objects.all_deleted().filter(country=country_instance).update(deleted=None) - print('Schools restored') + logger.info('Schools restored.') SchoolDailyStatus.objects.all_deleted().filter(school__country=country_instance).update(deleted=None) - print('School Daily restored') + logger.info('School daily restored.') SchoolWeeklyStatus.objects.all_deleted().filter(school__country=country_instance).update(deleted=None) - print('School Weekly restored') + logger.info('School weekly restored.') SchoolRealTimeRegistration.objects.all_deleted().filter(school__country=country_instance).update( deleted=None) - print('School Real Time Registration restored') + logger.info('School real time registration restored.') request_user = core_utilities.get_current_user(context=self.context) DataLayerCountryRelationship.objects.filter(country=country_instance).update( @@ -206,10 +209,11 @@ def create(self, validated_data): api_key__valid_to__gte=core_utilities.get_current_datetime_object().date(), deleted__isnull=True, ).exists(): - print('WARNING: API Key for country ({0}) already exists.'.format(country_instance.iso3_format)) + logger.debug( + 'Warning: api key for country ({0}) already exists.'.format(country_instance.iso3_format)) else: country_api_key_relationship_obj.update(deleted=None, last_modified_by=request_user) - print('API Key restored.') + logger.info('Api key restored.') else: country_instance = super().create(validated_data) @@ -274,15 +278,6 @@ def to_representation(self, instance): return super().to_representation(instance) -# class BoundaryListCountrySerializer(serializers.ModelSerializer): -# class Meta: -# model = Country -# fields = ( -# 'id', 'code', 'geometry_simplified', -# ) -# read_only_fields = fields - - class ListCountrySerializer(BaseCountrySerializer): integration_status = serializers.SerializerMethodField() schools_with_data_percentage = serializers.SerializerMethodField() @@ -408,8 +403,6 @@ class Meta: 'schools_connectivity_good', 'integration_status', 'avg_distance_school', - # 'created', - # 'modified', 'schools_with_data_percentage', 'connectivity_speed', 'connectivity_latency', @@ -427,80 +420,27 @@ class Meta: class CountryStatusSerializer(FlexFieldsModelSerializer): - # map_preview = serializers.SerializerMethodField() - # - # benchmark_metadata = serializers.JSONField() - class Meta: model = Country read_only_fields = fields = ( 'id', - # 'created', - # 'modified', 'name', - # 'code', 'iso3_format', - # 'flag', - # 'map_preview', - # 'description', - # 'data_source', - # 'date_of_join', - # 'date_schools_mapped', - # 'last_weekly_status_id', - # 'benchmark_metadata', ) expandable_fields = { 'last_weekly_status': (ExpandCountryWeeklyStatusSerializer, {'source': 'last_weekly_status'}), } - # def get_map_preview(self, instance): - # if not instance.map_preview: - # return '' - # - # request = self.context.get('request') - # photo_url = instance.map_preview.url - # return request.build_absolute_uri(photo_url) - class CountryCSVSerializer(CountryStatusSerializer, DownloadSerializerMixin): class Meta(CountryStatusSerializer.Meta): - report_fields = OrderedDict([ ('id', 'ID'), ('name', 'Name'), - # ('code', 'Code'), ('iso3_format', 'Country ISO3 Code'), - # ('map_preview', 'Map Preview'), - # ('description', 'Description'), - # ('data_source', 'Data Source'), - # ('date_of_join', 'Date of Joining'), - # ('date_schools_mapped', 'Date School Mapped'), - # ('live_layer_benchmark', {'name': 'Live Layer Benchmarks', 'is_computed': True}), - # ('last_weekly_status', {'name': 'Last Weekly Status', 'is_computed': True}), ]) - # def get_last_weekly_status(self, data): - # last_week_data = data.get('last_weekly_status', None) - # values = [] - # if last_week_data: - # for key, value in last_week_data.items(): - # if isinstance(value, bool): - # value = self.boolean_flags.get(value) - # values.append('{0}:{1}'.format(key, value)) - # return '\t'.join(values) - - # def get_live_layer_benchmark(self, data): - # values = [] - # layers_data = data.get('benchmark_metadata', {}).get('live_layer', {}) - # if len(layers_data) > 0: - # id_names = dict(DataLayer.objects.filter(id__in=list(layers_data.keys())).values_list('id', 'name')) - # for layer_id, benchmark_value in layers_data.items(): - # values.append( - # '{0}:{1}'.format(id_names.get(core_utilities.convert_to_int(layer_id, orig=True), layer_id), - # benchmark_value)) - # return '\t'.join(values) - def to_representation(self, data): data = super().to_representation(data) return self.to_record_representation(data) diff --git a/proco/locations/tests/factories.py b/proco/locations/tests/factories.py index d0496a9..588c5da 100644 --- a/proco/locations/tests/factories.py +++ b/proco/locations/tests/factories.py @@ -4,12 +4,13 @@ from factory import django as django_factory from factory import fuzzy -from proco.locations.models import Country, Location +from proco.locations.models import Country, CountryAdminMetadata, Location class CountryFactory(django_factory.DjangoModelFactory): name = fuzzy.FuzzyText(length=20) code = fuzzy.FuzzyText(length=20) + iso3_format = fuzzy.FuzzyText(length=20) description = fuzzy.FuzzyText(length=40) data_source = fuzzy.FuzzyText(length=40) @@ -27,3 +28,15 @@ class LocationFactory(django_factory.DjangoModelFactory): class Meta: model = Location + + +class Admin1Factory(django_factory.DjangoModelFactory): + name = fuzzy.FuzzyText(length=20) + giga_id_admin = fuzzy.FuzzyText(length=20) + description = fuzzy.FuzzyText(length=40) + layer_name = fuzzy.FuzzyChoice(dict(CountryAdminMetadata.LAYER_NAME_CHOICES).keys()) + + country = SubFactory(CountryFactory) + + class Meta: + model = CountryAdminMetadata diff --git a/proco/locations/tests/test_api.py b/proco/locations/tests/test_api.py index 9986d9c..10f917e 100644 --- a/proco/locations/tests/test_api.py +++ b/proco/locations/tests/test_api.py @@ -1,16 +1,25 @@ from django.contrib.gis.geos import GEOSGeometry from django.core.cache import cache from django.test import TestCase -from django.urls import reverse - +from django.urls import resolve, reverse from rest_framework import status -import string, random + from proco.connection_statistics.tests.factories import CountryWeeklyStatusFactory -from proco.locations.tests.factories import CountryFactory +from proco.custom_auth.tests import test_utils as test_utilities +from proco.locations.tests.factories import Admin1Factory, CountryFactory from proco.schools.tests.factories import SchoolFactory from proco.utils.tests import TestAPIViewSetMixin -from proco.custom_auth import models as auth_models -from proco.locations.models import Country + + +def locations_url(url_params, query_param, view_name='countries-list'): + url = reverse('locations:' + view_name, args=url_params) + view = resolve(url) + view_info = view.func + + if len(query_param) > 0: + query_params = '?' + '&'.join([key + '=' + str(val) for key, val in query_param.items()]) + url += query_params + return url, view, view_info class CountryApiTestCase(TestAPIViewSetMixin, TestCase): @@ -23,8 +32,13 @@ def get_detail_args(self, instance): def setUpTestData(cls): cls.country_one = CountryFactory() cls.country_two = CountryFactory() - SchoolFactory(country=cls.country_one, location__country=cls.country_one) - SchoolFactory(country=cls.country_one, location__country=cls.country_one) + cls.country_three = CountryFactory() + + cls.admin1_one = Admin1Factory(country=cls.country_one) + + SchoolFactory(country=cls.country_one, location__country=cls.country_one, admin1=cls.admin1_one) + SchoolFactory(country=cls.country_one, location__country=cls.country_one, admin1=cls.admin1_one) + CountryWeeklyStatusFactory(country=cls.country_one) def setUp(self): @@ -32,12 +46,47 @@ def setUp(self): super().setUp() def test_countries_list(self): - with self.assertNumQueries(3): + with self.assertNumQueries(4): response = self._test_list( - user=None, expected_objects=[self.country_one, self.country_two], + user=None, expected_objects=[self.country_one, self.country_two, self.country_three], ) self.assertIn('integration_status', response.data[0]) + def test_list_countries_with_schools(self): + url, _, view = locations_url((), {'has_schools': 'true'}) + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 1) + self.assertIn('integration_status', response.data[0]) + + def test_list_countries_without_schools(self): + url, _, view = locations_url((), {'has_schools': 'false'}) + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 2) + self.assertIn('integration_status', response.data[0]) + + def test_list_countries_with_school_master_records(self): + url, _, view = locations_url((), {'has_school_master_records': 'true'}) + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 3) + + def test_list_countries_without_school_master_records(self): + url, _, view = locations_url((), {'has_school_master_records': 'false'}) + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 3) + self.assertIn('integration_status', response.data[0]) + def test_country_detail(self): with self.assertNumQueries(4): response = self._test_retrieve( @@ -46,22 +95,16 @@ def test_country_detail(self): self.assertIn('statistics', response.data) def test_country_list_cached(self): - with self.assertNumQueries(3): + with self.assertNumQueries(4): self._test_list( - user=None, expected_objects=[self.country_one, self.country_two], + user=None, expected_objects=[self.country_one, self.country_two, self.country_three], ) with self.assertNumQueries(0): self._test_list( - user=None, expected_objects=[self.country_one, self.country_two], + user=None, expected_objects=[self.country_one, self.country_two, self.country_three], ) - # def test_empty_countries_hidden(self): - # CountryFactory(geometry=GEOSGeometry('{"type": "MultiPolygon", "coordinates": []}')) - # self._test_list( - # user=None, expected_objects=[self.country_one, self.country_two], - # ) - class CountryBoundaryApiTestCase(TestAPIViewSetMixin, TestCase): base_view = 'locations:countries-list' @@ -70,8 +113,11 @@ class CountryBoundaryApiTestCase(TestAPIViewSetMixin, TestCase): def setUpTestData(cls): cls.country_one = CountryFactory() cls.country_two = CountryFactory() - SchoolFactory(country=cls.country_one, location__country=cls.country_one) - SchoolFactory(country=cls.country_one, location__country=cls.country_one) + + cls.admin1_one = Admin1Factory(country=cls.country_one) + + SchoolFactory(country=cls.country_one, location__country=cls.country_one, admin1=cls.admin1_one) + SchoolFactory(country=cls.country_one, location__country=cls.country_one, admin1=cls.admin1_one) def setUp(self): cache.clear() @@ -99,71 +145,199 @@ def test_empty_countries_hidden(self): # self.assertCountEqual([r['id'] for r in response.data], [self.country_one.id, self.country_two.id]) -class CountryDataTestCase(TestAPIViewSetMixin, TestCase): - base_view = 'locations:' - databases = {'default', 'read_only_database'} +class CountryDataViewSetTestCase(TestAPIViewSetMixin, TestCase): + databases = ['default', ] + + @classmethod + def setUpTestData(cls): + cls.country_one = CountryFactory() + cls.country_two = CountryFactory() + cls.country_three = CountryFactory() + + cls.admin1_one = Admin1Factory(country=cls.country_one) + + cls.school_one = SchoolFactory(country=cls.country_one, location__country=cls.country_one, + admin1=cls.admin1_one) + cls.school_two = SchoolFactory(country=cls.country_one, location__country=cls.country_one, + admin1=cls.admin1_one) + + cls.school_three = SchoolFactory(country=cls.country_two, location__country=cls.country_two, admin1=None) + + cls.user = test_utilities.setup_admin_user_by_role() def setUp(self): - self.email = 'test@test.com' - self.password = 'SomeRandomPass96' - self.user = auth_models.ApplicationUser.objects.create_user(username=self.email, password=self.password) - - self.role = auth_models.Role.objects.create(name='Admin', category='system') - self.role_permission = auth_models.UserRoleRelationship.objects.create(user=self.user, role=self.role) - - self.data = {"name": ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6)), - # ===str(uuid.uuid4())[0:10], - "code": ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(2)), - "last_weekly_status_id": 2091, - "flag": "images/7962e7d2-ea1f-4571-a031-bb830fd575c6.png"} - - self.country_id = Country.objects.create(**self.data).id - self.delete_data = {"id": [self.country_id]} - - self.country_one = CountryFactory() - return super().setUp() - - # def test_create(self): - # self.data = {"name": ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6)),#===str(uuid.uuid4())[0:10], - # "code": ''.join(random.choice(string.ascii_uppercase) for _ in range(2)), - # "last_weekly_status_id": 2091,"benchmark_metadata":{}} - # headers = {'Content-Type': 'multipart/form-data'} - # - # response = self.forced_auth_req( - # 'post', - # reverse(self.base_view + "list_or_create_or_destroy_country"), - # data=self.data, - # headers=headers, - # user=self.user) - - # self.assertEqual(response.status_code, status.HTTP_200_OK) - # self.assertNotEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) - - # def test_update(self): - # self.data = {"name": ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6)),#===str(uuid.uuid4())[0:10], - # "code": ''.join(random.choice(string.ascii_uppercase) for _ in range(2)), - # "last_weekly_status_id": 2091,"benchmark_metadata":{}} - # self.country_one = CountryFactory() - # from django.core import serializers - # tmpJson = serializers.serialize("json", self.country_one[0]) - # tmpObj = json.loads(tmpJson) - # # import json - # # print(json.dumps(self.country_one.__dict__)) - # response = self.forced_auth_req( - # 'put', - # reverse(self.base_view + "update_or_retrieve_country", args=(self.country_id,)), - # data=tmpObj, - # user=self.user, - # ) - # self.assertEqual(response.status_code, status.HTTP_200_OK) - # self.assertNotEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) - - def test_destroy(self): + cache.clear() + super().setUp() + + def test_list(self): + url, _, view = locations_url((), {}, view_name='list-create-destroy-country') + + response = self.forced_auth_req('get', url, user=self.user, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + # 3 records as we created manually in setup + self.assertEqual(response_data['count'], 3) + self.assertEqual(len(response_data['results']), 3) + + def test_country_id_filter(self): + url, _, view = locations_url((), {'id': self.country_one.id}, + view_name='list-create-destroy-country') + + response = self.forced_auth_req('get', url, user=self.user, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertEqual(response_data['count'], 1) + self.assertEqual(len(response_data['results']), 1) + + def test_search(self): + url, _, view = locations_url((), {'search': self.country_one.name}, + view_name='list-create-destroy-country') + + response = self.forced_auth_req('get', url, user=self.user, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertEqual(response_data['count'], 1) + self.assertEqual(len(response_data['results']), 1) + + def test_retrieve(self): + url, view, view_info = locations_url((self.country_one.id,), {}, + view_name='update-retrieve-country') + + response = self.forced_auth_req('get', url, user=self.user, view=view, view_info=view_info, ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + self.assertEqual(response_data['id'], self.country_one.id) + self.assertEqual(response_data['name'], self.country_one.name) + + def test_retrieve_wrong_id(self): + url, view, view_info = locations_url((1234546,), {}, + view_name='update-retrieve-country') + + response = self.forced_auth_req('get', url, user=self.user, view=view, view_info=view_info, ) + + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_update_wrong_id(self): + url, view, view_info = locations_url((self.country_one.id,), {}, + view_name='update-retrieve-country') + response = self.forced_auth_req('get', url, user=self.user, view=view, view_info=view_info, ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + + url, _, view = locations_url((123434567,), {}, view_name='update-retrieve-country') + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data=response_data + ) + + self.assertEqual(put_response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_update_invalid_data(self): + url, view, view_info = locations_url((self.country_one.id,), {}, + view_name='update-retrieve-country') + response = self.forced_auth_req('get', url, user=self.user, view=view, view_info=view_info, ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + response_data['date_of_join'] = '2024-13-01' + response_data['flag'] = b'abd' + put_response = self.forced_auth_req( + 'put', + url, + user=self.user, + data=response_data + ) + + self.assertEqual(put_response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_delete(self): + url, _, view = locations_url((), {}, view_name='list-create-destroy-country') + response = self.forced_auth_req( 'delete', - reverse(self.base_view + "list_or_create_or_destroy_country"), - data=self.delete_data, + url, + data={'id': [self.country_three.id]}, user=self.user, ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertNotEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_delete_single_country(self): + url, view, view_info = locations_url((self.country_three.id, ), {}, view_name='update-retrieve-country') + + response = self.forced_auth_req( + 'delete', + url, + user=self.user, + view=view, view_info=view_info, + ) + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + + def test_delete_without_ids(self): + url, _, view = locations_url((), {}, view_name='list-create-destroy-country') + + response = self.forced_auth_req( + 'delete', + url, + user=self.user, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_delete_wrong_ids(self): + url, _, view = locations_url((), {}, view_name='list-create-destroy-country') + + response = self.forced_auth_req( + 'delete', + url, + data={'id': [12345432]}, + user=self.user, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_download_country_data_without_api_key(self): + url, view, view_info = locations_url((), { + 'page': '1', + 'page_size': '10', + 'ordering': 'name', + }, view_name='download-countries') + + response = self.forced_auth_req( + 'get', + url, + user=self.user, + view_info=view_info, + view=view, + request_format='text/csv' + ) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_list_searchable_details_from_db(self): + url, _, view = locations_url((), {}, view_name='search-countries-admin-schools') + + with self.assertNumQueries(1): + response = self.forced_auth_req('get', url, user=self.user, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.data + # 2 records as we created manually in setup and only 2 countries has schools + self.assertEqual(len(response_data), 2) + + with self.assertNumQueries(0): + response = self.forced_auth_req('get', url, user=self.user, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + diff --git a/proco/proco_data_migrations/management_utils/user_role_permissions.py b/proco/proco_data_migrations/management_utils/user_role_permissions.py index ac41636..ae447be 100644 --- a/proco/proco_data_migrations/management_utils/user_role_permissions.py +++ b/proco/proco_data_migrations/management_utils/user_role_permissions.py @@ -2,6 +2,7 @@ from django.db import transaction +from proco.core import utils as core_utilities from proco.custom_auth.models import Role, RolePermission, ApplicationUser, UserRoleRelationship @@ -22,10 +23,7 @@ def populate_roles_data(): role_permissions = OrderedDict({ Role.SYSTEM_ROLE_NAME_ADMIN: [perm[0] for perm in RolePermission.PERMISSION_CHOICES], - Role.SYSTEM_ROLE_NAME_READ_ONLY: [ - RolePermission.CAN_VIEW_COUNTRY, - RolePermission.CAN_VIEW_ALL_ROLES, - ], + Role.SYSTEM_ROLE_NAME_READ_ONLY: [RolePermission.CAN_DELETE_API_KEY, ], }) @@ -44,10 +42,10 @@ def populate_role_permissions(): def clean_data(): - UserRoleRelationship.objects.all().delete() - ApplicationUser.objects.all().delete() - RolePermission.objects.all().delete() - Role.objects.all().delete() + UserRoleRelationship.objects.all().update(deleted=core_utilities.get_current_datetime_object()) + # ApplicationUser.objects.all().update(deleted=core_utilities.get_current_datetime_object()) + RolePermission.objects.all().update(deleted=core_utilities.get_current_datetime_object()) + Role.objects.all().update(deleted=core_utilities.get_current_datetime_object()) def create_user_role_relationship(user, role_name): diff --git a/proco/proco_data_migrations/migrations/0004_drop_tables_for_realtime_dailycheckapp_and_realtime_unicef.py b/proco/proco_data_migrations/migrations/0004_drop_tables_for_realtime_dailycheckapp_and_realtime_unicef.py new file mode 100644 index 0000000..2f94133 --- /dev/null +++ b/proco/proco_data_migrations/migrations/0004_drop_tables_for_realtime_dailycheckapp_and_realtime_unicef.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- + +from __future__ import unicode_literals + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('proco_data_migrations', '0003_setup_user_role_permissions'), + ] + + operations = [ + migrations.RunSQL( + sql='DROP TABLE IF EXISTS "dailycheckapp_measurements"', + reverse_sql=migrations.RunSQL.noop, + ), + migrations.RunSQL( + sql='DROP TABLE IF EXISTS "dailycheckapp_measurements_backup"', + reverse_sql=migrations.RunSQL.noop, + ), + migrations.RunSQL( + sql='DROP TABLE IF EXISTS "measurements"', + reverse_sql=migrations.RunSQL.noop, + ), + migrations.RunSQL( + sql='DROP TABLE IF EXISTS "dailycheckapp_school"', + reverse_sql=migrations.RunSQL.noop, + ), + migrations.RunSQL( + sql='DROP TABLE IF EXISTS "dailycheckapp_school_backup"', + reverse_sql=migrations.RunSQL.noop, + ), + migrations.RunSQL( + sql='DROP TABLE IF EXISTS "dailycheckapp_flagged_school"', + reverse_sql=migrations.RunSQL.noop, + ), + migrations.RunSQL( + sql='DROP TABLE IF EXISTS "dailycheckapp_contact_contactmessage"', + reverse_sql=migrations.RunSQL.noop, + ), + migrations.RunSQL( + sql='DROP TABLE IF EXISTS "dailycheckapp_country"', + reverse_sql=migrations.RunSQL.noop, + ), + migrations.RunSQL( + sql='DROP TABLE IF EXISTS "dailycheckapp_wrong_country_fix"', + reverse_sql=migrations.RunSQL.noop, + ), + ] diff --git a/proco/realtime_dailycheckapp/__init__.py b/proco/realtime_dailycheckapp/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/proco/realtime_dailycheckapp/migrations/0001_initial.py b/proco/realtime_dailycheckapp/migrations/0001_initial.py deleted file mode 100644 index 6ba52b7..0000000 --- a/proco/realtime_dailycheckapp/migrations/0001_initial.py +++ /dev/null @@ -1,38 +0,0 @@ -# Generated by Django 2.2.18 on 2021-03-24 13:25 - -import django.contrib.postgres.fields.jsonb -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ] - - operations = [ - migrations.CreateModel( - name='DailyCheckApp_Measurement', - fields=[ - ('id', models.IntegerField(primary_key=True, serialize=False)), - ('Timestamp', models.DateTimeField()), - ('UUID', models.TextField(blank=True)), - ('BrowserID', models.TextField(blank=True)), - ('school_id', models.TextField()), - ('DeviceType', models.TextField(blank=True)), - ('Notes', models.TextField()), - ('ClientInfo', django.contrib.postgres.fields.jsonb.JSONField(default=dict)), - ('ServerInfo', django.contrib.postgres.fields.jsonb.JSONField(default=dict)), - ('annotation', models.TextField(blank=True)), - ('Download', models.FloatField(blank=True)), - ('Upload', models.FloatField(blank=True)), - ('Latency', models.IntegerField(blank=True)), - ('Results', django.contrib.postgres.fields.jsonb.JSONField(default=dict)), - ], - options={ - 'db_table': 'dailycheckapp_measurements', - 'managed': False, - }, - ), - ] diff --git a/proco/realtime_dailycheckapp/migrations/__init__.py b/proco/realtime_dailycheckapp/migrations/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/proco/realtime_dailycheckapp/models.py b/proco/realtime_dailycheckapp/models.py deleted file mode 100644 index f973f9b..0000000 --- a/proco/realtime_dailycheckapp/models.py +++ /dev/null @@ -1,46 +0,0 @@ -from datetime import datetime, timedelta - -from django.contrib.postgres.fields import JSONField -from django.core.cache import cache -from django.db import models -from django.utils import timezone - - -class DailyCheckApp_MeasurementManager(models.Manager): - def get_queryset(self): - return super().get_queryset().using('dailycheckapp_realtime') - - -class DailyCheckApp_Measurement(models.Model): - DAILYCHECKAPP_MEASUREMENT_DATE_CACHE_KEY = 'dailycheckapp_realtime_last_dailycheckapp_measurement_at' - - Timestamp = models.DateTimeField() - UUID = models.TextField(blank=True) - BrowserID = models.TextField(blank=True) - school_id = models.TextField() - DeviceType = models.TextField(blank=True) - Notes = models.TextField(blank=True) - ClientInfo = JSONField(default=dict) - ServerInfo = JSONField(default=dict) - annotation = models.TextField(blank=True) - Download = models.FloatField(blank=True) - Upload = models.FloatField(blank=True) - Latency = models.IntegerField(blank=True) - Results = JSONField(default=dict) - - objects = DailyCheckApp_MeasurementManager() - - class Meta: - managed = False - db_table = 'dailycheckapp_measurements' - - @classmethod - def get_last_dailycheckapp_measurement_date(cls) -> datetime: - last_dailycheckapp_measurement_at = cache.get(cls.DAILYCHECKAPP_MEASUREMENT_DATE_CACHE_KEY) - if not last_dailycheckapp_measurement_at: - return timezone.now() - timedelta(days=1) - return last_dailycheckapp_measurement_at - - @classmethod - def set_last_dailycheckapp_measurement_date(cls, value: datetime): - cache.set(cls.DAILYCHECKAPP_MEASUREMENT_DATE_CACHE_KEY, value) diff --git a/proco/realtime_dailycheckapp/tests/__init__.py b/proco/realtime_dailycheckapp/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/proco/realtime_dailycheckapp/tests/db/__init__.py b/proco/realtime_dailycheckapp/tests/db/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/proco/realtime_dailycheckapp/tests/db/initdb.sql b/proco/realtime_dailycheckapp/tests/db/initdb.sql deleted file mode 100644 index 5652fc8..0000000 --- a/proco/realtime_dailycheckapp/tests/db/initdb.sql +++ /dev/null @@ -1,20 +0,0 @@ --- clean managed tables -drop table if exists public.dailycheckapp_measurements; - --- create tables & relations -CREATE TABLE public.dailycheckapp_measurements ( - id serial, - "Timestamp" timestamp with time zone, - "UUID" text, - "BrowserID" text, - school_id text NOT NULL, - "DeviceType" text, - "Notes" text, - "ClientInfo" jsonb, - "ServerInfo" jsonb, - annotation text, - "Download" double precision, - "Upload" double precision, - "Latency" bigint, - "Results" jsonb -); diff --git a/proco/realtime_dailycheckapp/tests/db/test.py b/proco/realtime_dailycheckapp/tests/db/test.py deleted file mode 100644 index d9f3dc3..0000000 --- a/proco/realtime_dailycheckapp/tests/db/test.py +++ /dev/null @@ -1,9 +0,0 @@ -import os - -from django.db import connections - - -def init_test_db(): - with connections['dailycheckapp_realtime'].cursor() as cursor: - with open(os.path.join('proco', 'realtime_dailycheckapp', 'tests', 'db', 'initdb.sql'), 'r') as initdb_file: - cursor.execute(initdb_file.read()) diff --git a/proco/realtime_dailycheckapp/tests/factories.py b/proco/realtime_dailycheckapp/tests/factories.py deleted file mode 100644 index f66123e..0000000 --- a/proco/realtime_dailycheckapp/tests/factories.py +++ /dev/null @@ -1,42 +0,0 @@ -from django.utils import timezone - -import factory.fuzzy - -from proco.realtime_dailycheckapp.models import DailyCheckApp_Measurement - - -class DailyCheckApp_MeasurementFactory(factory.django.DjangoModelFactory): - Timestamp = factory.LazyFunction(lambda: timezone.now()) - UUID = factory.fuzzy.FuzzyText() - BrowserID = factory.fuzzy.FuzzyText() - school_id = factory.fuzzy.FuzzyText() - DeviceType = factory.fuzzy.FuzzyText() - Notes = factory.fuzzy.FuzzyText() - ClientInfo = { - 'IP': '127.0.0.1', - 'City': 'Neverwinter', - 'Postal': '9999', - 'Region': 'Sword Coast North', - 'Country': 'Faerûn', - 'Latitude': 0.01, - 'Timezone': 'America/New_York', - 'Longitude': 0.01, - } - ServerInfo = { - 'URL': 'http://localhost:7123', - 'City': 'Icewind Dale', - 'IPv4': '127.0.0.1', - 'Label': 'New York', - 'Country': 'Faerûn', - } - annotation = factory.fuzzy.FuzzyText() - Download = factory.fuzzy.FuzzyFloat(0, 10**6) - Upload = factory.fuzzy.FuzzyFloat(0, 10**6) - Latency = factory.fuzzy.FuzzyInteger(1, 1000) - Results = { - 'CurMSS': '1428', - 'Timeouts': '0', - } - - class Meta: - model = DailyCheckApp_Measurement diff --git a/proco/realtime_dailycheckapp/tests/test_sync.py b/proco/realtime_dailycheckapp/tests/test_sync.py deleted file mode 100644 index 43510da..0000000 --- a/proco/realtime_dailycheckapp/tests/test_sync.py +++ /dev/null @@ -1,130 +0,0 @@ -from datetime import timedelta - -from django.core.cache import cache -from django.db.models import Sum -from django.test import TestCase -from django.utils import timezone - -from proco.connection_statistics.models import RealTimeConnectivity -from proco.realtime_dailycheckapp.models import DailyCheckApp_Measurement -from proco.realtime_dailycheckapp.tests.db.test import init_test_db -from proco.realtime_dailycheckapp.tests.factories import DailyCheckApp_MeasurementFactory -from proco.realtime_dailycheckapp.utils import sync_dailycheckapp_realtime_data -from proco.schools.tests.factories import SchoolFactory - - -class SyncTestCase(TestCase): - databases = ['default', 'realtime', 'dailycheckapp_realtime'] - - @classmethod - def setUpClass(cls): - super().setUpClass() - init_test_db() - - def setUp(self) -> None: - super().setUp() - cache.delete(DailyCheckApp_Measurement.DAILYCHECKAPP_MEASUREMENT_DATE_CACHE_KEY) - - def test_empty_cache(self): - school = SchoolFactory(external_id='test_1') - DailyCheckApp_MeasurementFactory(Timestamp=timezone.now() - timedelta(days=1, hours=1), school_id='test_1', Download=1) - DailyCheckApp_MeasurementFactory(Timestamp=timezone.now() - timedelta(hours=23), school_id='test_1', Download=2) - - sync_dailycheckapp_realtime_data() - - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 1) - self.assertEqual(RealTimeConnectivity.objects.first().school, school) - self.assertEqual(RealTimeConnectivity.objects.aggregate(speed=Sum('connectivity_speed'))['speed'], 2048) - - self.assertGreater(DailyCheckApp_Measurement.get_last_dailycheckapp_measurement_date(), timezone.now() - timedelta(hours=23, seconds=5)) - - def test_cached_dailycheckapp_measurement_date(self): - SchoolFactory(external_id='test_1') - DailyCheckApp_MeasurementFactory(Timestamp=timezone.now() - timedelta(days=1, hours=1), school_id='test_1', Download=1) - DailyCheckApp_MeasurementFactory(Timestamp=timezone.now() - timedelta(hours=23), school_id='test_1', Download=2) - - DailyCheckApp_Measurement.set_last_dailycheckapp_measurement_date(timezone.now() - timedelta(days=1, hours=2)) - - sync_dailycheckapp_realtime_data() - - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 2) - self.assertEqual(RealTimeConnectivity.objects.aggregate(speed=Sum('connectivity_speed'))['speed'], 3072) - - def test_idempotency(self): - SchoolFactory(external_id='test_1') - DailyCheckApp_MeasurementFactory(school_id='test_1', Download=1) - DailyCheckApp_MeasurementFactory(school_id='test_1', Download=2) - - # two objects synchronized because they added after default last dailycheckapp_measurement date (day ago) - sync_dailycheckapp_realtime_data() - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 2) - - # no new entries added, because they are already synchronized - RealTimeConnectivity.objects.all().delete() - sync_dailycheckapp_realtime_data() - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 0) - - # two previous entries synchronized again as we moved date back - DailyCheckApp_Measurement.set_last_dailycheckapp_measurement_date(timezone.now() - timedelta(hours=1)) - sync_dailycheckapp_realtime_data() - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 2) - - def test_school_matching(self): - school = SchoolFactory(external_id='test_1') - DailyCheckApp_Measurement.set_last_dailycheckapp_measurement_date(timezone.now() - timedelta(seconds=1)) - DailyCheckApp_MeasurementFactory(school_id='test_1') - sync_dailycheckapp_realtime_data() - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 1) - self.assertEqual(RealTimeConnectivity.objects.first().school, school) - - def test_school_matching_external_id_collision(self): - SchoolFactory(external_id='test_1') - school = SchoolFactory(external_id='test_1', country__code='US', country__name='United States') - SchoolFactory(external_id='test_1') - - DailyCheckApp_Measurement.set_last_dailycheckapp_measurement_date(timezone.now() - timedelta(seconds=1)) - dailycheckapp_measurement = DailyCheckApp_MeasurementFactory(school_id='test_1') - dailycheckapp_measurement.ClientInfo['Country'] = 'US' - dailycheckapp_measurement.save() - - sync_dailycheckapp_realtime_data() - - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 1) - self.assertEqual(RealTimeConnectivity.objects.first().school, school) - - def test_school_matching_without_client_info(self): - # last school will be used cause to mapping logic - SchoolFactory(external_id='test_1') - school = SchoolFactory(external_id='test_1') - - DailyCheckApp_Measurement.set_last_dailycheckapp_measurement_date(timezone.now() - timedelta(seconds=1)) - dailycheckapp_measurement = DailyCheckApp_MeasurementFactory(school_id='test_1') - dailycheckapp_measurement.ClientInfo = {} - dailycheckapp_measurement.save() - - sync_dailycheckapp_realtime_data() - - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 1) - self.assertEqual(RealTimeConnectivity.objects.first().school, school) - - def test_school_matching_unknown(self): - SchoolFactory(external_id='test_1') - DailyCheckApp_Measurement.set_last_dailycheckapp_measurement_date(timezone.now() - timedelta(seconds=1)) - DailyCheckApp_MeasurementFactory() - sync_dailycheckapp_realtime_data() - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 0) - - def test_fields(self): - school = SchoolFactory(external_id='test_1') - DailyCheckApp_Measurement.set_last_dailycheckapp_measurement_date(timezone.now() - timedelta(seconds=1)) - - dailycheckapp_measurement = DailyCheckApp_MeasurementFactory(school_id='test_1') - sync_dailycheckapp_realtime_data() - - connectivity_info = RealTimeConnectivity.objects.first() - self.assertIsNotNone(connectivity_info) - - self.assertEqual(RealTimeConnectivity.objects.first().created, dailycheckapp_measurement.Timestamp) - self.assertEqual(RealTimeConnectivity.objects.first().connectivity_speed, int(dailycheckapp_measurement.Download * 1024)) - self.assertEqual(RealTimeConnectivity.objects.first().connectivity_latency, dailycheckapp_measurement.Latency) - self.assertEqual(RealTimeConnectivity.objects.first().school, school) diff --git a/proco/realtime_dailycheckapp/utils.py b/proco/realtime_dailycheckapp/utils.py deleted file mode 100644 index ece660f..0000000 --- a/proco/realtime_dailycheckapp/utils.py +++ /dev/null @@ -1,54 +0,0 @@ -import logging - -from django.utils import timezone - -from proco.connection_statistics.models import RealTimeConnectivity -from proco.locations.models import Country -from proco.realtime_dailycheckapp.models import DailyCheckApp_Measurement -from proco.schools.models import School - -logger = logging.getLogger('django.' + __name__) - - -def sync_dailycheckapp_realtime_data(): - dailycheckapp_measurements = DailyCheckApp_Measurement.objects.filter(Timestamp__gt=DailyCheckApp_Measurement.get_last_dailycheckapp_measurement_date()) - - realtime = [] - - countries = {m.ClientInfo.get('Country') for m in dailycheckapp_measurements} - for country_code in countries: - if country_code: - country = Country.objects.filter(code=country_code).first() - else: - country = None - - schools_qs = School.objects - if country: - schools_qs = schools_qs.filter(country=country) - - schools_ids = {m.school_id for m in dailycheckapp_measurements if m.ClientInfo.get('Country') == country_code} - schools = { - school.external_id: school - for school in schools_qs.filter(external_id__in=schools_ids) - } - - for dailycheckapp_measurement in dailycheckapp_measurements: - if dailycheckapp_measurement.school_id not in schools: - logger.debug(f'skipping dailycheckapp_measurement {dailycheckapp_measurement.UUID}: unknown school {dailycheckapp_measurement.school_id}') - continue - - realtime.append(RealTimeConnectivity( - created=dailycheckapp_measurement.Timestamp, - connectivity_speed=dailycheckapp_measurement.Download * 1024, # kb/s -> b/s - connectivity_latency=dailycheckapp_measurement.Latency, - school=schools[dailycheckapp_measurement.school_id], - )) - - RealTimeConnectivity.objects.bulk_create(realtime) - - # not using aggregate because there can be new entries between two operations - if dailycheckapp_measurements: - last_update = max((m.Timestamp for m in dailycheckapp_measurements)) - else: - last_update = timezone.now() - DailyCheckApp_Measurement.set_last_dailycheckapp_measurement_date(last_update) diff --git a/proco/realtime_unicef/__init__.py b/proco/realtime_unicef/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/proco/realtime_unicef/migrations/0001_initial.py b/proco/realtime_unicef/migrations/0001_initial.py deleted file mode 100644 index 6a08549..0000000 --- a/proco/realtime_unicef/migrations/0001_initial.py +++ /dev/null @@ -1,38 +0,0 @@ -# Generated by Django 2.2.18 on 2021-03-24 13:25 - -import django.contrib.postgres.fields.jsonb -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ] - - operations = [ - migrations.CreateModel( - name='Measurement', - fields=[ - ('id', models.IntegerField(primary_key=True, serialize=False)), - ('timestamp', models.DateTimeField()), - ('uuid', models.TextField(blank=True)), - ('browser_id', models.TextField(blank=True)), - ('school_id', models.TextField()), - ('device_type', models.TextField(blank=True)), - ('notes', models.TextField()), - ('client_info', django.contrib.postgres.fields.jsonb.JSONField(default=dict)), - ('server_info', django.contrib.postgres.fields.jsonb.JSONField(default=dict)), - ('annotation', models.TextField(blank=True)), - ('download', models.FloatField(blank=True)), - ('upload', models.FloatField(blank=True)), - ('latency', models.IntegerField(blank=True)), - ('results', django.contrib.postgres.fields.jsonb.JSONField(default=dict)), - ], - options={ - 'db_table': 'measurements', - 'managed': False, - }, - ), - ] diff --git a/proco/realtime_unicef/migrations/__init__.py b/proco/realtime_unicef/migrations/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/proco/realtime_unicef/models.py b/proco/realtime_unicef/models.py deleted file mode 100644 index c9ab957..0000000 --- a/proco/realtime_unicef/models.py +++ /dev/null @@ -1,46 +0,0 @@ -from datetime import datetime, timedelta - -from django.contrib.postgres.fields import JSONField -from django.core.cache import cache -from django.db import models -from django.utils import timezone - - -class MeasurementManager(models.Manager): - def get_queryset(self): - return super().get_queryset().using('realtime') - - -class Measurement(models.Model): - MEASUREMENT_DATE_CACHE_KEY = 'realtime_last_measurement_at' - - timestamp = models.DateTimeField() - uuid = models.TextField(blank=True) - browser_id = models.TextField(blank=True) - school_id = models.TextField() - device_type = models.TextField(blank=True) - notes = models.TextField(blank=True) - client_info = JSONField(default=dict) - server_info = JSONField(default=dict) - annotation = models.TextField(blank=True) - download = models.FloatField(blank=True) - upload = models.FloatField(blank=True) - latency = models.IntegerField(blank=True) - results = JSONField(default=dict) - - objects = MeasurementManager() - - class Meta: - managed = False - db_table = 'measurements' - - @classmethod - def get_last_measurement_date(cls) -> datetime: - last_measurement_at = cache.get(cls.MEASUREMENT_DATE_CACHE_KEY) - if not last_measurement_at: - return timezone.now() - timedelta(days=1) - return last_measurement_at - - @classmethod - def set_last_measurement_date(cls, value: datetime): - cache.set(cls.MEASUREMENT_DATE_CACHE_KEY, value) diff --git a/proco/realtime_unicef/tests/__init__.py b/proco/realtime_unicef/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/proco/realtime_unicef/tests/db/__init__.py b/proco/realtime_unicef/tests/db/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/proco/realtime_unicef/tests/db/initdb.sql b/proco/realtime_unicef/tests/db/initdb.sql deleted file mode 100644 index c5793c3..0000000 --- a/proco/realtime_unicef/tests/db/initdb.sql +++ /dev/null @@ -1,20 +0,0 @@ --- clean managed tables -drop table if exists public.measurements; - --- create tables & relations -CREATE TABLE public.measurements ( - id serial, - "timestamp" timestamp with time zone, - uuid text, - browser_id text, - school_id text NOT NULL, - device_type text, - notes text, - client_info jsonb, - server_info jsonb, - annotation text, - download double precision, - upload double precision, - latency bigint, - results jsonb -); diff --git a/proco/realtime_unicef/tests/db/test.py b/proco/realtime_unicef/tests/db/test.py deleted file mode 100644 index 7c73379..0000000 --- a/proco/realtime_unicef/tests/db/test.py +++ /dev/null @@ -1,9 +0,0 @@ -import os - -from django.db import connections - - -def init_test_db(): - with connections['realtime'].cursor() as cursor: - with open(os.path.join('proco', 'realtime_unicef', 'tests', 'db', 'initdb.sql'), 'r') as initdb_file: - cursor.execute(initdb_file.read()) diff --git a/proco/realtime_unicef/tests/factories.py b/proco/realtime_unicef/tests/factories.py deleted file mode 100644 index 05a8ced..0000000 --- a/proco/realtime_unicef/tests/factories.py +++ /dev/null @@ -1,42 +0,0 @@ -from django.utils import timezone - -import factory.fuzzy - -from proco.realtime_unicef.models import Measurement - - -class MeasurementFactory(factory.django.DjangoModelFactory): - timestamp = factory.LazyFunction(lambda: timezone.now()) - uuid = factory.fuzzy.FuzzyText() - browser_id = factory.fuzzy.FuzzyText() - school_id = factory.fuzzy.FuzzyText() - device_type = factory.fuzzy.FuzzyText() - notes = factory.fuzzy.FuzzyText() - client_info = { - 'IP': '127.0.0.1', - 'City': 'Neverwinter', - 'Postal': '9999', - 'Region': 'Sword Coast North', - 'Country': 'Faerûn', - 'Latitude': 0.01, - 'Timezone': 'America/New_York', - 'Longitude': 0.01, - } - server_info = { - 'URL': 'http://localhost:7123', - 'City': 'Icewind Dale', - 'IPv4': '127.0.0.1', - 'Label': 'New York', - 'Country': 'Faerûn', - } - annotation = factory.fuzzy.FuzzyText() - download = factory.fuzzy.FuzzyFloat(0, 10**6) - upload = factory.fuzzy.FuzzyFloat(0, 10**6) - latency = factory.fuzzy.FuzzyInteger(1, 1000) - results = { - 'CurMSS': '1428', - 'Timeouts': '0', - } - - class Meta: - model = Measurement diff --git a/proco/realtime_unicef/tests/test_sync.py b/proco/realtime_unicef/tests/test_sync.py deleted file mode 100644 index 14ab965..0000000 --- a/proco/realtime_unicef/tests/test_sync.py +++ /dev/null @@ -1,130 +0,0 @@ -from datetime import timedelta - -from django.core.cache import cache -from django.db.models import Sum -from django.test import TestCase -from django.utils import timezone - -from proco.connection_statistics.models import RealTimeConnectivity -from proco.realtime_unicef.models import Measurement -from proco.realtime_unicef.tests.db.test import init_test_db -from proco.realtime_unicef.tests.factories import MeasurementFactory -from proco.realtime_unicef.utils import sync_realtime_data -from proco.schools.tests.factories import SchoolFactory - - -class SyncTestCase(TestCase): - databases = ['default', 'realtime'] - - @classmethod - def setUpClass(cls): - super().setUpClass() - init_test_db() - - def setUp(self) -> None: - super().setUp() - cache.delete(Measurement.MEASUREMENT_DATE_CACHE_KEY) - - def test_empty_cache(self): - school = SchoolFactory(external_id='test_1') - MeasurementFactory(timestamp=timezone.now() - timedelta(days=1, hours=1), school_id='test_1', download=1) - MeasurementFactory(timestamp=timezone.now() - timedelta(hours=23), school_id='test_1', download=2) - - sync_realtime_data() - - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 1) - self.assertEqual(RealTimeConnectivity.objects.first().school, school) - self.assertEqual(RealTimeConnectivity.objects.aggregate(speed=Sum('connectivity_speed'))['speed'], 2048) - - self.assertGreater(Measurement.get_last_measurement_date(), timezone.now() - timedelta(hours=23, seconds=5)) - - def test_cached_measurement_date(self): - SchoolFactory(external_id='test_1') - MeasurementFactory(timestamp=timezone.now() - timedelta(days=1, hours=1), school_id='test_1', download=1) - MeasurementFactory(timestamp=timezone.now() - timedelta(hours=23), school_id='test_1', download=2) - - Measurement.set_last_measurement_date(timezone.now() - timedelta(days=1, hours=2)) - - sync_realtime_data() - - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 2) - self.assertEqual(RealTimeConnectivity.objects.aggregate(speed=Sum('connectivity_speed'))['speed'], 3072) - - def test_idempotency(self): - SchoolFactory(external_id='test_1') - MeasurementFactory(school_id='test_1', download=1) - MeasurementFactory(school_id='test_1', download=2) - - # two objects synchronized because they added after default last measurement date (day ago) - sync_realtime_data() - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 2) - - # no new entries added, because they are already synchronized - RealTimeConnectivity.objects.all().delete() - sync_realtime_data() - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 0) - - # two previous entries synchronized again as we moved date back - Measurement.set_last_measurement_date(timezone.now() - timedelta(hours=1)) - sync_realtime_data() - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 2) - - def test_school_matching(self): - school = SchoolFactory(external_id='test_1') - Measurement.set_last_measurement_date(timezone.now() - timedelta(seconds=1)) - MeasurementFactory(school_id='test_1') - sync_realtime_data() - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 1) - self.assertEqual(RealTimeConnectivity.objects.first().school, school) - - def test_school_matching_external_id_collision(self): - SchoolFactory(external_id='test_1') - school = SchoolFactory(external_id='test_1', country__code='US', country__name='United States') - SchoolFactory(external_id='test_1') - - Measurement.set_last_measurement_date(timezone.now() - timedelta(seconds=1)) - measurement = MeasurementFactory(school_id='test_1') - measurement.client_info['Country'] = 'US' - measurement.save() - - sync_realtime_data() - - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 1) - self.assertEqual(RealTimeConnectivity.objects.first().school, school) - - def test_school_matching_without_client_info(self): - # last school will be used cause to mapping logic - SchoolFactory(external_id='test_1') - school = SchoolFactory(external_id='test_1') - - Measurement.set_last_measurement_date(timezone.now() - timedelta(seconds=1)) - measurement = MeasurementFactory(school_id='test_1') - measurement.client_info = {} - measurement.save() - - sync_realtime_data() - - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 1) - self.assertEqual(RealTimeConnectivity.objects.first().school, school) - - def test_school_matching_unknown(self): - SchoolFactory(external_id='test_1') - Measurement.set_last_measurement_date(timezone.now() - timedelta(seconds=1)) - MeasurementFactory() - sync_realtime_data() - self.assertEqual(RealTimeConnectivity.objects.count(approx=False), 0) - - def test_fields(self): - school = SchoolFactory(external_id='test_1') - Measurement.set_last_measurement_date(timezone.now() - timedelta(seconds=1)) - - measurement = MeasurementFactory(school_id='test_1') - sync_realtime_data() - - connectivity_info = RealTimeConnectivity.objects.first() - self.assertIsNotNone(connectivity_info) - - self.assertEqual(RealTimeConnectivity.objects.first().created, measurement.timestamp) - self.assertEqual(RealTimeConnectivity.objects.first().connectivity_speed, int(measurement.download * 1024)) - self.assertEqual(RealTimeConnectivity.objects.first().connectivity_latency, measurement.latency) - self.assertEqual(RealTimeConnectivity.objects.first().school, school) diff --git a/proco/realtime_unicef/utils.py b/proco/realtime_unicef/utils.py deleted file mode 100644 index ed2d0ec..0000000 --- a/proco/realtime_unicef/utils.py +++ /dev/null @@ -1,55 +0,0 @@ -import logging - -from django.utils import timezone - -from proco.connection_statistics.models import RealTimeConnectivity -from proco.locations.models import Country -from proco.realtime_unicef.models import Measurement -from proco.schools.models import School - -logger = logging.getLogger('django.' + __name__) - - -def sync_realtime_data(): - measurements = Measurement.objects.filter(timestamp__gt=Measurement.get_last_measurement_date()) - - realtime = [] - - countries = {m.client_info.get('Country') for m in measurements} - for country_code in countries: - if country_code: - country = Country.objects.filter(code=country_code).first() - else: - country = None - - schools_qs = School.objects - if country: - schools_qs = schools_qs.filter(country=country) - - schools_ids = {m.school_id for m in measurements if m.client_info.get('Country') == country_code} - schools = { - school.external_id: school - for school in schools_qs.filter(external_id__in=schools_ids) - } - - for measurement in measurements: - if measurement.school_id not in schools: - logger.debug(f'skipping measurement {measurement.uuid}: unknown school {measurement.school_id}') - continue - - if(measurement.download > 0 and measurement.latency > 0): - realtime.append(RealTimeConnectivity( - created=measurement.timestamp, - connectivity_speed=measurement.download * 1024, # kb/s -> b/s - connectivity_latency=measurement.latency, - school=schools[measurement.school_id], - )) - - RealTimeConnectivity.objects.bulk_create(realtime) - - # not using aggregate because there can be new entries between two operations - if measurements: - last_update = max((m.timestamp for m in measurements)) - else: - last_update = timezone.now() - Measurement.set_last_measurement_date(last_update) diff --git a/proco/schools/admin.py b/proco/schools/admin.py deleted file mode 100644 index 919c7b1..0000000 --- a/proco/schools/admin.py +++ /dev/null @@ -1,96 +0,0 @@ -from django.contrib import admin, messages -from django.contrib.admin.options import csrf_protect_m -from django.core.exceptions import PermissionDenied -from django.shortcuts import redirect -from django.urls import path, reverse -from django.utils.safestring import mark_safe - -from mapbox_location_field.admin import MapAdmin - -from proco.locations.filters import CountryFilterList -from proco.schools.forms import ImportSchoolsCSVForm, SchoolAdminForm -from proco.schools.models import FileImport, School -from proco.schools.tasks import process_loaded_file -from proco.utils.admin import CountryNameDisplayAdminMixin - - -class ImportFormMixin(object): - @csrf_protect_m - def changelist_view(self, request, extra_context=None): - if extra_context is None: - extra_context = {} - - extra_context['import_form'] = ImportSchoolsCSVForm() - - return super(ImportFormMixin, self).changelist_view(request, extra_context) - - -@admin.register(School) -class SchoolAdmin(ImportFormMixin, CountryNameDisplayAdminMixin, MapAdmin): - form = SchoolAdminForm - list_display = ('name', 'get_country_name', 'address', 'education_level', 'school_type','giga_id_school','education_level_regional') - list_filter = (CountryFilterList, 'education_level', 'environment', 'school_type') - search_fields = ('name', 'country__name', 'location__name') - change_list_template = 'admin/schools/change_list.html' - ordering = ('country', 'name') - readonly_fields = ('get_weekly_stats_url',) - raw_id_fields = ('country', 'location', 'last_weekly_status') - - def get_urls(self): - urls = super().get_urls() - custom_urls = [ - path('import/csv/', self.import_csv, name='schools_school_import_csv'), - ] - return custom_urls + urls - - def get_queryset(self, request): - qs = super().get_queryset(request) - if not request.user.is_superuser: - qs = qs.filter(country__in=request.user.countries_available.all()) - return qs.prefetch_related('country').defer('location') - - def import_csv(self, request): - user = request.user - if user.is_authenticated and user.has_perm('schools.add_fileimport') and request.method == 'POST': - form = ImportSchoolsCSVForm(data=request.POST, files=request.FILES) - if form.is_valid(): - cleaned_data = form.clean() - imported_file = FileImport.objects.create( - uploaded_file=cleaned_data['csv_file'], uploaded_by=request.user, - ) - process_loaded_file.delay(imported_file.id, force=cleaned_data['force']) - - messages.success(request, 'Your file was uploaded and will be processed soon.') - return redirect('admin:schools_fileimport_change', imported_file.id) - - raise PermissionDenied() - - def get_weekly_stats_url(self, obj): - stats_url = reverse('admin:connection_statistics_schoolweeklystatus_changelist') - return mark_safe(f'Here') # noqa: S703,S308 - - get_weekly_stats_url.short_description = 'Weekly Stats' - - -@admin.register(FileImport) -class FileImportAdmin(ImportFormMixin, admin.ModelAdmin): - change_form_template = 'admin/schools/file_imports_change_form.html' - - list_display = ('id', 'country', 'uploaded_file', 'status', 'uploaded_by', 'modified') - list_select_related = ('uploaded_by', 'country') - list_filter = ('status',) - readonly_fields = ('country', 'uploaded_file', 'status', 'statistic', 'errors', 'uploaded_by', 'modified') - ordering = ('-id',) - raw_id_fields = ('country',) - - def has_add_permission(self, request): - return False - - def has_change_permission(self, request, obj=None): - return False - - def get_queryset(self, request): - qs = super().get_queryset(request) - if not request.user.is_superuser: - qs = qs.filter(uploaded_by=request.user) - return qs.defer('country__geometry') diff --git a/proco/schools/api.py b/proco/schools/api.py index 752143b..02298cf 100644 --- a/proco/schools/api.py +++ b/proco/schools/api.py @@ -1,4 +1,5 @@ import re +import logging from datetime import datetime, time from django.conf import settings @@ -42,10 +43,11 @@ from proco.utils import dates as date_utilities from proco.utils.error_message import id_missing_error_mess, delete_succ_mess, \ error_mess -from proco.utils.filters import NullsAlwaysLastOrderingFilter from proco.utils.log import action_log, changed_fields from proco.utils.mixins import CachedListMixin +logger = logging.getLogger('gigamaps.' + __name__) + @method_decorator([cache_control(public=True, max_age=settings.CACHE_CONTROL_MAX_AGE_FOR_FE)], name='dispatch') class SchoolsViewSet( @@ -100,10 +102,6 @@ def get_serializer_class(self): @action(methods=['get'], detail=False, url_path='export-csv-schools', url_name='export_csv_schools') def export_csv_schools(self, request, *args, **kwargs): - # country = get_object_or_404( - # self.get_queryset().annotate(code_lower=Lower('country__code')), - # code_lower=self.kwargs.get('country_code').lower(), - # ) country = self.get_country() serializer = self.get_serializer(self.get_queryset(), many=True) csvwriter = SchoolsCSVWriterBackend(serializer, country) @@ -130,9 +128,6 @@ def get_serializer(self, *args, **kwargs): class BaseTileGenerator: - # def __init__(self, table_config): - # self.table_config = table_config - def path_to_tile(self, request): path = "/" + request.query_params.get('z') + "/" + request.query_params.get( 'x') + "/" + request.query_params.get('y') @@ -187,11 +182,10 @@ def sql_to_pbf(self, sql): if not cur: return Response({"error": f"sql query failed: {sql}"}, status=404) return cur.fetchone()[0] - except Exception as error: + except Exception: return Response({"error": "An error occurred while executing SQL query"}, status=500) def generate_tile(self, request): - # start_time = time.time() tile = self.path_to_tile(request) if not (tile and self.tile_is_valid(tile)): return Response({"error": "Invalid tile path"}, status=400) @@ -200,7 +194,7 @@ def generate_tile(self, request): sql = self.envelope_to_sql(env, request) - print(sql.replace('\n', '')) + logger.debug(sql.replace('\n', '')) pbf = self.sql_to_pbf(sql) if isinstance(pbf, memoryview): @@ -218,7 +212,6 @@ def __init__(self, table_config): def envelope_to_sql(self, env, request): country_id = request.query_params.get('country_id', None) admin1_id = request.query_params.get('admin1_id', None) - # school_ids = request.query_params.get('school_ids', "") tbl = self.table_config.copy() tbl['env'] = self.envelope_to_bounds_sql(env) @@ -230,20 +223,13 @@ def envelope_to_sql(self, env, request): if country_id or admin1_id: if admin1_id: - tbl['admin1_condition'] = f"AND t.admin1_id = {admin1_id}" + tbl['admin1_condition'] = f"AND schools_school.admin1_id = {admin1_id}" if country_id: - tbl['country_condition'] = f"AND t.country_id = {country_id}" + tbl['country_condition'] = f"AND schools_school.country_id = {country_id}" else: tbl['random_order'] = 'ORDER BY random()' if int(request.query_params.get('z', 0)) == 2 else '' - # # Splitting the school_ids string into a list of individual IDs - # school_id_list = school_ids.split(',') - - # school_condition = "" - # if school_id_list: - # school_condition = "AND t._id IN ({0})".format(','.join(school_id_list)) - """In order to cater school requirements, {school_condition} can be added to id before/after country_condition in the query""" @@ -253,27 +239,48 @@ def envelope_to_sql(self, env, request): {env}::box2d AS b2d ), mvtgeom AS ( - SELECT ST_AsMVTGeom(ST_Transform(t.{geomColumn}, 3857), bounds.b2d) AS geom, - {attrColumns}, t.coverage_type, - CASE WHEN LOWER(t."coverage_type") IN ('5g', '4g') THEN 'good' - WHEN LOWER(t."coverage_type") IN ('3g', '2g') THEN 'moderate' - WHEN LOWER(t."coverage_type") = 'no' THEN 'bad' + SELECT ST_AsMVTGeom(ST_Transform(schools_school."geopoint", 3857), bounds.b2d) AS geom, + schools_school."id", + schools_school."coverage_type", + CASE WHEN LOWER(schools_school."coverage_type") IN ('5g', '4g') THEN 'good' + WHEN LOWER(schools_school."coverage_type") IN ('3g', '2g') THEN 'moderate' + WHEN LOWER(schools_school."coverage_type") = 'no' THEN 'bad' ELSE 'unknown' END AS coverage_status, - CASE WHEN t.connectivity_status IN ('good', 'moderate') THEN 'connected' - WHEN t.connectivity_status = 'no' THEN 'not_connected' ELSE 'unknown' + CASE WHEN schools_school."connectivity_status" IN ('good', 'moderate') THEN 'connected' + WHEN schools_school."connectivity_status" = 'no' THEN 'not_connected' ELSE 'unknown' END AS connectivity_status - FROM {table} t, bounds - WHERE ST_Intersects(t.{geomColumn}, ST_Transform(bounds.geom, {srid})) - AND t."deleted" IS NULL + FROM schools_school + INNER JOIN bounds ON ST_Intersects(schools_school."geopoint", ST_Transform(bounds.geom, 4326)) + {school_weekly_join} + WHERE schools_school."deleted" IS NULL {country_condition} {admin1_condition} + {school_condition} + {school_weekly_condition} {random_order} {limit_condition} ) SELECT ST_AsMVT(DISTINCT mvtgeom.*) FROM mvtgeom """ + tbl['school_condition'] = '' + tbl['school_weekly_join'] = '' + tbl['school_weekly_condition'] = '' + + school_filters = core_utilities.get_filter_sql(request, 'schools', 'schools_school') + if len(school_filters) > 0: + tbl['school_condition'] = 'AND ' + school_filters + + school_static_filters = core_utilities.get_filter_sql(request, 'school_static', + 'connection_statistics_schoolweeklystatus') + if len(school_static_filters) > 0: + tbl['school_weekly_join'] = """ + LEFT OUTER JOIN connection_statistics_schoolweeklystatus + ON schools_school."last_weekly_status_id" = connection_statistics_schoolweeklystatus."id" + """ + tbl['school_weekly_condition'] = 'AND ' + school_static_filters + return sql_tmpl.format(**tbl) @@ -294,7 +301,7 @@ def get(self, request): try: return self.tile_generator.generate_tile(request) except Exception as ex: - print('Exception occurred for school tiles endpoint: {}'.format(ex)) + logger.error('Exception occurred for school tiles endpoint: {}'.format(ex)) return Response({"error": "An error occurred while processing the request"}, status=500) @@ -315,22 +322,24 @@ def query_filters(self, request, table_configs): 'school_id__in' in request.query_params ): if 'country_id' in request.query_params: - table_configs['country_condition'] = f" AND t.country_id = {request.query_params['country_id']}" + table_configs[ + 'country_condition'] = f" AND schools_school.country_id = {request.query_params['country_id']}" elif 'country_id__in' in request.query_params: country_ids = ','.join([c.strip() for c in request.query_params['country_id__in'].split(',')]) - table_configs['country_condition'] = f" AND t.country_id IN ({country_ids})" + table_configs['country_condition'] = f" AND schools_school.country_id IN ({country_ids})" if 'admin1_id' in request.query_params: - table_configs['admin1_condition'] = f" AND t.admin1_id = {request.query_params['admin1_id']}" + table_configs[ + 'admin1_condition'] = f" AND schools_school.admin1_id = {request.query_params['admin1_id']}" elif 'admin1_id__in' in request.query_params: admin1_ids = ','.join([c.strip() for c in request.query_params['admin1_id__in'].split(',')]) - table_configs['admin1_condition'] = f" AND t.admin1_id IN ({admin1_ids})" + table_configs['admin1_condition'] = f" AND schools_school.admin1_id IN ({admin1_ids})" if 'school_id' in request.query_params: - table_configs['school_condition'] = f" AND t.id = {request.query_params['school_id']}" + table_configs['school_condition'] = f" AND schools_school.id = {request.query_params['school_id']}" elif 'school_id__in' in request.query_params: school_ids = ','.join([c.strip() for c in request.query_params['school_id__in'].split(',')]) - table_configs['school_condition'] = f" AND t.id IN ({school_ids})" + table_configs['school_condition'] = f" AND schools_school.id IN ({school_ids})" else: zoom_level = int(request.query_params.get('z', '0')) if zoom_level == 0: @@ -371,7 +380,7 @@ def query_filters(self, request, table_configs): # If for any week of the month data is not available then pick last week number week_number = week_numbers_for_month[-1] - table_configs['weekly_lookup_condition'] = (f'ON t.id = c.school_id AND c.week={week_number} ' + table_configs['weekly_lookup_condition'] = (f'ON schools_school.id = c.school_id AND c.week={week_number} ' f'AND c.year={year_number}') table_configs['benchmark'], table_configs['benchmark_unit'] = get_benchmark_value_for_default_download_layer( @@ -387,97 +396,21 @@ def envelope_to_sql(self, env, request): tbl['country_condition'] = '' tbl['admin1_condition'] = '' tbl['school_condition'] = '' - tbl['weekly_lookup_condition'] = 'ON t.last_weekly_status_id = c.id' + tbl['weekly_lookup_condition'] = 'ON schools_school.last_weekly_status_id = c.id' tbl['random_order'] = '' tbl['rt_date_condition'] = '' self.query_filters(request, tbl) """sql with join and connectivity_speed""" - # sql_tmpl = """ - # WITH bounds AS ( - # SELECT {env} AS geom, - # {env}::box2d AS b2d - # ), - # rt_status AS ( - # SELECT DISTINCT t.id as school_id, - # true rt_registered, - # FIRST_VALUE(dailystat.created) OVER (PARTITION BY dailystat.school_id ORDER BY dailystat.created) - # rt_registration_date - # FROM connection_statistics_schooldailystatus dailystat - # INNER JOIN schools_school t ON t.id = dailystat.school_id - # {country_condition}{school_condition} - # ), - # mvtgeom AS ( - # SELECT ST_AsMVTGeom(ST_Transform(t.{geomColumn}, 3857), bounds.b2d) AS geom, - # t.{attrColumns}, - # c.connectivity_speed, - # t.connectivity_status as connectivity, - # CASE WHEN c.connectivity_speed > {benchmark} THEN 'good' - # WHEN c.connectivity_speed < {benchmark} and c.connectivity_speed >= 1000000 THEN 'moderate' - # WHEN c.connectivity_speed < 1000000 THEN 'bad' - # ELSE 'unknown' - # END as hist_connectivity, - # CASE WHEN t.connectivity_status IN ('good', 'moderate') THEN 'connected' - # WHEN t.connectivity_status = 'no' THEN 'not_connected' - # ELSE 'unknown' - # END as connectivity_status, - # CASE WHEN rt_status.rt_registered = True {rt_date_condition} THEN True - # ELSE False - # END as is_rt_connected - # FROM {table} t - # INNER JOIN bounds ON ST_Intersects(t.{geomColumn}, ST_Transform(bounds.geom, {srid})) - # {country_condition}{school_condition} - # LEFT JOIN connection_statistics_schoolweeklystatus c {weekly_lookup_condition} - # LEFT JOIN rt_status ON rt_status.school_id = t.id - # {random_order} - # {limit_condition} - # ) - # SELECT ST_AsMVT(mvtgeom.*) FROM mvtgeom; - # """ - - # sql_tmpl = """ - # WITH bounds AS ( - # SELECT {env} AS geom, - # {env}::box2d AS b2d - # ), - # mvtgeom AS ( - # SELECT ST_AsMVTGeom(ST_Transform(t.{geomColumn}, 3857), bounds.b2d) AS geom, - # t.{attrColumns}, - # c.connectivity_speed, - # t.connectivity_status as connectivity, - # CASE WHEN c.connectivity_speed > {benchmark} THEN 'good' - # WHEN c.connectivity_speed < {benchmark} and c.connectivity_speed >= 1000000 - # THEN 'moderate' - # WHEN c.connectivity_speed < 1000000 THEN 'bad' - # ELSE 'unknown' - # END as hist_connectivity, - # CASE WHEN t.connectivity_status IN ('good', 'moderate') THEN 'connected' - # WHEN t.connectivity_status = 'no' THEN 'not_connected' - # ELSE 'unknown' - # END as connectivity_status, - # CASE WHEN rt_status.rt_registered = True {rt_date_condition} THEN True - # ELSE False - # END as is_rt_connected - # FROM {table} t - # INNER JOIN bounds ON ST_Intersects(t.{geomColumn}, ST_Transform(bounds.geom, {srid})) - # {country_condition}{school_condition} - # LEFT JOIN connection_statistics_schoolweeklystatus c {weekly_lookup_condition} - # LEFT JOIN school_rt_connectivity_stat rt_status ON rt_status.giga_id_school = t.giga_id_school - # {random_order} - # {limit_condition} - # ) - # SELECT ST_AsMVT(mvtgeom.*) FROM mvtgeom; - # """ - sql_tmpl = """ WITH bounds AS ( SELECT {env} AS geom, {env}::box2d AS b2d ), mvtgeom AS ( - SELECT ST_AsMVTGeom(ST_Transform(t.{geomColumn}, 3857), bounds.b2d) AS geom, - t.{attrColumns}, + SELECT ST_AsMVTGeom(ST_Transform(schools_school.geopoint, 3857), bounds.b2d) AS geom, + schools_school.id, CASE WHEN c.id is NULL AND rt_status.rt_registered = True {rt_date_condition} THEN 'unknown' WHEN c.id is NULL THEN NULL WHEN c.connectivity_speed > {benchmark} THEN 'good' @@ -485,26 +418,44 @@ def envelope_to_sql(self, env, request): WHEN c.connectivity_speed < 1000000 THEN 'bad' ELSE 'unknown' END as connectivity, - CASE WHEN t.connectivity_status IN ('good', 'moderate') THEN 'connected' - WHEN t.connectivity_status = 'no' THEN 'not_connected' + CASE WHEN schools_school.connectivity_status IN ('good', 'moderate') THEN 'connected' + WHEN schools_school.connectivity_status = 'no' THEN 'not_connected' ELSE 'unknown' END as connectivity_status, CASE WHEN rt_status.rt_registered = True {rt_date_condition} THEN True ELSE False END as is_rt_connected - FROM {table} t - INNER JOIN bounds ON ST_Intersects(t.{geomColumn}, ST_Transform(bounds.geom, {srid})) - AND t."deleted" IS NULL {country_condition}{admin1_condition}{school_condition} + FROM schools_school + INNER JOIN bounds ON ST_Intersects(schools_school.geopoint, ST_Transform(bounds.geom, {srid})) + AND schools_school."deleted" IS NULL {country_condition}{admin1_condition}{school_condition} + {school_weekly_join} LEFT JOIN connection_statistics_schoolweeklystatus c {weekly_lookup_condition} AND c."deleted" IS NULL - LEFT JOIN connection_statistics_schoolrealtimeregistration rt_status ON rt_status.school_id = t.id + LEFT JOIN connection_statistics_schoolrealtimeregistration rt_status ON rt_status.school_id = schools_school.id AND rt_status."deleted" IS NULL + {school_weekly_condition} {random_order} {limit_condition} ) SELECT ST_AsMVT(DISTINCT mvtgeom.*) FROM mvtgeom; """ + tbl['school_weekly_join'] = '' + tbl['school_weekly_condition'] = '' + + school_filters = core_utilities.get_filter_sql(request, 'schools', 'schools_school') + if len(school_filters) > 0: + tbl['school_condition'] += ' AND ' + school_filters + + school_static_filters = core_utilities.get_filter_sql(request, 'school_static', + 'connection_statistics_schoolweeklystatus') + if len(school_static_filters) > 0: + tbl['school_weekly_join'] = """ + LEFT OUTER JOIN connection_statistics_schoolweeklystatus + ON schools_school."last_weekly_status_id" = connection_statistics_schoolweeklystatus."id" + """ + tbl['school_weekly_condition'] = 'WHERE ' + school_static_filters + return sql_tmpl.format(**tbl) @@ -525,7 +476,7 @@ def get(self, request): try: return self.tile_generator.generate_tile(request) except Exception as ex: - print('Exception occurred for school connectivity tiles endpoint: {}'.format(ex)) + logger.error('Exception occurred for school connectivity tiles endpoint: {}'.format(ex)) return Response({'error': 'An error occurred while processing the request'}, status=500) diff --git a/proco/schools/api_urls.py b/proco/schools/api_urls.py index 7e5b169..764fd5c 100644 --- a/proco/schools/api_urls.py +++ b/proco/schools/api_urls.py @@ -20,11 +20,11 @@ 'get': 'list', 'post': 'create', 'delete': 'destroy', - }), name='list_or_create_or_destroy_school'), + }), name='list-create-destroy-school'), path('schools/school//', api.AdminViewSchoolAPIViewSet.as_view({ 'put': 'update', 'get': 'retrieve', - }), name='update_or_retrieve_school'), + }), name='update-or-retrieve-school'), path('schools/fileimport/', api.ImportCSVViewSet.as_view({ 'post': 'fileimport', 'get': 'list', diff --git a/proco/schools/constants.py b/proco/schools/constants.py index 1177c25..2448ea6 100644 --- a/proco/schools/constants.py +++ b/proco/schools/constants.py @@ -19,8 +19,9 @@ class ColorMapSchema: 'unknown': 'unknown', 'no': 'no', '2g': 'moderate', - '3g': 'good', + '3g': 'moderate', '4g': 'good', + '5g': 'good', } CONNECTIVITY_SPEED_FOR_GOOD_CONNECTIVITY_STATUS = 5 * (10 ** 6) diff --git a/proco/schools/forms.py b/proco/schools/forms.py deleted file mode 100644 index e2a5d4a..0000000 --- a/proco/schools/forms.py +++ /dev/null @@ -1,48 +0,0 @@ -from django import forms -from django.contrib.gis.forms import PointField -from django.contrib.gis.geos import Point -from django.urls import reverse -from django.utils.translation import ugettext as _ - -from crispy_forms.helper import FormHelper -from crispy_forms.layout import ButtonHolder, Field, Layout, Submit -from mapbox_location_field.forms import parse_location -from mapbox_location_field.widgets import MapAdminInput - -from proco.schools.models import School - - -class ImportSchoolsCSVForm(forms.Form): - csv_file = forms.FileField() - force = forms.BooleanField(label=_('Skip rows with bad data'), required=False) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.helper = FormHelper() - self.helper.form_class = 'blueForms import-csv-form' - self.helper.form_method = 'post' - self.helper.form_action = reverse('admin:schools_school_import_csv') - self.helper.layout = Layout( - Field('csv_file'), - Field('force'), - ButtonHolder( - Submit('submit', 'Submit', css_class='button'), - ), - ) - - -class MapboxPointField(PointField): - def to_python(self, value): - if isinstance(value, str): - lng, lat = parse_location(value, first_in_order='lat') - point = Point(x=lng, y=lat, srid=4326) - return point - return value - - -class SchoolAdminForm(forms.ModelForm): - geopoint = MapboxPointField(widget=MapAdminInput, required=False) - - class Meta: - model = School - fields = forms.ALL_FIELDS diff --git a/proco/schools/migrations/0030_added_unique_constraint_on_country_and_giga_id.py b/proco/schools/migrations/0030_added_unique_constraint_on_country_and_giga_id.py old mode 100644 new mode 100755 diff --git a/proco/schools/models.py b/proco/schools/models.py index 1aad77f..0d6564f 100644 --- a/proco/schools/models.py +++ b/proco/schools/models.py @@ -122,6 +122,8 @@ class FileImport(TimeStampedModel): errors = models.TextField(blank=True) statistic = models.TextField(blank=True) + objects = models.Manager() + def __str__(self): return self.uploaded_file.name diff --git a/proco/schools/serializers.py b/proco/schools/serializers.py index 7507f1b..c050627 100644 --- a/proco/schools/serializers.py +++ b/proco/schools/serializers.py @@ -1,3 +1,4 @@ +import logging import re from collections import OrderedDict from datetime import timedelta @@ -21,6 +22,8 @@ from proco.schools.models import School, FileImport from proco.utils import dates as date_utilities +logger = logging.getLogger('gigamaps.' + __name__) + class BaseSchoolSerializer(serializers.ModelSerializer): class Meta: @@ -80,73 +83,6 @@ class Meta(BaseSchoolSerializer.Meta): ) -class SchoolSerializer(CountryToSerializerMixin, BaseSchoolSerializer): - admin1_name = serializers.ReadOnlyField(source='admin1.name', default='') - admin2_name = serializers.ReadOnlyField(source='admin2.name', default='') - - statistics = serializers.SerializerMethodField() - data_status = serializers.SerializerMethodField() - - is_verified = serializers.SerializerMethodField() - - class Meta(BaseSchoolSerializer.Meta): - fields = BaseSchoolSerializer.Meta.fields + ( - 'statistics', - 'data_status', - 'connectivity_status', - 'coverage_status', - 'coverage_type', - 'gps_confidence', - 'address', - 'postal_code', - 'admin1_name', - 'admin2_name', - 'timezone', - 'altitude', - 'email', - 'education_level', - 'environment', - 'school_type', - 'is_verified', - # 'connectivity_dist_status', - ) - - def get_statistics(self, instance): - return SchoolWeeklyStatusSerializer(instance.last_weekly_status).data - - def get_data_status(self, instance): - latest_school_weekly_instance = instance.last_weekly_status - static_data = {} - - if latest_school_weekly_instance: - year = latest_school_weekly_instance.year - week_number = latest_school_weekly_instance.week - - monday_of_week = date_utilities.get_first_date_of_week(year, week_number) - sunday_of_week = monday_of_week + timedelta(days=6) - - static_data = { - 'week': { - 'start_date': date_utilities.format_date(monday_of_week), - 'end_date': date_utilities.format_date(sunday_of_week), - }, - 'month': { - 'start_date': date_utilities.format_date(date_utilities.get_first_date_of_month( - year, monday_of_week.month)), - 'end_date': date_utilities.format_date(date_utilities.get_last_date_of_month( - year, monday_of_week.month)) - } - } - return static_data - - def get_is_verified(self, obj): - if not self.country.last_weekly_status: - return None - return self.country.last_weekly_status.integration_status not in [ - CountryWeeklyStatus.COUNTRY_CREATED, CountryWeeklyStatus.SCHOOL_OSM_MAPPED, - ] - - class ExtendedSchoolSerializer(BaseSchoolSerializer): class Meta(BaseSchoolSerializer.Meta): fields = BaseSchoolSerializer.Meta.fields + ( @@ -216,24 +152,7 @@ class ExpandSchoolWeeklyStatusSerializer(FlexFieldsModelSerializer): class Meta: model = SchoolWeeklyStatus fields = ( - # 'id', - # 'num_students', - # 'num_teachers', - # 'num_classroom', - # 'num_latrines', - # 'running_water', - # 'electricity_availability', - # 'computer_lab', - # 'num_computers', - # 'connectivity', - # 'connectivity_type', - # 'connectivity_speed', - # 'connectivity_latency', - # 'coverage_availability', - # 'coverage_type', 'school_data_source', - # 'created', - # 'modified', ) @@ -241,49 +160,28 @@ class SchoolStatusSerializer(FlexFieldsModelSerializer): class Meta: model = School read_only_fields = fields = ( - # 'id', - # 'created', - # 'modified', 'name', - # 'timezone', 'geopoint', - # 'gps_confidence', - # 'altitude', - # 'address', - # 'postal_code', - # 'email', 'education_level', - # 'environment', - # 'school_type', 'country_id', - # 'location_id', - # 'admin1_id', - # 'admin2_id', 'external_id', 'last_weekly_status_id', - # 'name_lower', 'giga_id_school', 'education_level_regional', - # 'connectivity_status', - # 'coverage_type', ) expandable_fields = { 'country': (ExpandCountrySerializer, {'source': 'country'}), - # 'admin1': (ExpandCountryAdminSerializer, {'source': 'admin1'}), - # 'admin2': (ExpandCountryAdminSerializer, {'source': 'admin2'}), 'last_weekly_status': (ExpandSchoolWeeklyStatusSerializer, {'source': 'last_weekly_status'}), } class SchoolCSVSerializer(SchoolStatusSerializer, DownloadSerializerMixin): - # geopoint = GeoPointCSVField() class Meta(SchoolStatusSerializer.Meta): report_fields = OrderedDict([ ('giga_id_school', 'School Giga ID'), - # ('external_id', 'School Source ID'), ('name', 'School Name'), ('longitude', {'name': 'Longitude', 'is_computed': True}), ('latitude', {'name': 'Latitude', 'is_computed': True}), @@ -291,62 +189,23 @@ class Meta(SchoolStatusSerializer.Meta): ('country_iso3_format', {'name': 'Country ISO3 Code', 'is_computed': True}), ('country_name', {'name': 'Country Name', 'is_computed': True}), ('school_data_source', {'name': 'School Data Source', 'is_computed': True}), - # ('timezone', 'TimeZone'), - # ('geopoint', 'Location GeoPoints'), - # ('gps_confidence', 'GPS Confidence'), - # ('altitude', 'Altitude'), - # ('address', 'Address'), - # ('postal_code', 'Postal Code'), - # ('email', 'Email'), - # ('environment', 'Environment'), - # ('school_type', 'School Type'), - # ('country_code', {'name': 'Country Code', 'is_computed': True}), - # ('admin1_name', {'name': 'Admin 1 Name', 'is_computed': True}), - # ('admin2_name', {'name': 'Admin 2 Name', 'is_computed': True}), - # ('name_lower', 'Name Lower'), - # ('education_level_regional', 'Education Level Regional'), - # ('last_weekly_status', {'name': 'Last Week Status', 'is_computed': True}), ]) def get_country_name(self, data): return data.get('country', {}).get('name') - # def get_admin1_name(self, data): - # admin_data = data.get('admin1', None) - # if admin_data: - # return admin_data.get('name') - # - # def get_admin2_name(self, data): - # admin_data = data.get('admin2', None) - # if admin_data: - # return admin_data.get('name') - def get_country_iso3_format(self, data): return data.get('country', {}).get('iso3_format') - # def get_country_code(self, data): - # return data.get('country', {}).get('code') - def get_longitude(self, data): point_coordinates = data.get('geopoint', {}).get('coordinates', []) if len(point_coordinates) > 0: return point_coordinates[0] - return def get_latitude(self, data): point_coordinates = data.get('geopoint', {}).get('coordinates', []) if len(point_coordinates) > 1: return point_coordinates[1] - return - - # def get_last_weekly_status(self, data): - # last_week_data = data.get('last_weekly_status', {}) - # values = [] - # for key, value in last_week_data.items(): - # if isinstance(value, bool): - # value = self.boolean_flags.get(value) - # values.append('{0}:{1}'.format(key, value)) - # return '\t'.join(values) def get_school_data_source(self, data): return data.get('last_weekly_status', {}).get('school_data_source') @@ -393,16 +252,16 @@ def create(self, validated_data): if deleted_school_with_same_giga_id: validated_data['deleted'] = None school_instance = super().update(deleted_school_with_same_giga_id, validated_data) - print('School restored') + logger.debug('School restored') SchoolDailyStatus.objects.all_deleted().filter(school=school_instance).update(deleted=None) - print('School Daily restored') + logger.debug('School Daily restored') SchoolWeeklyStatus.objects.all_deleted().filter(school=school_instance).update(deleted=None) - print('School Weekly restored') + logger.debug('School Weekly restored') SchoolRealTimeRegistration.objects.all_deleted().filter(school=school_instance).update(deleted=None) - print('School Real Time Registration restored') + logger.debug('School Real Time Registration restored') return school_instance else: diff --git a/proco/schools/tasks.py b/proco/schools/tasks.py index 5179c9d..38c19a7 100644 --- a/proco/schools/tasks.py +++ b/proco/schools/tasks.py @@ -1,14 +1,17 @@ +import logging import random import traceback +import uuid from collections import Counter from copy import copy from random import randint # noqa from typing import List +from celery import current_task from django.contrib.gis.geos import MultiPoint, Point -from django.core.cache import cache from django.db import transaction +from proco.background import utils as background_task_utilities from proco.connection_statistics.utils import update_country_data_source_by_csv_filename, update_country_weekly_status from proco.core import utils as core_utilities from proco.locations.models import Country @@ -20,6 +23,8 @@ from proco.utils.dates import format_date from proco.utils.tasks import update_country_related_cache +logger = logging.getLogger('gigamaps.' + __name__) + class FailedImportError(Exception): pass @@ -55,11 +60,11 @@ def _find_country(loaded: List[dict]) -> [Country]: else: countries_counter = Counter() for country in countries: - instersections = country.geometry.intersection(points) - if isinstance(instersections, Point): + intersections = country.geometry.intersection(points) + if isinstance(intersections, Point): countries_counter[country] = 1 else: - countries_counter[country] = len(instersections) + countries_counter[country] = len(intersections) return countries_counter.most_common()[0][0] @@ -140,17 +145,16 @@ def update_school_records(): Periodic task executed every day at 01:00 AM and 01:00 PM to update the school fields based on changes in SchoolWeekly or CountryWeekly tables. """ - task_cache_key = 'update_school_records_status_{current_time}'.format( - current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H')) - running_task = cache.get(task_cache_key, None) + task_key = 'update_school_records_status_{current_time}'.format( + current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H')) - if running_task in [None, b'completed', 'completed']: - print('***** Not found running Job *****') - cache.set(task_cache_key, 'running', None) + task_id = current_task.request.id or str(uuid.uuid4()) + task_instance = background_task_utilities.task_on_start( + task_id, task_key, 'Update the school fields based on changes in SchoolWeekly or CountryWeekly tables') + if task_instance: + logger.debug('Not found running job for school connectivity status update task.') school_utilities.update_school_from_country_or_school_weekly_update() - - cache.set(task_cache_key, 'completed', None) + background_task_utilities.task_on_complete(task_instance) else: - print('***** Found running Job with "{0}" name so skipping current iteration *****'.format(task_cache_key)) - + logger.debug('Found running job with "{0}" name so skipping current iteration.'.format(task_key)) diff --git a/proco/schools/tests/factories.py b/proco/schools/tests/factories.py index 09b9980..3d53b87 100644 --- a/proco/schools/tests/factories.py +++ b/proco/schools/tests/factories.py @@ -1,21 +1,30 @@ from django.contrib.gis.geos import GEOSGeometry - from factory import SubFactory from factory import django as django_factory from factory import fuzzy -from proco.locations.tests.factories import CountryFactory, LocationFactory -from proco.schools.models import School +from proco.locations.tests.factories import Admin1Factory, CountryFactory, LocationFactory +from proco.schools.models import FileImport, School class SchoolFactory(django_factory.DjangoModelFactory): name = fuzzy.FuzzyText(length=20) + giga_id_school = fuzzy.FuzzyText(length=20) external_id = fuzzy.FuzzyText(length=20) country = SubFactory(CountryFactory) location = SubFactory(LocationFactory) + admin1 = SubFactory(Admin1Factory) geopoint = GEOSGeometry('Point(1 1)') gps_confidence = fuzzy.FuzzyDecimal(low=0.0) altitude = fuzzy.FuzzyInteger(0, 10000) class Meta: model = School + + +class FileImportFactory(django_factory.DjangoModelFactory): + country = SubFactory(CountryFactory) + uploaded_file = fuzzy.FuzzyText(length=20) + + class Meta: + model = FileImport diff --git a/proco/schools/tests/test_api.py b/proco/schools/tests/test_api.py index dc76194..67965e3 100644 --- a/proco/schools/tests/test_api.py +++ b/proco/schools/tests/test_api.py @@ -1,25 +1,40 @@ from django.core.cache import cache from django.test import TestCase -from django.urls import reverse - +from django.urls import resolve, reverse from rest_framework import status from proco.connection_statistics.models import CountryWeeklyStatus from proco.connection_statistics.tests.factories import SchoolWeeklyStatusFactory -from proco.locations.tests.factories import CountryFactory -from proco.schools.tests.factories import SchoolFactory +from proco.custom_auth.tests import test_utils as test_utilities +from proco.locations.tests.factories import Admin1Factory, CountryFactory +from proco.schools.tests.factories import FileImportFactory, SchoolFactory from proco.utils.tests import TestAPIViewSetMixin +def schools_url(url_params, query_param, view_name='schools-list'): + url = reverse('schools:' + view_name, args=url_params) + view = resolve(url) + view_info = view.func + + if len(query_param) > 0: + query_params = '?' + '&'.join([key + '=' + str(val) for key, val in query_param.items()]) + url += query_params + return url, view, view_info + + class SchoolsApiTestCase(TestAPIViewSetMixin, TestCase): base_view = 'schools:schools' @classmethod def setUpTestData(cls): cls.country = CountryFactory() - cls.school_one = SchoolFactory(country=cls.country, location__country=cls.country) - cls.school_two = SchoolFactory(country=cls.country, location__country=cls.country) - cls.school_three = SchoolFactory(country=cls.country, location__country=cls.country) + + cls.admin1_one = Admin1Factory(country=cls.country) + + cls.school_one = SchoolFactory(country=cls.country, location__country=cls.country, admin1=cls.admin1_one) + cls.school_two = SchoolFactory(country=cls.country, location__country=cls.country, admin1=cls.admin1_one) + cls.school_three = SchoolFactory(country=cls.country, location__country=cls.country, admin1=cls.admin1_one) + cls.school_weekly_one = SchoolWeeklyStatusFactory( school=cls.school_one, connectivity=True, connectivity_speed=3 * (10 ** 6), @@ -42,6 +57,13 @@ def setUpTestData(cls): cls.school_three.last_weekly_status = cls.school_weekly_three cls.school_three.save() + cls.admin_user = test_utilities.setup_admin_user_by_role() + cls.read_only_user = test_utilities.setup_read_only_user_by_role() + + cls.imported_file_one = FileImportFactory(country=cls.country) + cls.imported_file_one.uploaded_by = cls.admin_user + cls.imported_file_one.save() + def setUp(self): cache.clear() super().setUp() @@ -118,7 +140,6 @@ def test_schools_detail(self): ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['id'], self.school_one.id) - # self.assertIn('statistics', response.data) def test_update_keys(self): # todo: move me to proper place @@ -135,3 +156,360 @@ def test_update_keys(self): f'SOFT_CACHE_SCHOOLS_{self.country.code.lower()}_', ])), ) + + def test_random_schools_list(self): + with self.assertNumQueries(2): + response = self.forced_auth_req( + 'get', + reverse('schools:random-schools'), + user=None, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('geopoint', response.data[0]) + self.assertIn('country_integration_status', response.data[0]) + self.assertIn('country_id', response.data[0]) + + def test_default_coverage_layer_school_tiles_country_view(self): + url, _, view = schools_url((), { + 'country_id': self.country.id, + 'z': '2', + 'x': '1', + 'y': '2.mvt', + }, view_name='tiles-view') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_default_coverage_layer_school_tiles_admin_view(self): + url, _, view = schools_url((), { + 'country_id': self.country.id, + 'admin1_id': '12345678', + 'z': '2', + 'x': '1', + 'y': '2.mvt', + }, view_name='tiles-view') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_default_coverage_layer_school_tiles_global_view(self): + url, _, view = schools_url((), { + 'z': '2', + 'x': '1', + 'y': '2.mvt', + }, view_name='tiles-view') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_default_download_layer_school_tiles_country_view(self): + url, _, view = schools_url((), { + 'country_id': self.country.id, + 'indicator': 'download', + 'benchmark': 'global', + 'start_date': '24-06-2024', + 'end_date': '30-06-2024', + 'is_weekly': 'true', + 'z': '2', + 'x': '1', + 'y': '2.mvt', + }, view_name='tiles-connectivity-view') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_default_download_layer_school_tiles_admin_view(self): + url, _, view = schools_url((), { + 'country_id': self.country.id, + 'admin1_id': '1234543', + 'indicator': 'download', + 'benchmark': 'global', + 'start_date': '24-06-2024', + 'end_date': '30-06-2024', + 'is_weekly': 'true', + 'z': '2', + 'x': '1', + 'y': '2.mvt', + }, view_name='tiles-connectivity-view') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_default_download_layer_school_tiles_global_view(self): + url, _, view = schools_url((), { + 'indicator': 'download', + 'benchmark': 'global', + 'start_date': '24-06-2024', + 'end_date': '30-06-2024', + 'is_weekly': 'true', + 'z': '2', + 'x': '1', + 'y': '2.mvt', + }, view_name='tiles-connectivity-view') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_default_download_layer_school_tiles_country_view_month_filter(self): + url, _, view = schools_url((), { + 'country_id': self.country.id, + 'indicator': 'download', + 'benchmark': 'global', + 'start_date': '01-06-2024', + 'end_date': '30-06-2024', + 'is_weekly': 'false', + 'z': '2', + 'x': '1', + 'y': '2.mvt', + }, view_name='tiles-connectivity-view') + + response = self.forced_auth_req('get', url, view=view) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_list_file_imports_on_admin_view(self): + url, _, view = schools_url((), {}, view_name='file-import') + + response = self.forced_auth_req('get', url, view=view, user=self.admin_user) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_update_school(self): + url, _, view = schools_url((self.school_one.id,), {}, + view_name='update-or-retrieve-school') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'name': self.school_one.name + ' Test', + 'timezone': 'UTC', + 'country': self.country.id, + 'giga_id_school': self.school_one.giga_id_school + '-test', + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + def test_update_school_giga_id_to_duplicate_value(self): + url, _, view = schools_url((self.school_one.id,), {}, + view_name='update-or-retrieve-school') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'name': self.school_one.name + ' Test', + 'timezone': 'UTC', + 'country': self.country.id, + 'giga_id_school': self.school_two.giga_id_school, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_update_school_giga_id_to_invalid_regex(self): + url, _, view = schools_url((self.school_one.id,), {}, + view_name='update-or-retrieve-school') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'name': self.school_one.name + ' Test', + 'timezone': 'UTC', + 'country': self.country.id, + 'giga_id_school': self.school_one.giga_id_school + '$!@#', + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_update_school_to_invalid_id(self): + url, _, view = schools_url((12345678,), {}, + view_name='update-or-retrieve-school') + + put_response = self.forced_auth_req( + 'put', + url, + user=self.admin_user, + data={ + 'name': self.school_one.name + ' Test', + 'timezone': 'UTC', + 'country': self.country.id, + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_list_schools(self): + url, _, view = schools_url((), {}, view_name='list-create-destroy-school') + + response = self.forced_auth_req('get', url, view=view, user=self.admin_user) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_create_school_by_admin(self): + url, _, view = schools_url((), {}, view_name='list-create-destroy-school') + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data={ + 'name': 'New School', + 'giga_id_school': 'ac65543e-cdba-4f5c-891a-448bzdcfge099', + 'external_id': '25805591031323454', + 'country': self.country.id, + 'geopoint': { + 'type': 'Point', + 'coordinates': [ + 76.92044830322266, + 9.022849082946777 + ] + }, + 'gps_confidence': 1.0, + 'altitude': 0, + 'timezone': 'Africa/Conakry' + } + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_restore_school_by_admin(self): + url, _, view = schools_url((), {}, view_name='list-create-destroy-school') + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data={ + 'name': 'New School 2', + 'giga_id_school': 'ac65543e-cdba-4f5c-891a-448bzdc12e099', + 'external_id': '258055910313231', + 'country': self.country.id, + 'geopoint': { + 'type': 'Point', + 'coordinates': [ + 76.92044830322266, + 9.022849082946777 + ] + }, + 'gps_confidence': 1.0, + 'altitude': 0, + 'timezone': 'Africa/Conakry' + } + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + school_id = response.data['id'] + + put_response = self.forced_auth_req( + 'delete', + url, + user=self.admin_user, + data={ + 'id': [school_id, ] + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + response = self.forced_auth_req( + 'post', + url, + user=self.admin_user, + view=view, + data={ + 'name': 'New School 3', + 'giga_id_school': 'ac65543e-cdba-4f5c-891a-448bzdc12e099', + 'external_id': '258055910313231', + 'country': self.country.id, + 'geopoint': { + 'type': 'Point', + 'coordinates': [ + 76.92044830322266, + 9.022849082946777 + ] + }, + 'gps_confidence': 1.0, + 'altitude': 0, + 'timezone': 'Africa/Conakry' + } + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + self.assertEqual(school_id, response.data['id']) + + def test_retrieve_school(self): + url, view, view_info = schools_url((self.school_one.id,), {}, view_name='update-or-retrieve-school') + + response = self.forced_auth_req('get', url, view=view, user=self.admin_user, view_info=view_info) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_retrieve_school_to_invalid_id(self): + url, view, view_info = schools_url((1234,), {}, view_name='update-or-retrieve-school') + + response = self.forced_auth_req('get', url, view=view, user=self.admin_user, view_info=view_info) + + self.assertEqual(response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_delete_school(self): + url, _, view = schools_url((), {}, view_name='list-create-destroy-school') + + put_response = self.forced_auth_req( + 'delete', + url, + user=self.admin_user, + data={ + 'id': [self.school_one.id, ] + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_200_OK) + + def test_delete_school_to_invalid_id(self): + url, _, view = schools_url((), {}, view_name='list-create-destroy-school') + + put_response = self.forced_auth_req( + 'delete', + url, + user=self.admin_user, + data={ + 'id': [54321, ] + } + ) + + self.assertEqual(put_response.status_code, status.HTTP_502_BAD_GATEWAY) + + def test_download_school_data_without_api_key(self): + url, view, view_info = schools_url((), { + 'page': '1', + 'page_size': '10', + 'ordering': 'name', + 'expand': 'country,last_weekly_status,admin1,admin2', + }, view_name='download-schools') + + response = self.forced_auth_req( + 'get', + url, + user=self.admin_user, + view_info=view_info, + view=view, + request_format='text/csv' + ) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) diff --git a/proco/schools/tests/test_utils.py b/proco/schools/tests/test_utils.py new file mode 100644 index 0000000..069544f --- /dev/null +++ b/proco/schools/tests/test_utils.py @@ -0,0 +1,35 @@ +from django.test import TestCase + +from proco.locations.tests.factories import CountryFactory +from proco.schools import utils as schools_utilities +from proco.schools.tests.factories import SchoolFactory +from proco.utils.tests import TestAPIViewSetMixin + + +class UtilsUtilitiesTestCase(TestAPIViewSetMixin, TestCase): + + def test_get_imported_file_path_utility(self): + self.assertEqual(type(schools_utilities.get_imported_file_path(None, 'TestImportFile.csv')), str) + + def test_get_coverage_type_utility(self): + country = CountryFactory() + school_one = SchoolFactory(country=country, location__country=country) + self.assertEqual(type(schools_utilities.get_coverage_type(school_one)), str) + + def test_get_connectivity_status_utility(self): + country = CountryFactory() + school_one = SchoolFactory(country=country, location__country=country) + self.assertEqual(type(schools_utilities.get_connectivity_status(school_one)), str) + + def test_get_connectivity_status_by_master_api_utility(self): + country = CountryFactory() + school_one = SchoolFactory(country=country, location__country=country) + self.assertEqual(type(schools_utilities.get_connectivity_status_by_master_api(school_one)), str) + + def test_get_get_coverage_status_utility(self): + country = CountryFactory() + school_one = SchoolFactory(country=country, location__country=country) + self.assertEqual(type(schools_utilities.get_coverage_status(school_one)), str) + + def test_update_school_from_country_or_school_weekly_update_utility(self): + self.assertEqual(schools_utilities.update_school_from_country_or_school_weekly_update(), None) diff --git a/proco/schools/utils.py b/proco/schools/utils.py index c0c143e..278ddc5 100644 --- a/proco/schools/utils.py +++ b/proco/schools/utils.py @@ -1,5 +1,6 @@ import os import uuid +import logging from datetime import timedelta from django.db import transaction @@ -8,6 +9,8 @@ from proco.core import utils as core_utilities from proco.schools.constants import statuses_schema +logger = logging.getLogger('gigamaps.' + __name__) + def get_imported_file_path(instance, filename): filename_stripped = os.path.splitext(filename)[0].split('/')[-1] @@ -126,7 +129,7 @@ def update_school_from_country_or_school_weekly_update(start_time=None, end_time modified__lt=end_time, ).values_list('id', flat=True).order_by('id').distinct('id') - print('Query to select countries updated between ({0} - {1}): {2}'.format( + logger.debug('Query to select countries updated between ({0} - {1}): {2}'.format( start_time, end_time, country_ids_updated_in_last_12_hours.query)) # CountryWeeklyStatus modified in last 24 hours @@ -136,7 +139,7 @@ def update_school_from_country_or_school_weekly_update(start_time=None, end_time ).exclude(id__in=list(country_ids_updated_in_last_12_hours)).values_list( 'id', flat=True).order_by('id').distinct('id') - print('Query to select countries where CountryWeeklyStatus updated between ({0} - {1}): {2}'.format( + logger.debug('Query to select countries where CountryWeeklyStatus updated between ({0} - {1}): {2}'.format( start_time, end_time, country_status_updated_in_last_12_hours.query)) # SchoolWeeklyStatus updated in last 24 hours @@ -145,10 +148,10 @@ def update_school_from_country_or_school_weekly_update(start_time=None, end_time Q(country_id__in=list(country_ids_updated_in_last_12_hours) + list(country_status_updated_in_last_12_hours)) ) - print('Query to select schools where SchoolWeeklyStatus updated between ({0} - {1}): {2}'.format( + logger.debug('Query to select schools where SchoolWeeklyStatus updated between ({0} - {1}): {2}'.format( start_time, end_time, school_updated_in_last_12_hours.query)) - for data_chunk in core_utilities.queryset_iterator(school_updated_in_last_12_hours, chunk_size=20000): + for data_chunk in core_utilities.queryset_iterator(school_updated_in_last_12_hours, chunk_size=100): with transaction.atomic(): for school in data_chunk: school.coverage_type = get_coverage_type(school) diff --git a/proco/taskapp/__init__.py b/proco/taskapp/__init__.py index 997cf0b..854b44c 100755 --- a/proco/taskapp/__init__.py +++ b/proco/taskapp/__init__.py @@ -15,11 +15,12 @@ app.conf.timezone = 'UTC' app.conf.broker_transport_options = {"visibility_timeout": 36000} # 10h app.conf.worker_deduplicate_successful_tasks = True +app.conf.redbeat_key_prefix = 'gigamaps:' +app.conf.redbeat_lock_timeout = 36000 @app.on_after_finalize.connect def finalize_setup(sender, **kwargs): - # from drf_secure_token.tasks import DELETE_OLD_TOKENS app.conf.beat_schedule.update({ 'proco.utils.tasks.update_all_cached_values': { @@ -46,7 +47,7 @@ def finalize_setup(sender, **kwargs): 'proco.data_sources.tasks.update_static_data': { 'task': 'proco.data_sources.tasks.update_static_data', # Executes at 4:00 AM every day - 'schedule': crontab(hour=3, minute=0), # crontab(hour=1, minute=0, day_of_week='mon'), + 'schedule': crontab(hour='*/4', minute=47), 'args': (), }, 'proco.data_sources.tasks.update_live_data': { @@ -89,5 +90,4 @@ def finalize_setup(sender, **kwargs): 'schedule': crontab(hour=5, minute=10), 'args': (), }, - # 'drf_secure_token.tasks.delete_old_tokens': DELETE_OLD_TOKENS, }) diff --git a/proco/templates/admin/background/backgroud_task_change.html b/proco/templates/admin/background/backgroud_task_change.html deleted file mode 100644 index fa37f0a..0000000 --- a/proco/templates/admin/background/backgroud_task_change.html +++ /dev/null @@ -1,11 +0,0 @@ -{% extends "admin/change_form.html" %} - -{% block extrahead %} - {{ block.super }} - - {% if original.status in original.PROCESS_STATUSES %} - - {% endif %} -{% endblock %} diff --git a/proco/templates/admin/index.html b/proco/templates/admin/index.html deleted file mode 100644 index c4ead79..0000000 --- a/proco/templates/admin/index.html +++ /dev/null @@ -1,27 +0,0 @@ -{% extends "admin/index.html" %} -{% load staticfiles %} - -{% block sidebar %} - {% if user.is_superuser %} -
-

Global actions

- -
- {% endif %} - {{ block.super }} -{% endblock %} - -{% block extrastyle %} - {{ block.super }} - {% if user.is_superuser %} - - - {% endif %} -{% endblock %} diff --git a/proco/templates/admin/locations/action_confirm.html b/proco/templates/admin/locations/action_confirm.html deleted file mode 100644 index aa3f157..0000000 --- a/proco/templates/admin/locations/action_confirm.html +++ /dev/null @@ -1,59 +0,0 @@ -{% extends 'admin/delete_confirmation.html' %} -{% load i18n admin_urls static %} - -{% block breadcrumbs %} - -{% endblock %} - -{% block content %} -{% if perms_lacking %} -

- Deleting the {{ object_name }} '{{ escaped_object }}' would result in deleting related objects, - but your account doesn't have permission to delete the following types of objects: -

-
    - {% for obj in perms_lacking %} -
  • {{ obj }}
  • - {% endfor %} -
-{% elif protected %} -

- Deleting the {{ object_name }} '{{ escaped_object }}' would require deleting - the following protected related objects: -

-
    - {% for obj in protected %} -
  • {{ obj }}
  • - {% endfor %} -
-{% else %} -

Are you sure?

- {% if action == 'mark_as_joined' %} -

- Are you sure you want to mark countries data source as verified?
- At the next upload of the csv file with the list of schools, all existing schools will be deleted. -

- {% else %} -

- Are you sure you want to delete all school points and saved statistics?
- The status of the country will return to its original position and it will be necessary to re-mark as joined. -

- {% endif %} - {% if action == 'mark_as_joined' %} -
{% csrf_token %} - {% else %} - {% csrf_token %} - {% endif %} - -
-{% endif %} -{% endblock %} diff --git a/proco/templates/admin/schools/change_list.html b/proco/templates/admin/schools/change_list.html deleted file mode 100644 index 61fdb53..0000000 --- a/proco/templates/admin/schools/change_list.html +++ /dev/null @@ -1,123 +0,0 @@ -{% extends 'admin/change_list.html' %} -{% load i18n crispy_forms_tags %} - - -{% block extrahead %} - {{ block.super }} - - - - -{% endblock %} - - -{% block object-tools-items %} -
  • - Import CSV -
  • - {{ block.super }} -{% endblock %} - -{% block content %} - {{ block.super }} - -
    - -{% endblock %} diff --git a/proco/templates/admin/schools/file_imports_change_form.html b/proco/templates/admin/schools/file_imports_change_form.html deleted file mode 100644 index d58febf..0000000 --- a/proco/templates/admin/schools/file_imports_change_form.html +++ /dev/null @@ -1,21 +0,0 @@ -{% extends "admin/change_form.html" %} - -{% block extrastyle %} - {{ block.super }} - -{% endblock %} - -{% block extrahead %} - {{ block.super }} - - {% if original.status in original.PROCESS_STATUSES %} - - {% endif %} -{% endblock %} diff --git a/proco/templates/email/dailycheckapp_contact_email.html b/proco/templates/email/dailycheckapp_contact_email.html deleted file mode 100644 index 93f9c85..0000000 --- a/proco/templates/email/dailycheckapp_contact_email.html +++ /dev/null @@ -1,12 +0,0 @@ -{% block subject %}{{dailycheckapp_contact_message.full_name}}: {{dailycheckapp_contact_message.purpose}}{% endblock %} - -{% block html %} -

    - Message from: {{contact_message.full_name}}.
    - School ID: {{contact_message.school_id}}.
    - Email: {{contact_message.email}}.
    -

    -

    - Message text: {{contact_message.message}} -

    -{% endblock %} \ No newline at end of file diff --git a/proco/utils/admin.py b/proco/utils/admin.py deleted file mode 100644 index 210f910..0000000 --- a/proco/utils/admin.py +++ /dev/null @@ -1,22 +0,0 @@ -class SchoolNameDisplayAdminMixin(object): - def get_school_name(self, obj): - return obj.school.name - - get_school_name.short_description = 'School Name' - get_school_name.admin_order_field = 'school__name' - - -class CountryNameDisplayAdminMixin(object): - def get_country_name(self, obj): - return obj.country.name - - get_country_name.short_description = 'Country Name' - get_country_name.admin_order_field = 'country__name' - - -class LocationNameDisplayAdminMixin(object): - def get_location_name(self, obj): - return obj.location.name if obj.location else '' - - get_location_name.short_description = 'Location Name' - get_location_name.admin_order_field = 'location__name' diff --git a/proco/utils/log.py b/proco/utils/log.py index eec51b9..c51528e 100644 --- a/proco/utils/log.py +++ b/proco/utils/log.py @@ -1,8 +1,10 @@ import traceback +import logging from django.contrib.admin.models import LogEntry, ADDITION, CHANGE, DELETION from django.contrib.contenttypes.models import ContentType +logger = logging.getLogger('gigamaps.' + __name__) def action_log(request, queryset, checked, change_message, model, field_name): action_flag = ADDITION @@ -36,7 +38,10 @@ def action_log(request, queryset, checked, change_message, model, field_name): change_message=change_message) -def changed_fields(instance, validated_data, changed_data=[]): +def changed_fields(instance, validated_data, changed_data=None): + if not changed_data: + changed_data = [] + model_instance = ['country', 'school', 'last_weekly_status', 'location'] try: for field, value in validated_data.items(): @@ -44,8 +49,12 @@ def changed_fields(instance, validated_data, changed_data=[]): if field in model_instance and int(value) != int(getattr(instance, field, None).id): changed_data.append(field) elif 'date' in field: - if (getattr(instance, field, None) != None and str(getattr(instance, field, None).strftime( - "%d-%m-%Y")) != value) or getattr(instance, field, None) == None and value != '': + if ( + ( + getattr(instance, field, None) is not None and + str(getattr(instance, field, None).strftime("%d-%m-%Y")) != value + ) or getattr(instance, field, None) is None and value != '' + ): changed_data.append(field) else: try: @@ -53,18 +62,22 @@ def changed_fields(instance, validated_data, changed_data=[]): except: pass elif 'date' not in field and field not in model_instance and value != getattr(instance, field, None): - if value == "" and ( - getattr(instance, field, None) == None or getattr(instance, field, None) == '') or ( - field in ['schools_with_data_percentage'] and getattr(instance, field, None) == float(value)): + if ( + value == "" and + (getattr(instance, field, None) is None or getattr(instance, field, None) == '') or + (field in ['schools_with_data_percentage'] and getattr(instance, field, None) == float(value)) + ): pass - elif (getattr(instance, field, None) != '' and (value != "" or value == "")) or ( - getattr(instance, field, None) == None and value != "") or ( - getattr(instance, field, None) == '' and value != ""): + elif ( + (getattr(instance, field, None) != '' and (value != "" or value == "")) or + (getattr(instance, field, None) is None and value != "") or + (getattr(instance, field, None) == '' and value != "") + ): changed_data.append(field) else: changed_fields(getattr(instance, field), validated_data[field], changed_data) except: - print(traceback.format_exc()) + logger.error(traceback.format_exc()) changed_data = list(set(changed_data)) remove_item = ["created", "modified"] @@ -74,17 +87,22 @@ def changed_fields(instance, validated_data, changed_data=[]): return changed_data -def changed_about_us_content_fields(instance, validated_data, changed_data=[], d=True): +def changed_about_us_content_fields(instance, validated_data, changed_data=None): + if not changed_data: + changed_data = [] + try: for field, value in validated_data.items(): if not isinstance(value, dict) and not isinstance(value, list): if value != getattr(instance, field, None): - if (getattr(instance, field, None) != '' and (value != "" or value == "")) or ( - getattr(instance, field, None) == None and value != "") or ( - getattr(instance, field, None) == '' and value != ""): + if ( + (getattr(instance, field, None) != '' and (value != "" or value == "")) or + (getattr(instance, field, None) is None and value != "") or + (getattr(instance, field, None) == '' and value != "") + ): changed_data.append(field) elif isinstance(value, dict): - changed_about_us_content_fields(getattr(instance, field), validated_data[field], changed_data, 'dict') + changed_about_us_content_fields(getattr(instance, field), validated_data[field], changed_data) elif isinstance(value, list) and len(value) > 0 and isinstance(value[0], str): try: if set(instance[field]) != set(value): @@ -99,8 +117,7 @@ def changed_about_us_content_fields(instance, validated_data, changed_data=[], d for dict_field, dict_value in item.items(): if isinstance(dict_value, dict): changed_about_us_content_fields(getattr(instance, field)[i][dict_field], item[dict_field], - changed_data, - 'dict') + changed_data) elif isinstance(dict_value, list): if set(dict_value) != set(getattr(instance, field)[i][dict_field]): changed_data.append(field + '_' + dict_field) @@ -109,5 +126,5 @@ def changed_about_us_content_fields(instance, validated_data, changed_data=[], d changed_data.append(field + '_' + dict_field) i += 1 except: - print(traceback.format_exc()) + logger.error(traceback.format_exc()) return changed_data diff --git a/proco/utils/tasks.py b/proco/utils/tasks.py index 8233e11..b1b81f6 100644 --- a/proco/utils/tasks.py +++ b/proco/utils/tasks.py @@ -1,15 +1,23 @@ +import logging +import time +import uuid + from celery import chain -from django.core.cache import cache +from celery import current_task from django.core.management import call_command +from django.db.models import Q from django.db.models.functions.text import Lower from django.urls import reverse from rest_framework.test import APIClient +from proco.background import utils as background_task_utilities from proco.core import db_utils as db_utilities from proco.core import utils as core_utilities from proco.taskapp import app from proco.utils.dates import format_date +logger = logging.getLogger('gigamaps.' + __name__) + @app.task(soft_time_limit=10 * 60, time_limit=11 * 60) def update_cached_value(*args, url='', query_params=None, **kwargs): @@ -25,62 +33,81 @@ def update_cached_value(*args, url='', query_params=None, **kwargs): def update_all_cached_values(): from proco.locations.models import Country from proco.schools.models import School + from proco.accounts.models import DataLayerCountryRelationship, DataLayer - task_cache_key = 'update_all_cached_values_status_{current_time}'.format( - current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y'), - ) - running_task = cache.get(task_cache_key, None) + task_key = 'update_all_cached_values_status_{current_time}'.format( + current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H%M')) - if running_task in [None, b'completed', 'completed']: - print('***** Not found running Job *****') - cache.set(task_cache_key, 'running', None) + task_id = current_task.request.id or str(uuid.uuid4()) + task_instance = background_task_utilities.task_on_start( + task_id, task_key, 'Update the Redis cache, allowed once in a hour') - update_cached_value.delay(url=reverse('connection_statistics:global-stat')) - update_cached_value.delay(url=reverse('locations:countries-list')) + if task_instance: + logger.debug('Not found running job: {}'.format(task_key)) update_cached_value.delay(url=reverse('locations:search-countries-admin-schools')) - # update_cached_value.delay(url=reverse('locations:countries-boundary')) - # update_cached_value.delay(url=reverse('schools:random-schools')) + update_cached_value.delay(url=reverse('locations:countries-list')) + update_cached_value.delay(url=reverse('accounts:list-advanced-filters')) + update_cached_value.delay(url=reverse('connection_statistics:global-stat')) # Get countries which has at least has 1 school countries = Country.objects.filter(id__in=list( School.objects.all().values_list('country_id', flat=True).order_by('country_id').distinct('country_id') )) - for country in countries: - chain([ - update_cached_value.s( - url=reverse('connection_statistics:global-stat'), - query_params={'country_id': country.id}, + + country_wise_default_layers = { + row['country_id']: row['data_layer_id'] + for row in DataLayerCountryRelationship.objects.filter( + Q(is_default=True) | Q( + is_default=False, + data_layer__category=DataLayer.LAYER_CATEGORY_CONNECTIVITY, + data_layer__created_by__isnull=True, ), + data_layer__type=DataLayer.LAYER_TYPE_LIVE, + data_layer__status=DataLayer.LAYER_STATUS_PUBLISHED, + data_layer__deleted__isnull=True, + country_id__in=list(countries)).values('country_id', 'data_layer_id').order_by('country_id').distinct() + } + + for country in countries: + country_wise_task_list = [ update_cached_value.s( url=reverse('locations:countries-detail', kwargs={'pk': country.code.lower()}) ), - # update_cached_value.s( - # url=reverse('schools:schools-list', kwargs={'country_code': country.code.lower()}) - # ), update_cached_value.s( - url=reverse('connection_statistics:get-latest-week-and-month'), + url=reverse('connection_statistics:global-stat'), query_params={'country_id': country.id}, ), - ]).delay() + ] - cache.set(task_cache_key, 'completed', None) + if country_wise_default_layers.get(country.id, None): + country_wise_task_list.append(update_cached_value.s( + url=reverse('connection_statistics:get-latest-week-and-month'), + query_params={ + 'country_id': country.id, + 'layer_id': country_wise_default_layers[country.id], + }, + )) + + chain(country_wise_task_list).delay() + + background_task_utilities.task_on_complete(task_instance) else: - print('***** Found running Job with "{0}" name so skipping current iteration *****'.format(task_cache_key)) + logger.error('Found running Job with "{0}" name so skipping current iteration'.format(task_key)) @app.task def update_country_related_cache(country_code): from proco.locations.models import Country - update_cached_value.delay(url=reverse('connection_statistics:global-stat')) - update_cached_value.delay(url=reverse('locations:countries-list')) update_cached_value.delay(url=reverse('locations:search-countries-admin-schools')) + update_cached_value.delay(url=reverse('locations:countries-list')) + update_cached_value.delay(url=reverse('accounts:list-advanced-filters')) + update_cached_value.delay(url=reverse('connection_statistics:global-stat')) update_cached_value.delay(url=reverse('locations:countries-detail', kwargs={'pk': country_code.lower()})) - # update_cached_value.delay(url=reverse('schools:random-schools')) - # update_cached_value.delay(url=reverse('schools:schools-list', kwargs={'country_code': country_code.lower()})) + country = Country.objects.annotate( code_lower=Lower('code'), - ).filter(code_lower=country_code.lower()) + ).filter(code_lower=country_code.lower()).first() if country: update_cached_value.delay( url=reverse('connection_statistics:global-stat'), @@ -95,21 +122,23 @@ def rebuild_school_index(): Task which runs to rebuild the Cognitive Search Index for Schools from scratch. Frequency: Once in a day - Limit: 15 mins + Limit: 15 minutes """ - print('Rebuilding the School Index') - task_cache_key = 'rebuild_school_index_status' - running_task = cache.get(task_cache_key, None) - - if running_task in [None, b'completed', 'completed']: - print('***** Not found running Job *****') - cache.set(task_cache_key, 'running', None) - args = ['--delete_index', '--create_index', '--clean_index', '--update_index'] - call_command('index_rebuild_schools', *args) - - cache.set(task_cache_key, 'completed', None) + logger.info('Rebuilding the school indexes.') + task_key = 'rebuild_school_index_status_{current_time}'.format( + current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H')) + + task_id = current_task.request.id or str(uuid.uuid4()) + task_instance = background_task_utilities.task_on_start( + task_id, task_key, 'Update the Cognitive Search Index for Schools') + + if task_instance: + logger.debug('Not found running job: {}'.format(task_key)) + cmd_args = ['--delete_index', '--create_index', '--clean_index', '--update_index'] + call_command('index_rebuild_schools', *cmd_args) + background_task_utilities.task_on_complete(task_instance) else: - print('***** Found running Job with "{0}" name so skipping current iteration *****'.format(task_cache_key)) + logger.error('Found running Job with "{0}" name so skipping current iteration'.format(task_key)) @app.task(soft_time_limit=1 * 60 * 60, time_limit=1 * 60 * 60) @@ -121,13 +150,17 @@ def populate_school_registration_data(): Frequency: Once in a day Limit: 1 hour """ - print('Setting RT status, RT Date for School which start live data from sources.') - task_cache_key = 'populate_school_registration_data_status' - running_task = cache.get(task_cache_key, None) + logger.info('Setting RT status, RT Date for schools which start live data from sources.') + + task_key = 'populate_school_registration_data_status_{current_time}'.format( + current_time=format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H')) - if running_task in [None, b'completed', 'completed']: - print('***** Not found running Job *****') - cache.set(task_cache_key, 'running', None) + task_id = current_task.request.id or str(uuid.uuid4()) + task_instance = background_task_utilities.task_on_start( + task_id, task_key, 'Populate the RT table data for new schools') + + if task_instance: + logger.debug('Not found running job: {}'.format(task_key)) sql = """ SELECT DISTINCT sds.school_id FROM public.connection_statistics_schooldailystatus AS sds @@ -141,12 +174,12 @@ def populate_school_registration_data(): school_ids_missing_in_rt_table = db_utilities.sql_to_response(sql, label='SchoolRealtimeRegistration') for missing_school_id in school_ids_missing_in_rt_table: - args = ['--reset', '-school_id={0}'.format(missing_school_id['school_id'])] - call_command('populate_school_registration_data', *args) + cmd_args = ['--reset', '-school_id={0}'.format(missing_school_id['school_id'])] + call_command('populate_school_registration_data', *cmd_args) - cache.set(task_cache_key, 'completed', None) + background_task_utilities.task_on_complete(task_instance) else: - print('***** Found running Job with "{0}" name so skipping current iteration *****'.format(task_cache_key)) + logger.error('Found running Job with "{0}" name so skipping current iteration'.format(task_key)) @app.task(soft_time_limit=10 * 60 * 60, time_limit=10 * 60 * 60) @@ -156,20 +189,22 @@ def redo_aggregations_task(country_id, year, week_no, *args): Task to schedule manually from Console. """ if not country_id or not year: - print('ERROR: Required args not provided: [country_id, year]') + logger.error('Required args not provided: [country_id, year]') return - print('Starting redo_aggregations_task: Country ID "{0}" - Year "{1}" - Week "{2}"'.format( + logger.info('Starting redo aggregations task: Country ID "{0}" - Year "{1}" - Week "{2}"'.format( country_id, year, week_no)) - task_cache_key = 'redo_aggregations_task_country_id_{0}_year_{1}_week_{2}_on_{3}'.format( - country_id, year, week_no, format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y')) - running_task = cache.get(task_cache_key, None) + task_key = 'redo_aggregations_task_country_id_{0}_year_{1}_week_{2}_on_{3}'.format( + country_id, year, week_no, format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H')) + + task_id = current_task.request.id or str(uuid.uuid4()) + task_instance = background_task_utilities.task_on_start( + task_id, task_key, 'Update the SchoolWeekly, CountryDaily and CountryWeekly from SchoolDaily') - if running_task in [None, b'completed', 'completed']: - print('***** Not found running Job "{}" *****'.format(task_cache_key)) - cache.set(task_cache_key, 'running', None) - args = [ + if task_instance: + logger.debug('Not found running job: {}'.format(task_key)) + cmd_args = [ '-country_id={}'.format(country_id), '-year={}'.format(year), '--update_school_weekly', @@ -178,46 +213,52 @@ def redo_aggregations_task(country_id, year, week_no, *args): ] if week_no: - args.append('-week_no={}'.format(week_no)) + cmd_args.append('-week_no={}'.format(week_no)) - call_command('redo_aggregations', *args) + call_command('redo_aggregations', *cmd_args) - cache.set(task_cache_key, 'completed', None) + background_task_utilities.task_on_complete(task_instance) else: - print('***** Found running Job with "{0}" name so skipping current iteration *****'.format(task_cache_key)) + logger.error('Found running Job with "{0}" name so skipping current iteration'.format(task_key)) @app.task(soft_time_limit=10 * 60 * 60, time_limit=10 * 60 * 60) -def populate_school_new_fields_task(start_school_id, end_school_id, country_id, *args): +def populate_school_new_fields_task(start_school_id, end_school_id, country_id, *args, school_ids=None): """ populate_school_new_fields_task Task to schedule manually from Console. """ - print( - 'Starting populate_school_new_fields_task: Country ID "{0}" - start_school_id "{1}" - end_school_id "{2}"'.format( - country_id, start_school_id, end_school_id)) + logger.info('Starting populate school new fields task: Country ID "{0}" - start_school_id "{1}" - ' + 'end_school_id "{2}"'.format(country_id, start_school_id, end_school_id)) + + cmd_args = [] + + if country_id: + cmd_args.append('-country_id={}'.format(country_id)) + + if start_school_id: + cmd_args.append('-start_school_id={}'.format(start_school_id)) - task_cache_key = 'populate_school_new_fields_task_country_id_{0}_start_school_id_{1}_end_school_id_{2}_on_{3}'.format( - country_id, start_school_id, end_school_id, - format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y')) - running_task = cache.get(task_cache_key, None) + if end_school_id: + cmd_args.append('-end_school_id={}'.format(end_school_id)) - if running_task in [None, b'completed', 'completed']: - print('***** Not found running Job "{}" *****'.format(task_cache_key)) - cache.set(task_cache_key, 'running', None) - args = [] + if school_ids and len(school_ids) > 0: + cmd_args.append('-school_ids={}'.format(','.join([str(school_id) for school_id in school_ids]))) - if country_id: - args.append('-country_id={}'.format(country_id)) + task_key = 'populate_school_new_fields_task{0}_at_{1}'.format( + ''.join(cmd_args), format_date(core_utilities.get_current_datetime_object(), frmt='%d%m%Y_%H')) - if start_school_id: - args.append('-start_school_id={}'.format(start_school_id)) + task_id = current_task.request.id or str(uuid.uuid4()) + task_instance = background_task_utilities.task_on_start( + task_id, task_key, 'Update the school new fields for provided records') - if end_school_id: - args.append('-end_school_id={}'.format(end_school_id)) + if task_instance: + logger.debug('Not found running job: {}'.format(task_key)) - call_command('populate_school_new_fields', *args) + task_instance.info('Starting the command with args: {}'.format(cmd_args)) + call_command('populate_school_new_fields', *cmd_args) + task_instance.info('Completed the command.') - cache.set(task_cache_key, 'completed', None) + background_task_utilities.task_on_complete(task_instance) else: - print('***** Found running Job with "{0}" name so skipping current iteration *****'.format(task_cache_key)) + logger.error('Found running Job with "{0}" name so skipping current iteration'.format(task_key)) diff --git a/proco/utils/tests.py b/proco/utils/tests.py index 4cb7b03..d0c1c76 100644 --- a/proco/utils/tests.py +++ b/proco/utils/tests.py @@ -26,7 +26,11 @@ def forced_auth_req(self, method, url, user=None, data=None, request_format='jso if 'view' in kwargs: view = kwargs.pop('view') - response = view(request) + if 'view_info' in kwargs: + view_info = kwargs.pop('view_info') + response = view_info(request, *view.args, **view.kwargs) + else: + response = view(request) else: view_info = resolve(url) view = view_info.func diff --git a/proco/utils/urls.py b/proco/utils/urls.py index ab99f3c..a29b468 100644 --- a/proco/utils/urls.py +++ b/proco/utils/urls.py @@ -14,7 +14,7 @@ def add_url_params(url, params): >> add_url_params(url, new_params) 'http://stackoverflow.com/test?data=some&data=values&answers=false' """ - # Unquoting URL first so we don't loose existing args + # Unquoting URL first so we don't lose existing args url = unquote(url) # Extracting url info parsed_url = urlparse(url) @@ -37,7 +37,7 @@ def add_url_params(url, params): # Converting URL argument to proper query string encoded_get_args = urlencode(parsed_get_args, doseq=True) # Creating new parsed result object based on provided with new - # URL arguments. Same thing happens inside of urlparse. + # URL arguments. Same thing happens inside urlparse. new_url = ParseResult( parsed_url.scheme, parsed_url.netloc, parsed_url.path, parsed_url.params, encoded_get_args, parsed_url.fragment, diff --git a/web-worker.sh b/web-worker.sh index 62a5534..04eb750 100644 --- a/web-worker.sh +++ b/web-worker.sh @@ -71,3 +71,11 @@ pipenv run gunicorn config.wsgi:application -b 0.0.0.0:8000 -w 8 --timeout=300 # pipenv run python manage.py data_loss_recovery_for_qos --pull_data -country_code='MNG' -pull_start_version=11 -pull_end_version=20 # Step 3: Update the proco tables with new aggregation # pipenv run python manage.py data_loss_recovery_for_qos --aggregate -country_code='MNG' -aggregate_start_version=11 -aggregate_end_version=20 + + +# pipenv run python manage.py create_api_key_with_write_access -user='pcdc_user_with_write_api_key66@nagarro.com' -api_code='DAILY_CHECK_APP' -reason='API Key to GET the PCDC measurement data, Post/Delete API Control over DailyCheckApp documentation' -valid_till='31-12-2099' --force_user -first_name='PCDC' -last_name='User' --inactive_email +# pipenv run python manage.py load_system_data_layers --update_data_layers_code +# pipenv run python manage.py update_system_role_permissions +# pipenv run python manage.py data_cleanup --clean_duplicate_school_gigs_ids + +# pipenv run python manage.py create_admin_user -email='pcdc_user_with_write_api_key5@nagarro.com' -first_name='PCDC' -last_name='User' --inactive_email