Skip to content
This repository has been archived by the owner on Dec 13, 2022. It is now read-only.

[WIP] Add results page #124

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 14 additions & 2 deletions default/data_file.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import json
import os
import uuid
from datetime import date
from datetime import date, datetime
from zipfile import ZIP_DEFLATED, ZipFile

from django.conf import settings
Expand All @@ -18,13 +18,16 @@ class DataFile:
"""
sep = '-'

def __init__(self, prefix, ext, id=None, folder=None):
def __init__(self, prefix, ext, id=None, folder=None, timestamp=None, origin=None, url_suffix=None):
if len(prefix) > 20:
prefix = prefix[:20 + 1]

self.prefix = prefix
self.ext = ext
self.id = id or str(uuid.uuid4())
self.timestamp = timestamp or str(datetime.now())
self.origin = origin
self.url_suffix = url_suffix

if folder is not None:
self.folder = folder
Expand Down Expand Up @@ -57,8 +60,17 @@ def url(self):
"""
Returns the URL path to the file.
"""
if self.url_suffix:
return '/result/{}/{}/{}/'.format(self.folder, self.id, self.url_suffix)
return '/result/{}/{}/'.format(self.folder, self.id)

@property
def drive_url(self):
"""
Returns the URL path to the Google Drive save feature for the file.
"""
return '/google-drive-save-start/{}/{}/'.format(self.folder, self.id)

@property
def size(self):
"""
Expand Down
41 changes: 21 additions & 20 deletions default/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,26 +34,27 @@ def extract_last_result(function):
def wrap(request, *args, **kwargs):
request_object = getattr(request, request.method)
send_result = request_object.get('sendResult')
if 'files' in request.session and send_result:
# Set files session to the last generated results
for file in request.session['results']:
data_file = DataFile(**file)
# All json results are compressed in a zip file
if data_file.ext == '.zip':
with ZipFile(data_file.path) as zipfile:
for f in zipfile.infolist():
prefix, ext = os.path.splitext(f.filename)
new_file = DataFile(prefix, ext)
path, f.filename = os.path.split(new_file.path)
zipfile.extract(f, path)
# Open the file to check if it is the correct type
with open(new_file.path, 'rb') as h:
file_type = request_object.get('type', None)
message = invalid_request_file_message(h, file_type)
if message:
return HttpResponse(message, status=401) # error 401 for invalid type
else:
request.session['files'].append(new_file.as_dict())
if send_result and 'results' in request.session.keys() and len(request.session['results']):
file = request.session['results'][-1]
data_file = DataFile(**file)
# All json results are compressed in a zip file
if data_file.ext == '.zip':
with ZipFile(data_file.path) as zipfile:
for f in zipfile.infolist():
prefix, ext = os.path.splitext(f.filename)
new_file = DataFile(prefix, ext)
path, f.filename = os.path.split(new_file.path)
zipfile.extract(f, path)
# Open the file to check if it is the correct type
with open(new_file.path, 'rb') as h:
file_type = request_object.get('type', None)
message = invalid_request_file_message(h, file_type)
if message:
return HttpResponse(message, status=401) # error 401 for invalid type
else:
if 'files' not in request.session:
request.session['files'] = []
request.session['files'].append(new_file.as_dict())
request.session.modified = True
return function(request, *args, **kwargs)

Expand Down
2 changes: 1 addition & 1 deletion default/static/js/to-spreadsheet.js
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@
}

function send() {
if (checkSendResult) {
if (checkSendResult()) {
transformInServer('sendResult');
}
else {
Expand Down
7 changes: 7 additions & 0 deletions default/templates/default/base.html
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,13 @@ <h1><a href="/" style="color: white;">{% trans "OCDS Toucan" %}</a> <small> {% b
{% endfor %}
</ul>
</li>
<li>
<a href="/my-files" style="margin-top:15px;">
<i class="glyphicon glyphicon-folder-open"></i>&nbsp;
{% trans "My files" %}
<span class="badge">{% if request.session.results %}{{ request.session.results|length }}{% endif %}</span>
</a>
</li>
</ul>
</div>
</nav>
Expand Down
29 changes: 29 additions & 0 deletions default/templates/default/my-files.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
{% extends "default/base.html" %}
{% load i18n %}

{% block title %}{{ block.super }} | {% trans "My files" %}{% endblock %}
{% block subtitle %} >> {% trans "My files" %}{% endblock %}
{% block body %}
<table class="table">
<thead>
<tr>
<th>#</th>
<th>{% trans "File" %}</th>
<th>{% trans "Created" %}</th>
<th>{% trans "Output from" %}</th>
<th>{% trans "Actions" %}</th>
</tr>
</thead>
<tbody>
{% for result in results %}
<tr>
<td>{{ forloop.counter }}</td>
<td>{{ result.prefix }}{{ result.ext }}</td>
<td>{{ result.timestamp }}</td>
<td>{% if result.origin %}{% trans result.origin %}{% endif %}</td>
<td><a href="{{ result.url }}">{% trans "Download" %}</a></td>
</tr>
{% endfor %}
</tbody>
</table>
{% endblock body %}
3 changes: 2 additions & 1 deletion default/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,5 +33,6 @@
path('google-drive-save-start/<str:folder>/<uuid:id>/', views.google_drive_save_start,
name='googleapi_auth_start'),
path('google-drive-save-start/<str:folder>/<uuid:id>/<str:format>/', views.google_drive_save_start,
name='googleapi_auth_start')
name='googleapi_auth_start'),
path('my-files', views.list_my_files, name='my_files')
]
24 changes: 17 additions & 7 deletions default/util.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import base64
import collections
import copy
import hashlib
import io
Expand All @@ -20,7 +21,7 @@

from default.data_file import DataFile
from default.mapping_sheet import mapping_sheet_method
from ocdstoucan.settings import OCDS_TOUCAN_MAXFILESIZE, OCDS_TOUCAN_MAXNUMFILES
from ocdstoucan.settings import OCDS_TOUCAN_MAXFILESIZE, OCDS_TOUCAN_MAXNUMFILES, OCDS_TOUCAN_MAX_RESULTS


def ocds_tags():
Expand All @@ -41,26 +42,27 @@ def get_files_from_session(request):
yield DataFile(**fileinfo)


def json_response(request, files, warnings=None, pretty_json=False, codec='utf-8'):
file = DataFile('result', '.zip')
def json_response(request, files, warnings=None, pretty_json=False, codec='utf-8', origin=None):
file = DataFile('result', '.zip', origin=origin)
file.write_json_to_zip(files, pretty_json=pretty_json, codec=codec)

response = {
'url': file.url,
'size': file.size,
'driveUrl': file.url.replace('result', 'google-drive-save-start')
'driveUrl': file.drive_url
}

if warnings:
response['warnings'] = warnings

# Save the last generated result on session
request.session['results'] = [file.as_dict()]
# request.session['results'] = [file.as_dict()]
save_results(request, file)

return JsonResponse(response)


def make_package(request, published_date, method, pretty_json, codec, warnings):
def make_package(request, published_date, method, pretty_json, codec, warnings, origin=None):
items = []
for file in get_files_from_session(request):
item = file.json(codec=codec)
Expand All @@ -71,7 +73,7 @@ def make_package(request, published_date, method, pretty_json, codec, warnings):

return json_response(request, {
'result.json': method(items, published_date=published_date),
}, warnings=warnings, pretty_json=pretty_json, codec=codec)
}, warnings=warnings, pretty_json=pretty_json, codec=codec, origin=origin)


def invalid_request_file_message(f, file_type):
Expand Down Expand Up @@ -191,3 +193,11 @@ def flatten(input_file, output_dir, options):

def get_cache_name(key, param):
return key + '_' + str(base64.b64encode(hashlib.md5(param.encode('utf-8')).digest()))


def save_results(request, file):
deque = collections.deque(request.session.get('results', []), maxlen=OCDS_TOUCAN_MAX_RESULTS)
deque.append(file.as_dict())

request.session['results'] = list(deque)
request.session.modified = True
28 changes: 20 additions & 8 deletions default/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from default.mapping_sheet import (get_extended_mapping_sheet, get_mapping_sheet_from_uploaded_file,
get_mapping_sheet_from_url)
from default.util import (flatten, get_cache_name, get_files_from_session, invalid_request_file_message, json_response,
make_package, ocds_command, ocds_tags, resolve_schema_refs)
make_package, ocds_command, ocds_tags, resolve_schema_refs, save_results)


def get_datafile_filename(folder, id, format):
Expand Down Expand Up @@ -62,6 +62,11 @@ def index(request):
return render(request, 'default/index.html')


def list_my_files(request):
results = [DataFile(**file) for file in request.session.get('results', [])]
return render(request, 'default/my-files.html', {'results': results})


@clear_files
def to_spreadsheet(request):
return render(request, 'default/to-spreadsheet.html', {'form': UnflattenOptionsForm()})
Expand Down Expand Up @@ -105,7 +110,7 @@ def perform_upgrade(request, pretty_json=False, encoding='utf-8', warnings=None)
for file in get_files_from_session(request):
data.update({file.name_with_suffix('upgraded'): upgrade_10_11(
file.json(codec=encoding, object_pairs_hook=OrderedDict))})
return json_response(request, data, warnings, pretty_json, encoding)
return json_response(request, data, warnings, pretty_json, encoding, origin='Upgrade from 1.0 to 1.1')


@require_GET
Expand All @@ -132,7 +137,8 @@ def get_schema_as_options(request):
@validate_optional_args
def perform_package_releases(request, pretty_json=False, published_date='', encoding='utf-8', warnings=None):
method = package_releases_method
return make_package(request, published_date, method, pretty_json, encoding, warnings)
return make_package(request, published_date, method, pretty_json, encoding, warnings,
origin='Create Release Packages')


@require_files
Expand All @@ -144,7 +150,7 @@ def perform_combine_packages(request, pretty_json=False, published_date='', enco
method = combine_release_packages
else:
method = combine_record_packages
return make_package(request, published_date, method, pretty_json, encoding, warnings)
return make_package(request, published_date, method, pretty_json, encoding, warnings, origin='Combine Packages')


@require_files
Expand Down Expand Up @@ -191,7 +197,7 @@ def perform_split_packages(request, pretty_json=False, published_date='', size=1
content[package_data] = context[i:i + size]
result.update({name: content})

return json_response(request, result, warnings, pretty_json, encoding)
return json_response(request, result, warnings, pretty_json, encoding, origin='Split Packages')


@require_files
Expand All @@ -205,7 +211,7 @@ def perform_compile(request, pretty_json=False, published_date='', encoding='utf
return json_response(request, {
'result.json': next(merge(packages, return_package=True, published_date=published_date,
return_versioned_release=return_versioned_release)),
}, warnings, pretty_json, encoding)
}, warnings, pretty_json, encoding, origin='Compile Releases')


def mapping_sheet(request):
Expand Down Expand Up @@ -279,7 +285,8 @@ def perform_to_spreadsheet(request):
response = {}
if form.cleaned_data['output_format'] == 'all' or form.cleaned_data['output_format'] == 'csv':
# Create a ZIP file of the CSV files, and delete the output CSV files.
csv_zip = DataFile('flatten-csv', '.zip', id=input_file.id, folder=input_file.folder)
csv_zip = DataFile('flatten-csv', '.zip', id=input_file.id, folder=input_file.folder,
origin='Convert to CSV/Excel', url_suffix='csv.zip')
with ZipFile(csv_zip.path, 'w', compression=ZIP_DEFLATED) as zipfile:
for filename in os.listdir(output_dir.path):
zipfile.write(os.path.join(output_dir.path, filename), filename)
Expand All @@ -290,13 +297,17 @@ def perform_to_spreadsheet(request):
'size': csv_zip.size,
'driveUrl': input_file.url.replace('result', 'google-drive-save-start') + 'csv.zip/'
}
save_results(request, csv_zip)

if form.cleaned_data['output_format'] == 'all' or form.cleaned_data['output_format'] == 'xlsx':
response['xlsx'] = {
'url': input_file.url + 'xlsx/',
'size': os.path.getsize(output_dir.path + '.xlsx'),
'driveUrl': input_file.url.replace('result', 'google-drive-save-start') + 'xlsx/'
}
output_xlsx = DataFile('flatten', '.xlsx', input_file.id, input_file.folder, origin='Convert to CSV/Excel',
url_suffix='xlsx')
save_results(request, output_xlsx)

return JsonResponse(response)

Expand Down Expand Up @@ -339,7 +350,8 @@ def perform_to_json(request, pretty_json=False, encoding='utf-8', warnings=None)
shutil.rmtree(input_file_path)

with open(output_name) as json_file:
return json_response(request, {'result.json': json.load(json_file)}, warnings, pretty_json, encoding)
return json_response(request, {'result.json': json.load(json_file)}, warnings, pretty_json, encoding,
origin='Convert to JSON')


@require_POST
Expand Down
2 changes: 2 additions & 0 deletions ocdstoucan/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
# Application definition

INSTALLED_APPS = [
'sslserver',
'default.apps.DefaultConfig',
'django.contrib.contenttypes',
'django.contrib.sessions',
Expand Down Expand Up @@ -174,6 +175,7 @@

OCDS_TOUCAN_MAXNUMFILES = os.getenv('OCDS_TOUCAN_MAXNUMFILES', 20)
OCDS_TOUCAN_MAXFILESIZE = os.getenv('OCDS_TOUCAN_MAXFILESIZE', 10000000) # in bytes
OCDS_TOUCAN_MAX_RESULTS = os.getenv('OCDS_TOUCAN_MAX_RESULTS', 50)
OCDS_TOUCAN_GOOGLE_API_CREDENTIALS_FILE = os.getenv('OCDS_TOUCAN_CREDENTIALS_DRIVE', 'googleapi_credentials.json')

# https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
Expand Down