From a5432e81e6a3dd5120ec7b653cfee930cbd355c6 Mon Sep 17 00:00:00 2001 From: Juan Date: Tue, 21 Nov 2017 14:04:41 -0700 Subject: [PATCH 01/16] Got rid of all CSV files dependency on the entire application. CSV file download is now a view on it's own. --- src/WebSDL/settings/base.py | 4 + src/dataloaderinterface/csv_serializer.py | 67 --------------- .../management/commands/generate_csv_data.py | 32 ------- src/dataloaderinterface/models.py | 18 ++-- .../dataloaderinterface/js/device-detail.js | 56 +++++++++---- .../dataloaderinterface/site_details.html | 7 +- src/dataloaderinterface/templatetags/site.py | 20 +---- src/dataloaderinterface/views.py | 11 +-- .../metadata_template.txt | 2 +- src/dataloaderservices/urls.py | 4 +- src/dataloaderservices/views.py | 83 +++++++++++++++++-- 11 files changed, 144 insertions(+), 160 deletions(-) delete mode 100644 src/dataloaderinterface/csv_serializer.py delete mode 100644 src/dataloaderinterface/management/commands/generate_csv_data.py rename src/{dataloaderinterface => dataloaderservices}/metadata_template.txt (97%) diff --git a/src/WebSDL/settings/base.py b/src/WebSDL/settings/base.py index a74656c4..decc62ff 100644 --- a/src/WebSDL/settings/base.py +++ b/src/WebSDL/settings/base.py @@ -164,3 +164,7 @@ EMAIL_HOST = EMAIL_SERVER[0] if isinstance(EMAIL_SERVER, tuple) else EMAIL_SERVER DATETIME_FORMAT = "N j, Y, H:m" + +INFLUX_URL_QUERY = data['influx_query'] + +TSA_URL = data['tsa_url'] if 'tsa_url' in data else "" diff --git a/src/dataloaderinterface/csv_serializer.py b/src/dataloaderinterface/csv_serializer.py deleted file mode 100644 index b8325afe..00000000 --- a/src/dataloaderinterface/csv_serializer.py +++ /dev/null @@ -1,67 +0,0 @@ -import codecs -import csv -import os - -from datetime import timedelta -from django.contrib.staticfiles.storage import staticfiles_storage -from unicodecsv.py2 import UnicodeWriter - - -class SiteResultSerializer: - headers = ('DateTime', 'TimeOffset', 'DateTimeUTC', 'Value', 'CensorCode', 'QualifierCode', ) - date_format = '%Y-%m-%d %H:%M:%S' - - def __init__(self, result): - self.result = result - - def get_file_path(self): - filename = "{0}_{1}_{2}.csv".format(self.result.feature_action.sampling_feature.sampling_feature_code, - self.result.variable.variable_code, self.result.result_id) - return os.path.join('data', filename) - - def open_csv_file(self): - csv_file = staticfiles_storage.open(self.get_file_path(), 'ab+') - return csv_file - - def create_csv_file(self): - csv_file = staticfiles_storage.open(self.get_file_path(), 'wb') - return csv_file - - def get_metadata_template(self): - with codecs.open(os.path.join(os.path.dirname(__file__), 'metadata_template.txt'), 'r', encoding='utf-8') as metadata_file: - return metadata_file.read() - - def generate_metadata(self): - action = self.result.feature_action.action - equipment_model = self.result.data_logger_file_columns.first().instrument_output_variable.model - affiliation = action.action_by.filter(is_action_lead=True).first().affiliation - return self.get_metadata_template().format( - sampling_feature=self.result.feature_action.sampling_feature, - variable=self.result.variable, - unit=self.result.unit, - model=equipment_model, - result=self.result, - action=action, - affiliation=affiliation - ).encode('utf-8') - - def build_csv(self): - with self.create_csv_file() as output_file: - output_file.write(self.generate_metadata()) - csv_writer = UnicodeWriter(output_file) - csv_writer.writerow(self.headers) - - def add_data_value(self, data_value): - self.add_data_values([data_value]) - - def add_data_values(self, data_values): - data = [(data_value.value_datetime.strftime(self.date_format), - '{0}:00'.format(data_value.value_datetime_utc_offset), - (data_value.value_datetime - timedelta(hours=data_value.value_datetime_utc_offset)).strftime(self.date_format), - data_value.data_value, - data_value.censor_code_id, - data_value.quality_code_id) - for data_value in data_values] - with self.open_csv_file() as output_file: - csv_writer = UnicodeWriter(output_file) - csv_writer.writerows(data) diff --git a/src/dataloaderinterface/management/commands/generate_csv_data.py b/src/dataloaderinterface/management/commands/generate_csv_data.py deleted file mode 100644 index 23e5949a..00000000 --- a/src/dataloaderinterface/management/commands/generate_csv_data.py +++ /dev/null @@ -1,32 +0,0 @@ -from django.core.management.base import BaseCommand - -from dataloader.models import Result, TimeSeriesResult -from dataloaderinterface.csv_serializer import SiteResultSerializer - - -class Command(BaseCommand): - help = '' - - def handle(self, *args, **options): - results = TimeSeriesResult.objects.prefetch_related( - 'result__variable', - 'result__unit', - 'result__processing_level', - 'result__data_logger_file_columns__instrument_output_variable__model', - 'result__feature_action__sampling_feature__site', - 'result__feature_action__action__method', - 'result__feature_action__action__action_by__affiliation__person', - 'result__feature_action__action__action_by__affiliation__organization' - ).all() - # falta como una musica de elevador. wtf. - for time_series_result in results: - print('result %s:' % time_series_result.result_id) - serializer = SiteResultSerializer(result=time_series_result.result) - serializer.build_csv() - print('-file with metadata created') - if time_series_result.result.value_count > 0: - values = time_series_result.values.all() - serializer.add_data_values(values) - print('-data values written to file') - - print('--csv generated.') diff --git a/src/dataloaderinterface/models.py b/src/dataloaderinterface/models.py index 197757a1..2c985581 100644 --- a/src/dataloaderinterface/models.py +++ b/src/dataloaderinterface/models.py @@ -10,6 +10,8 @@ from django.contrib.auth.models import User from django.db import models +from django.conf import settings + class SiteRegistration(models.Model): registration_id = models.AutoField(primary_key=True, db_column='RegistrationID') @@ -86,6 +88,10 @@ class SiteSensor(models.Model): activation_date = models.DateTimeField(db_column='ActivationDate', blank=True, null=True) activation_date_utc_offset = models.IntegerField(db_column='ActivationDateUtcOffset', blank=True, null=True) + @property + def result(self): + return Result.objects.get(pk=self.result_id) + @property def equipment_model(self): return EquipmentModel.objects.filter(model_name=self.model_name).first() @@ -106,17 +112,17 @@ def medium(self): def make_model(self): return "{0}_{1}".format(self.model_manufacturer, self.model_name) - @property - def sensor_identity(self): - return "{0}_{1}_{2}".format(self.registration.sampling_feature_code, self.variable_code, self.result_id) - @property def last_measurement(self): return TimeSeriesResultValue.objects.filter(pk=self.last_measurement_id).first() @property - def result(self): - return Result.objects.get(pk=self.result_id) + def sensor_identity(self): + return "{0}_{1}_{2}".format(self.registration.sampling_feature_code, self.variable_code, self.result_id) + + @property + def influx_url(self): + return settings.INFLUX_URL_QUERY.format(str(self.result_uuid).replace('-', '_')) def __str__(self): return '%s %s' % (self.variable_name, self.unit_abbreviation) diff --git a/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js b/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js index ea61fb26..6537452a 100644 --- a/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js +++ b/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js @@ -204,21 +204,49 @@ function drawSparklinePlot(seriesInfo, seriesData) { } function getTimeSeriesData(sensorInfo) { - Papa.parse(sensorInfo['csvPath'], { - download: true, - header: true, - worker: true, - comments: "#", - skipEmptyLines: true, - complete: function (result) { - if (result.data) { - var recentValues = getRecentData(result.data); - fillValueTable($('table.data-values[data-result-id=' + sensorInfo['resultId'] + ']'), result.data); - drawSparklineOnResize(sensorInfo, recentValues); - drawSparklinePlot(sensorInfo, recentValues); - } + $.ajax({ + url: sensorInfo['influxUrl'] + }).done(function(influx_data) { + var resultSet = influx_data.results.shift(); + if (resultSet.series && resultSet.series.length) { + var influxSeries = resultSet.series.shift(); + var values = influxSeries.values.map(function(influxValue) { + return { + DateTime: influxValue[0].match(/^(\d{4}\-\d\d\-\d\d([tT][\d:]*)?)/).shift(), + Value: influxValue[1], + TimeOffset: influxValue[2] + } + }); + + var recentValues = getRecentData(values); + fillValueTable($('table.data-values[data-result-id=' + sensorInfo['resultId'] + ']'), values); + drawSparklineOnResize(sensorInfo, recentValues); + drawSparklinePlot(sensorInfo, recentValues); + + + } else { + console.error('No data values were found for this site'); + console.info(series.getdatainflux); } - }); + }).fail(function(failedData) { + console.log('data failed to load.'); + + }) + // Papa.parse(sensorInfo['csvPath'], { + // download: true, + // header: true, + // worker: true, + // comments: "#", + // skipEmptyLines: true, + // complete: function (result) { + // if (result.data) { + // var recentValues = getRecentData(result.data); + // fillValueTable($('table.data-values[data-result-id=' + sensorInfo['resultId'] + ']'), result.data); + // drawSparklineOnResize(sensorInfo, recentValues); + // drawSparklinePlot(sensorInfo, recentValues); + // } + // } + // }); } $(document).ready(function () { diff --git a/src/dataloaderinterface/templates/dataloaderinterface/site_details.html b/src/dataloaderinterface/templates/dataloaderinterface/site_details.html index 2e5171e8..570143be 100644 --- a/src/dataloaderinterface/templates/dataloaderinterface/site_details.html +++ b/src/dataloaderinterface/templates/dataloaderinterface/site_details.html @@ -190,7 +190,7 @@
+ data-influx-url="{{ sensor.influx_url }}">
@@ -205,7 +205,7 @@
{# Download data button #} - + @@ -215,8 +215,7 @@
{# View on TSA button #} - {# TODO: (Juan) Add to url patterns #} - + diff --git a/src/dataloaderinterface/templatetags/site.py b/src/dataloaderinterface/templatetags/site.py index 0fbaaf3a..0e85b0c7 100644 --- a/src/dataloaderinterface/templatetags/site.py +++ b/src/dataloaderinterface/templatetags/site.py @@ -1,27 +1,9 @@ from dataloader.models import SamplingFeature, Result from django import template -from dataloaderinterface.csv_serializer import SiteResultSerializer -from dataloaderinterface.models import DeviceRegistration, SiteSensor, SiteRegistration +from dataloaderinterface.models import SiteRegistration register = template.Library() -# -# -# @register.filter(name='get_registration') -# def get_registration(sampling_feature): -# if not isinstance(sampling_feature, SamplingFeature): -# return -# -# return DeviceRegistration.objects.filter(deployment_sampling_feature_uuid__exact=sampling_feature.sampling_feature_uuid).first() - - -@register.filter(name='get_sensor_csv_path') -def get_sensor_csv_path(sensor): - if not isinstance(sensor, SiteSensor): - return - - return SiteResultSerializer(sensor.result).get_file_path() - @register.filter(name='get_site_sensor') def get_site_sensor(site, variable_code): diff --git a/src/dataloaderinterface/views.py b/src/dataloaderinterface/views.py index b13c5931..100efd8b 100644 --- a/src/dataloaderinterface/views.py +++ b/src/dataloaderinterface/views.py @@ -1,6 +1,8 @@ from datetime import datetime from uuid import uuid4 +from django.conf import settings + from dataloader.models import FeatureAction, Result, ProcessingLevel, TimeSeriesResult, SamplingFeature, \ SpatialReference, \ ElevationDatum, SiteType, ActionBy, Action, Method, DataLoggerProgramFile, DataLoggerFile, \ @@ -18,7 +20,6 @@ from django.views.generic.edit import UpdateView, CreateView, DeleteView from django.views.generic.list import ListView -from dataloaderinterface.csv_serializer import SiteResultSerializer from dataloaderinterface.forms import SamplingFeatureForm, ResultFormSet, SiteForm, UserRegistrationForm, \ OrganizationForm, UserUpdateForm, ActionByForm from dataloaderinterface.models import ODM2User, SiteRegistration, SiteSensor @@ -136,6 +137,7 @@ class SiteDetailView(DetailView): def get_context_data(self, **kwargs): context = super(SiteDetailView, self).get_context_data() + context['tsa_url'] = settings.TSA_URL context['is_followed'] = self.request.user.is_authenticated and self.request.user.followed_sites.filter(sampling_feature_code=self.object.sampling_feature_code).exists() return context @@ -470,13 +472,6 @@ def create_result(site_registration, result_form, sampling_feature, affiliation, site_sensor = SiteSensor(**sensor_data) site_sensor.save() - # Create csv file to hold the sensor data. - # TODO: have this send a signal and the call to create the file be somewhere else. - # lol should've done it. - # once more i regret not doing it... - serializer = SiteResultSerializer(result) - serializer.build_csv() - return result diff --git a/src/dataloaderinterface/metadata_template.txt b/src/dataloaderservices/metadata_template.txt similarity index 97% rename from src/dataloaderinterface/metadata_template.txt rename to src/dataloaderservices/metadata_template.txt index bd6457c3..f4294bc7 100644 --- a/src/dataloaderinterface/metadata_template.txt +++ b/src/dataloaderservices/metadata_template.txt @@ -2,7 +2,7 @@ # WARNING: These data are provisional and subject to revision. The data are provided # as is with no warranty and on the condition that neither the data collector nor # any of the participants in or developers of http://data.envirodiy.org may be held -# liable for any damages resulting from thier use. +# liable for any damages resulting from their use. # # The following metadata describe the data in this file: # ---------------------------------------------------------------------------------- diff --git a/src/dataloaderservices/urls.py b/src/dataloaderservices/urls.py index 96785de3..7dcc5da1 100644 --- a/src/dataloaderservices/urls.py +++ b/src/dataloaderservices/urls.py @@ -16,10 +16,12 @@ from django.conf.urls import url from rest_framework.urlpatterns import format_suffix_patterns -from dataloaderservices.views import TimeSeriesValuesApi, OrganizationApi, ModelVariablesApi, ResultApi, FollowSiteApi +from dataloaderservices.views import TimeSeriesValuesApi, OrganizationApi, ModelVariablesApi, ResultApi, FollowSiteApi, \ + CSVDataApi urlpatterns = [ url(r'^api/data-stream/$', TimeSeriesValuesApi.as_view(), name='api_post'), + url(r'^api/csv-values/$', CSVDataApi.as_view(), name='csv_data_service'), url(r'^api/follow-site/$', FollowSiteApi.as_view(), name='follow_site'), url(r'^api/sensor-form/$', ResultApi.as_view(), name='result_validation_service'), url(r'^api/organization/$', OrganizationApi.as_view(), name='organization_service'), diff --git a/src/dataloaderservices/views.py b/src/dataloaderservices/views.py index 1faa7a21..4670b798 100644 --- a/src/dataloaderservices/views.py +++ b/src/dataloaderservices/views.py @@ -1,6 +1,14 @@ -from datetime import datetime, timedelta +import codecs +import os +from datetime import timedelta -from dataloader.models import SamplingFeature, TimeSeriesResultValue, Unit, EquipmentModel, TimeSeriesResult +from StringIO import StringIO + +from django.http.response import HttpResponse +from django.views.generic.base import View +from unicodecsv.py2 import UnicodeWriter + +from dataloader.models import SamplingFeature, TimeSeriesResultValue, Unit, EquipmentModel, TimeSeriesResult, Result from django.db.models.expressions import F # Create your views here. from django.utils.dateparse import parse_datetime @@ -10,7 +18,6 @@ from rest_framework.response import Response from rest_framework.views import APIView -from dataloaderinterface.csv_serializer import SiteResultSerializer from dataloaderinterface.forms import ResultForm from dataloaderinterface.models import SiteSensor, SiteRegistration from dataloaderservices.auth import UUIDAuthentication @@ -79,6 +86,71 @@ def post(self, request, format=None): return Response({}, status.HTTP_200_OK) +class CSVDataApi(View): + authentication_classes = () + csv_headers = ('DateTime', 'TimeOffset', 'DateTimeUTC', 'Value', 'CensorCode', 'QualifierCode',) + date_format = '%Y-%m-%d %H:%M:%S' + + def get(self, request): + if 'result_id' not in request.GET: + return Response({'error': 'Result Id not received.'}) + + result_id = request.GET['result_id'] + if result_id == '': + return Response({'error': 'Empty Result Id received.'}) + + time_series_result = TimeSeriesResult.objects\ + .prefetch_related('values')\ + .select_related('result__feature_action__sampling_feature', 'result__variable')\ + .filter(pk=result_id)\ + .first() + + if not time_series_result: + return Response({'error': 'Time Series Result not found.'}) + result = time_series_result.result + + csv_file = StringIO() + csv_writer = UnicodeWriter(csv_file) + csv_file.write(self.generate_metadata(time_series_result.result)) + csv_writer.writerow(self.csv_headers) + csv_writer.writerows(self.get_data_values(time_series_result)) + + filename = "{0}_{1}_{2}.csv".format(result.feature_action.sampling_feature.sampling_feature_code, + result.variable.variable_code, result.result_id) + + response = HttpResponse(csv_file.getvalue(), content_type='text/csv') + response['Content-Disposition'] = 'attachment; filename="%s.csv"' % filename + return response + + def get_data_values(self, result): + data_values = result.values.all() + return [(data_value.value_datetime.strftime(self.date_format), + '{0}:00'.format(data_value.value_datetime_utc_offset), + (data_value.value_datetime - timedelta(hours=data_value.value_datetime_utc_offset)).strftime(self.date_format), + data_value.data_value, + data_value.censor_code_id, + data_value.quality_code_id) + for data_value in data_values] + + def get_metadata_template(self): + with codecs.open(os.path.join(os.path.dirname(__file__), 'metadata_template.txt'), 'r', encoding='utf-8') as metadata_file: + return metadata_file.read() + + def generate_metadata(self, result): + action = result.feature_action.action + equipment_model = result.data_logger_file_columns.first().instrument_output_variable.model + affiliation = action.action_by.filter(is_action_lead=True).first().affiliation + return self.get_metadata_template().format( + sampling_feature=result.feature_action.sampling_feature, + variable=result.variable, + unit=result.unit, + model=equipment_model, + result=result, + action=action, + affiliation=affiliation + ).encode('utf-8') + + class TimeSeriesValuesApi(APIView): authentication_classes = (UUIDAuthentication, ) @@ -152,9 +224,4 @@ def post(self, request, format=None): site_sensor.save(update_fields=['last_measurement_id', 'activation_date', 'activation_date_utc_offset']) result.save(update_fields=['result_datetime', 'value_count', 'result_datetime_utc_offset', 'valid_datetime', 'valid_datetime_utc_offset']) - # Write data to result's csv file - # TODO: have this send a signal and the call to add data to the file be somewhere else. - serializer = SiteResultSerializer(result) - serializer.add_data_value(result_value) - return Response({}, status.HTTP_201_CREATED) From 2e8643d74716e82a10ab4a11150f856a7610015b Mon Sep 17 00:00:00 2001 From: Juan Date: Tue, 28 Nov 2017 10:56:32 -0700 Subject: [PATCH 02/16] Optimizations for everyone. influx optimization, speed optimization, network load optimization. --- src/WebSDL/settings/base.py | 5 ++- .../commands/update_sensor_measurements.py | 42 +++++++++++++++++++ src/dataloaderinterface/models.py | 9 +++- .../static/dataloaderinterface/css/style.css | 1 + .../dataloaderinterface/js/device-detail.js | 42 ++++++++++--------- .../dataloaderinterface/site_details.html | 9 ++-- 6 files changed, 81 insertions(+), 27 deletions(-) create mode 100644 src/dataloaderinterface/management/commands/update_sensor_measurements.py diff --git a/src/WebSDL/settings/base.py b/src/WebSDL/settings/base.py index c83085b5..b6067264 100644 --- a/src/WebSDL/settings/base.py +++ b/src/WebSDL/settings/base.py @@ -167,4 +167,7 @@ INFLUX_URL_QUERY = data['influx_query'] -TSA_URL = data['tsa_url'] if 'tsa_url' in data else "" +# This data period is measured in days +SENSOR_DATA_PERIOD = data['sensor_data_period'] if 'sensor_data_period' in data else '2' + +TSA_URL = data['tsa_url'] if 'tsa_url' in data else '' diff --git a/src/dataloaderinterface/management/commands/update_sensor_measurements.py b/src/dataloaderinterface/management/commands/update_sensor_measurements.py new file mode 100644 index 00000000..d4a3b8c1 --- /dev/null +++ b/src/dataloaderinterface/management/commands/update_sensor_measurements.py @@ -0,0 +1,42 @@ +from django.core.management.base import BaseCommand + +from dataloaderinterface.models import SiteSensor + + +class Command(BaseCommand): + help = '' + + def handle(self, *args, **options): + sensors = SiteSensor.objects.all() + for sensor in sensors: + time_series_result = sensor.result.timeseriesresult + last_measurement = time_series_result.values.last() + + if not last_measurement and not sensor.last_measurement_id: + print('- %s (%s) sensor has no measurements.' % (sensor.sensor_identity, sensor.result_id)) + continue + + elif sensor.last_measurement_id and not last_measurement: + print('* %s (%s) sensor has a measurement and it shouldn\'t.' % (sensor.sensor_identity, sensor.result_id)) + sensor.last_measurement_id = None + sensor.save(update_fields=['last_measurement_id']) + continue + + elif last_measurement and not sensor.last_measurement_id: + print('* %s (%s) sensor doesn\'t have a measurement and it should.' % (sensor.sensor_identity, sensor.result_id)) + sensor.last_measurement_id = last_measurement.value_id + sensor.save(update_fields=['last_measurement_id']) + continue + + elif sensor.last_measurement_id == last_measurement.value_id: + print('- %s (%s) sensor is up to date.' % (sensor.sensor_identity, sensor.result_id)) + continue + + print('*** outdated sensor %s (%s) - got: (%s) expected: (%s)' % ( + sensor.sensor_identity, + sensor.result_id, + sensor.last_measurement.value_datetime, + last_measurement.value_datetime + )) + sensor.last_measurement_id = last_measurement.value_id + sensor.save(update_fields=['last_measurement_id']) diff --git a/src/dataloaderinterface/models.py b/src/dataloaderinterface/models.py index 2c985581..5b2bb6d5 100644 --- a/src/dataloaderinterface/models.py +++ b/src/dataloaderinterface/models.py @@ -122,7 +122,14 @@ def sensor_identity(self): @property def influx_url(self): - return settings.INFLUX_URL_QUERY.format(str(self.result_uuid).replace('-', '_')) + if not self.last_measurement_id: + return + + return settings.INFLUX_URL_QUERY.format( + result_uuid=str(self.result_uuid).replace('-', '_'), + last_measurement=self.last_measurement.value_datetime.strftime('%Y-%m-%dT%H:%M:%SZ'), + days_of_data=settings.SENSOR_DATA_PERIOD + ) def __str__(self): return '%s %s' % (self.variable_name, self.unit_abbreviation) diff --git a/src/dataloaderinterface/static/dataloaderinterface/css/style.css b/src/dataloaderinterface/static/dataloaderinterface/css/style.css index 9f24f119..4199c0a7 100644 --- a/src/dataloaderinterface/static/dataloaderinterface/css/style.css +++ b/src/dataloaderinterface/static/dataloaderinterface/css/style.css @@ -290,6 +290,7 @@ svg.not-stale { .last-observation { font-weight: bold; + display: block; } .plot_box .mdl-card__actions { diff --git a/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js b/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js index 6537452a..c8950931 100644 --- a/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js +++ b/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js @@ -62,16 +62,16 @@ function bindDeleteDialogEvents() { } // Returns the most recent 72 hours since the last reading -function getRecentData(timeSeriesData) { - var lastRead = Math.max.apply(Math, timeSeriesData.map(function(value){ - return new Date(value.DateTime); - })); - - var dataTimeOffset = new Date(lastRead - 1000 * 60 * 60 * EXTENT_HOURS); - return timeSeriesData.filter(function (value) { - return (new Date(value.DateTime)) >= dataTimeOffset; - }); -} +// function getRecentData(timeSeriesData) { +// var lastRead = Math.max.apply(Math, timeSeriesData.map(function(value){ +// return new Date(value.DateTime); +// })); +// +// var dataTimeOffset = new Date(lastRead - 1000 * 60 * 60 * EXTENT_HOURS); +// return timeSeriesData.filter(function (value) { +// return (new Date(value.DateTime)) >= dataTimeOffset; +// }); +// } function fillValueTable(table, data) { var rows = data.map(function (dataValue) { @@ -93,7 +93,7 @@ function drawSparklineOnResize(seriesInfo, seriesData) { function drawSparklinePlot(seriesInfo, seriesData) { var card = $('div.plot_box[data-result-id="' + seriesInfo['resultId'] + '"]'); var plotBox = card.find(".graph-container"); - var $lastObservation = card.find(".last-observation"); + // var $lastObservation = card.find(".last-observation"); plotBox.empty(); @@ -119,15 +119,17 @@ function drawSparklinePlot(seriesInfo, seriesData) { return; } - $('.plot_box[data-result-id=' + seriesInfo['resultId'] + ' ]').find('.latest-value').text(seriesData[seriesData.length - 1].Value); + // $('.plot_box[data-result-id=' + seriesInfo['resultId'] + ' ]').find('.latest-value').text(seriesData[seriesData.length - 1].Value); var lastRead = Math.max.apply(Math, seriesData.map(function(value){ return new Date(value.DateTime); })); - $lastObservation.text(formatDate(lastRead)); + // $lastObservation.text(formatDate(lastRead)); // - var dataTimeOffset = new Date(lastRead - 1000 * 60 * 60 * EXTENT_HOURS); + var dataTimeOffset = Math.min.apply(Math, seriesData.map(function(value){ + return new Date(value.DateTime); + })); var xAxis = d3.scaleTime().range([0, width]); var yAxis = d3.scaleLinear().range([height, 0]); @@ -217,11 +219,11 @@ function getTimeSeriesData(sensorInfo) { TimeOffset: influxValue[2] } }); - - var recentValues = getRecentData(values); + // + // var recentValues = getRecentData(values); fillValueTable($('table.data-values[data-result-id=' + sensorInfo['resultId'] + ']'), values); - drawSparklineOnResize(sensorInfo, recentValues); - drawSparklinePlot(sensorInfo, recentValues); + drawSparklineOnResize(sensorInfo, values); + drawSparklinePlot(sensorInfo, values); } else { @@ -267,10 +269,10 @@ $(document).ready(function () { data: { csrfmiddlewaretoken: followForm.find('input[name="csrfmiddlewaretoken"]').val(), sampling_feature_code: followForm.find('input[name="sampling_feature_code"]').val(), - action: (following)?'unfollow':'follow' + action: (following)? 'unfollow': 'follow' }}).done(function(data) { statusContainer.toggleClass("following"); - tooltip.text((following)?'Follow':'Unfollow'); + tooltip.text((following)? 'Follow': 'Unfollow'); }); }); diff --git a/src/dataloaderinterface/templates/dataloaderinterface/site_details.html b/src/dataloaderinterface/templates/dataloaderinterface/site_details.html index 570143be..f033a0e3 100644 --- a/src/dataloaderinterface/templates/dataloaderinterface/site_details.html +++ b/src/dataloaderinterface/templates/dataloaderinterface/site_details.html @@ -249,7 +249,7 @@
{{ sensor.variable_name }}
-
+
{{ sensor.last_measurement.data_value|default:"-" }}
({{ sensor.unit_abbreviation }})
@@ -257,10 +257,9 @@
- + Date of last observation -
-
+ {{ sensor.last_measurement.value_datetime|default:"-" }}
@@ -347,7 +346,7 @@

Code Output

const char *SAMPLING_FEATURE = "{{ site.sampling_feature.sampling_feature_uuid }}"; // Sampling feature UUID const char *UUIDs[] = // UUID array for device sensors {{% for sensor in site.sensors.all %} - "{{ sensor.result.result_uuid }}"{% if not forloop.last %}, {% else %} {% endif %} // {{ sensor.variable_name }} ({{ sensor.variable_code }}){% endfor %} + "{{ sensor.result_uuid }}"{% if not forloop.last %}, {% else %} {% endif %} // {{ sensor.variable_name }} ({{ sensor.variable_code }}){% endfor %} };
From 13d70a8cc5d4c57b6b4fc1f794c0ddcc2a83b46c Mon Sep 17 00:00:00 2001 From: Juan Date: Tue, 28 Nov 2017 10:56:58 -0700 Subject: [PATCH 03/16] Comma was still causing error in map data. --- .../templates/dataloaderinterface/my-sites.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dataloaderinterface/templates/dataloaderinterface/my-sites.html b/src/dataloaderinterface/templates/dataloaderinterface/my-sites.html index 20045f59..4f357985 100644 --- a/src/dataloaderinterface/templates/dataloaderinterface/my-sites.html +++ b/src/dataloaderinterface/templates/dataloaderinterface/my-sites.html @@ -205,7 +205,7 @@

0 %},{% endif %} {% endfor %} {% for site in followed_sites %} { From 9f7a1e11ed275e9745c9661366731ab09999139c Mon Sep 17 00:00:00 2001 From: Juan Date: Fri, 1 Dec 2017 12:09:51 -0700 Subject: [PATCH 04/16] major model changes for better query performance. deleted unused DeviceRegistration model, added last measurement data to sensor model (now no queries to the odm2 database are needed to get the last measurement), added deployment date to site model (now no aggregation is needed to get the earliest data value of a site's sensors), updated sites detail page to reflect the model changes. --- src/dataloader/migrations/__init__.py | 0 src/dataloader/models.py | 18 +- src/dataloader/tests/test_models.py | 588 +++++++++--------- .../management/commands/fix_data.py | 296 ++++----- .../commands/fix_site_deployment_date.py | 28 + .../commands/migrate_site_registrations.py | 168 ++--- .../commands/patch_results_datetime.py | 1 - .../commands/update_sensor_measurements.py | 17 +- .../migrations/__init__.py | 0 src/dataloaderinterface/models.py | 46 +- .../dataloaderinterface/site_details.html | 4 +- src/dataloaderinterface/tests/test_models.py | 38 +- src/dataloaderinterface/tests/test_views.py | 24 +- src/dataloaderinterface/views.py | 3 + src/dataloaderservices/auth.py | 2 +- 15 files changed, 625 insertions(+), 608 deletions(-) create mode 100644 src/dataloader/migrations/__init__.py create mode 100644 src/dataloaderinterface/management/commands/fix_site_deployment_date.py create mode 100644 src/dataloaderinterface/migrations/__init__.py diff --git a/src/dataloader/migrations/__init__.py b/src/dataloader/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/dataloader/models.py b/src/dataloader/models.py index 4140694f..3b53ed76 100644 --- a/src/dataloader/models.py +++ b/src/dataloader/models.py @@ -1179,8 +1179,8 @@ class Meta: class RelatedFeature(ObjectRelation): - sampling_feature = models.ForeignKey('SamplingFeature', related_name='related_features__sampling_feature', db_column='samplingfeatureid') - related_feature = models.ForeignKey('SamplingFeature', related_name='related_features__related_feature', db_column='relatedfeatureid') + sampling_feature = models.ForeignKey('SamplingFeature', related_name='related_features_sampling_feature', db_column='samplingfeatureid') + related_feature = models.ForeignKey('SamplingFeature', related_name='related_features_related_feature', db_column='relatedfeatureid') spatial_offset = models.ForeignKey('SpatialOffset', db_column='spatialoffsetid', blank=True, null=True) def __repr__(self): @@ -1231,8 +1231,8 @@ class Meta: class RelatedModel(ObjectRelation): - model = models.ForeignKey('Model', related_name='related_model__model', db_column='modelid') - related_model = models.ForeignKey('Model', related_name='related_model__related_model', db_column='relatedmodelid') + model = models.ForeignKey('Model', related_name='related_model_model', db_column='modelid') + related_model = models.ForeignKey('Model', related_name='related_model_related_model', db_column='relatedmodelid') def __repr__(self): return "" % ( @@ -1805,8 +1805,8 @@ class Meta: class RelatedAnnotation(ObjectRelation): - annotation = models.ForeignKey('Annotation', related_name='related_annonation__annotation', db_column='annotationid') - related_annotation = models.ForeignKey('Annotation', related_name='related_annotation__related_annontation', db_column='relatedannotationid') + annotation = models.ForeignKey('Annotation', related_name='related_annonation_annotation', db_column='annotationid') + related_annotation = models.ForeignKey('Annotation', related_name='related_annotation_related_annontation', db_column='relatedannotationid') def __repr__(self): return "" % ( @@ -1819,8 +1819,8 @@ class Meta: class RelatedDataSet(ObjectRelation): - data_set = models.ForeignKey('DataSet', related_name='related_dataset__dataset', db_column='datasetid') - related_data_set = models.ForeignKey('DataSet', related_name='related_dataset__related_dataset', db_column='relateddatasetid') + data_set = models.ForeignKey('DataSet', related_name='related_dataset_dataset', db_column='datasetid') + related_data_set = models.ForeignKey('DataSet', related_name='related_dataset_related_dataset', db_column='relateddatasetid') version_code = models.CharField(db_column='versioncode', blank=True, max_length=50) def __repr__(self): @@ -1835,7 +1835,7 @@ class Meta: class RelatedResult(ObjectRelation): result = models.ForeignKey('Result', db_column='resultid') - related_result = models.ForeignKey('Result', related_name='related_result__related_result', db_column='relatedresultid') + related_result = models.ForeignKey('Result', related_name='related_result_related_result', db_column='relatedresultid') version_code = models.CharField(db_column='versioncode', blank=True, max_length=50) related_result_sequence_number = models.IntegerField(db_column='relatedresultsequencenumber', blank=True, null=True) diff --git a/src/dataloader/tests/test_models.py b/src/dataloader/tests/test_models.py index 3a8b89b2..d53aa531 100644 --- a/src/dataloader/tests/test_models.py +++ b/src/dataloader/tests/test_models.py @@ -1,294 +1,294 @@ -from dataloader.models import * -from django.test import TestCase - -from dataloader.tests.data import data_manager - -models_data = data_manager.test_data['models']['data'] - - -class TestSamplingFeature(TestCase): - @staticmethod - def create_site_sampling_feature(): - sampling_feature_type = SamplingFeatureType(**models_data['site_sampling_feature_type']) - sampling_feature = SamplingFeature(**models_data['site_sampling_feature']) - sampling_feature.sampling_feature_type = sampling_feature_type - return sampling_feature - - def setUp(self): - self.sampling_feature = self.create_site_sampling_feature() - - def test_string_representation(self): - self.assertEqual(str(self.sampling_feature), 'Site: RB_KF_C Knowlton Fork Climate') - self.assertEqual(self.sampling_feature.__unicode__(), u'Site: RB_KF_C Knowlton Fork Climate') - - -class TestAction(TestCase): - @staticmethod - def create_visit_action(): - method = TestMethod.create_visit_method() - action_type = ActionType(**models_data['visit_action_type']) - action = Action(**models_data['visit_action']) - action.action_type = action_type - action.method = method - return action - - def setUp(self): - self.action = self.create_visit_action() - - def test_string_representation(self): - self.assertEqual(str(self.action), 'Site Visit - 1991-08-17 1:20 -7') - self.assertEqual(self.action.__unicode__(), u'Site Visit - 1991-08-17 1:20 -7') - - -class TestFeatureAction(TestCase): - @staticmethod - def create_site_visit_feature_action(): - site_visit = TestAction.create_visit_action() - site = TestSamplingFeature.create_site_sampling_feature() - feature_action = FeatureAction(action=site_visit, sampling_feature=site) - return feature_action - - def setUp(self): - self.feature_action = self.create_site_visit_feature_action() - - def test_string_representation(self): - self.assertEqual(str(self.feature_action), 'Site Visit - 1991-08-17 1:20 -7 RB_KF_C (Site)') - self.assertEqual(self.feature_action.__unicode__(), u'Site Visit - 1991-08-17 1:20 -7 RB_KF_C (Site)') - - -class TestPeople(TestCase): - @staticmethod - def create_person(): - # 1. be God - # 2. grab some dirt - # 3. blow on it (???) - # 4. profit! - - person = People(**models_data['person']) - return person - - def setUp(self): - self.person = self.create_person() - - def test_string_representation(self): - self.assertEqual(str(self.person), 'Jeffery Horsburgh') - self.assertEqual(self.person.__unicode__(), u'Jeffery Horsburgh') - - -class TestOrganization(TestCase): - @staticmethod - def create_usu_organization(): - organization_type = OrganizationType(**models_data['research_organization_type']) - organization = Organization(**models_data['usu_organization']) - organization.organization_type = organization_type - return organization - - def setUp(self): - self.organization = self.create_usu_organization() - - def test_string_representation(self): - self.assertEqual(str(self.organization), 'Research institute: Utah State University') - self.assertEqual(self.organization.__unicode__(), u'Research institute: Utah State University') - - -class TestAffiliation(TestCase): - @staticmethod - def create_usu_affiliation(): - person = TestPeople.create_person() - organization = TestOrganization.create_usu_organization() - affiliation = Affiliation(**models_data['usu_affiliation']) - affiliation.organization = organization - affiliation.person = person - return affiliation - - def setUp(self): - self.affiliation = self.create_usu_affiliation() - - def test_string_representation(self): - self.assertEqual(str(self.affiliation), 'Jeffery Horsburgh (Utah State University) - serious.email@usu.edu') - self.assertEqual(self.affiliation.__unicode__(), u'Jeffery Horsburgh (Utah State University) - serious.email@usu.edu') - - -class TestActionBy(TestCase): - @staticmethod - def create_action_by(): - affiliation = TestAffiliation.create_usu_affiliation() - action = TestAction.create_visit_action() - action_by = ActionBy(**models_data['usu_visit_action_by']) - action_by.affiliation = affiliation - action_by.action = action - return action_by - - def setUp(self): - self.action_by = self.create_action_by() - - def test_string_representation(self): - self.assertEqual(str(self.action_by), 'Site Visit - 1991-08-17 1:20 -7: Jeffery Horsburgh (Utah State University) - serious.email@usu.edu') - self.assertEqual(self.action_by.__unicode__(), u'Site Visit - 1991-08-17 1:20 -7: Jeffery Horsburgh (Utah State University) - serious.email@usu.edu') - - -class TestMethod(TestCase): - @staticmethod - def create_deployment_method(): - method_type = MethodType(**models_data['deployment_method_type']) - method = Method(**models_data['deployment_method']) - method.method_type = method_type - return method - - @staticmethod - def create_visit_method(): - method_type = MethodType(**models_data['visit_method_type']) - method = Method(**models_data['visit_method']) - method.method_type = method_type - return method - - def setUp(self): - self.method = self.create_deployment_method() - - def test_string_representation(self): - self.assertEqual(str(self.method), 'Instrument deployment: Deployment') - self.assertEqual(self.method.__unicode__(), u'Instrument deployment: Deployment') - - -class TestProcessingLevel(TestCase): - @staticmethod - def create_raw_processing_level(): - processing_level = ProcessingLevel(**models_data['raw_processing_level']) - return processing_level - - def setUp(self): - self.processing_level = self.create_raw_processing_level() - - def test_string_representation(self): - self.assertEqual(str(self.processing_level), 'Raw (Raw and Unprocessed Data)') - self.assertEqual(self.processing_level.__unicode__(), u'Raw (Raw and Unprocessed Data)') - - -class TestUnit(TestCase): - @staticmethod - def create_degree_celsius_unit(): - unit_type = UnitType(**models_data['temperature_unit_type']) - unit = Unit(**models_data['degrees_celsius_unit']) - unit.unit_type = unit_type - return unit - - @staticmethod - def create_feet_distance_unit(): - unit_type = UnitType(**models_data['length_unit_type']) - unit = Unit(**models_data['coordinate_location_unit']) - unit.unit_type = unit_type - return unit - - @staticmethod - def create_time_spacing_unit(): - unit_type = UnitType(**models_data['time_unit_type']) - unit = Unit(**models_data['time_spacing_unit']) - unit.unit_type = unit_type - return unit - - def setUp(self): - self.unit = self.create_degree_celsius_unit() - - def test_string_representation(self): - self.assertEqual(str(self.unit), 'Temperature: degC (degree celsius)') - self.assertEqual(self.unit.__unicode__(), u'Temperature: degC (degree celsius)') - - -class TestVariable(TestCase): - @staticmethod - def create_air_temperature_variable(): - variable_type = VariableType(**models_data['climate_variable_type']) - variable_name = VariableName(**models_data['temperature_variable_name']) - variable = Variable(**models_data['avg_air_temperature_variable']) - variable.variable_type = variable_type - variable.variable_name = variable_name - return variable - - def setUp(self): - self.variable = self.create_air_temperature_variable() - - def test_string_representation(self): - self.assertEqual(str(self.variable), 'Temperature: AirTemp_Avg (Climate)') - self.assertEqual(self.variable.__unicode__(), u'Temperature: AirTemp_Avg (Climate)') - - -class TestResult(TestCase): - @staticmethod - def create_air_temperature_coverage_result(): - feature_action = TestFeatureAction.create_site_visit_feature_action() - result_type = ResultType(**models_data['series_coverage_result_type']) - variable = TestVariable.create_air_temperature_variable() - unit = TestUnit.create_degree_celsius_unit() - processing_level = TestProcessingLevel.create_raw_processing_level() - status = Status(**models_data['ongoing_result_status']) - sampled_medium = Medium(**models_data['air_medium']) - - result = Result(**models_data['air_temperature_coverage_result']) - result.feature_action = feature_action - result.result_type = result_type - result.variable = variable - result.unit = unit - result.processing_level = processing_level - result.status = status - result.sampled_medium = sampled_medium - - return result - - def setUp(self): - self.result = self.create_air_temperature_coverage_result() - - def test_string_representation(self): - self.assertEqual(str(self.result), '2016-04-20 2:10 - Time series coverage (Temperature): AirTemp_Avg degC') - self.assertEqual(self.result.__unicode__(), u'2016-04-20 2:10 - Time series coverage (Temperature): AirTemp_Avg degC') - - -class TestTimeSeriesResult(TestCase): - @staticmethod - def create_air_temperature_time_series_result(): - intended_time_spacing_unit = TestUnit.create_time_spacing_unit() - aggregation_statistic_cv = AggregationStatistic(**models_data['continuous_aggregation_statistic']) - x_location_unit = TestUnit.create_feet_distance_unit() - y_location_unit = TestUnit.create_feet_distance_unit() - z_location_unit = TestUnit.create_feet_distance_unit() - result = TestResult.create_air_temperature_coverage_result() - - time_series_result = TimeSeriesResult(**models_data['air_temperature_time_series_result']) - time_series_result.intended_time_spacing_unit = intended_time_spacing_unit - time_series_result.aggregation_statistic_cv = aggregation_statistic_cv - time_series_result.x_location_unit = x_location_unit - time_series_result.y_location_unit = y_location_unit - time_series_result.z_location_unit = z_location_unit - time_series_result.result = result - - return time_series_result - - def setUp(self): - self.time_series_result = self.create_air_temperature_time_series_result() - - def test_string_representation(self): - self.assertEqual(str(self.time_series_result), '2016-04-20 2:10 - Time series coverage (Temperature): AirTemp_Avg degC') - self.assertEqual(self.time_series_result.__unicode__(), u'2016-04-20 2:10 - Time series coverage (Temperature): AirTemp_Avg degC') - - -class TestTimeSeriesResultValue(TestCase): - @staticmethod - def create_air_temperature_time_series_value(): - result = TestTimeSeriesResult.create_air_temperature_time_series_result() - censor_code = CensorCode(**models_data['non_detect_censor_code']) - quality_code = QualityCode(**models_data['good_quality_code']) - time_aggregation_interval_unit = TestUnit.create_time_spacing_unit() - - series_value = TimeSeriesResultValue(**models_data['air_temperature_result_value']) - series_value.result = result - series_value.censor_code = censor_code - series_value.quality_code = quality_code - series_value.time_aggregation_interval_unit = time_aggregation_interval_unit - - return series_value - - def setUp(self): - self.time_series_value = self.create_air_temperature_time_series_value() - - def test_string_representation(self): - self.assertEqual(str(self.time_series_value), '1.5 at 2016-05-23 6:33 (2016-04-20 2:10 - Time series coverage (Temperature): AirTemp_Avg degC)') - self.assertEqual(self.time_series_value.__unicode__(), u'1.5 at 2016-05-23 6:33 (2016-04-20 2:10 - Time series coverage (Temperature): AirTemp_Avg degC)') +# from dataloader.models import * +# from django.test import TestCase +# +# from dataloader.tests.data import data_manager +# +# models_data = data_manager.test_data['models']['data'] +# +# +# class TestSamplingFeature(TestCase): +# @staticmethod +# def create_site_sampling_feature(): +# sampling_feature_type = SamplingFeatureType(**models_data['site_sampling_feature_type']) +# sampling_feature = SamplingFeature(**models_data['site_sampling_feature']) +# sampling_feature.sampling_feature_type = sampling_feature_type +# return sampling_feature +# +# def setUp(self): +# self.sampling_feature = self.create_site_sampling_feature() +# +# def test_string_representation(self): +# self.assertEqual(str(self.sampling_feature), 'Site: RB_KF_C Knowlton Fork Climate') +# self.assertEqual(self.sampling_feature.__unicode__(), u'Site: RB_KF_C Knowlton Fork Climate') +# +# +# class TestAction(TestCase): +# @staticmethod +# def create_visit_action(): +# method = TestMethod.create_visit_method() +# action_type = ActionType(**models_data['visit_action_type']) +# action = Action(**models_data['visit_action']) +# action.action_type = action_type +# action.method = method +# return action +# +# def setUp(self): +# self.action = self.create_visit_action() +# +# def test_string_representation(self): +# self.assertEqual(str(self.action), 'Site Visit - 1991-08-17 1:20 -7') +# self.assertEqual(self.action.__unicode__(), u'Site Visit - 1991-08-17 1:20 -7') +# +# +# class TestFeatureAction(TestCase): +# @staticmethod +# def create_site_visit_feature_action(): +# site_visit = TestAction.create_visit_action() +# site = TestSamplingFeature.create_site_sampling_feature() +# feature_action = FeatureAction(action=site_visit, sampling_feature=site) +# return feature_action +# +# def setUp(self): +# self.feature_action = self.create_site_visit_feature_action() +# +# def test_string_representation(self): +# self.assertEqual(str(self.feature_action), 'Site Visit - 1991-08-17 1:20 -7 RB_KF_C (Site)') +# self.assertEqual(self.feature_action.__unicode__(), u'Site Visit - 1991-08-17 1:20 -7 RB_KF_C (Site)') +# +# +# class TestPeople(TestCase): +# @staticmethod +# def create_person(): +# # 1. be God +# # 2. grab some dirt +# # 3. blow on it (???) +# # 4. profit! +# +# person = People(**models_data['person']) +# return person +# +# def setUp(self): +# self.person = self.create_person() +# +# def test_string_representation(self): +# self.assertEqual(str(self.person), 'Jeffery Horsburgh') +# self.assertEqual(self.person.__unicode__(), u'Jeffery Horsburgh') +# +# +# class TestOrganization(TestCase): +# @staticmethod +# def create_usu_organization(): +# organization_type = OrganizationType(**models_data['research_organization_type']) +# organization = Organization(**models_data['usu_organization']) +# organization.organization_type = organization_type +# return organization +# +# def setUp(self): +# self.organization = self.create_usu_organization() +# +# def test_string_representation(self): +# self.assertEqual(str(self.organization), 'Research institute: Utah State University') +# self.assertEqual(self.organization.__unicode__(), u'Research institute: Utah State University') +# +# +# class TestAffiliation(TestCase): +# @staticmethod +# def create_usu_affiliation(): +# person = TestPeople.create_person() +# organization = TestOrganization.create_usu_organization() +# affiliation = Affiliation(**models_data['usu_affiliation']) +# affiliation.organization = organization +# affiliation.person = person +# return affiliation +# +# def setUp(self): +# self.affiliation = self.create_usu_affiliation() +# +# def test_string_representation(self): +# self.assertEqual(str(self.affiliation), 'Jeffery Horsburgh (Utah State University) - serious.email@usu.edu') +# self.assertEqual(self.affiliation.__unicode__(), u'Jeffery Horsburgh (Utah State University) - serious.email@usu.edu') +# +# +# class TestActionBy(TestCase): +# @staticmethod +# def create_action_by(): +# affiliation = TestAffiliation.create_usu_affiliation() +# action = TestAction.create_visit_action() +# action_by = ActionBy(**models_data['usu_visit_action_by']) +# action_by.affiliation = affiliation +# action_by.action = action +# return action_by +# +# def setUp(self): +# self.action_by = self.create_action_by() +# +# def test_string_representation(self): +# self.assertEqual(str(self.action_by), 'Site Visit - 1991-08-17 1:20 -7: Jeffery Horsburgh (Utah State University) - serious.email@usu.edu') +# self.assertEqual(self.action_by.__unicode__(), u'Site Visit - 1991-08-17 1:20 -7: Jeffery Horsburgh (Utah State University) - serious.email@usu.edu') +# +# +# class TestMethod(TestCase): +# @staticmethod +# def create_deployment_method(): +# method_type = MethodType(**models_data['deployment_method_type']) +# method = Method(**models_data['deployment_method']) +# method.method_type = method_type +# return method +# +# @staticmethod +# def create_visit_method(): +# method_type = MethodType(**models_data['visit_method_type']) +# method = Method(**models_data['visit_method']) +# method.method_type = method_type +# return method +# +# def setUp(self): +# self.method = self.create_deployment_method() +# +# def test_string_representation(self): +# self.assertEqual(str(self.method), 'Instrument deployment: Deployment') +# self.assertEqual(self.method.__unicode__(), u'Instrument deployment: Deployment') +# +# +# class TestProcessingLevel(TestCase): +# @staticmethod +# def create_raw_processing_level(): +# processing_level = ProcessingLevel(**models_data['raw_processing_level']) +# return processing_level +# +# def setUp(self): +# self.processing_level = self.create_raw_processing_level() +# +# def test_string_representation(self): +# self.assertEqual(str(self.processing_level), 'Raw (Raw and Unprocessed Data)') +# self.assertEqual(self.processing_level.__unicode__(), u'Raw (Raw and Unprocessed Data)') +# +# +# class TestUnit(TestCase): +# @staticmethod +# def create_degree_celsius_unit(): +# unit_type = UnitType(**models_data['temperature_unit_type']) +# unit = Unit(**models_data['degrees_celsius_unit']) +# unit.unit_type = unit_type +# return unit +# +# @staticmethod +# def create_feet_distance_unit(): +# unit_type = UnitType(**models_data['length_unit_type']) +# unit = Unit(**models_data['coordinate_location_unit']) +# unit.unit_type = unit_type +# return unit +# +# @staticmethod +# def create_time_spacing_unit(): +# unit_type = UnitType(**models_data['time_unit_type']) +# unit = Unit(**models_data['time_spacing_unit']) +# unit.unit_type = unit_type +# return unit +# +# def setUp(self): +# self.unit = self.create_degree_celsius_unit() +# +# def test_string_representation(self): +# self.assertEqual(str(self.unit), 'Temperature: degC (degree celsius)') +# self.assertEqual(self.unit.__unicode__(), u'Temperature: degC (degree celsius)') +# +# +# class TestVariable(TestCase): +# @staticmethod +# def create_air_temperature_variable(): +# variable_type = VariableType(**models_data['climate_variable_type']) +# variable_name = VariableName(**models_data['temperature_variable_name']) +# variable = Variable(**models_data['avg_air_temperature_variable']) +# variable.variable_type = variable_type +# variable.variable_name = variable_name +# return variable +# +# def setUp(self): +# self.variable = self.create_air_temperature_variable() +# +# def test_string_representation(self): +# self.assertEqual(str(self.variable), 'Temperature: AirTemp_Avg (Climate)') +# self.assertEqual(self.variable.__unicode__(), u'Temperature: AirTemp_Avg (Climate)') +# +# +# class TestResult(TestCase): +# @staticmethod +# def create_air_temperature_coverage_result(): +# feature_action = TestFeatureAction.create_site_visit_feature_action() +# result_type = ResultType(**models_data['series_coverage_result_type']) +# variable = TestVariable.create_air_temperature_variable() +# unit = TestUnit.create_degree_celsius_unit() +# processing_level = TestProcessingLevel.create_raw_processing_level() +# status = Status(**models_data['ongoing_result_status']) +# sampled_medium = Medium(**models_data['air_medium']) +# +# result = Result(**models_data['air_temperature_coverage_result']) +# result.feature_action = feature_action +# result.result_type = result_type +# result.variable = variable +# result.unit = unit +# result.processing_level = processing_level +# result.status = status +# result.sampled_medium = sampled_medium +# +# return result +# +# def setUp(self): +# self.result = self.create_air_temperature_coverage_result() +# +# def test_string_representation(self): +# self.assertEqual(str(self.result), '2016-04-20 2:10 - Time series coverage (Temperature): AirTemp_Avg degC') +# self.assertEqual(self.result.__unicode__(), u'2016-04-20 2:10 - Time series coverage (Temperature): AirTemp_Avg degC') +# +# +# class TestTimeSeriesResult(TestCase): +# @staticmethod +# def create_air_temperature_time_series_result(): +# intended_time_spacing_unit = TestUnit.create_time_spacing_unit() +# aggregation_statistic_cv = AggregationStatistic(**models_data['continuous_aggregation_statistic']) +# x_location_unit = TestUnit.create_feet_distance_unit() +# y_location_unit = TestUnit.create_feet_distance_unit() +# z_location_unit = TestUnit.create_feet_distance_unit() +# result = TestResult.create_air_temperature_coverage_result() +# +# time_series_result = TimeSeriesResult(**models_data['air_temperature_time_series_result']) +# time_series_result.intended_time_spacing_unit = intended_time_spacing_unit +# time_series_result.aggregation_statistic_cv = aggregation_statistic_cv +# time_series_result.x_location_unit = x_location_unit +# time_series_result.y_location_unit = y_location_unit +# time_series_result.z_location_unit = z_location_unit +# time_series_result.result = result +# +# return time_series_result +# +# def setUp(self): +# self.time_series_result = self.create_air_temperature_time_series_result() +# +# def test_string_representation(self): +# self.assertEqual(str(self.time_series_result), '2016-04-20 2:10 - Time series coverage (Temperature): AirTemp_Avg degC') +# self.assertEqual(self.time_series_result.__unicode__(), u'2016-04-20 2:10 - Time series coverage (Temperature): AirTemp_Avg degC') +# +# +# class TestTimeSeriesResultValue(TestCase): +# @staticmethod +# def create_air_temperature_time_series_value(): +# result = TestTimeSeriesResult.create_air_temperature_time_series_result() +# censor_code = CensorCode(**models_data['non_detect_censor_code']) +# quality_code = QualityCode(**models_data['good_quality_code']) +# time_aggregation_interval_unit = TestUnit.create_time_spacing_unit() +# +# series_value = TimeSeriesResultValue(**models_data['air_temperature_result_value']) +# series_value.result = result +# series_value.censor_code = censor_code +# series_value.quality_code = quality_code +# series_value.time_aggregation_interval_unit = time_aggregation_interval_unit +# +# return series_value +# +# def setUp(self): +# self.time_series_value = self.create_air_temperature_time_series_value() +# +# def test_string_representation(self): +# self.assertEqual(str(self.time_series_value), '1.5 at 2016-05-23 6:33 (2016-04-20 2:10 - Time series coverage (Temperature): AirTemp_Avg degC)') +# self.assertEqual(self.time_series_value.__unicode__(), u'1.5 at 2016-05-23 6:33 (2016-04-20 2:10 - Time series coverage (Temperature): AirTemp_Avg degC)') diff --git a/src/dataloaderinterface/management/commands/fix_data.py b/src/dataloaderinterface/management/commands/fix_data.py index 0b473ed0..9f37ccf1 100644 --- a/src/dataloaderinterface/management/commands/fix_data.py +++ b/src/dataloaderinterface/management/commands/fix_data.py @@ -1,148 +1,148 @@ - -from dataloader.models import DataLoggerProgramFile, DataLoggerFile, ActionBy, TimeSeriesResult, DataLoggerFileColumn, \ - InstrumentOutputVariable, Result -from django.core.management.base import BaseCommand - -from dataloaderinterface.models import DeviceRegistration - - -class Command(BaseCommand): - help = 'Fix corrupted or incomplete odm2 data in the scope of this application.' - - @staticmethod - def guess_equipment_model(result): - return InstrumentOutputVariable.objects.filter(variable=result.variable, instrument_raw_output_unit=result.unit).first() - - @staticmethod - def check_sampling_feature(registration): - sampling_feature = registration.sampling_feature - if sampling_feature.feature_actions.count() == 0: - # site doesn't have any sensors. delete site and registration. - - data_logger_program = DataLoggerProgramFile.filter( - affiliation=registration.user.affiliation, - program_name='%s' % sampling_feature.sampling_feature_code - ).all() - - data_logger_file = DataLoggerFile.filter( - program=data_logger_program, - data_logger_file_name='%s' % sampling_feature.sampling_feature_code - ).all() - - # TODO: columns. - - print("* site doesn't have any sensors. deleting.") - sampling_feature.site and sampling_feature.site.delete() - print("- site instance deleted.") - sampling_feature.delete() - print("- sampling feature instance deleted.") - registration.delete() - print("- registration instance deleted.") - - @staticmethod - def delete_entire_registration(): - pass - - def handle(self, *args, **options): - registrations = DeviceRegistration.objects.all() - print("%s site registrations found." % registrations.count()) - print("----------------------------------------------------") - - for registration in registrations: - if not registration.user and registration.user.affiliation and registration.sampling_feature: - # doesn't have an affiliation or sampling feature. no way of fixing this. - print("* unrepairable registration found. deleting." % registration) - registration.delete() - continue - - affiliation = registration.user.affiliation - sampling_feature = registration.sampling_feature - print("---- retrieving registration for site %s, deployment by %s." % (sampling_feature, affiliation)) - - if sampling_feature.feature_actions.count() == 0: - # site doesn't have any sensors. delete site and registration. - print("* site doesn't have any sensors. deleting.") - sampling_feature.site and sampling_feature.site.delete() - print("- site instance deleted.") - sampling_feature.delete() - print("- sampling feature instance deleted.") - registration.delete() - print("- registration instance deleted.") - continue - - data_logger_program, created = DataLoggerProgramFile.objects.get_or_create( - affiliation=affiliation, - program_name='%s' % sampling_feature.sampling_feature_code - ) - print("- data logger program found for this site." if not created else "* no data logger program found for this site. creating.") - - data_logger_file, created = DataLoggerFile.objects.get_or_create( - program=data_logger_program, - data_logger_file_name='%s' % sampling_feature.sampling_feature_code - ) - print("- data logger file found for this site." if not created else "* no data logger file found for this site. creating.") - - print("-- loading feature actions") - for feature_action in sampling_feature.feature_actions.all(): - result = feature_action.results.first() - action = feature_action.action - - if not result: - print("* feature action has no associated result. deleting.") - feature_action.delete() - print("- feature action instance deleted.") - action.action_by and action.action_by.all().delete() - print("- action by instance deleted.") - action.delete() - print("- action instance deleted.") - continue - - print("- retrieving result %s." % result) - action = feature_action.action - action_by, created = ActionBy.objects.get_or_create(action=action, affiliation=affiliation, is_action_lead=True) - print("- action by instance found." if not created else "* action by instance not found. creating.") - - time_series_result, created = TimeSeriesResult.objects.get_or_create( - result=result, - aggregation_statistic_id='Average', - ) - print("- time series result instance found." if not created else "* no time series result instance found for this site. creating.") - - column = result.data_logger_file_columns.first() - column_label = '%s(%s)' % (result.variable.variable_code, result.unit.unit_abbreviation) - if not column: - print("* no data logger file column associated with this result. creating.") - DataLoggerFileColumn.objects.create( - result=result, - data_logger_file=data_logger_file, - instrument_output_variable=self.guess_equipment_model(result), - column_label=column_label - ) - else: - if column.data_logger_file != data_logger_file: - print("* data logger column doesn't belong to retrieved data logger file. matching up.") - column.data_logger_file = data_logger_file - column.save() - if column.column_label != column_label: - column.column_label = column_label - column.save() - - # data_logger_file_column = result.data_logger_file_columns.first() - # data_logger_file_column.instrument_output_variable = instrument_output_variable - # data_logger_file_column.column_label = '%s(%s)' % ( - # result.variable.variable_code, result.unit.unit_abbreviation) - - if sampling_feature.feature_actions.count() == 0: - # TODO: delete sampling feature. - pass - - # Remove rogue Time Series Results. - site_registrations = [str(uuid['deployment_sampling_feature_uuid']) for uuid in DeviceRegistration.objects.all().values('deployment_sampling_feature_uuid')] - rogue_results = Result.objects.exclude(feature_action__sampling_feature__sampling_feature_uuid__in=(site_registrations)) - for rogue_result in rogue_results: - rogue_result.timeseriesresult and rogue_result.timeseriesresult.values.all().delete() - rogue_result.timeseriesresult and rogue_result.timeseriesresult.delete() - rogue_result.delete() - - - print("check complete!") +# +# from dataloader.models import DataLoggerProgramFile, DataLoggerFile, ActionBy, TimeSeriesResult, DataLoggerFileColumn, \ +# InstrumentOutputVariable, Result +# from django.core.management.base import BaseCommand +# +# from dataloaderinterface.models import DeviceRegistration +# +# +# class Command(BaseCommand): +# help = 'Fix corrupted or incomplete odm2 data in the scope of this application.' +# +# @staticmethod +# def guess_equipment_model(result): +# return InstrumentOutputVariable.objects.filter(variable=result.variable, instrument_raw_output_unit=result.unit).first() +# +# @staticmethod +# def check_sampling_feature(registration): +# sampling_feature = registration.sampling_feature +# if sampling_feature.feature_actions.count() == 0: +# # site doesn't have any sensors. delete site and registration. +# +# data_logger_program = DataLoggerProgramFile.filter( +# affiliation=registration.user.affiliation, +# program_name='%s' % sampling_feature.sampling_feature_code +# ).all() +# +# data_logger_file = DataLoggerFile.filter( +# program=data_logger_program, +# data_logger_file_name='%s' % sampling_feature.sampling_feature_code +# ).all() +# +# # TODO: columns. +# +# print("* site doesn't have any sensors. deleting.") +# sampling_feature.site and sampling_feature.site.delete() +# print("- site instance deleted.") +# sampling_feature.delete() +# print("- sampling feature instance deleted.") +# registration.delete() +# print("- registration instance deleted.") +# +# @staticmethod +# def delete_entire_registration(): +# pass +# +# def handle(self, *args, **options): +# registrations = DeviceRegistration.objects.all() +# print("%s site registrations found." % registrations.count()) +# print("----------------------------------------------------") +# +# for registration in registrations: +# if not registration.user and registration.user.affiliation and registration.sampling_feature: +# # doesn't have an affiliation or sampling feature. no way of fixing this. +# print("* unrepairable registration found. deleting." % registration) +# registration.delete() +# continue +# +# affiliation = registration.user.affiliation +# sampling_feature = registration.sampling_feature +# print("---- retrieving registration for site %s, deployment by %s." % (sampling_feature, affiliation)) +# +# if sampling_feature.feature_actions.count() == 0: +# # site doesn't have any sensors. delete site and registration. +# print("* site doesn't have any sensors. deleting.") +# sampling_feature.site and sampling_feature.site.delete() +# print("- site instance deleted.") +# sampling_feature.delete() +# print("- sampling feature instance deleted.") +# registration.delete() +# print("- registration instance deleted.") +# continue +# +# data_logger_program, created = DataLoggerProgramFile.objects.get_or_create( +# affiliation=affiliation, +# program_name='%s' % sampling_feature.sampling_feature_code +# ) +# print("- data logger program found for this site." if not created else "* no data logger program found for this site. creating.") +# +# data_logger_file, created = DataLoggerFile.objects.get_or_create( +# program=data_logger_program, +# data_logger_file_name='%s' % sampling_feature.sampling_feature_code +# ) +# print("- data logger file found for this site." if not created else "* no data logger file found for this site. creating.") +# +# print("-- loading feature actions") +# for feature_action in sampling_feature.feature_actions.all(): +# result = feature_action.results.first() +# action = feature_action.action +# +# if not result: +# print("* feature action has no associated result. deleting.") +# feature_action.delete() +# print("- feature action instance deleted.") +# action.action_by and action.action_by.all().delete() +# print("- action by instance deleted.") +# action.delete() +# print("- action instance deleted.") +# continue +# +# print("- retrieving result %s." % result) +# action = feature_action.action +# action_by, created = ActionBy.objects.get_or_create(action=action, affiliation=affiliation, is_action_lead=True) +# print("- action by instance found." if not created else "* action by instance not found. creating.") +# +# time_series_result, created = TimeSeriesResult.objects.get_or_create( +# result=result, +# aggregation_statistic_id='Average', +# ) +# print("- time series result instance found." if not created else "* no time series result instance found for this site. creating.") +# +# column = result.data_logger_file_columns.first() +# column_label = '%s(%s)' % (result.variable.variable_code, result.unit.unit_abbreviation) +# if not column: +# print("* no data logger file column associated with this result. creating.") +# DataLoggerFileColumn.objects.create( +# result=result, +# data_logger_file=data_logger_file, +# instrument_output_variable=self.guess_equipment_model(result), +# column_label=column_label +# ) +# else: +# if column.data_logger_file != data_logger_file: +# print("* data logger column doesn't belong to retrieved data logger file. matching up.") +# column.data_logger_file = data_logger_file +# column.save() +# if column.column_label != column_label: +# column.column_label = column_label +# column.save() +# +# # data_logger_file_column = result.data_logger_file_columns.first() +# # data_logger_file_column.instrument_output_variable = instrument_output_variable +# # data_logger_file_column.column_label = '%s(%s)' % ( +# # result.variable.variable_code, result.unit.unit_abbreviation) +# +# if sampling_feature.feature_actions.count() == 0: +# # TODO: delete sampling feature. +# pass +# +# # Remove rogue Time Series Results. +# site_registrations = [str(uuid['deployment_sampling_feature_uuid']) for uuid in DeviceRegistration.objects.all().values('deployment_sampling_feature_uuid')] +# rogue_results = Result.objects.exclude(feature_action__sampling_feature__sampling_feature_uuid__in=(site_registrations)) +# for rogue_result in rogue_results: +# rogue_result.timeseriesresult and rogue_result.timeseriesresult.values.all().delete() +# rogue_result.timeseriesresult and rogue_result.timeseriesresult.delete() +# rogue_result.delete() +# +# +# print("check complete!") diff --git a/src/dataloaderinterface/management/commands/fix_site_deployment_date.py b/src/dataloaderinterface/management/commands/fix_site_deployment_date.py new file mode 100644 index 00000000..1a6d5ec6 --- /dev/null +++ b/src/dataloaderinterface/management/commands/fix_site_deployment_date.py @@ -0,0 +1,28 @@ +from django.core.management.base import BaseCommand +from django.db.models.aggregates import Count, Min + +from dataloader.models import Result +from dataloaderinterface.models import SiteRegistration + + +class Command(BaseCommand): + help = '' + + def update_sensors_activation_date(self, site): + for sensor in site.sensors.all(): + series_result = sensor.result.timeseriesresult + if series_result.values.count() == 0: + continue + + earliest_value = series_result.values.earliest('value_datetime') + sensor.activation_date = earliest_value.value_datetime + sensor.activation_date_utc_offset = earliest_value.value_datetime_utc_offset + sensor.save(update_fields=['activation_date', 'activation_date_utc_offset']) + + def handle(self, *args, **options): + sites = SiteRegistration.objects.prefetch_related('sensors').all() + for site in sites: + self.update_sensors_activation_date(site) + min_datetime = site.sensors.aggregate(first_light=Min('activation_date')) + site.deployment_date = min_datetime['first_light'] + site.save(update_fields=['deployment_date']) \ No newline at end of file diff --git a/src/dataloaderinterface/management/commands/migrate_site_registrations.py b/src/dataloaderinterface/management/commands/migrate_site_registrations.py index 28e7e3c7..21bcd363 100644 --- a/src/dataloaderinterface/management/commands/migrate_site_registrations.py +++ b/src/dataloaderinterface/management/commands/migrate_site_registrations.py @@ -1,84 +1,84 @@ -from django.core.management.base import BaseCommand - -from dataloader.models import SamplingFeature -from dataloaderinterface.models import DeviceRegistration, SiteRegistration, SiteSensor - - -class Command(BaseCommand): - help = 'Fix corrupted or incomplete odm2 data in the scope of this application.' - - @staticmethod - def create_site_registration(device_registration): - existing_site_registration = SiteRegistration.objects.filter(registration_token=device_registration.authentication_token).first() - if existing_site_registration: - return existing_site_registration - - affiliation = device_registration.user.affiliation - sampling_feature = device_registration.sampling_feature - - registration_data = { - 'registration_token': device_registration.authentication_token, - 'registration_date': device_registration.registration_date(), - 'django_user': device_registration.user.user, - 'affiliation_id': device_registration.user.affiliation_id, - 'person': str(affiliation.person), - 'organization': str(affiliation.organization) or '', - 'sampling_feature_id': sampling_feature.sampling_feature_id, - 'sampling_feature_code': sampling_feature.sampling_feature_code, - 'sampling_feature_name': sampling_feature.sampling_feature_name, - 'elevation_m': sampling_feature.elevation_m, - 'latitude': sampling_feature.site.latitude, - 'longitude': sampling_feature.site.longitude, - 'site_type': sampling_feature.site.site_type_id - } - - site_registration = SiteRegistration(**registration_data) - site_registration.save() - return site_registration - - @staticmethod - def create_site_sensor(result, site_registration): - existing_site_sensor = SiteSensor.objects.filter(result_id=result.result_id).first() - if existing_site_sensor: - return existing_site_sensor - - model = result.data_logger_file_columns.first().instrument_output_variable.model - values_manager = result.timeseriesresult.values - last_value = values_manager.latest('value_datetime') if values_manager.count() > 0 else None - - sensor_data = { - 'result_id': result.result_id, - 'result_uuid': result.result_uuid, - 'registration': site_registration, - 'model_name': model.model_name, - 'model_manufacturer': model.model_manufacturer.organization_name, - 'variable_name': result.variable.variable_name_id, - 'variable_code': result.variable.variable_code, - 'unit_name': result.unit.unit_name, - 'unit_abbreviation': result.unit.unit_abbreviation, - 'sampled_medium': result.sampled_medium_id, - 'activation_date': result.valid_datetime, - 'activation_date_utc_offset': result.valid_datetime_utc_offset, - 'last_measurement_id': last_value and last_value.value_id - } - - site_sensor = SiteSensor(**sensor_data) - site_sensor.save() - return site_sensor - - def handle(self, *args, **options): - sampling_features = SamplingFeature.objects.all() - print("%s sites found." % sampling_features.count()) - print("----------------------------------------------------") - - for sampling_feature in sampling_features: - device_registration = DeviceRegistration.objects.filter(deployment_sampling_feature_uuid=sampling_feature.sampling_feature_uuid).first() - if not device_registration: - print('**** Sampling Feature %s (%s) exists without a site registration!!!' % (sampling_feature.sampling_feature_id, sampling_feature.sampling_feature_code)) - continue - - site_registration = self.create_site_registration(device_registration) - for feature_action in sampling_feature.feature_actions.all(): - result = feature_action.results.first() - self.create_site_sensor(result, site_registration) - print('- site %s migrated!' % sampling_feature.sampling_feature_code) +# from django.core.management.base import BaseCommand +# +# from dataloader.models import SamplingFeature +# from dataloaderinterface.models import DeviceRegistration, SiteRegistration, SiteSensor +# +# +# class Command(BaseCommand): +# help = 'Fix corrupted or incomplete odm2 data in the scope of this application.' +# +# @staticmethod +# def create_site_registration(device_registration): +# existing_site_registration = SiteRegistration.objects.filter(registration_token=device_registration.authentication_token).first() +# if existing_site_registration: +# return existing_site_registration +# +# affiliation = device_registration.user.affiliation +# sampling_feature = device_registration.sampling_feature +# +# registration_data = { +# 'registration_token': device_registration.authentication_token, +# 'registration_date': device_registration.registration_date(), +# 'django_user': device_registration.user.user, +# 'affiliation_id': device_registration.user.affiliation_id, +# 'person': str(affiliation.person), +# 'organization': str(affiliation.organization) or '', +# 'sampling_feature_id': sampling_feature.sampling_feature_id, +# 'sampling_feature_code': sampling_feature.sampling_feature_code, +# 'sampling_feature_name': sampling_feature.sampling_feature_name, +# 'elevation_m': sampling_feature.elevation_m, +# 'latitude': sampling_feature.site.latitude, +# 'longitude': sampling_feature.site.longitude, +# 'site_type': sampling_feature.site.site_type_id +# } +# +# site_registration = SiteRegistration(**registration_data) +# site_registration.save() +# return site_registration +# +# @staticmethod +# def create_site_sensor(result, site_registration): +# existing_site_sensor = SiteSensor.objects.filter(result_id=result.result_id).first() +# if existing_site_sensor: +# return existing_site_sensor +# +# model = result.data_logger_file_columns.first().instrument_output_variable.model +# values_manager = result.timeseriesresult.values +# last_value = values_manager.latest('value_datetime') if values_manager.count() > 0 else None +# +# sensor_data = { +# 'result_id': result.result_id, +# 'result_uuid': result.result_uuid, +# 'registration': site_registration, +# 'model_name': model.model_name, +# 'model_manufacturer': model.model_manufacturer.organization_name, +# 'variable_name': result.variable.variable_name_id, +# 'variable_code': result.variable.variable_code, +# 'unit_name': result.unit.unit_name, +# 'unit_abbreviation': result.unit.unit_abbreviation, +# 'sampled_medium': result.sampled_medium_id, +# 'activation_date': result.valid_datetime, +# 'activation_date_utc_offset': result.valid_datetime_utc_offset, +# 'last_measurement_id': last_value and last_value.value_id +# } +# +# site_sensor = SiteSensor(**sensor_data) +# site_sensor.save() +# return site_sensor +# +# def handle(self, *args, **options): +# sampling_features = SamplingFeature.objects.all() +# print("%s sites found." % sampling_features.count()) +# print("----------------------------------------------------") +# +# for sampling_feature in sampling_features: +# device_registration = DeviceRegistration.objects.filter(deployment_sampling_feature_uuid=sampling_feature.sampling_feature_uuid).first() +# if not device_registration: +# print('**** Sampling Feature %s (%s) exists without a site registration!!!' % (sampling_feature.sampling_feature_id, sampling_feature.sampling_feature_code)) +# continue +# +# site_registration = self.create_site_registration(device_registration) +# for feature_action in sampling_feature.feature_actions.all(): +# result = feature_action.results.first() +# self.create_site_sensor(result, site_registration) +# print('- site %s migrated!' % sampling_feature.sampling_feature_code) diff --git a/src/dataloaderinterface/management/commands/patch_results_datetime.py b/src/dataloaderinterface/management/commands/patch_results_datetime.py index b1b9c5a9..172104ec 100644 --- a/src/dataloaderinterface/management/commands/patch_results_datetime.py +++ b/src/dataloaderinterface/management/commands/patch_results_datetime.py @@ -14,7 +14,6 @@ def fix_results_value_count(): result.value_count = result.number_of_values result.save() - def handle(self, *args, **options): self.fix_results_value_count() results = Result.objects.prefetch_related('timeseriesresult__values').filter(value_count__gt=0) diff --git a/src/dataloaderinterface/management/commands/update_sensor_measurements.py b/src/dataloaderinterface/management/commands/update_sensor_measurements.py index d4a3b8c1..c7a56b10 100644 --- a/src/dataloaderinterface/management/commands/update_sensor_measurements.py +++ b/src/dataloaderinterface/management/commands/update_sensor_measurements.py @@ -19,24 +19,27 @@ def handle(self, *args, **options): elif sensor.last_measurement_id and not last_measurement: print('* %s (%s) sensor has a measurement and it shouldn\'t.' % (sensor.sensor_identity, sensor.result_id)) sensor.last_measurement_id = None - sensor.save(update_fields=['last_measurement_id']) + sensor.last_measurement_datetime = None + sensor.last_measurement_utc_offset = None + sensor.save(update_fields=['last_measurement_id', 'last_measurement_datetime', 'last_measurement_utc_offset']) continue elif last_measurement and not sensor.last_measurement_id: print('* %s (%s) sensor doesn\'t have a measurement and it should.' % (sensor.sensor_identity, sensor.result_id)) - sensor.last_measurement_id = last_measurement.value_id - sensor.save(update_fields=['last_measurement_id']) - continue - elif sensor.last_measurement_id == last_measurement.value_id: + elif sensor.last_measurement_id == last_measurement.value_id \ + and sensor.last_measurement_datetime == last_measurement.value_datetime\ + and sensor.last_measurement_utc_offset == last_measurement.value_datetime_utc_offset: print('- %s (%s) sensor is up to date.' % (sensor.sensor_identity, sensor.result_id)) continue print('*** outdated sensor %s (%s) - got: (%s) expected: (%s)' % ( sensor.sensor_identity, sensor.result_id, - sensor.last_measurement.value_datetime, + sensor.last_measurement_datetime, last_measurement.value_datetime )) sensor.last_measurement_id = last_measurement.value_id - sensor.save(update_fields=['last_measurement_id']) + sensor.last_measurement_datetime = last_measurement.value_datetime + sensor.last_measurement_utc_offset = last_measurement.value_datetime_utc_offset + sensor.save(update_fields=['last_measurement_id', 'last_measurement_datetime', 'last_measurement_utc_offset']) diff --git a/src/dataloaderinterface/migrations/__init__.py b/src/dataloaderinterface/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/dataloaderinterface/models.py b/src/dataloaderinterface/models.py index 5b2bb6d5..99b2c416 100644 --- a/src/dataloaderinterface/models.py +++ b/src/dataloaderinterface/models.py @@ -44,10 +44,10 @@ def sampling_feature(self): def odm2_affiliation(self): return Affiliation.objects.get(pk=self.affiliation_id) - @property - def deployment_date(self): - min_datetime = self.sensors.aggregate(first_light=Min('activation_date')) - return min_datetime['first_light'] + # @property + # def deployment_date(self): + # min_datetime = self.sensors.aggregate(first_light=Min('activation_date')) + # return min_datetime['first_light'] @property def last_measurements(self): @@ -75,16 +75,26 @@ def __repr__(self): class SiteSensor(models.Model): registration = models.ForeignKey('SiteRegistration', db_column='RegistrationID', related_name='sensors') + result_id = models.IntegerField(db_column='ResultID', unique=True) result_uuid = models.UUIDField(default=uuid.uuid4, editable=False, db_column='ResultUUID', unique=True) + model_name = models.CharField(db_column='ModelName', max_length=255) model_manufacturer = models.CharField(db_column='ModelManufacturer', max_length=255) + variable_name = models.CharField(max_length=255, db_column='VariableName') variable_code = models.CharField(max_length=50, db_column='VariableCode') + unit_name = models.CharField(max_length=255, db_column='UnitsName') unit_abbreviation = models.CharField(max_length=255, db_column='UnitAbbreviation') + sampled_medium = models.CharField(db_column='SampledMedium', max_length=255) + last_measurement_id = models.IntegerField(db_column='LastMeasurementID', unique=True, blank=True, null=True) + last_measurement_value = models.FloatField(db_column='LastMeasurementValue', blank=True, null=True) + last_measurement_datetime = models.DateTimeField(db_column='LastMeasurementDatetime', blank=True, null=True) + last_measurement_utc_offset = models.IntegerField(db_column='LastMeasurementUtcOffset', blank=True, null=True) + activation_date = models.DateTimeField(db_column='ActivationDate', blank=True, null=True) activation_date_utc_offset = models.IntegerField(db_column='ActivationDateUtcOffset', blank=True, null=True) @@ -127,7 +137,7 @@ def influx_url(self): return settings.INFLUX_URL_QUERY.format( result_uuid=str(self.result_uuid).replace('-', '_'), - last_measurement=self.last_measurement.value_datetime.strftime('%Y-%m-%dT%H:%M:%SZ'), + last_measurement=self.last_measurement_datetime.strftime('%Y-%m-%dT%H:%M:%SZ'), days_of_data=settings.SENSOR_DATA_PERIOD ) @@ -140,32 +150,6 @@ def __repr__(self): ) -class DeviceRegistration(models.Model): - registration_id = models.AutoField(primary_key=True, db_column='RegistrationID') - deployment_sampling_feature_uuid = models.UUIDField(db_column='SamplingFeatureUUID') - authentication_token = models.CharField(max_length=64, editable=False, db_column='AuthenticationToken') - user = models.ForeignKey('ODM2User', db_column='User') - # deployment_date = models.DateTimeField(db_column='DeploymentDate') - - def registration_date(self): - action = self.sampling_feature.actions.first() - return action and action.begin_datetime - - @property - def deployment_date(self): - sampling_feature = self.sampling_feature - min_datetime = sampling_feature.feature_actions.aggregate(first_light=Min('results__valid_datetime')) - return min_datetime['first_light'] - - @property - def sampling_feature(self): - return SamplingFeature.objects.get(sampling_feature_uuid__exact=self.deployment_sampling_feature_uuid) - - def __str__(self): - action = self.sampling_feature.actions.first() - return '{}\t{}: {}'.format(self.sampling_feature.sampling_feature_code, action.action_type_id, action.begin_datetime.strftime('%Y/%m/%d')) - - class ODM2User(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE) affiliation_id = models.IntegerField() diff --git a/src/dataloaderinterface/templates/dataloaderinterface/site_details.html b/src/dataloaderinterface/templates/dataloaderinterface/site_details.html index f033a0e3..c6b9c774 100644 --- a/src/dataloaderinterface/templates/dataloaderinterface/site_details.html +++ b/src/dataloaderinterface/templates/dataloaderinterface/site_details.html @@ -249,7 +249,7 @@
{{ sensor.variable_name }}
-
{{ sensor.last_measurement.data_value|default:"-" }}
+
{{ sensor.last_measurement_value|default:"-" }}
({{ sensor.unit_abbreviation }})
@@ -259,7 +259,7 @@ Date of last observation - {{ sensor.last_measurement.value_datetime|default:"-" }} + {{ sensor.last_measurement_datetime|default:"-" }} diff --git a/src/dataloaderinterface/tests/test_models.py b/src/dataloaderinterface/tests/test_models.py index db682381..bffad911 100644 --- a/src/dataloaderinterface/tests/test_models.py +++ b/src/dataloaderinterface/tests/test_models.py @@ -1,19 +1,19 @@ -from django.contrib.auth.models import User -from django.test import TestCase - -from dataloader.tests.data import data_manager -from dataloaderinterface.tests.test_views import TestAuthentication - -models_data = data_manager.test_data['models']['data'] - - -class TestDeviceRegistration(TestCase): - def create_device_registration(self): - pass - - def setUp(self): - self.user = TestAuthentication.create_user() - self.client.force_login(self.user) - - def test_string_representation(self): - pass \ No newline at end of file +# from django.contrib.auth.models import User +# from django.test import TestCase +# +# from dataloader.tests.data import data_manager +# from dataloaderinterface.tests.test_views import TestAuthentication +# +# models_data = data_manager.test_data['models']['data'] +# +# +# class TestDeviceRegistration(TestCase): +# def create_device_registration(self): +# pass +# +# def setUp(self): +# self.user = TestAuthentication.create_user() +# self.client.force_login(self.user) +# +# def test_string_representation(self): +# pass \ No newline at end of file diff --git a/src/dataloaderinterface/tests/test_views.py b/src/dataloaderinterface/tests/test_views.py index 28e93cb9..96d0a4cc 100644 --- a/src/dataloaderinterface/tests/test_views.py +++ b/src/dataloaderinterface/tests/test_views.py @@ -101,18 +101,18 @@ def tearDown(self): self.user.delete() -class TestDeviceRegistrationView(TestCase): - def setUp(self): - self.user = TestAuthentication.create_user() - self.client.force_login(self.user) - - def test_status(self): - url = reverse('device_registration') - response = self.client.get(url) - self.assertEqual(response.status_code, 200) - - def tearDown(self): - self.user.delete() +# class TestDeviceRegistrationView(TestCase): +# def setUp(self): +# self.user = TestAuthentication.create_user() +# self.client.force_login(self.user) +# +# def test_status(self): +# url = reverse('device_registration') +# response = self.client.get(url) +# self.assertEqual(response.status_code, 200) +# +# def tearDown(self): +# self.user.delete() class TestDeviceUpdateView(TestCase): diff --git a/src/dataloaderinterface/views.py b/src/dataloaderinterface/views.py index d8788ac5..5210d5e7 100644 --- a/src/dataloaderinterface/views.py +++ b/src/dataloaderinterface/views.py @@ -135,6 +135,9 @@ class SiteDetailView(DetailView): slug_url_kwarg = 'sampling_feature_code' template_name = 'dataloaderinterface/site_details.html' + def get_queryset(self): + return super(SiteDetailView, self).get_queryset().prefetch_related('sensors') + def get_context_data(self, **kwargs): context = super(SiteDetailView, self).get_context_data() context['tsa_url'] = settings.TSA_URL diff --git a/src/dataloaderservices/auth.py b/src/dataloaderservices/auth.py index 0302f607..69dc1ffa 100644 --- a/src/dataloaderservices/auth.py +++ b/src/dataloaderservices/auth.py @@ -2,7 +2,7 @@ from rest_framework import authentication from rest_framework import exceptions -from dataloaderinterface.models import DeviceRegistration, SiteRegistration +from dataloaderinterface.models import SiteRegistration class UUIDAuthentication(authentication.BaseAuthentication): From 5970b9d975e8d68a529d6e9ffab31d4f28b4d32d Mon Sep 17 00:00:00 2001 From: Juan Date: Fri, 1 Dec 2017 13:02:05 -0700 Subject: [PATCH 05/16] sensors' last measurement datetime is now updated when data is received. --- src/dataloaderservices/views.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/dataloaderservices/views.py b/src/dataloaderservices/views.py index 4670b798..c012b61d 100644 --- a/src/dataloaderservices/views.py +++ b/src/dataloaderservices/views.py @@ -214,6 +214,8 @@ def post(self, request, format=None): site_sensor = SiteSensor.objects.filter(result_id=result.result_id).first() site_sensor.last_measurement_id = result_value.value_id + site_sensor.last_measurement_datetime = result_value.value_datetime + site_sensor.last_measurement_datetime_utc_offset = result_value.value_datetime_utc_offset if is_first_value: result.valid_datetime = measurement_datetime @@ -221,7 +223,7 @@ def post(self, request, format=None): site_sensor.activation_date = measurement_datetime site_sensor.activation_date_utc_offset = utc_offset - site_sensor.save(update_fields=['last_measurement_id', 'activation_date', 'activation_date_utc_offset']) + site_sensor.save(update_fields=['last_measurement_id', 'last_measurement_datetime', 'last_measurement_datetime_utc_offset', 'activation_date', 'activation_date_utc_offset']) result.save(update_fields=['result_datetime', 'value_count', 'result_datetime_utc_offset', 'valid_datetime', 'valid_datetime_utc_offset']) return Response({}, status.HTTP_201_CREATED) From 13b3731921137a7b7783dd014486a9e65b0d7e8f Mon Sep 17 00:00:00 2001 From: Juan Date: Fri, 1 Dec 2017 14:09:04 -0700 Subject: [PATCH 06/16] streaming web service now records the "deployment" date of a site the first time it receives data. --- src/dataloaderservices/views.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/dataloaderservices/views.py b/src/dataloaderservices/views.py index c012b61d..881edb3c 100644 --- a/src/dataloaderservices/views.py +++ b/src/dataloaderservices/views.py @@ -184,6 +184,8 @@ def post(self, request, format=None): feature_actions = sampling_feature.feature_actions.prefetch_related('results__variable', 'action').all() for feature_action in feature_actions: result = feature_action.results.all().first() + site_sensor = SiteSensor.objects.filter(result_id=result.result_id).first() + is_first_value = result.value_count == 0 # don't create a new TimeSeriesValue for results that are not included in the request @@ -212,7 +214,7 @@ def post(self, request, format=None): result.result_datetime = measurement_datetime result.result_datetime_utc_offset = utc_offset - site_sensor = SiteSensor.objects.filter(result_id=result.result_id).first() + site_sensor.last_measurement_id = result_value.value_id site_sensor.last_measurement_datetime = result_value.value_datetime site_sensor.last_measurement_datetime_utc_offset = result_value.value_datetime_utc_offset @@ -223,6 +225,11 @@ def post(self, request, format=None): site_sensor.activation_date = measurement_datetime site_sensor.activation_date_utc_offset = utc_offset + if not site_sensor.registration.deployment_date: + site_sensor.registration.deployment_date = measurement_datetime + site_sensor.registration.deployment_date_utc_offset = utc_offset + site_sensor.registration.save(update_fields=['deployment_date', 'deployment_date_utc_offset']) + site_sensor.save(update_fields=['last_measurement_id', 'last_measurement_datetime', 'last_measurement_datetime_utc_offset', 'activation_date', 'activation_date_utc_offset']) result.save(update_fields=['result_datetime', 'value_count', 'result_datetime_utc_offset', 'valid_datetime', 'valid_datetime_utc_offset']) From ed48527cdc4f5e3407e3a82b91b39b590c81427f Mon Sep 17 00:00:00 2001 From: Juan Date: Fri, 1 Dec 2017 22:34:50 -0700 Subject: [PATCH 07/16] implemented faster and more efficient way of calculating a site's latest measurement. --- src/dataloaderinterface/models.py | 28 +++++++++---------- .../dataloaderinterface/browse-sites.html | 2 +- src/dataloaderinterface/views.py | 5 ++++ 3 files changed, 20 insertions(+), 15 deletions(-) diff --git a/src/dataloaderinterface/models.py b/src/dataloaderinterface/models.py index 99b2c416..8ed81c03 100644 --- a/src/dataloaderinterface/models.py +++ b/src/dataloaderinterface/models.py @@ -3,7 +3,7 @@ # Create your models here. import uuid -from django.db.models.aggregates import Min +from django.db.models.aggregates import Min, Max from dataloader.models import SamplingFeature, Affiliation, Result, TimeSeriesResultValue, EquipmentModel, Variable, \ Unit, Medium @@ -49,20 +49,20 @@ def odm2_affiliation(self): # min_datetime = self.sensors.aggregate(first_light=Min('activation_date')) # return min_datetime['first_light'] - @property - def last_measurements(self): - if not self.deployment_date: - return [] - - measurement_ids = [long(measurement.last_measurement_id) for measurement in self.sensors.all() if measurement.last_measurement_id] - measurements = TimeSeriesResultValue.objects.filter(pk__in=measurement_ids) - return measurements + # @property + # def last_measurements(self): + # if not self.deployment_date: + # return [] + # + # measurement_ids = [long(measurement.last_measurement_id) for measurement in self.sensors.all() if measurement.last_measurement_id] + # measurements = TimeSeriesResultValue.objects.filter(pk__in=measurement_ids) + # return measurements - @property - def latest_measurement(self): - if not self.deployment_date: - return None - return self.last_measurements.latest('value_datetime') + # @property + # def latest_measurement(self): + # if not self.deployment_date: + # return None + # return self.sensors.aggregate(last_update=Max('last_measurement_datetime'))['last_update'] def __str__(self): return '%s by %s from %s on %s' % (self.sampling_feature_code, self.person, self.organization, self.registration_date) diff --git a/src/dataloaderinterface/templates/dataloaderinterface/browse-sites.html b/src/dataloaderinterface/templates/dataloaderinterface/browse-sites.html index 2c8d7240..bfd4bf7c 100644 --- a/src/dataloaderinterface/templates/dataloaderinterface/browse-sites.html +++ b/src/dataloaderinterface/templates/dataloaderinterface/browse-sites.html @@ -32,7 +32,7 @@ "latitude": {{ site.latitude|default_if_none:0 }}, "longitude": {{ site.longitude|default_if_none:0 }}, "elevation": {{ site.elevation_m|default_if_none:'""' }}, - "latest-measurement": "{% if site.latest_measurement.value_datetime %}{{ site.latest_measurement.value_datetime }} ({{ site.latest_measurement.value_datetime|timesince }} ago){% else %}-{% endif %}", + "latest-measurement": "{% if site.latest_measurement %}{{ site.latest_measurement }} ({{ site.latest_measurement|timesince }} ago){% else %}-{% endif %}", "detail_link": "{% url 'site_detail' site.sampling_feature_code %}", "detail_link": "{% url 'site_detail' site.sampling_feature_code %}" }{% if not forloop.last %},{% endif %} diff --git a/src/dataloaderinterface/views.py b/src/dataloaderinterface/views.py index 5210d5e7..bfbaebb3 100644 --- a/src/dataloaderinterface/views.py +++ b/src/dataloaderinterface/views.py @@ -2,6 +2,8 @@ from uuid import uuid4 from django.conf import settings +from django.db.models.aggregates import Max +from django.db.models.expressions import F from dataloader.models import FeatureAction, Result, ProcessingLevel, TimeSeriesResult, SamplingFeature, \ SpatialReference, \ @@ -127,6 +129,9 @@ class BrowseSitesListView(ListView): context_object_name = 'sites' template_name = 'dataloaderinterface/browse-sites.html' + def get_queryset(self): + return super(BrowseSitesListView, self).get_queryset().prefetch_related('sensors').annotate(latest_measurement=Max('sensors__last_measurement_datetime')) + class SiteDetailView(DetailView): model = SiteRegistration From 127a2de4c2196d7288526161fe620f2656b31fee Mon Sep 17 00:00:00 2001 From: Juan Date: Fri, 1 Dec 2017 22:51:08 -0700 Subject: [PATCH 08/16] using that new method of getting last measuremen to reduce drastically the number of queries on the My Sites page. --- .../dataloaderinterface/my-sites.html | 46 +++++++++---------- src/dataloaderinterface/views.py | 19 ++++++-- 2 files changed, 36 insertions(+), 29 deletions(-) diff --git a/src/dataloaderinterface/templates/dataloaderinterface/my-sites.html b/src/dataloaderinterface/templates/dataloaderinterface/my-sites.html index 4f357985..e3e69556 100644 --- a/src/dataloaderinterface/templates/dataloaderinterface/my-sites.html +++ b/src/dataloaderinterface/templates/dataloaderinterface/my-sites.html @@ -73,8 +73,7 @@

@@ -154,24 +152,22 @@

query_builder - - Latest Measurement - - - - - {{ latest_result.result_datetime }} ({{ latest_result.result_datetime|timesince }} ago) - - - - {% endif %} - {% endwith %} + {% if site.latest_measurement %} + + + + + + + + + + + +
query_builder + Latest Measurement +
{{ site.latest_measurement }} ({{ site.latest_measurement|timesince }} ago)
+ {% endif %}
@@ -203,7 +199,7 @@

0 %},{% endif %} {% endfor %} @@ -215,7 +211,7 @@

Date: Mon, 4 Dec 2017 08:07:52 -0700 Subject: [PATCH 09/16] measurement data value was not being saved. --- .../management/commands/update_sensor_measurements.py | 6 ++++-- src/dataloaderservices/views.py | 11 +++++++++-- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/src/dataloaderinterface/management/commands/update_sensor_measurements.py b/src/dataloaderinterface/management/commands/update_sensor_measurements.py index c7a56b10..f840df98 100644 --- a/src/dataloaderinterface/management/commands/update_sensor_measurements.py +++ b/src/dataloaderinterface/management/commands/update_sensor_measurements.py @@ -29,7 +29,8 @@ def handle(self, *args, **options): elif sensor.last_measurement_id == last_measurement.value_id \ and sensor.last_measurement_datetime == last_measurement.value_datetime\ - and sensor.last_measurement_utc_offset == last_measurement.value_datetime_utc_offset: + and sensor.last_measurement_utc_offset == last_measurement.value_datetime_utc_offset\ + and sensor.last_measurement_value == last_measurement.data_value: print('- %s (%s) sensor is up to date.' % (sensor.sensor_identity, sensor.result_id)) continue @@ -40,6 +41,7 @@ def handle(self, *args, **options): last_measurement.value_datetime )) sensor.last_measurement_id = last_measurement.value_id + sensor.last_measurement_value = last_measurement.data_value sensor.last_measurement_datetime = last_measurement.value_datetime sensor.last_measurement_utc_offset = last_measurement.value_datetime_utc_offset - sensor.save(update_fields=['last_measurement_id', 'last_measurement_datetime', 'last_measurement_utc_offset']) + sensor.save(update_fields=['last_measurement_id', 'last_measurement_value', 'last_measurement_datetime', 'last_measurement_utc_offset']) diff --git a/src/dataloaderservices/views.py b/src/dataloaderservices/views.py index 881edb3c..a38fe745 100644 --- a/src/dataloaderservices/views.py +++ b/src/dataloaderservices/views.py @@ -216,6 +216,7 @@ def post(self, request, format=None): site_sensor.last_measurement_id = result_value.value_id + site_sensor.last_measurement_value = result_value.data_value site_sensor.last_measurement_datetime = result_value.value_datetime site_sensor.last_measurement_datetime_utc_offset = result_value.value_datetime_utc_offset @@ -230,7 +231,13 @@ def post(self, request, format=None): site_sensor.registration.deployment_date_utc_offset = utc_offset site_sensor.registration.save(update_fields=['deployment_date', 'deployment_date_utc_offset']) - site_sensor.save(update_fields=['last_measurement_id', 'last_measurement_datetime', 'last_measurement_datetime_utc_offset', 'activation_date', 'activation_date_utc_offset']) - result.save(update_fields=['result_datetime', 'value_count', 'result_datetime_utc_offset', 'valid_datetime', 'valid_datetime_utc_offset']) + site_sensor.save(update_fields=[ + 'last_measurement_id', 'last_measurement_value', 'last_measurement_datetime', + 'last_measurement_datetime_utc_offset', 'activation_date', 'activation_date_utc_offset' + ]) + result.save(update_fields=[ + 'result_datetime', 'value_count', 'result_datetime_utc_offset', + 'valid_datetime', 'valid_datetime_utc_offset' + ]) return Response({}, status.HTTP_201_CREATED) From 51fe3bb1b6c9cff06c8b504d8cac8e77ff94de30 Mon Sep 17 00:00:00 2001 From: Juan Date: Mon, 4 Dec 2017 08:09:13 -0700 Subject: [PATCH 10/16] again using that new method of getting last measurement to reduce drastically the number of queries, now on the Status page. --- .../templates/dataloaderinterface/status.html | 24 +++++++++---------- src/dataloaderinterface/templatetags/site.py | 4 ++-- src/dataloaderinterface/views.py | 14 +++++++++-- 3 files changed, 26 insertions(+), 16 deletions(-) diff --git a/src/dataloaderinterface/templates/dataloaderinterface/status.html b/src/dataloaderinterface/templates/dataloaderinterface/status.html index 91442bd6..3fe2fa2a 100644 --- a/src/dataloaderinterface/templates/dataloaderinterface/status.html +++ b/src/dataloaderinterface/templates/dataloaderinterface/status.html @@ -87,14 +87,14 @@

Sites Status

{{ site.sampling_feature_code }} {{ site.sampling_feature_name }} - {% if not site.latest_measurement.value_datetime %} + {% if not site.latest_measurement %} -{% else %} - {{ site.latest_measurement.value_datetime }} - ({{ site.latest_measurement.value_datetime|timesince }} ago) + {{ site.latest_measurement }} + ({{ site.latest_measurement|timesince }} ago) {% endif %} - {{ voltage.last_measurement.data_value|default:"-" }} - {{ temperature.last_measurement.data_value|default:"-" }} - {{ sram.last_measurement.data_value|default:"-" }} + {{ voltage.last_measurement_value|default:"-" }} + {{ temperature.last_measurement_value|default:"-" }} + {{ sram.last_measurement_value|default:"-" }} {% endwith %} {% endfor %} @@ -120,14 +120,14 @@

Sites Status

{{ site.sampling_feature_code }} {{ site.sampling_feature_name }} - {% if not site.latest_measurement.value_datetime %} + {% if not site.latest_measurement %} -{% else %} - {{ site.latest_measurement.value_datetime }} - ({{ site.latest_measurement.value_datetime|timesince }} ago) + {{ site.latest_measurement }} + ({{ site.latest_measurement|timesince }} ago) {% endif %} - {{ voltage.last_measurement.data_value|default:"-" }} - {{ temperature.last_measurement.data_value|default:"-" }} - {{ sram.last_measurement.data_value|default:"-" }} + {{ voltage.last_measurement_value|default:"-" }} + {{ temperature.last_measurement_value|default:"-" }} + {{ sram.last_measurement_value|default:"-" }} {% endwith %} {% endfor %} diff --git a/src/dataloaderinterface/templatetags/site.py b/src/dataloaderinterface/templatetags/site.py index 0e85b0c7..d279261a 100644 --- a/src/dataloaderinterface/templatetags/site.py +++ b/src/dataloaderinterface/templatetags/site.py @@ -7,10 +7,10 @@ @register.filter(name='get_site_sensor') def get_site_sensor(site, variable_code): - if not isinstance(site, SiteRegistration): + if not isinstance(site, SiteRegistration) or not site.status_sensors: return - return site.sensors.filter(variable_code=variable_code).first() + return next((sensor for sensor in site.status_sensors if sensor.variable_code==variable_code), None) @register.filter(name='can_administer_site') diff --git a/src/dataloaderinterface/views.py b/src/dataloaderinterface/views.py index 4e4b8a15..4da5568e 100644 --- a/src/dataloaderinterface/views.py +++ b/src/dataloaderinterface/views.py @@ -4,6 +4,7 @@ from django.conf import settings from django.db.models.aggregates import Max from django.db.models.expressions import F +from django.db.models.query import Prefetch from dataloader.models import FeatureAction, Result, ProcessingLevel, TimeSeriesResult, SamplingFeature, \ SpatialReference, \ @@ -124,12 +125,21 @@ class StatusListView(ListView): def get_queryset(self): return super(StatusListView, self).get_queryset()\ .filter(django_user_id=self.request.user.id)\ - .prefetch_related('sensors')\ + .prefetch_related(Prefetch('sensors', queryset=SiteSensor.objects.filter(variable_code__in=[ + 'EnviroDIY_Mayfly_Volt', + 'EnviroDIY_Mayfly_Temp', + 'EnviroDIY_Mayfly_FreeSRAM' + ]), to_attr='status_sensors')) \ .annotate(latest_measurement=Max('sensors__last_measurement_datetime')) def get_context_data(self, **kwargs): context = super(StatusListView, self).get_context_data(**kwargs) - context['followed_sites'] = self.request.user.followed_sites.all() + context['followed_sites'] = self.request.user.followed_sites \ + .prefetch_related(Prefetch('sensors', queryset=SiteSensor.objects.filter(variable_code__in=[ + 'EnviroDIY_Mayfly_Volt', + 'EnviroDIY_Mayfly_Temp', + 'EnviroDIY_Mayfly_FreeSRAM' + ]), to_attr='status_sensors')) return context From c9536c67fb0ea52c0148c41a03494b149c0bd113 Mon Sep 17 00:00:00 2001 From: Mauriel Date: Tue, 5 Dec 2017 09:39:01 -0700 Subject: [PATCH 11/16] Sparkline padding to prevent graph cropping on the edge --- .../static/dataloaderinterface/css/style.css | 2 +- .../static/dataloaderinterface/js/device-detail.js | 11 ++++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/src/dataloaderinterface/static/dataloaderinterface/css/style.css b/src/dataloaderinterface/static/dataloaderinterface/css/style.css index 4199c0a7..85658b44 100644 --- a/src/dataloaderinterface/static/dataloaderinterface/css/style.css +++ b/src/dataloaderinterface/static/dataloaderinterface/css/style.css @@ -285,7 +285,7 @@ svg.stale { } svg.not-stale { - background-color: #f8fff7; + background-color: #ebffe8; } .last-observation { diff --git a/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js b/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js index c8950931..e75066ac 100644 --- a/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js +++ b/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js @@ -134,10 +134,15 @@ function drawSparklinePlot(seriesInfo, seriesData) { var xAxis = d3.scaleTime().range([0, width]); var yAxis = d3.scaleLinear().range([height, 0]); + var yDomain = d3.extent(seriesData, function(d) { + return parseFloat(d.Value); + }); + var yPadding = parseInt((yDomain[1] - yDomain[0]) / 20); // 5% padding + yDomain[0] -= yPadding; + yDomain[1] += yPadding; + xAxis.domain([dataTimeOffset, lastRead]); - yAxis.domain(d3.extent(seriesData, function(d) { - return parseInt(d.Value); - })); + yAxis.domain(yDomain); var line = d3.line() .x(function(d) { From c6f3c65be1a1f6576a5ee6c716e39ce4ab8fb88a Mon Sep 17 00:00:00 2001 From: Mauriel Date: Tue, 5 Dec 2017 09:43:02 -0700 Subject: [PATCH 12/16] Removing type casting --- .../static/dataloaderinterface/js/device-detail.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js b/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js index e75066ac..318d9e5d 100644 --- a/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js +++ b/src/dataloaderinterface/static/dataloaderinterface/js/device-detail.js @@ -137,7 +137,7 @@ function drawSparklinePlot(seriesInfo, seriesData) { var yDomain = d3.extent(seriesData, function(d) { return parseFloat(d.Value); }); - var yPadding = parseInt((yDomain[1] - yDomain[0]) / 20); // 5% padding + var yPadding = (yDomain[1] - yDomain[0]) / 20; // 5% padding yDomain[0] -= yPadding; yDomain[1] += yPadding; From ef8d3998182a03ebf34ea768f6e85701708172ab Mon Sep 17 00:00:00 2001 From: Mauriel Date: Tue, 5 Dec 2017 10:16:23 -0700 Subject: [PATCH 13/16] Cancel edit/registration button --- .../static/dataloaderinterface/css/style.css | 2 +- .../dataloaderinterface/site_registration.html | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/src/dataloaderinterface/static/dataloaderinterface/css/style.css b/src/dataloaderinterface/static/dataloaderinterface/css/style.css index 85658b44..bcb92176 100644 --- a/src/dataloaderinterface/static/dataloaderinterface/css/style.css +++ b/src/dataloaderinterface/static/dataloaderinterface/css/style.css @@ -314,7 +314,7 @@ svg.not-stale { width: 100%; } -.buttons-toolbar .button-link:hover { +.button-link:hover { text-decoration: none; } diff --git a/src/dataloaderinterface/templates/dataloaderinterface/site_registration.html b/src/dataloaderinterface/templates/dataloaderinterface/site_registration.html index d0245d63..e640655b 100644 --- a/src/dataloaderinterface/templates/dataloaderinterface/site_registration.html +++ b/src/dataloaderinterface/templates/dataloaderinterface/site_registration.html @@ -178,6 +178,16 @@
+ + Cancel + +