Feat: Assessment page completed with save method combining Assessment, Distraction, Assessment Command Modality Link, and Assessment Response save for creating new complete Assessnent.

This commit is contained in:
2025-07-28 22:41:38 +01:00
parent 1bbe6f0040
commit 574d60442a
122 changed files with 3261 additions and 1057 deletions

View File

@@ -15,6 +15,9 @@ Dog Assessments Page Controller.
from business_objects.api import API
from business_objects.dog.command import Command
from business_objects.dog.assessment import Assessment
from business_objects.dog.assessment_command_modality_link import Assessment_Command_Modality_Link
from business_objects.dog.assessment_response import Assessment_Response
from business_objects.dog.distraction import Distraction
from datastores.datastore_dog import DataStore_Dog
from forms.dog.assessment import Filters_Assessment
from helpers.helper_app import Helper_App
@@ -56,18 +59,31 @@ def assessments():
Helper_App.console_log(f'form_filters={form_filters}')
return render_template('pages/dog/_assessments.html', model = model)
@routes_dog_assessment.route(Model_View_Dog_Assessment.HASH_SAVE_DOG_ASSESSMENT, methods=['POST'])
@routes_dog_assessment.route(Model_View_Dog_Assessment.HASH_SAVE_DOG_ASSESSMENT_DISTRACTION_AND_RESPONSE, methods=['POST'])
def save_assessment():
Helper_App.console_log('save_assessment')
data = Helper_App.get_request_data(request)
try:
token_received = data.get(Model_View_Dog_Assessment.FLAG_CSRF_TOKEN)
token_expected = session.get(Model_View_Dog_Assessment.FLAG_CSRF_TOKEN) # 'csrf_token'
if (not token_received) or not hmac.compare_digest(token_received, token_expected):
Helper_App.console_log(f'token_received: {token_received}\ntoken_expected: {token_expected}')
Helper_App.console_log(f'session: {session}\nkey: {Model_View_Dog_Assessment.FLAG_CSRF_TOKEN}')
return jsonify({
Model_View_Dog_Assessment.FLAG_STATUS: Model_View_Dog_Assessment.FLAG_FAILURE,
Model_View_Dog_Assessment.FLAG_MESSAGE: f'Invalid token received.'
})
form_filters = Filters_Assessment.from_json(data[Model_View_Dog_Assessment.FLAG_FORM_FILTERS])
"""
if not form_filters.validate_on_submit():
return jsonify({
Model_View_Dog_Assessment.FLAG_STATUS: Model_View_Dog_Assessment.FLAG_FAILURE,
Model_View_Dog_Assessment.FLAG_MESSAGE: f'Filters form invalid.\n{form_filters.errors}'
})
model_return = Model_View_Dog_Assessment(form_filters_old=form_filters)
if not model_return.is_user_logged_in:
"""
model_permissions = Model_View_Dog_Assessment(form_filters_old=form_filters)
Helper_App.console_log('made model')
if not model_permissions.is_user_logged_in:
raise Exception('User not logged in')
assessments = data[Model_View_Dog_Assessment.FLAG_ASSESSMENT]
@@ -77,19 +93,47 @@ def save_assessment():
Model_View_Dog_Assessment.FLAG_MESSAGE: f'No assessments.'
})
objs_assessment = []
objs_distraction = []
objs_assessment_command_modality_link = []
objs_assessment_responses = []
for assessment in assessments:
objs_assessment.append(Assessment.from_json(assessment))
objs_distraction.extend([Distraction.from_json(link_json) for link_json in assessment.get(Distraction.FLAG_DISTRACTION, [])])
for link_json in assessment.get(Assessment_Command_Modality_Link.FLAG_ASSESSMENT_COMMAND_MODALITY_LINK, []):
objs_assessment_command_modality_link.append(Assessment_Command_Modality_Link.from_json(link_json))
objs_assessment_responses.extend([Assessment_Response.from_json(response_json) for response_json in link_json.get(Assessment_Response.FLAG_ASSESSMENT_RESPONSE, [])])
"""
errors = []
Helper_App.console_log(f'objs_assessment={objs_assessment}')
errors = DataStore_Dog.save_assessments(data.get('comment', 'No comment'), objs_assessment)
if len(objs_assessment) > 0:
errors.extend(DataStore_Dog.save_assessments(data.get('comment', 'No comment'), objs_assessment))
Helper_App.console_log(f'objs_distraction={objs_distraction}')
if len(objs_distraction) > 0:
errors.extend(DataStore_Dog.save_distractions(data.get('comment', 'No comment'), objs_distraction))
Helper_App.console_log(f'objs_assessment_command_modality_link={objs_assessment_command_modality_link}')
if len(objs_assessment_command_modality_link) > 0:
errors.extend(DataStore_Dog.save_assessment_command_modality_links(data.get('comment', 'No comment'), objs_assessment_command_modality_link))
Helper_App.console_log(f'objs_assessment_responses={objs_assessment_responses}')
if len(objs_assessment_responses) > 0:
errors.extend(DataStore_Dog.save_assessment_responses(data.get('comment', 'No comment'), objs_assessment_responses))
"""
errors = DataStore_Dog.save_assessments_distactions_and_responses(
comment = data.get('comment', 'No comment')
, assessments = objs_assessment
, distractions = objs_distraction
, assessment_command_modality_links = objs_assessment_command_modality_link
, assessment_responses = objs_assessment_responses
)
# model_return = Model_View_Dog_Assessment(form_filters_old=form_filters)
if (len(errors) > 0):
return jsonify({
Model_View_Dog_Assessment.FLAG_STATUS: Model_View_Dog_Assessment.FLAG_FAILURE,
Model_View_Dog_Assessment.FLAG_MESSAGE: f'Error saving assessments.\n{model_return.convert_list_objects_to_json(errors)}'
Model_View_Dog_Assessment.FLAG_MESSAGE: f'Error saving assessments.\n{model_permissions.convert_list_objects_to_json(errors)}'
})
return jsonify({
Model_View_Dog_Assessment.FLAG_STATUS: Model_View_Dog_Assessment.FLAG_SUCCESS,
Model_View_Dog_Assessment.FLAG_DATA: Model_View_Dog_Assessment.convert_list_objects_to_json(model_return.assessments)
Model_View_Dog_Assessment.FLAG_DATA: None # Model_View_Dog_Assessment.convert_list_objects_to_json(model_permissions.assessments)
})
except Exception as e:
return jsonify({
@@ -107,12 +151,17 @@ def assessment():
Helper_App.console_log(f'Error: {e}')
form_filters = Filters_Assessment()
Helper_App.console_log(f'form_filters={form_filters}')
id_assessment = request.args.get(Model_View_Dog_Assessment.ATTR_ID_ASSESSMENT, None)
temp_id_assessment = request.args.get(Model_View_Dog_Assessment.ATTR_ID_ASSESSMENT, -1)
id_assessment = None if (temp_id_assessment is None or temp_id_assessment == '') else int(temp_id_assessment)
model = Model_View_Dog_Assessment(form_filters_old = form_filters, id_assessment = id_assessment, hash_page_current = Model_View_Dog_Assessment.HASH_PAGE_DOG_ASSESSMENT)
model._title = 'Assessment'
if model.assessments is None or len(model.assessments) == 0:
return assessments()
if id_assessment is not None and id_assessment > 0:
return assessments()
else:
model.assessments = [Assessment()]
if not model.is_user_logged_in:
return redirect(url_for('routes_core_home.home'))
session[Model_View_Dog_Assessment.FLAG_CSRF_TOKEN] = model.form_filters.csrf_token.current_token #.hidden_tag()
Helper_App.console_log(f'form_filters={form_filters}')
return render_template('pages/dog/_assessment.html', model = model)

View File

@@ -20,7 +20,7 @@ from models.model_view_accessibility_statement import Model_View_Accessibility_S
from models.model_view_retention_schedule import Model_View_Retention_Schedule
import lib.argument_validation as av
# external
from flask import render_template, Blueprint
from flask import render_template, Blueprint, send_from_directory
routes_legal = Blueprint('routes_legal', __name__)
@@ -67,4 +67,6 @@ def privacy_policy():
except Exception as e:
return str(e)
return html_body
@routes_legal.route('/robots.txt', methods=['GET'])
def robots_txt():
return send_from_directory('static', 'docs/robots.txt')