feat: Implement Smart Router workflow with User Input and Duplicate Logic (v0.18.0)

Major update to the scanning engine to support "Pause & Resume" workflows.
The system can now halt execution to ask for user input (e.g. Weight) and resume
processing seamlessly.

Key Changes:
- Backend (Global Actions): Added `OPEN_FORM` action type to pause pipeline and request manual input.
- Backend (Routes): Updated `scan_lot` to handle `extra_data` payloads, allowing the pipeline to resume after user input.
- Backend (Logic): Implemented `confirm_duplicate` gatekeeper to handle "Warn vs Block" logic dynamically.
- Frontend (JS): Added `processSmartScan` to handle router signals (Open Modal, Warn Duplicate).
- Frontend (JS): Added `saveSmartScanData` to send original barcode + new form data back to the engine.
- UI: Fixed modal ID/Name conflicts (forcing use of `name` attribute for DB compatibility).
- UI: Restored missing "Cancel" button to Details Modal.
- Config: Added "User Input" rule type to the Rule Editor.

Ver: 0.18.0
This commit is contained in:
Javier
2026-02-09 00:34:41 -06:00
parent ea8551043f
commit 363295762a
12 changed files with 1184 additions and 220 deletions

View File

@@ -6,11 +6,13 @@ from flask import render_template, request, redirect, url_for, flash, jsonify, s
from db import query_db, execute_db
from utils import login_required, role_required
from datetime import datetime
from global_actions import execute_pipeline
import sqlite3
import io
import os
def register_routes(bp):
"""Register all conssheets routes on the blueprint"""
@@ -219,7 +221,103 @@ def register_routes(bp):
header_fields=header_fields,
detail_fields=detail_fields)
@bp.route('/admin/consumption-sheets/<int:process_id>/router')
@role_required('owner', 'admin')
def process_router(process_id):
"""Configure IFTTT routing rules for a process"""
process = query_db('SELECT * FROM cons_processes WHERE id = ?', [process_id], one=True)
if not process:
flash('Process not found', 'danger')
return redirect(url_for('conssheets.admin_processes'))
# Get existing rules sorted by line number (10, 20...)
rules = query_db('''
SELECT * FROM cons_process_router
WHERE process_id = ?
ORDER BY line_number ASC
''', [process_id])
return render_template('conssheets/process_router.html',
process=process,
rules=rules)
@bp.route('/admin/consumption-sheets/<int:process_id>/router/add', methods=['POST'])
@role_required('owner', 'admin')
def add_router_rule(process_id):
"""Add a new routing rule"""
process = query_db('SELECT * FROM cons_processes WHERE id = ?', [process_id], one=True)
if not process:
flash('Process not found', 'danger')
return redirect(url_for('conssheets.admin_processes'))
line_number = request.form.get('line_number')
rule_name = request.form.get('rule_name')
match_pattern = request.form.get('match_pattern')
# Basic validation
if not line_number or not rule_name or not match_pattern:
flash('All fields are required', 'danger')
return redirect(url_for('conssheets.process_router', process_id=process_id))
try:
execute_db('''
INSERT INTO cons_process_router
(process_id, line_number, rule_name, match_pattern, actions_json, is_active)
VALUES (?, ?, ?, ?, '[]', 1)
''', [process_id, line_number, rule_name, match_pattern])
flash(f'Rule {line_number} created successfully!', 'success')
except Exception as e:
flash(f'Error creating rule: {str(e)}', 'danger')
return redirect(url_for('conssheets.process_router', process_id=process_id))
@bp.route('/admin/consumption-sheets/<int:process_id>/router/<int:rule_id>/edit', methods=['GET', 'POST'])
@role_required('owner', 'admin')
def edit_router_rule(process_id, rule_id):
"""Edit a specific routing rule and its logic actions"""
process = query_db('SELECT * FROM cons_processes WHERE id = ?', [process_id], one=True)
rule = query_db('SELECT * FROM cons_process_router WHERE id = ?', [rule_id], one=True)
if not process or not rule:
flash('Rule not found', 'danger')
return redirect(url_for('conssheets.process_router', process_id=process_id))
# NEW: Fetch all active fields so we can use them in the Logic Editor dropdowns
fields = query_db('''
SELECT * FROM cons_process_fields
WHERE process_id = ? AND is_active = 1
ORDER BY table_type, sort_order
''', [process_id])
if request.method == 'POST':
# 1. Update Basic Info
line_number = request.form.get('line_number')
rule_name = request.form.get('rule_name')
match_pattern = request.form.get('match_pattern')
# 2. Update the Logic Chain (JSON)
# We get the raw JSON string from a hidden input we'll build next
actions_json = request.form.get('actions_json', '[]')
try:
execute_db('''
UPDATE cons_process_router
SET line_number = ?, rule_name = ?, match_pattern = ?, actions_json = ?
WHERE id = ?
''', [line_number, rule_name, match_pattern, actions_json, rule_id])
flash('Rule configuration saved!', 'success')
except Exception as e:
flash(f'Error saving rule: {str(e)}', 'danger')
return redirect(url_for('conssheets.edit_router_rule', process_id=process_id, rule_id=rule_id))
return render_template('conssheets/edit_rule.html', process=process, rule=rule, fields=fields)
@bp.route('/admin/consumption-sheets/<int:process_id>/fields')
@role_required('owner', 'admin')
def process_fields(process_id):
@@ -693,136 +791,62 @@ def register_routes(bp):
@bp.route('/session/<int:session_id>/scan', methods=['POST'])
@login_required
def scan_lot(session_id):
"""Process a scan with duplicate detection using dynamic tables"""
from global_actions import execute_pipeline
import re
import json
# 1. Setup Context & Get Session
# We need the process_key to know which table to save to
sess = query_db('''
SELECT cs.*, cp.process_key, cp.id as process_id
FROM cons_sessions cs
JOIN cons_processes cp ON cs.process_id = cp.id
WHERE cs.id = ? AND cs.status = 'active'
SELECT cs.*, cp.process_key, cp.id as process_id
FROM cons_sessions cs
JOIN cons_processes cp ON cs.process_id = cp.id
WHERE cs.id = ?
''', [session_id], one=True)
if not sess:
return jsonify({'success': False, 'message': 'Session not found or archived'})
if not sess:
return jsonify({'success': False, 'message': 'Session invalid'})
# 2. Get Data from Frontend
data = request.get_json()
field_values = data.get('field_values', {}) # Dict of field_name: value
confirm_duplicate = data.get('confirm_duplicate', False)
check_only = data.get('check_only', False)
barcode = data.get('barcode', '').strip()
# Get the duplicate key field
dup_key_field = get_duplicate_key_field(sess['process_id'])
# 3. Find Matching Rule (The Routing)
matched_rule = None
if barcode:
rules = query_db('SELECT * FROM cons_process_router WHERE process_id = ? AND is_active = 1 ORDER BY line_number ASC', [sess['process_id']])
for rule in rules:
try:
if re.search(rule['match_pattern'], barcode):
matched_rule = rule
break
except: continue
if not dup_key_field:
return jsonify({'success': False, 'message': 'No duplicate key field configured for this process'})
if not matched_rule:
return jsonify({'success': False, 'message': f"❌ No rule matched: {barcode}"})
# 4. Execute Pipeline (The Processing)
context = {
'table_name': f"cons_proc_{sess['process_key']}_details",
'session_id': session_id,
'user_id': session.get('user_id'),
# CRITICAL FIXES:
# Pass the "Yes" flag so it doesn't ask about duplicates again
'confirm_duplicate': data.get('confirm_duplicate', False),
# Pass the "Weight" (or other inputs) so it doesn't open the form again
'extra_data': data.get('field_values') or data.get('extra_data')
}
dup_key_value = field_values.get(dup_key_field['field_name'], '').strip()
if not dup_key_value:
return jsonify({'success': False, 'message': f'{dup_key_field["field_label"]} is required'})
table_name = get_detail_table_name(sess['process_key'])
# Check for duplicates in SAME session
same_session_dup = query_db(f'''
SELECT * FROM {table_name}
WHERE session_id = ? AND {dup_key_field['field_name']} = ? AND is_deleted = 0
''', [session_id, dup_key_value], one=True)
# Check for duplicates in OTHER sessions (need to check all sessions of same process type)
other_session_dup = query_db(f'''
SELECT t.*, cs.id as other_session_id, cs.created_at as other_session_date,
u.full_name as other_user,
(SELECT field_value FROM cons_session_header_values
WHERE session_id = cs.id AND field_id = (
SELECT id FROM cons_process_fields
WHERE process_id = cs.process_id AND field_name LIKE '%wo%' AND is_active = 1 LIMIT 1
)) as other_wo
FROM {table_name} t
JOIN cons_sessions cs ON t.session_id = cs.id
JOIN Users u ON t.scanned_by = u.user_id
WHERE t.{dup_key_field['field_name']} = ? AND t.session_id != ? AND t.is_deleted = 0
ORDER BY t.scanned_at DESC
LIMIT 1
''', [dup_key_value, session_id], one=True)
duplicate_status = 'normal'
duplicate_info = None
needs_confirmation = False
if same_session_dup:
duplicate_status = 'dup_same_session'
duplicate_info = 'Already scanned in this session'
needs_confirmation = True
elif other_session_dup:
duplicate_status = 'dup_other_session'
dup_date = other_session_dup['other_session_date'][:10] if other_session_dup['other_session_date'] else 'Unknown'
dup_user = other_session_dup['other_user'] or 'Unknown'
dup_wo = other_session_dup['other_wo'] or 'N/A'
duplicate_info = f"Previously scanned on {dup_date} by {dup_user} on WO {dup_wo}"
needs_confirmation = True
# If just checking, return early
if check_only:
if needs_confirmation:
return jsonify({
'success': False,
'needs_confirmation': True,
'duplicate_status': duplicate_status,
'duplicate_info': duplicate_info,
'message': duplicate_info
})
return jsonify({'success': True, 'needs_confirmation': False})
# If needs confirmation and not confirmed, ask user
if needs_confirmation and not confirm_duplicate:
return jsonify({
'success': False,
'needs_confirmation': True,
'duplicate_status': duplicate_status,
'duplicate_info': duplicate_info,
'message': duplicate_info
})
# Get all active detail fields for this process
detail_fields = query_db('''
SELECT * FROM cons_process_fields
WHERE process_id = ? AND table_type = 'detail' AND is_active = 1
ORDER BY sort_order, id
''', [sess['process_id']])
# Build dynamic INSERT statement
field_names = ['session_id', 'scanned_by', 'duplicate_status', 'duplicate_info']
field_placeholders = ['?', '?', '?', '?']
values = [session_id, session['user_id'], duplicate_status, duplicate_info]
for field in detail_fields:
field_names.append(field['field_name'])
field_placeholders.append('?')
values.append(field_values.get(field['field_name'], ''))
insert_sql = f'''
INSERT INTO {table_name} ({', '.join(field_names)})
VALUES ({', '.join(field_placeholders)})
'''
detail_id = execute_db(insert_sql, values)
# If this is a same-session duplicate, update the original scan too
updated_entry_ids = []
if duplicate_status == 'dup_same_session' and same_session_dup:
execute_db(f'''
UPDATE {table_name}
SET duplicate_status = 'dup_same_session', duplicate_info = 'Duplicate'
WHERE id = ?
''', [same_session_dup['id']])
updated_entry_ids.append(same_session_dup['id'])
return jsonify({
'success': True,
'detail_id': detail_id,
'duplicate_status': duplicate_status,
'updated_entry_ids': updated_entry_ids
})
try:
# The global engine handles Map, Clean, Duplicate, Input, and Save!
actions = json.loads(matched_rule['actions_json'])
result = execute_pipeline(actions, barcode, context)
return jsonify(result)
except Exception as e:
return jsonify({'success': False, 'message': f"System Error: {str(e)}"})
@bp.route('/session/<int:session_id>/detail/<int:detail_id>')