v0.13.0 - Add Consumption Sheets module

New module for tracking production consumption with lot scanning:
- Admin configuration for process types (AD WIP, etc.)
- Dynamic table creation per process
- Flexible header/detail field definitions with Excel cell mapping
- Duplicate detection with configurable key field
- Staff scanning interface with duplicate warnings (same session/cross session)
- Excel export using uploaded templates with multi-page support
- Template settings for rows per page and detail start row
This commit is contained in:
Javier
2026-01-29 12:33:34 -06:00
parent b11421a8f5
commit d955a13f3d
11 changed files with 697 additions and 402 deletions

View File

@@ -198,6 +198,7 @@ def init_database():
field_type TEXT NOT NULL CHECK(field_type IN ('TEXT', 'INTEGER', 'REAL', 'DATE', 'DATETIME')),
max_length INTEGER,
is_required INTEGER DEFAULT 0,
is_duplicate_key INTEGER DEFAULT 0,
is_active INTEGER DEFAULT 1,
sort_order INTEGER DEFAULT 0,
excel_cell TEXT,
@@ -219,6 +220,7 @@ def init_database():
)
''')
# Note: Header values still use flexible key-value storage
# cons_session_header_values - Flexible storage for header field values
cursor.execute('''
CREATE TABLE IF NOT EXISTS cons_session_header_values (
@@ -231,24 +233,9 @@ def init_database():
)
''')
# cons_session_details - Scanned lot details
cursor.execute('''
CREATE TABLE IF NOT EXISTS cons_session_details (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id INTEGER NOT NULL,
item_number TEXT,
lot_number TEXT NOT NULL,
weight REAL,
scanned_by INTEGER NOT NULL,
scanned_at DATETIME DEFAULT CURRENT_TIMESTAMP,
duplicate_status TEXT DEFAULT 'normal' CHECK(duplicate_status IN ('normal', 'dup_same_session', 'dup_other_session')),
duplicate_info TEXT,
comment TEXT,
is_deleted INTEGER DEFAULT 0,
FOREIGN KEY (session_id) REFERENCES cons_sessions(id),
FOREIGN KEY (scanned_by) REFERENCES Users(user_id)
)
''')
# Note: Detail tables are created dynamically per process as cons_proc_{process_key}_details
# They include system columns (id, session_id, scanned_by, scanned_at, duplicate_status,
# duplicate_info, comment, is_deleted) plus custom fields defined in cons_process_fields
# Create Indexes
# MASTER baseline indexes
@@ -271,8 +258,7 @@ def init_database():
cursor.execute('CREATE INDEX IF NOT EXISTS idx_cons_process_fields_active ON cons_process_fields(process_id, is_active)')
cursor.execute('CREATE INDEX IF NOT EXISTS idx_cons_sessions_process ON cons_sessions(process_id, status)')
cursor.execute('CREATE INDEX IF NOT EXISTS idx_cons_sessions_user ON cons_sessions(created_by, status)')
cursor.execute('CREATE INDEX IF NOT EXISTS idx_cons_session_details_session ON cons_session_details(session_id, is_deleted)')
cursor.execute('CREATE INDEX IF NOT EXISTS idx_cons_session_details_lot ON cons_session_details(lot_number)')
# Note: Detail table indexes are created dynamically when process tables are created
conn.commit()
conn.close()