V1.0.0.2 - Refactor: Split DB and Import logic, fixed CSV upload columns
This commit is contained in:
155
blueprints/data_imports.py
Normal file
155
blueprints/data_imports.py
Normal file
@@ -0,0 +1,155 @@
|
||||
import csv
|
||||
import io
|
||||
from flask import Blueprint, request, flash, redirect, url_for, session
|
||||
from db import execute_db, get_db
|
||||
|
||||
data_imports_bp = Blueprint('data_imports', __name__)
|
||||
|
||||
def login_required_check():
|
||||
if 'user_id' not in session:
|
||||
return False
|
||||
return True
|
||||
|
||||
# --- ROUTE 1: Upload CURRENT Inventory (Global) ---
|
||||
@data_imports_bp.route('/upload_current/<int:session_id>', methods=['POST'])
|
||||
def upload_current(session_id):
|
||||
if not login_required_check(): return redirect(url_for('login'))
|
||||
|
||||
if 'csv_file' not in request.files:
|
||||
flash('No file part', 'danger')
|
||||
return redirect(url_for('session_detail', session_id=session_id))
|
||||
|
||||
file = request.files['csv_file']
|
||||
if file.filename == '':
|
||||
flash('No selected file', 'danger')
|
||||
return redirect(url_for('session_detail', session_id=session_id))
|
||||
|
||||
if file:
|
||||
conn = get_db()
|
||||
cursor = conn.cursor()
|
||||
try:
|
||||
stream = io.StringIO(file.stream.read().decode("UTF8"), newline=None)
|
||||
csv_input = csv.DictReader(stream)
|
||||
|
||||
# 1. Reset Table
|
||||
cursor.execute('DROP TABLE IF EXISTS BaselineInventory_Current')
|
||||
cursor.execute('''
|
||||
CREATE TABLE BaselineInventory_Current (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
item TEXT,
|
||||
lot_number TEXT,
|
||||
system_bin TEXT,
|
||||
system_quantity REAL,
|
||||
uom TEXT
|
||||
)
|
||||
''')
|
||||
|
||||
# 2. BULK INSERT with Correct Headers
|
||||
rows_to_insert = []
|
||||
for row in csv_input:
|
||||
# Clean up keys (remove hidden characters/spaces)
|
||||
row = {k.strip(): v for k, v in row.items()}
|
||||
|
||||
rows_to_insert.append((
|
||||
row.get('Item', ''),
|
||||
row.get('Lot Number', ''), # FIX: Changed from 'Lot'
|
||||
row.get('Bin Number', ''), # FIX: Changed from 'Bin'
|
||||
row.get('On Hand', 0), # FIX: Changed from 'Qty'
|
||||
row.get('UOM', 'LBS')
|
||||
))
|
||||
|
||||
cursor.executemany('''
|
||||
INSERT INTO BaselineInventory_Current
|
||||
(item, lot_number, system_bin, system_quantity, uom)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
''', rows_to_insert)
|
||||
|
||||
# 3. Update timestamp
|
||||
cursor.execute('UPDATE CountSessions SET current_baseline_timestamp = CURRENT_TIMESTAMP')
|
||||
|
||||
conn.commit()
|
||||
flash(f'Successfully uploaded {len(rows_to_insert)} records.', 'success')
|
||||
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
flash(f'Error uploading CSV: {str(e)}', 'danger')
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return redirect(url_for('session_detail', session_id=session_id))
|
||||
|
||||
# --- ROUTE 2: Upload MASTER Baseline (Session Specific) ---
|
||||
@data_imports_bp.route('/session/<int:session_id>/upload_master', methods=['POST'])
|
||||
def upload_master(session_id):
|
||||
if not login_required_check(): return redirect(url_for('login'))
|
||||
|
||||
if 'csv_file' not in request.files:
|
||||
flash('No file uploaded', 'danger')
|
||||
return redirect(url_for('session_detail', session_id=session_id))
|
||||
|
||||
file = request.files['csv_file']
|
||||
if file.filename == '':
|
||||
flash('No file selected', 'danger')
|
||||
return redirect(url_for('session_detail', session_id=session_id))
|
||||
|
||||
conn = get_db()
|
||||
cursor = conn.cursor()
|
||||
try:
|
||||
stream = io.StringIO(file.stream.read().decode("UTF8"), newline=None)
|
||||
csv_reader = csv.DictReader(stream)
|
||||
|
||||
lot_location_data = {}
|
||||
|
||||
# Consolidate duplicates in memory
|
||||
for row in csv_reader:
|
||||
# Clean keys here too just in case
|
||||
row = {k.strip(): v for k, v in row.items()}
|
||||
|
||||
lot_num = row.get('Lot Number', '').strip()
|
||||
bin_num = row.get('Bin Number', '').strip()
|
||||
key = (lot_num, bin_num)
|
||||
|
||||
# Use get() to handle potential missing keys gracefully
|
||||
qty = float(row.get('On Hand', 0))
|
||||
|
||||
if key in lot_location_data:
|
||||
lot_location_data[key]['quantity'] += qty
|
||||
else:
|
||||
lot_location_data[key] = {
|
||||
'item': row.get('Item', '').strip(),
|
||||
'description': row.get('Description', '').strip(),
|
||||
'location': row.get('Location', '').strip(),
|
||||
'bin': bin_num,
|
||||
'quantity': qty
|
||||
}
|
||||
|
||||
# BULK INSERT
|
||||
rows_to_insert = []
|
||||
for (lot_num, bin_num), data in lot_location_data.items():
|
||||
rows_to_insert.append((
|
||||
session_id,
|
||||
lot_num,
|
||||
data['item'],
|
||||
data['description'],
|
||||
data['location'],
|
||||
data['bin'],
|
||||
data['quantity']
|
||||
))
|
||||
|
||||
cursor.executemany('''
|
||||
INSERT INTO BaselineInventory_Master
|
||||
(session_id, lot_number, item, description, system_location, system_bin, system_quantity)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
''', rows_to_insert)
|
||||
|
||||
cursor.execute('UPDATE CountSessions SET master_baseline_timestamp = CURRENT_TIMESTAMP WHERE session_id = ?', [session_id])
|
||||
conn.commit()
|
||||
flash(f'✅ MASTER baseline uploaded: {len(rows_to_insert)} records', 'success')
|
||||
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
flash(f'Error uploading Master CSV: {str(e)}', 'danger')
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return redirect(url_for('session_detail', session_id=session_id))
|
||||
Reference in New Issue
Block a user