V1.0.0.2 - Refactor: Split DB and Import logic, fixed CSV upload columns
This commit is contained in:
149
app.py
149
app.py
@@ -11,8 +11,13 @@ import csv
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from io import StringIO
|
||||
from db import query_db, execute_db, get_db
|
||||
from blueprints.data_imports import data_imports_bp
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
app.register_blueprint(data_imports_bp)
|
||||
|
||||
# V1.0: Use environment variable for production, fallback to demo key for development
|
||||
app.secret_key = os.environ.get('SCANLOOK_SECRET_KEY', 'scanlook-demo-key-replace-for-production')
|
||||
app.config['DATABASE'] = os.path.join(os.path.dirname(__file__), 'database', 'scanlook.db')
|
||||
@@ -21,33 +26,6 @@ app.config['DATABASE'] = os.path.join(os.path.dirname(__file__), 'database', 'sc
|
||||
app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(hours=1)
|
||||
|
||||
|
||||
# ==================== DATABASE HELPERS ====================
|
||||
|
||||
def get_db():
|
||||
"""Get database connection"""
|
||||
conn = sqlite3.connect(app.config['DATABASE'])
|
||||
conn.row_factory = sqlite3.Row
|
||||
return conn
|
||||
|
||||
|
||||
def query_db(query, args=(), one=False):
|
||||
"""Query database helper"""
|
||||
conn = get_db()
|
||||
cursor = conn.execute(query, args)
|
||||
rv = cursor.fetchall()
|
||||
conn.close()
|
||||
return (rv[0] if rv else None) if one else rv
|
||||
|
||||
|
||||
def execute_db(query, args=()):
|
||||
"""Execute database insert/update/delete"""
|
||||
conn = get_db()
|
||||
cursor = conn.execute(query, args)
|
||||
conn.commit()
|
||||
last_id = cursor.lastrowid
|
||||
conn.close()
|
||||
return last_id
|
||||
|
||||
|
||||
# ==================== AUTHENTICATION DECORATORS ====================
|
||||
|
||||
@@ -383,123 +361,6 @@ def get_status_details(session_id, status):
|
||||
return jsonify({'success': False, 'message': f'Error: {str(e)}'})
|
||||
|
||||
|
||||
@app.route('/session/<int:session_id>/upload_baseline', methods=['POST'])
|
||||
@role_required('owner', 'admin')
|
||||
def upload_baseline(session_id):
|
||||
"""Upload MASTER or CURRENT baseline CSV"""
|
||||
baseline_type = request.form.get('baseline_type', 'master')
|
||||
|
||||
if 'csv_file' not in request.files:
|
||||
flash('No file uploaded', 'danger')
|
||||
return redirect(url_for('session_detail', session_id=session_id))
|
||||
|
||||
file = request.files['csv_file']
|
||||
|
||||
if file.filename == '':
|
||||
flash('No file selected', 'danger')
|
||||
return redirect(url_for('session_detail', session_id=session_id))
|
||||
|
||||
try:
|
||||
# Read CSV
|
||||
stream = StringIO(file.stream.read().decode("UTF8"), newline=None)
|
||||
csv_reader = csv.DictReader(stream)
|
||||
|
||||
# Validate columns
|
||||
required_columns = ['Item', 'Description', 'Lot Number', 'Location', 'Bin Number', 'On Hand']
|
||||
if not all(col in csv_reader.fieldnames for col in required_columns):
|
||||
flash(f'CSV missing required columns. Need: {", ".join(required_columns)}', 'danger')
|
||||
return redirect(url_for('session_detail', session_id=session_id))
|
||||
|
||||
conn = get_db()
|
||||
cursor = conn.cursor()
|
||||
count = 0
|
||||
|
||||
if baseline_type == 'master':
|
||||
# Upload MASTER baseline - consolidate duplicates by location
|
||||
lot_location_data = {}
|
||||
|
||||
for row in csv_reader:
|
||||
lot_num = row['Lot Number'].strip()
|
||||
bin_num = row['Bin Number'].strip()
|
||||
key = (lot_num, bin_num)
|
||||
|
||||
if key in lot_location_data:
|
||||
# Duplicate in same location - add to existing
|
||||
lot_location_data[key]['quantity'] += float(row['On Hand'])
|
||||
else:
|
||||
# New lot/location combination
|
||||
lot_location_data[key] = {
|
||||
'item': row['Item'].strip(),
|
||||
'description': row['Description'].strip(),
|
||||
'location': row['Location'].strip(),
|
||||
'bin': bin_num,
|
||||
'quantity': float(row['On Hand'])
|
||||
}
|
||||
|
||||
# Insert consolidated data
|
||||
for (lot_num, bin_num), data in lot_location_data.items():
|
||||
cursor.execute('''
|
||||
INSERT INTO BaselineInventory_Master
|
||||
(session_id, lot_number, item, description, system_location, system_bin, system_quantity)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
''', [
|
||||
session_id,
|
||||
lot_num,
|
||||
data['item'],
|
||||
data['description'],
|
||||
data['location'],
|
||||
data['bin'],
|
||||
data['quantity']
|
||||
])
|
||||
count += 1
|
||||
|
||||
# Update session
|
||||
cursor.execute('''
|
||||
UPDATE CountSessions
|
||||
SET master_baseline_timestamp = CURRENT_TIMESTAMP
|
||||
WHERE session_id = ?
|
||||
''', [session_id])
|
||||
|
||||
flash(f'✅ MASTER baseline uploaded: {count} records', 'success')
|
||||
|
||||
else:
|
||||
# Upload CURRENT baseline (GLOBAL - not session-specific)
|
||||
# Simple: Delete all old data, insert new data
|
||||
|
||||
# Delete all existing CURRENT data
|
||||
cursor.execute('DELETE FROM BaselineInventory_Current')
|
||||
|
||||
# Insert new CURRENT baseline
|
||||
for row in csv_reader:
|
||||
cursor.execute('''
|
||||
INSERT INTO BaselineInventory_Current
|
||||
(lot_number, item, description, system_location, system_bin, system_quantity)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
''', [
|
||||
row['Lot Number'].strip(),
|
||||
row['Item'].strip(),
|
||||
row['Description'].strip(),
|
||||
row['Location'].strip(),
|
||||
row['Bin Number'].strip(),
|
||||
float(row['On Hand'])
|
||||
])
|
||||
count += 1
|
||||
|
||||
# Update ALL sessions with current timestamp
|
||||
cursor.execute('''
|
||||
UPDATE CountSessions
|
||||
SET current_baseline_timestamp = CURRENT_TIMESTAMP
|
||||
''')
|
||||
|
||||
flash(f'✅ CURRENT baseline uploaded: {count} records (global)', 'success')
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
except Exception as e:
|
||||
flash(f'Error uploading CSV: {str(e)}', 'danger')
|
||||
|
||||
return redirect(url_for('session_detail', session_id=session_id))
|
||||
|
||||
|
||||
# ==================== ROUTES: COUNTING (STAFF) ====================
|
||||
|
||||
Reference in New Issue
Block a user