Updated API to accept public IP, Change the agent download to give full config options, remove Uvicorn - change to use Waitress for Windows, Guvicorn linux

This commit is contained in:
ghostersk
2025-05-28 09:12:44 +01:00
parent 605067180d
commit b5fc6728a8
14 changed files with 1099 additions and 426 deletions

3
.gitignore vendored
View File

@@ -2,4 +2,5 @@
database.db
__pycache__/
*.db
*.db.old
*.db.old
config.ini

View File

@@ -21,7 +21,8 @@ class Log(db.Model):
event_type = db.Column(db.String(20), nullable=False)
user_name = db.Column(db.String(50), nullable=False)
computer_name = db.Column(db.String(50), nullable=False)
ip_address = db.Column(db.String(15), nullable=False)
local_ip = db.Column(db.String(45), nullable=True) # Increased size to support IPv6, made nullable
public_ip = db.Column(db.String(45), nullable=True) # New field for public IP, nullable
timestamp = db.Column(db.DateTime, nullable=False, default=get_current_time_with_timezone)
retry = db.Column(db.Integer, default=0, nullable=False)
company_id = db.Column(db.Integer, db.ForeignKey('app_auth_companies.id'), nullable=True)

View File

@@ -144,7 +144,7 @@ def log_event():
Log.event_type == data['EventType'],
Log.user_name == data['UserName'],
Log.computer_name == data['ComputerName'],
Log.ip_address == data['IPAddress'],
Log.local_ip == data.get('LocalIP'),
Log.timestamp == timestamp
)
).first()
@@ -158,7 +158,8 @@ def log_event():
event_type=data['EventType'],
user_name=data['UserName'],
computer_name=data['ComputerName'],
ip_address=data['IPAddress'],
local_ip=data.get('LocalIP'),
public_ip=data.get('PublicIP'),
timestamp=timestamp,
retry=is_retry,
company_id=g.company_id, # Add the company ID from the API key
@@ -180,6 +181,8 @@ def log_event():
'event_type': data.get('EventType') if 'data' in locals() else None,
'user_name': data.get('UserName') if 'data' in locals() else None,
'computer_name': data.get('ComputerName') if 'data' in locals() else None,
'local_ip': data.get('LocalIP') if 'data' in locals() else None,
'public_ip': data.get('PublicIP') if 'data' in locals() else None,
'retry_attempt': data.get('retry', 0) if 'data' in locals() else None,
'error': str(e)
})

361
app.py
View File

@@ -1,10 +1,5 @@
from flask import Flask, session, request, send_from_directory, render_template
from asgiref.wsgi import WsgiToAsgi
from extensions import db, bcrypt, login_manager, get_env_var
from flask_migrate import Migrate
from auth.models import User, Settings, ApiKey
import ssl
import logging
from flask_wtf import CSRFProtect
from auth import auth_bp
from api import api_bp
@@ -13,7 +8,10 @@ from datetime import datetime, timezone, timedelta
import pytz
import configparser
import os
import uvicorn
import sys
import platform
import ssl
import logging
import argparse
from werkzeug.middleware.proxy_fix import ProxyFix
from werkzeug.exceptions import HTTPException
@@ -22,24 +20,43 @@ from flask_compress import Compress
from utils.security_headers import setup_security_headers
from utils.rate_limiter import apply_rate_limits
from utils.db_logging import setup_database_logging
from auth.models import User, Settings, ApiKey
# Removed SQLite encryption import
# Load configuration from ini file
config = configparser.ConfigParser()
config_file = os.path.join(os.path.dirname(__file__), 'config.ini')
config.read(config_file)
# Set up logging
# Set up logging first
logging.basicConfig(
level=logging.DEBUG if config.getboolean('app', 'APP_DEBUG', fallback=True) else logging.INFO,
level=logging.INFO, # Default level, will be updated after config is loaded
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
app = Flask(__name__)
# Configuration setup with automatic config.ini management
from utils.config_manager import initialize_config
# Add Migrate after app initialization
migrate = Migrate(app, db)
config_file = os.path.join(os.path.dirname(__file__), 'config.ini')
# Initialize configuration with automatic creation/updating
try:
config = initialize_config(config_file, preserve_existing=True)
logger.info("Configuration initialized successfully")
# Update logging level based on config
debug_mode = config.getboolean('app', 'APP_DEBUG', fallback=False)
if debug_mode:
logging.getLogger().setLevel(logging.DEBUG)
logger.info("Debug mode enabled")
except Exception as e:
logger.error(f"Failed to initialize configuration: {e}")
# Fall back to basic ConfigParser if config manager fails
config = configparser.ConfigParser()
if os.path.exists(config_file):
config.read(config_file)
else:
logger.error(f"Configuration file {config_file} not found and could not be created")
exit(1)
app = Flask(__name__)
# Configure WSGI middleware for reverse proxy support (Traefik)
proxy_count = config.getint('proxy', 'PROXY_COUNT', fallback=1)
@@ -99,11 +116,11 @@ if config.getboolean('cache', 'ENABLE_COMPRESSION', fallback=True):
# Enable CSRF protection
csrf = CSRFProtect(app)
# Configure static files with caching
# Configure static files with caching and proper MIME types
@app.route('/favicon.ico')
def favicon():
response = send_from_directory(os.path.join(app.root_path, 'static', 'img'),
'favicon.ico', mimetype='image/ico')
'favicon.ico', mimetype='image/x-icon')
# Add cache headers manually instead of using cache_timeout
max_age = config.getint('cache', 'IMAGE_MAX_AGE', fallback=604800)
@@ -112,6 +129,62 @@ def favicon():
return response
# Add explicit static file serving with proper MIME types
@app.route('/static/<path:filename>')
def static_files(filename):
"""Serve static files with proper MIME types"""
from flask import send_from_directory
import mimetypes
# Ensure proper MIME type detection
if filename.endswith('.css'):
mimetype = 'text/css'
elif filename.endswith('.js'):
mimetype = 'application/javascript'
elif filename.endswith('.png'):
mimetype = 'image/png'
elif filename.endswith('.jpg') or filename.endswith('.jpeg'):
mimetype = 'image/jpeg'
elif filename.endswith('.gif'):
mimetype = 'image/gif'
elif filename.endswith('.ico'):
mimetype = 'image/x-icon'
elif filename.endswith('.woff'):
mimetype = 'font/woff'
elif filename.endswith('.woff2'):
mimetype = 'font/woff2'
elif filename.endswith('.ttf'):
mimetype = 'font/ttf'
else:
# Use mimetypes module for other files
mimetype, _ = mimetypes.guess_type(filename)
if not mimetype:
mimetype = 'application/octet-stream'
try:
response = send_from_directory(app.static_folder, filename, mimetype=mimetype)
# Add cache headers based on file type
if filename.endswith(('.png', '.jpg', '.jpeg', '.gif', '.ico', '.woff', '.woff2', '.ttf')):
max_age = config.getint('cache', 'IMAGE_MAX_AGE', fallback=604800)
elif filename.endswith(('.js', '.css')):
max_age = config.getint('cache', 'JS_CSS_MAX_AGE', fallback=43200)
else:
max_age = config.getint('cache', 'STATIC_MAX_AGE', fallback=86400)
response.headers['Cache-Control'] = f'public, max-age={max_age}'
response.headers['Expires'] = (datetime.now(timezone.utc) + timedelta(seconds=max_age)).strftime('%a, %d %b %Y %H:%M:%S GMT')
# Add ETag for efficient caching
response.add_etag()
return response
except FileNotFoundError:
# Return 404 for missing static files instead of redirecting to HTML pages
from flask import abort
abort(404)
# Setup security headers
setup_security_headers(app, config)
@@ -177,8 +250,6 @@ db.init_app(app)
bcrypt.init_app(app)
login_manager.init_app(app)
wsg = WsgiToAsgi(app)
# Register blueprints
app.register_blueprint(auth_bp, url_prefix='/auth')
app.register_blueprint(api_bp, url_prefix='/api')
@@ -190,6 +261,12 @@ apply_rate_limits(app, config)
def handle_http_exception(exc:HTTPException):
"""Use the code and description from an HTTPException to inform the user of an error"""
logger.debug('HTTP error %s - %s', exc.code, exc.description)
# For static file 404s, return proper 404 response instead of HTML error page
if request.path.startswith('/static/') and exc.code == 404:
from flask import Response
return Response(f"Static file not found: {request.path}", status=404, mimetype='text/plain')
return render_template("error.html", status_code=exc.code, description=exc.description)
def handle_uncaught_exception(exc:Exception):
@@ -204,7 +281,7 @@ app.register_error_handler(Exception, handle_uncaught_exception)
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
return db.session.get(User, int(user_id))
def init_app():
with app.app_context():
@@ -241,7 +318,8 @@ def init_app():
# Create initial API key for admin
api_key = ApiKey(
key=ApiKey.generate_key(),
description="Initial Admin API Key"
description="Initial Admin API Key",
user_id=admin.id
)
db.session.add(api_key)
@@ -273,96 +351,183 @@ def format_datetime(value):
with app.app_context():
csrf.exempt(api_bp)
def get_best_server():
"""Determine the best server for the current OS"""
system = platform.system().lower()
if system == 'windows':
return 'waitress'
elif system in ['linux', 'darwin']: # Linux or macOS
return 'gunicorn'
else:
logger.warning(f"Unknown OS: {system}, defaulting to waitress")
return 'waitress'
def run_with_waitress():
"""Run the application with Waitress (Windows-compatible production server)"""
try:
from waitress import serve
# Read configuration
host = config.get('server', 'HOST', fallback='0.0.0.0')
port = config.getint('server', 'PORT', fallback=8000)
threads = config.get('server', 'WORKERS', fallback='4') # Waitress uses threads instead of processes
ssl_certfile = config.get('server', 'SSL_CERTFILE', fallback=None)
ssl_keyfile = config.get('server', 'SSL_KEYFILE', fallback=None)
logger.info(f"Starting Waitress server on {host}:{port} with {threads} threads")
# Waitress configuration - optimized for performance
serve_kwargs = {
'host': host,
'port': port,
'threads': int(threads),
'connection_limit': 1000,
'cleanup_interval': 30,
'channel_timeout': 120,
'log_socket_errors': True,
# Valid Waitress performance optimizations
'recv_bytes': 65536, # Increase receive buffer
'send_bytes': 65536, # Increase send buffer
'max_request_header_size': 262144, # 256KB header limit
'max_request_body_size': 1073741824, # 1GB body limit
'expose_tracebacks': False, # Don't expose tracebacks in production
}
# Note: Waitress doesn't handle SSL directly - use reverse proxy for SSL
if ssl_certfile and ssl_keyfile and os.path.exists(ssl_certfile) and os.path.exists(ssl_keyfile):
logger.info("SSL certificates found - but Waitress doesn't handle SSL directly")
logger.info("For SSL support, use a reverse proxy (nginx, traefik, etc.)")
logger.info("Starting Waitress without SSL on HTTP")
else:
logger.info("No SSL certificates configured - starting with HTTP")
# Start Waitress server (HTTP only - SSL handled by reverse proxy)
serve(app, **serve_kwargs)
except ImportError:
logger.error("Waitress not installed. Install with: pip install waitress")
return False
except Exception as e:
logger.error(f"Failed to start Waitress: {e}")
return False
return True
def run_with_gunicorn():
"""Run the application with Gunicorn (Linux/macOS production server)"""
try:
import subprocess
# Read configuration
host = config.get('server', 'HOST', fallback='0.0.0.0')
port = config.getint('server', 'PORT', fallback=8000)
workers = config.get('server', 'WORKERS', fallback='4')
ssl_certfile = config.get('server', 'SSL_CERTFILE', fallback=None)
ssl_keyfile = config.get('server', 'SSL_KEYFILE', fallback=None)
logger.info(f"Starting Gunicorn server on {host}:{port} with {workers} workers")
logger.info(f"SSL: {'Enabled' if ssl_certfile and ssl_keyfile else 'Disabled'}")
# Build Gunicorn command
cmd = [
'gunicorn',
'--bind', f'{host}:{port}',
'--workers', str(workers),
'--worker-class', 'sync',
'--timeout', '120',
'--keepalive', '5',
'--max-requests', '1000',
'--max-requests-jitter', '50',
'--preload',
'app:app'
]
# Add SSL configuration if available
if ssl_certfile and ssl_keyfile and os.path.exists(ssl_certfile) and os.path.exists(ssl_keyfile):
cmd.extend(['--certfile', ssl_certfile, '--keyfile', ssl_keyfile])
logger.info("SSL enabled with certificates")
elif ssl_certfile and ssl_keyfile:
logger.warning(f"SSL certificates not found: {ssl_certfile}, {ssl_keyfile}")
# Run Gunicorn
subprocess.run(cmd)
except ImportError:
logger.error("Gunicorn not available on this system")
return False
except FileNotFoundError:
logger.error("Gunicorn command not found. Install with: pip install gunicorn")
return False
except Exception as e:
logger.error(f"Failed to start Gunicorn: {e}")
return False
return True
def run_app():
"""Start the application with Uvicorn using config settings"""
host = config.get('server', 'HOST', fallback='0.0.0.0')
port = config.getint('server', 'PORT', fallback=8000)
ssl_certfile = config.get('server', 'SSL_CERTFILE', fallback='certs/cert.pem')
ssl_keyfile = config.get('server', 'SSL_KEYFILE', fallback='certs/key.pem')
"""Start the application with the best server for this OS"""
best_server = get_best_server()
# Get new configuration settings
development_mode = config.getboolean('server', 'DEVELOPMENT_MODE', fallback=False)
watch_files = config.getboolean('server', 'WATCH_FILES', fallback=False)
workers_setting = config.get('server', 'WORKERS', fallback='1')
worker_lifetime = config.getint('server', 'WORKER_LIFETIME', fallback=86400)
graceful_shutdown = config.getboolean('server', 'GRACEFUL_SHUTDOWN', fallback=True)
shutdown_timeout = config.getint('server', 'SHUTDOWN_TIMEOUT', fallback=30)
logger.info(f"Detected OS: {platform.system()}")
logger.info(f"Using server: {best_server}")
# Parse workers setting - could be "auto" or a number
workers = None
if workers_setting.lower() == 'auto':
import multiprocessing
workers = multiprocessing.cpu_count()
if best_server == 'waitress':
success = run_with_waitress()
elif best_server == 'gunicorn':
success = run_with_gunicorn()
else:
try:
workers = int(workers_setting)
except ValueError:
logger.warning(f"Invalid WORKERS setting '{workers_setting}', defaulting to 1")
workers = 1
logger.error("No suitable server found")
success = False
# Only enable file watching in development mode
reload_enabled = development_mode and watch_files
# Use debug log level in development mode
log_level = "debug" if development_mode else "info"
logger.info(f"Starting application on {host}:{port} with SSL")
logger.info(f"SSL certificate: {ssl_certfile}")
logger.info(f"SSL key: {ssl_keyfile}")
logger.info(f"Development mode: {development_mode}")
logger.info(f"File watching: {reload_enabled}")
logger.info(f"Workers: {workers}")
# Get max requests per worker before graceful restart
# Setting to None disables the worker auto-restart feature
limit_max_requests = None
if worker_lifetime > 0:
# If worker_lifetime is set (> 0), we'll use a reasonable request limit
# Default to around 10,000 requests per worker before restart
limit_max_requests = 10000
# Get trusted proxy configuration for Uvicorn
trusted_proxies_config = config.get('proxy', 'TRUSTED_PROXIES', fallback='').strip()
if trusted_proxies_config:
forwarded_allow_ips = trusted_proxies_config.replace(',', ' ')
logger.info(f"Uvicorn forwarded_allow_ips: {forwarded_allow_ips}")
else:
forwarded_allow_ips = '*'
logger.info("Uvicorn allowing all IPs for forwarded headers")
uvicorn.run(
"app:wsg",
host=host,
port=port,
reload=reload_enabled,
workers=workers,
log_level=log_level,
ssl_certfile=ssl_certfile,
ssl_keyfile=ssl_keyfile,
proxy_headers=True,
forwarded_allow_ips=forwarded_allow_ips,
timeout_keep_alive=65, # Keep-alive timeout to detect hanging connections
limit_max_requests=limit_max_requests, # Fixed: Only restart workers after this many requests
timeout_graceful_shutdown=shutdown_timeout
)
if not success:
logger.error("Failed to start preferred server, falling back to Flask dev server")
# Fallback to Flask development server
host = config.get('server', 'HOST', fallback='0.0.0.0')
port = config.getint('server', 'PORT', fallback=8000)
ssl_certfile = config.get('server', 'SSL_CERTFILE', fallback='certs/cert.pem')
ssl_keyfile = config.get('server', 'SSL_KEYFILE', fallback='certs/key.pem')
ssl_context = None
if ssl_certfile and ssl_keyfile and os.path.exists(ssl_certfile) and os.path.exists(ssl_keyfile):
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
ssl_context.load_cert_chain(ssl_certfile, ssl_keyfile)
ssl_context.verify_mode = ssl.CERT_NONE
app.run(debug=app.config['APP_DEBUG'], ssl_context=ssl_context, host=host, port=port)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Domain Logons Monitoring Application')
parser.add_argument('--legacy', action='store_true', help='Use legacy Flask server instead of Uvicorn')
parser.add_argument('--legacy', action='store_true', help='Use legacy Flask development server')
parser.add_argument('--waitress', action='store_true', help='Force use of Waitress server (Windows)')
parser.add_argument('--gunicorn', action='store_true', help='Force use of Gunicorn server (Linux/macOS)')
args = parser.parse_args()
if args.legacy:
# Legacy Flask server mode
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
ssl_certfile = config.get('server', 'SSL_CERTFILE', fallback='certs/cert.pem')
ssl_keyfile = config.get('server', 'SSL_KEYFILE', fallback='certs/key.pem')
ssl_context.load_cert_chain(ssl_certfile, ssl_keyfile)
ssl_context.verify_mode = ssl.CERT_NONE # Accept self-signed certificates
# Legacy Flask development server mode
host = config.get('server', 'HOST', fallback='0.0.0.0')
port = config.getint('server', 'PORT', fallback=8000)
ssl_certfile = config.get('server', 'SSL_CERTFILE', fallback='certs/cert.pem')
ssl_keyfile = config.get('server', 'SSL_KEYFILE', fallback='certs/key.pem')
ssl_context = None
if ssl_certfile and ssl_keyfile and os.path.exists(ssl_certfile) and os.path.exists(ssl_keyfile):
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
ssl_context.load_cert_chain(ssl_certfile, ssl_keyfile)
ssl_context.verify_mode = ssl.CERT_NONE
logger.info("Starting Flask development server")
app.run(debug=app.config['APP_DEBUG'], ssl_context=ssl_context, host=host, port=port)
elif args.waitress:
# Force Waitress server
logger.info("Force using Waitress server")
if not run_with_waitress():
logger.error("Failed to start Waitress, no fallback available")
elif args.gunicorn:
# Force Gunicorn server
logger.info("Force using Gunicorn server")
if not run_with_gunicorn():
logger.error("Failed to start Gunicorn, no fallback available")
else:
# Default to Uvicorn server
# Auto-detect best server for OS
run_app()

View File

@@ -6,12 +6,11 @@ from .models import User, Settings, AllowedDomain
from extensions import db
import re
def validate_password_strength(form, field):
"""Validate password based on current settings"""
password = field.data
def validate_password_requirements(password):
"""Standalone password validation function that returns a list of error messages"""
settings = Settings.query.first()
if not settings:
return # No settings found, allow any password
return [] # No settings found, allow any password
errors = []
@@ -34,6 +33,13 @@ def validate_password_strength(form, field):
if not any(char in safe_chars for char in password):
errors.append(f'Password must contain at least one special character from: {safe_chars}')
return errors
def validate_password_strength(form, field):
"""WTForms validator that uses the standalone validation function"""
password = field.data
errors = validate_password_requirements(password)
if errors:
raise ValidationError(' '.join(errors))

View File

@@ -17,6 +17,15 @@ import zipfile
import tempfile
import configparser
import logging
import re
import pyotp
import qrcode
import base64
import tempfile
import configparser
import zipfile
import os
import shutil
from utils.toolbox import get_current_timestamp
print(get_current_timestamp())
@@ -512,18 +521,17 @@ def manage_users():
flash('A user with that username or email already exists.', 'danger')
else:
# Validate password strength
try:
from .forms import validate_password_strength
validate_password_strength(password)
except ValidationError as e:
logger.warning(f'Password validation failed for user creation: {str(e)}', extra={
from .forms import validate_password_requirements
password_errors = validate_password_requirements(password)
if password_errors:
logger.warning(f'Password validation failed for user creation: {"; ".join(password_errors)}', extra={
'username': username,
'email': email,
'role': role,
'remote_addr': request.remote_addr,
'current_user_id': current_user.id
})
flash(f'Password validation failed: {str(e)}', 'danger')
flash(f'Password validation failed: {"; ".join(password_errors)}', 'danger')
return redirect(url_for('auth.manage_users'))
hashed_password = bcrypt.generate_password_hash(password).decode('utf-8')
@@ -947,17 +955,16 @@ def reset_user_password(user_id):
new_password = request.form.get('new_password')
if new_password:
# Validate password strength
try:
from .forms import validate_password_strength
validate_password_strength(new_password)
except ValidationError as e:
logger.warning(f'Password validation failed for password reset: {str(e)}', extra={
from .forms import validate_password_requirements
password_errors = validate_password_requirements(new_password)
if password_errors:
logger.warning(f'Password validation failed for password reset: {"; ".join(password_errors)}', extra={
'target_user_id': user_id,
'target_username': user.username,
'current_user_id': current_user.id,
'remote_addr': request.remote_addr
})
flash(f'Password validation failed: {str(e)}', 'danger')
flash(f'Password validation failed: {"; ".join(password_errors)}', 'danger')
return redirect(url_for('auth.manage_users'))
user.password = bcrypt.generate_password_hash(new_password).decode('utf-8')
@@ -1285,11 +1292,10 @@ def create_company_user(company_id):
return redirect(url_for('auth.create_company_user', company_id=company_id))
# Validate password strength
try:
from .forms import validate_password_strength
validate_password_strength(password)
except ValidationError as e:
logger.warning(f'Password validation failed for company user creation: {str(e)}', extra={
from .forms import validate_password_requirements
password_errors = validate_password_requirements(password)
if password_errors:
logger.warning(f'Password validation failed for company user creation: {"; ".join(password_errors)}', extra={
'username': username,
'email': email,
'company_id': company_id,
@@ -1297,7 +1303,7 @@ def create_company_user(company_id):
'current_user_id': current_user.id,
'remote_addr': request.remote_addr
})
flash(f'Password validation failed: {str(e)}', 'danger')
flash(f'Password validation failed: {"; ".join(password_errors)}', 'danger')
return redirect(url_for('auth.create_company_user', company_id=company_id))
# Create new user
@@ -1380,62 +1386,82 @@ def delete_company_api_key(company_id, key_id):
@auth.route('/company/<int:company_id>/download_agent', methods=['GET', 'POST'])
@login_required
def download_agent(company_id):
# Check if user has access to this company
user_company = UserCompany.query.filter_by(user_id=current_user.id, company_id=company_id).first()
if not user_company and current_user.role != 'Admin' and current_user.role != 'GlobalAdmin':
abort(403)
company = Company.query.get_or_404(company_id)
api_keys = ApiKey.query.filter_by(company_id=company_id).all()
if request.method == 'POST':
api_key_id = request.form.get('api_key')
server_url = request.form.get('server_url')
install_dir = request.form.get('install_dir')
try:
# Check if user has access to this company
user_company = UserCompany.query.filter_by(user_id=current_user.id, company_id=company_id).first()
if not user_company and current_user.role != 'Admin' and current_user.role != 'GlobalAdmin':
abort(403)
# Get the selected API key
selected_api_key = ApiKey.query.get(api_key_id)
if not selected_api_key or selected_api_key.company_id != company_id:
flash('Invalid API key selected', 'danger')
return redirect(url_for('auth.download_agent', company_id=company_id))
company = Company.query.get_or_404(company_id)
api_keys = ApiKey.query.filter_by(company_id=company_id).all()
# Create a ZIP file with pre-configured agent
with tempfile.TemporaryDirectory() as tmp_dir:
# Create config.ini file
from flask import current_app
# Get timezone from the application configuration
from flask import current_app
app_timezone = current_app.config.get('TIMEZONE', 'UTC')
if request.method == 'POST':
api_key_id = request.form.get('api_key')
server_url = request.form.get('server_url')
# Get timezone from the application configuration
app_timezone = current_app.config.get('TIMEZONE', 'UTC')
# Get all form values for configuration
debug_logs = 'debug_logs' in request.form
install_dir = request.form.get('install_dir', '').strip()
if not install_dir: # If empty or whitespace only, use default
install_dir = r"C:\ProgramData\UserSessionMon"
health_check_interval = request.form.get('health_check_interval', '30')
obtain_public_ip = 'obtain_public_ip' in request.form
public_ip_http_urls = request.form.get('public_ip_http_urls', 'https://ifconfig.me/ip,https://ipv4.icanhazip.com')
config = configparser.ConfigParser()
config['API'] = {
'api_key': selected_api_key.key,
'server_url': server_url,
'debug_logs': 'false',
'timezone': app_timezone,
'install_dir': install_dir if install_dir else r"C:\ProgramData\UserSessionMon"
}
# Settings for Log retention for agent - it is in MB ( max 20 MB, 0 is No log)
config['Logging'] = {
'session_log_rotation_size_mb': 5,
'error_log_rotation_size_mb': 5,
'event_log_rotation_size_mb': 5
}
# Logging settings
session_log_rotation_size_mb = request.form.get('session_log_rotation_size_mb', '5')
error_log_rotation_size_mb = request.form.get('error_log_rotation_size_mb', '5')
event_log_rotation_size_mb = request.form.get('event_log_rotation_size_mb', '5')
config_path = os.path.join(tmp_dir, 'config.ini')
with open(config_path, 'w') as f:
config.write(f)
# Get the selected API key
selected_api_key = ApiKey.query.get(api_key_id)
if not selected_api_key or selected_api_key.company_id != company_id:
flash('Invalid API key selected', 'danger')
return redirect(url_for('auth.download_agent', company_id=company_id))
# Create a ZIP file with pre-configured agent
# Create temporary directory and files
tmp_dir = tempfile.mkdtemp()
try:
# Create config.ini file
config = configparser.ConfigParser()
config['API'] = {
'api_key': selected_api_key.key,
'server_url': server_url,
'debug_logs': str(debug_logs).lower(),
'timezone': app_timezone,
'install_dir': install_dir,
'health_check_interval': health_check_interval,
'health_check_path': '/api/health',
'obtain_public_ip': str(obtain_public_ip).lower(),
'public_ip_http_urls': public_ip_http_urls
}
# Path to the pre-compiled agent executable
agent_path = os.path.join(os.path.dirname(os.path.dirname(__file__)),
'windows_agent', 'winagentUSM.exe')
install_dir_display = install_dir if install_dir else r"C:\ProgramData\UserSessionMon"
# Create installation batch script
install_script_path = os.path.join(tmp_dir, 'install_service.bat')
with open(install_script_path, 'w') as f:
# Get the current directory for the config path
f.write(f"""@echo off
# Settings for Log retention for agent - it is in MB ( max 20 MB, 0 is No log)
config['Logging'] = {
'session_log_rotation_size_mb': session_log_rotation_size_mb,
'error_log_rotation_size_mb': error_log_rotation_size_mb,
'event_log_rotation_size_mb': event_log_rotation_size_mb
}
config_path = os.path.join(tmp_dir, 'config.ini')
with open(config_path, 'w') as f:
config.write(f)
# Path to the pre-compiled agent executable
agent_path = os.path.join(os.path.dirname(os.path.dirname(__file__)),
'windows_agent', 'winagentUSM.exe')
install_dir_display = install_dir if install_dir else r"C:\ProgramData\UserSessionMon"
# Create installation batch script
install_script_path = os.path.join(tmp_dir, 'install_service.bat')
with open(install_script_path, 'w') as f:
# Get the current directory for the config path
f.write(f"""@echo off
REM User Session Monitor Agent Installation Script
REM This script must be run as Administrator
@@ -1485,11 +1511,11 @@ echo To uninstall the app run uninstaller script:
echo {install_dir_display}\\uninstall.bat
pause
""")
# Create README file with instructions
readme_path = os.path.join(tmp_dir, 'README.txt')
with open(readme_path, 'w') as f:
f.write(f"""USER SESSION MONITOR AGENT INSTALLATION INSTRUCTIONS
# Create README file with instructions
readme_path = os.path.join(tmp_dir, 'README.txt')
with open(readme_path, 'w') as f:
f.write(f"""USER SESSION MONITOR AGENT INSTALLATION INSTRUCTIONS
AUTOMATIC INSTALLATION (RECOMMENDED):
1. Extract the contents of this ZIP file to a folder on your Windows computer.
@@ -1512,49 +1538,86 @@ Configuration:
- Config file will be created at: {install_dir_display}\\config.ini
Service Management:
- Start service: sc start "User Session Monitor"
- Stop service: sc stop "User Session Monitor"
- Check status: sc query "User Session Monitor"
- Start service: sc start "UserSessionMonService"
- Stop service: sc stop "UserSessionMonService"
- Check status: sc query "UserSessionMonService"
- Uninstall service: winagentUSM.exe -service uninstall
If you need to change settings later, edit the config file or use the command line:
- winagentUSM.exe --api-key <key>
- winagentUSM.exe --url <url>
- winagentUSM.exe --debug true|false
- winagentUSM.exe --timezone <timezone>
Configuration Settings:
- Debug Logs: {str(debug_logs).lower()}
- Health Check Interval: {health_check_interval} seconds
- Health Check Path: /api/health
- Public IP Detection: {str(obtain_public_ip).lower()}
- Session Log Size: {session_log_rotation_size_mb} MB
- Error Log Size: {error_log_rotation_size_mb} MB
- Event Log Size: {event_log_rotation_size_mb} MB
If you need to change settings later, edit the config file at:
{install_dir_display}\\config.ini
""")
# Create the ZIP file
zip_path = os.path.join(tmp_dir, f'{company.name.replace(" ", "_")}_agent.zip')
with zipfile.ZipFile(zip_path, 'w') as zip_file:
# If agent executable exists, add it
if os.path.exists(agent_path):
zip_file.write(agent_path, arcname='winagentUSM.exe')
else:
flash('Pre-compiled agent not found. Please contact administrator.', 'danger')
return redirect(url_for('auth.download_agent', company_id=company_id))
zip_file.write(config_path, arcname='config.ini')
zip_file.write(readme_path, arcname='README.txt')
zip_file.write(install_script_path, arcname='install_service.bat')
# Send the ZIP file to the user
return send_file(
zip_path,
as_attachment=True,
download_name=f'{company.name.replace(" ", "_")}_agent.zip',
mimetype='application/zip'
)
# Default server URL is the current request URL's base
default_url = request.url_root.rstrip('/')
return render_template(
'auth/download_agent.html',
company=company,
api_keys=api_keys,
default_url=default_url
)
# Create the ZIP file
zip_path = os.path.join(tmp_dir, f'{company.name.replace(" ", "_")}_agent.zip')
with zipfile.ZipFile(zip_path, 'w') as zip_file:
# If agent executable exists, add it
if os.path.exists(agent_path):
zip_file.write(agent_path, arcname='winagentUSM.exe')
else:
flash('Pre-compiled agent not found. Please contact administrator.', 'danger')
return redirect(url_for('auth.download_agent', company_id=company_id))
zip_file.write(config_path, arcname='config.ini')
zip_file.write(readme_path, arcname='README.txt')
zip_file.write(install_script_path, arcname='install_service.bat')
# Read the ZIP file contents into memory
with open(zip_path, 'rb') as f:
zip_data = f.read()
# Create a BytesIO object to serve the file
from io import BytesIO
zip_buffer = BytesIO(zip_data)
zip_buffer.seek(0)
# Clean up temporary directory now that we have the data in memory
import shutil
try:
shutil.rmtree(tmp_dir)
except Exception as cleanup_error:
# Log cleanup error but don't fail the download
logger.warning(f"Failed to cleanup temporary directory {tmp_dir}: {str(cleanup_error)}")
# Send the ZIP file to the user from memory
return send_file(
zip_buffer,
as_attachment=True,
download_name=f'{company.name.replace(" ", "_")}_agent.zip',
mimetype='application/zip'
)
except Exception as e:
# Clean up temporary directory if an error occurs
import shutil
try:
shutil.rmtree(tmp_dir)
except Exception:
pass # Ignore cleanup errors during exception handling
raise e
# Default server URL is the current request URL's base
default_url = f"https://{request.host}"
return render_template(
'auth/download_agent.html',
company=company,
api_keys=api_keys,
default_url=default_url,
app_timezone=app_timezone
)
except Exception as e:
logging.error(f"Error in download_agent for company {company_id}: {str(e)}", exc_info=True)
flash('An error occurred while preparing the agent download. Please try again.', 'danger')
return redirect(url_for('auth.company_api_keys', company_id=company_id))
# User-Company Management Routes for manage_users page
@auth.route('/admin/user/<int:user_id>/companies/add', methods=['POST'])

View File

@@ -1,164 +1,113 @@
; Configuration file for User Monitor Application
; This file is auto-managed - existing values are preserved
; Generated/Updated by ConfigManager
[app]
SECRET_KEY = your_secret_key
APP_DEBUG = true
TIMEZONE = Europe/London
; Application configuration
; SECRET_KEY: Change this to a random secret key in production
; APP_DEBUG: Set to false in production
; TIMEZONE: Your local timezone for log display
secret_key = your_secret_key_change_this_in_production
app_debug = false
timezone = Europe/London
[server]
HOST = 0.0.0.0
PORT = 8000
SSL_CERTFILE = instance/certs/cert.pem
SSL_KEYFILE = instance/certs/key.pem
; Server configuration
; DEVELOPMENT_MODE: When true, enables development features (default: false)
DEVELOPMENT_MODE = true
; Watch for file changes and reload automatically (development only, default: false)
WATCH_FILES = true
; Number of worker processes for Uvicorn (default: 1)
; For production, set to 2-4 workers for most servers
; "auto" uses CPU count but may be excessive for some systems
WORKERS = 2
; Maximum number of seconds a worker can live (helps with memory leaks)
WORKER_LIFETIME = 86400
; Determines if server should stop gracefully or immediately on receiving SIGINT/SIGTERM
GRACEFUL_SHUTDOWN = true
; Timeout in seconds for graceful shutdown (default: 30)
SHUTDOWN_TIMEOUT = 30
; HOST: IP address to bind to (0.0.0.0 for all interfaces)
; PORT: Port number to listen on
; SSL_CERTFILE/SSL_KEYFILE: SSL certificate paths (for reverse proxy setups)
; WORKERS: Number of threads (Waitress) or processes (Gunicorn)
; DEVELOPMENT_MODE: Enable development features (false in production)
host = 0.0.0.0
port = 8000
ssl_certfile = instance/certs/cert.pem
ssl_keyfile = instance/certs/key.pem
development_mode = false
watch_files = false
workers = 4
worker_lifetime = 86400
graceful_shutdown = true
shutdown_timeout = 30
[database]
; Current SQLite configuration
SQLALCHEMY_DATABASE_URI = sqlite:///database.db
SQLALCHEMY_TRACK_MODIFICATIONS = false
; ====== DATABASE CONNECTION EXAMPLES ======
; Uncomment one of these examples and comment out the SQLite connection above to switch databases
; === PostgreSQL Example ===
; Setup:
; 1. Install PostgreSQL server
; 2. Create database and user with proper permissions
; 3. Install Python driver: pip install psycopg2-binary
;
; SQLALCHEMY_DATABASE_URI = postgresql://username:password@localhost:5432/database_name
; For SSL connection:
; SQLALCHEMY_DATABASE_URI = postgresql://username:password@localhost:5432/database_name?sslmode=require
; === MySQL/MariaDB Example ===
; Setup:
; 1. Install MySQL/MariaDB server
; 2. Create database and user with proper permissions
; 3. Install Python driver: pip install pymysql
;
; SQLALCHEMY_DATABASE_URI = mysql+pymysql://username:password@localhost:3306/database_name
; For SSL connection:
; SQLALCHEMY_DATABASE_URI = mysql+pymysql://username:password@localhost:3306/database_name?ssl_ca=/path/to/ca.pem
; === MSSQL Server Example ===
; Setup:
; 1. Install MSSQL Server
; 2. Create database and user
; 3. Install Python driver: pip install pyodbc
; 4. Install ODBC Driver for SQL Server:
; - On Ubuntu/Debian:
; sudo apt-get install -y unixodbc-dev
; sudo curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add -
; sudo curl https://packages.microsoft.com/config/ubuntu/$(lsb_release -rs)/prod.list > /etc/apt/sources.list.d/mssql-release.list
; sudo apt-get update
; sudo ACCEPT_EULA=Y apt-get install -y msodbcsql18 # Driver 18 (latest)
; # Or for older driver: sudo ACCEPT_EULA=Y apt-get install -y msodbcsql17
; - On RHEL/CentOS:
; sudo curl https://packages.microsoft.com/config/rhel/8/prod.repo > /etc/yum.repos.d/mssql-release.repo
; sudo ACCEPT_EULA=Y dnf install -y msodbcsql18 # Driver 18 (latest)
; # Or for older driver: sudo ACCEPT_EULA=Y dnf install -y msodbcsql17
; - On Windows:
; Download and install from https://go.microsoft.com/fwlink/?linkid=2249006 # Driver 18
; # Or for older driver: https://go.microsoft.com/fwlink/?linkid=2187217 # Driver 17
;
; # Using ODBC Driver 18 (recommended)
; SQLALCHEMY_DATABASE_URI = mssql+pyodbc://username:password@server_name/database_name?driver=ODBC+Driver+18+for+SQL+Server
; # Using ODBC Driver 17
; SQLALCHEMY_DATABASE_URI = mssql+pyodbc://username:password@server_name/database_name?driver=ODBC+Driver+17+for+SQL+Server
; # For named instance:
; SQLALCHEMY_DATABASE_URI = mssql+pyodbc://username:password@server_name\\instance_name/database_name?driver=ODBC+Driver+18+for+SQL+Server
; Database configuration
; SQLALCHEMY_DATABASE_URI: Database connection string
; For SQLite (default): sqlite:///database.db
; For PostgreSQL: postgresql://user:pass@localhost:5432/dbname
; For MySQL: mysql+pymysql://user:pass@localhost:3306/dbname
; For MSSQL: mssql+pyodbc://user:pass@server/db?driver=ODBC+Driver+18+for+SQL+Server
sqlalchemy_database_uri = sqlite:///database.db
sqlalchemy_track_modifications = false
[session]
SESSION_COOKIE_SECURE = true
SESSION_COOKIE_HTTPONLY = true
SESSION_COOKIE_SAMESITE = Lax
REMEMBER_COOKIE_SECURE = true
REMEMBER_COOKIE_HTTPONLY = true
REMEMBER_COOKIE_DURATION = 7200
PERMANENT_SESSION_LIFETIME = 7200
; Session and cookie configuration
; SESSION_COOKIE_SECURE: Only send cookies over HTTPS
; REMEMBER_COOKIE_DURATION: Remember me duration in seconds
session_cookie_secure = true
session_cookie_httponly = true
session_cookie_samesite = Lax
remember_cookie_secure = true
remember_cookie_httponly = true
remember_cookie_duration = 7200
permanent_session_lifetime = 7200
[cache]
STATIC_MAX_AGE = 86400
IMAGE_MAX_AGE = 604800
JS_CSS_MAX_AGE = 43200
ENABLE_COMPRESSION = true
COMPRESSION_LEVEL = 6
COMPRESSION_MIN_SIZE = 500
; Cache and compression settings
; MAX_AGE values are in seconds
; COMPRESSION_LEVEL: 1-9 (higher = better compression, more CPU)
static_max_age = 86400
image_max_age = 604800
js_css_max_age = 43200
enable_compression = true
compression_level = 6
compression_min_size = 500
[security]
; Security headers configuration
CONTENT_SECURITY_POLICY = default-src 'self'; script-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; font-src 'self'; connect-src 'self'; frame-ancestors 'self'; form-action 'self'; base-uri 'self'
ENABLE_HSTS = true
HSTS_MAX_AGE = 31536000
ENABLE_SECURITY_HEADERS = true
; CONTENT_SECURITY_POLICY: Controls allowed content sources
; ENABLE_HSTS: HTTP Strict Transport Security (HTTPS only)
; HSTS_MAX_AGE: HSTS duration in seconds
content_security_policy = default-src 'self'; script-src 'self' 'unsafe-inline' https://cdn.datatables.net https://code.jquery.com; style-src 'self' 'unsafe-inline' https://cdn.datatables.net; img-src 'self' data:; font-src 'self' https://cdn.datatables.net; connect-src 'self'; frame-ancestors 'self'; form-action 'self'; base-uri 'self'
enable_hsts = true
hsts_max_age = 31536000
enable_security_headers = true
[rate_limiting]
; Rate limiting configuration
ENABLE_RATE_LIMITING = true
; Redis connection for rate limiting (leave empty to use in-memory storage)
; REDIS_URL = redis://localhost:6379/0
REDIS_URL =
; Login endpoint limits
LOGIN_LIMIT = 10
LOGIN_PERIOD = 60
; Registration endpoint limits
REGISTER_LIMIT = 5
REGISTER_PERIOD = 300
; API endpoint limits
API_LIMIT = 60
API_PERIOD = 60
; REDIS_URL: Redis connection for distributed rate limiting (leave empty for in-memory)
; LOGIN_LIMIT: Max login attempts per LOGIN_PERIOD seconds
; API_LIMIT: Max API requests per API_PERIOD seconds
; Leave REDIS_URL empty to use in-memory rate limiting
enable_rate_limiting = true
redis_url =
login_limit = 10
login_period = 60
register_limit = 5
register_period = 300
api_limit = 60
api_period = 60
[proxy]
; Reverse proxy configuration for Traefik
; Number of proxies between the client and your app (default: 1 for single proxy like Traefik)
PROXY_COUNT = 1
; Whether to trust X-Forwarded-For header (required for Traefik)
TRUST_X_FORWARDED_FOR = true
; Whether to trust X-Forwarded-Proto header (for HTTPS detection)
TRUST_X_FORWARDED_PROTO = true
; Whether to trust X-Forwarded-Host header
TRUST_X_FORWARDED_HOST = true
; Whether to trust X-Forwarded-Port header
TRUST_X_FORWARDED_PORT = true
; Whether to trust X-Forwarded-Prefix header
TRUST_X_FORWARDED_PREFIX = false
; Trusted proxy IPs (leave empty to trust all, comma-separated for multiple)
; For production with Traefik, specify your Traefik container IP or Docker network CIDR
; Examples:
; TRUSTED_PROXIES = 172.16.0.0/12,10.0.0.0/8,192.168.0.0/16 # Docker default networks
; TRUSTED_PROXIES = 172.20.0.2,172.20.0.3 # Specific Traefik IPs
; TRUSTED_PROXIES = 172.18.0.0/16 # Custom Docker network
; For development/testing, leave empty to trust all proxies:
TRUSTED_PROXIES =
; Reverse proxy configuration
; PROXY_COUNT: Number of proxies between client and app
; TRUSTED_PROXIES: Comma-separated proxy IPs (empty = trust all)
; For Docker: 172.16.0.0/12,10.0.0.0/8,192.168.0.0/16
proxy_count = 1
trust_x_forwarded_for = true
trust_x_forwarded_proto = true
trust_x_forwarded_host = true
trust_x_forwarded_port = true
trust_x_forwarded_prefix = false
trusted_proxies =
[logging]
; Database logging configuration
; Enable/disable database logging entirely
DB_LOGGING_ENABLED = true
; Loggers to exclude from database logging (comma-separated)
; These loggers often create feedback loops or excessive noise
DB_LOGGING_FILTERED_LOGGERS = watchfiles.main,watchfiles.watcher,watchdog,uvicorn.access,__mp_main__,__main__,app
; Message patterns to exclude from database logging (comma-separated)
; Messages containing these patterns will not be logged to database
DB_LOGGING_FILTERED_PATTERNS = database.db,instance/,file changed,reloading
; Enable filtering of file watcher logs (prevents feedback loops in debug mode)
FILTER_FILE_WATCHER_LOGS = true
; Minimum time between identical log entries (seconds) to prevent spam
DB_LOGGING_DEDUPE_INTERVAL = 1
; DB_LOGGING_ENABLED: Enable/disable database logging
; DB_LOGGING_FILTERED_LOGGERS: Comma-separated logger names to exclude
; DB_LOGGING_FILTERED_PATTERNS: Comma-separated patterns to exclude
db_logging_enabled = true
db_logging_filtered_loggers = watchfiles.main,watchfiles.watcher,watchdog,__mp_main__,__main__,app,waitress.queue
db_logging_filtered_patterns = database.db,instance/,file changed,reloading
filter_file_watcher_logs = true
db_logging_dedupe_interval = 1

View File

@@ -0,0 +1,36 @@
"""Replace ip_address with local_ip and add public_ip
Revision ID: 351386323a79
Revises: 4b74b8a01154
Create Date: 2025-05-28 03:54:33.409642
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '351386323a79'
down_revision = '4b74b8a01154'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('api_logs', schema=None) as batch_op:
batch_op.add_column(sa.Column('local_ip', sa.String(length=45), nullable=True))
batch_op.add_column(sa.Column('public_ip', sa.String(length=45), nullable=True))
batch_op.drop_column('ip_address')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('api_logs', schema=None) as batch_op:
batch_op.add_column(sa.Column('ip_address', sa.VARCHAR(length=15), nullable=False))
batch_op.drop_column('public_ip')
batch_op.drop_column('local_ip')
# ### end Alembic commands ###

View File

@@ -1,5 +1,4 @@
flask
asgiref
flask_sqlalchemy
flask_bcrypt
flask_jwt_extended
@@ -13,16 +12,7 @@ flask-compress
pillow
# serving the application
uvicorn[standard]
# not needed?
#flask_login
#flask_bootstrap
#httpx[http2]
# Production enhancements
redis # Optional: for distributed rate limiting
waitress # Production WSGI server for Windows (better than Gunicorn for Windows)
gunicorn # Production WSGI server (Linux/Unix only - won't work on Windows)
#psutil # System monitoring for health checks
#gunicorn # Production WSGI server
#click # CLI tools (for database backups)

View File

@@ -23,42 +23,143 @@
<div class="card-body">
<form method="POST" action="{{ url_for('auth.download_agent', company_id=company.id) }}">
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
<div class="mb-3">
<label for="api_key" class="form-label">Select API Key</label>
<select class="form-select" id="api_key" name="api_key" required>
<option value="">Select API Key</option>
{% for api_key in api_keys %}
<option value="{{ api_key.id }}">
{{ api_key.description }} - {{ api_key.key[:8] }}...{{ api_key.key[-8:] }}
</option>
{% endfor %}
</select>
{% if not api_keys %}
<div class="form-text text-warning">
No API keys found. <a href="{{ url_for('auth.company_api_keys', company_id=company.id) }}">Create an API key</a> first.
</div>
{% endif %}
<div class="table-responsive">
<table class="table table-bordered">
<thead class="table-light">
<tr>
<th style="width: 25%;">Setting</th>
<th style="width: 65%;">Value</th>
<th style="width: 10%;">Info</th>
</tr>
</thead>
<tbody>
<!-- API Settings -->
<tr class="table-secondary">
<td colspan="3"><strong>API Settings</strong></td>
</tr>
<tr>
<td><label for="api_key" class="form-label mb-0">API Key (Site)</label></td>
<td>
<select class="form-select" id="api_key" name="api_key" required>
<option value="">Select API Key</option>
{% for api_key in api_keys %}
<option value="{{ api_key.id }}" {% if loop.first %}selected{% endif %}>
{{ api_key.description }} - {{ api_key.key[:8] }}...{{ api_key.key[-8:] }}
</option>
{% endfor %}
</select>
{% if not api_keys %}
<div class="form-text text-warning">
No API keys found. <a href="{{ url_for('auth.company_api_keys', company_id=company.id) }}">Create an API key</a> first.
</div>
{% endif %}
</td>
<td class="text-center">
<i class="fas fa-info-circle text-info" data-bs-toggle="tooltip" data-bs-placement="left" title="The API key used to authenticate with the server. Each site should have its own unique API key."></i>
</td>
</tr>
<tr>
<td><label for="server_url" class="form-label mb-0">Server URL</label></td>
<td>
<input type="url" class="form-control" id="server_url" name="server_url" value="{{ default_url }}" required>
</td>
<td class="text-center">
<i class="fas fa-info-circle text-info" data-bs-toggle="tooltip" data-bs-placement="left" title="The base URL where the agent will send login events and health checks. This should be the URL of this server."></i>
</td>
</tr>
<tr>
<td><label for="debug_logs" class="form-label mb-0">Debug Logs</label></td>
<td>
<div class="form-check">
<input class="form-check-input" type="checkbox" id="debug_logs" name="debug_logs" value="true">
<label class="form-check-label" for="debug_logs">Enable debug logging</label>
</div>
</td>
<td class="text-center">
<i class="fas fa-info-circle text-info" data-bs-toggle="tooltip" data-bs-placement="left" title="Enable detailed debug logging for troubleshooting. Should be disabled in production for better performance."></i>
</td>
</tr>
<tr>
<td><label for="install_dir" class="form-label mb-0">Installation Directory</label></td>
<td>
<input type="text" class="form-control" id="install_dir" name="install_dir" placeholder="C:\ProgramData\UserSessionMon">
</td>
<td class="text-center">
<i class="fas fa-info-circle text-info" data-bs-toggle="tooltip" data-bs-placement="left" title="Directory where the agent will be installed and store its configuration and logs. Leave empty to use the default path."></i>
</td>
</tr>
<tr>
<td><label for="health_check_interval" class="form-label mb-0">Health Check Interval (seconds)</label></td>
<td>
<input type="number" class="form-control" id="health_check_interval" name="health_check_interval" value="30" min="10" max="3600">
</td>
<td class="text-center">
<i class="fas fa-info-circle text-info" data-bs-toggle="tooltip" data-bs-placement="left" title="Interval in seconds between health check requests to the server. Recommended: 30-300 seconds."></i>
</td>
</tr>
<tr>
<td><label for="obtain_public_ip" class="form-label mb-0">Obtain Public IP</label></td>
<td>
<div class="form-check">
<input class="form-check-input" type="checkbox" id="obtain_public_ip" name="obtain_public_ip" value="true" checked>
<label class="form-check-label" for="obtain_public_ip">Enable public IP detection</label>
</div>
</td>
<td class="text-center">
<i class="fas fa-info-circle text-info" data-bs-toggle="tooltip" data-bs-placement="left" title="Enable automatic detection of the public IP address for better location tracking and security monitoring."></i>
</td>
</tr>
<tr>
<td><label for="public_ip_http_urls" class="form-label mb-0">Public IP HTTP URLs</label></td>
<td>
<input type="text" class="form-control" id="public_ip_http_urls" name="public_ip_http_urls" value="https://ifconfig.me/ip,https://ipv4.icanhazip.com" placeholder="https://ifconfig.me/ip,https://ipv4.icanhazip.com">
</td>
<td class="text-center">
<i class="fas fa-info-circle text-info" data-bs-toggle="tooltip" data-bs-placement="left" title="Comma-separated list of HTTP URLs used to detect the public IP address. The agent will try these in order until one responds."></i>
</td>
</tr>
<!-- Logging Settings -->
<tr class="table-secondary">
<td colspan="3"><strong>Logging Settings (Max size, after witch it will be archived)</strong></td>
</tr>
<tr>
<td><label for="session_log_rotation_size_mb" class="form-label mb-0">Session Log Size (MB)</label></td>
<td>
<input type="number" class="form-control" id="session_log_rotation_size_mb" name="session_log_rotation_size_mb" value="5" min="0" max="100">
</td>
<td class="text-center">
<i class="fas fa-info-circle text-info" data-bs-toggle="tooltip" data-bs-placement="left" title="Maximum size in MB for session log files before rotation. Set to 0 to disable session logging."></i>
</td>
</tr>
<tr>
<td><label for="error_log_rotation_size_mb" class="form-label mb-0">Error Log Size (MB)</label></td>
<td>
<input type="number" class="form-control" id="error_log_rotation_size_mb" name="error_log_rotation_size_mb" value="5" min="0" max="100">
</td>
<td class="text-center">
<i class="fas fa-info-circle text-info" data-bs-toggle="tooltip" data-bs-placement="left" title="Maximum size in MB for error log files before rotation. Set to 0 to disable error logging."></i>
</td>
</tr>
<tr>
<td><label for="event_log_rotation_size_mb" class="form-label mb-0">Event Log Size (MB)</label></td>
<td>
<input type="number" class="form-control" id="event_log_rotation_size_mb" name="event_log_rotation_size_mb" value="5" min="0" max="100">
</td>
<td class="text-center">
<i class="fas fa-info-circle text-info" data-bs-toggle="tooltip" data-bs-placement="left" title="Maximum size in MB for event log files before rotation. Set to 0 to disable event logging."></i>
</td>
</tr>
</tbody>
</table>
</div>
<div class="mb-3">
<label for="server_url" class="form-label">Server URL</label>
<input type="url" class="form-control" id="server_url" name="server_url" value="{{ default_url }}" required>
<div class="form-text">
The URL where the agent will send login events. Usually the same URL as this website.
</div>
<div class="d-grid gap-2">
<button type="submit" class="btn btn-primary btn-lg" {% if not api_keys %}disabled{% endif %}>
<i class="fas fa-download"></i> Download Agent Package
</button>
</div>
<div class="mb-3">
<label for="install_dir" class="form-label">Installation Directory (Optional)</label>
<input type="text" class="form-control" id="install_dir" name="install_dir" placeholder="C:\ProgramData\UserSessionMon">
<div class="form-text">
Custom installation directory for the agent. Leave empty to use the default path.
</div>
</div>
<button type="submit" class="btn btn-primary" {% if not api_keys %}disabled{% endif %}>
<i class="fas fa-download"></i> Download Agent
</button>
</form>
</div>
</div>
@@ -71,9 +172,7 @@
<ol>
<li>Download the agent package using the form above.</li>
<li>Extract the ZIP file to a folder on your Windows computer.</li>
<li>Run the agent as administrator to install it:
<code>winagentUSM.exe --service install</code>
</li>
<li>Right-click on "install_service.bat" and select "Run as administrator".</li>
<li>The service will start automatically and begin monitoring login events.</li>
<li>Events will be sent to this server using the specified API key.</li>
</ol>
@@ -85,4 +184,16 @@
</div>
</div>
</div>
{% endblock %}
{% block scripts %}
<script>
// Initialize Bootstrap tooltips
document.addEventListener('DOMContentLoaded', function() {
var tooltipTriggerList = [].slice.call(document.querySelectorAll('[data-bs-toggle="tooltip"]'));
var tooltipList = tooltipTriggerList.map(function (tooltipTriggerEl) {
return new bootstrap.Tooltip(tooltipTriggerEl);
});
});
</script>
{% endblock %}

View File

@@ -4,7 +4,6 @@
<link href="{{ url_for('static', filename='css/dataTables.bootstrap5.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/buttons.bootstrap5.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/buttons.dataTables.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/colVis.dataTables.min.css') }}" rel="stylesheet">
<!-- DateRangePicker CSS -->
<link href="{{ url_for('static', filename='css/daterangepicker.css') }}" rel="stylesheet">
<!-- Custom DateRangePicker dark theme styles -->
@@ -216,7 +215,8 @@
{% endif %}
<th>Site</th>
<th>Computer Name</th>
<th>IP Address</th>
<th>Local IP</th>
<th>Public IP</th>
</tr>
</thead>
<tbody>
@@ -228,11 +228,12 @@
<td>{{ log.event_type }}</td>
<td>{{ log.user_name }}</td>
{% if current_user.is_global_admin() and not selected_company_id %}
<td>{{ log.company.name if log.company else 'N/A' }}</td>
<td>{{ log.company.name if log.company else '' }}</td>
{% endif %}
<td>{{ log.api_key.description if log.api_key else 'N/A' }}</td>
<td>{{ log.api_key.description if log.api_key else '' }}</td>
<td>{{ log.computer_name }}</td>
<td>{{ log.ip_address }}</td>
<td>{{ log.local_ip or '' }}</td>
<td>{{ log.public_ip or '' }}</td>
</tr>
{% endfor %}
</tbody>
@@ -316,7 +317,7 @@
'<"row"<"col-sm-12 col-md-5"i><"col-sm-12 col-md-7"p>>',
columnDefs: [
{
targets: [5, 6], // Computer Name and IP Address columns are now at indices 5 and 6
targets: [5, 6, 7], // Computer Name, Local IP, and Public IP columns are now at indices 5, 6, and 7
visible: false, // Hide by default
searchable: true // Still allow searching in these columns
}
@@ -363,15 +364,11 @@
});
// Column names for the visibility controls
var columnNames = [
'Timestamp',
'Event Type',
'User Name',
{% if current_user.is_global_admin() and not selected_company_id %}'Company',{% endif %}
'Site',
'Computer Name',
'IP Address'
];
var columnNames = ['Timestamp', 'Event Type', 'User Name'];
{% if current_user.is_global_admin() and not selected_company_id %}
columnNames.push('Company');
{% endif %}
columnNames.push('Site', 'Computer Name', 'Local IP', 'Public IP');
// Load saved column visibility from localStorage
function loadColumnVisibility() {
@@ -383,10 +380,10 @@
console.log('Error parsing saved column visibility:', e);
}
}
// Default visibility - hide Computer Name (5) and IP Address (6)
// Default visibility - hide Computer Name (5), Local IP (6), and Public IP (7)
var defaultVisibility = {};
columnNames.forEach(function(name, index) {
defaultVisibility[index] = index !== 5 && index !== 6;
defaultVisibility[index] = index !== 5 && index !== 6 && index !== 7;
});
return defaultVisibility;
}

351
utils/config_manager.py Normal file
View File

@@ -0,0 +1,351 @@
#!/usr/bin/env python3
"""
Configuration Manager - Handles config.ini creation and updates
Preserves existing values while adding missing sections/options
"""
import os
import configparser
import logging
from typing import Dict, Any, Optional
logger = logging.getLogger(__name__)
class ConfigManager:
"""Manages configuration file creation and updates"""
def __init__(self, config_path: str = 'config.ini'):
self.config_path = config_path
self.config = configparser.ConfigParser(allow_no_value=True)
def get_default_config(self) -> Dict[str, Dict[str, Any]]:
"""Define the default configuration structure and values"""
return {
'app': {
'SECRET_KEY': 'your_secret_key_change_this_in_production',
'APP_DEBUG': 'false',
'TIMEZONE': 'Europe/London',
'; Application configuration': None,
'; SECRET_KEY: Change this to a random secret key in production': None,
'; APP_DEBUG: Set to false in production': None,
'; TIMEZONE: Your local timezone for log display': None,
},
'server': {
'HOST': '0.0.0.0',
'PORT': '8000',
'SSL_CERTFILE': 'instance/certs/cert.pem',
'SSL_KEYFILE': 'instance/certs/key.pem',
'DEVELOPMENT_MODE': 'false',
'WATCH_FILES': 'false',
'WORKERS': '4',
'WORKER_LIFETIME': '86400',
'GRACEFUL_SHUTDOWN': 'true',
'SHUTDOWN_TIMEOUT': '30',
'; Server configuration': None,
'; HOST: IP address to bind to (0.0.0.0 for all interfaces)': None,
'; PORT: Port number to listen on': None,
'; SSL_CERTFILE/SSL_KEYFILE: SSL certificate paths (for reverse proxy setups)': None,
'; WORKERS: Number of threads (Waitress) or processes (Gunicorn)': None,
'; DEVELOPMENT_MODE: Enable development features (false in production)': None,
},
'database': {
'SQLALCHEMY_DATABASE_URI': 'sqlite:///database.db',
'SQLALCHEMY_TRACK_MODIFICATIONS': 'false',
'; Database configuration': None,
'; SQLALCHEMY_DATABASE_URI: Database connection string': None,
'; For SQLite (default): sqlite:///database.db': None,
'; For PostgreSQL: postgresql://user:pass@localhost:5432/dbname': None,
'; For MySQL: mysql+pymysql://user:pass@localhost:3306/dbname': None,
'; For MSSQL: mssql+pyodbc://user:pass@server/db?driver=ODBC+Driver+18+for+SQL+Server': None,
},
'session': {
'SESSION_COOKIE_SECURE': 'true',
'SESSION_COOKIE_HTTPONLY': 'true',
'SESSION_COOKIE_SAMESITE': 'Lax',
'REMEMBER_COOKIE_SECURE': 'true',
'REMEMBER_COOKIE_HTTPONLY': 'true',
'REMEMBER_COOKIE_DURATION': '7200',
'PERMANENT_SESSION_LIFETIME': '7200',
'; Session and cookie configuration': None,
'; SESSION_COOKIE_SECURE: Only send cookies over HTTPS': None,
'; REMEMBER_COOKIE_DURATION: Remember me duration in seconds': None,
},
'cache': {
'STATIC_MAX_AGE': '86400',
'IMAGE_MAX_AGE': '604800',
'JS_CSS_MAX_AGE': '43200',
'ENABLE_COMPRESSION': 'true',
'COMPRESSION_LEVEL': '6',
'COMPRESSION_MIN_SIZE': '500',
'; Cache and compression settings': None,
'; MAX_AGE values are in seconds': None,
'; COMPRESSION_LEVEL: 1-9 (higher = better compression, more CPU)': None,
},
'security': {
'CONTENT_SECURITY_POLICY': "default-src 'self'; script-src 'self' 'unsafe-inline' https://cdn.datatables.net https://code.jquery.com; style-src 'self' 'unsafe-inline' https://cdn.datatables.net; img-src 'self' data:; font-src 'self' https://cdn.datatables.net; connect-src 'self'; frame-ancestors 'self'; form-action 'self'; base-uri 'self'",
'ENABLE_HSTS': 'true',
'HSTS_MAX_AGE': '31536000',
'ENABLE_SECURITY_HEADERS': 'true',
'; Security headers configuration': None,
'; CONTENT_SECURITY_POLICY: Controls allowed content sources': None,
'; ENABLE_HSTS: HTTP Strict Transport Security (HTTPS only)': None,
'; HSTS_MAX_AGE: HSTS duration in seconds': None,
},
'rate_limiting': {
'ENABLE_RATE_LIMITING': 'true',
'REDIS_URL': '',
'LOGIN_LIMIT': '10',
'LOGIN_PERIOD': '60',
'REGISTER_LIMIT': '5',
'REGISTER_PERIOD': '300',
'API_LIMIT': '60',
'API_PERIOD': '60',
'; Rate limiting configuration': None,
'; REDIS_URL: Redis connection for distributed rate limiting (leave empty for in-memory)': None,
'; LOGIN_LIMIT: Max login attempts per LOGIN_PERIOD seconds': None,
'; API_LIMIT: Max API requests per API_PERIOD seconds': None,
'; Leave REDIS_URL empty to use in-memory rate limiting': None,
},
'proxy': {
'PROXY_COUNT': '1',
'TRUST_X_FORWARDED_FOR': 'true',
'TRUST_X_FORWARDED_PROTO': 'true',
'TRUST_X_FORWARDED_HOST': 'true',
'TRUST_X_FORWARDED_PORT': 'true',
'TRUST_X_FORWARDED_PREFIX': 'false',
'TRUSTED_PROXIES': '',
'; Reverse proxy configuration': None,
'; PROXY_COUNT: Number of proxies between client and app': None,
'; TRUSTED_PROXIES: Comma-separated proxy IPs (empty = trust all)': None,
'; For Docker: 172.16.0.0/12,10.0.0.0/8,192.168.0.0/16': None,
},
'logging': {
'DB_LOGGING_ENABLED': 'true',
'DB_LOGGING_FILTERED_LOGGERS': 'watchfiles.main,watchfiles.watcher,watchdog,uvicorn.access,__mp_main__,__main__,app',
'DB_LOGGING_FILTERED_PATTERNS': 'database.db,instance/,file changed,reloading',
'FILTER_FILE_WATCHER_LOGS': 'true',
'DB_LOGGING_DEDUPE_INTERVAL': '1',
'; Database logging configuration': None,
'; DB_LOGGING_ENABLED: Enable/disable database logging': None,
'; DB_LOGGING_FILTERED_LOGGERS: Comma-separated logger names to exclude': None,
'; DB_LOGGING_FILTERED_PATTERNS: Comma-separated patterns to exclude': None,
}
}
def load_existing_config(self) -> bool:
"""Load existing configuration file if it exists"""
if os.path.exists(self.config_path):
try:
self.config.read(self.config_path)
logger.info(f"Loaded existing configuration from {self.config_path}")
return True
except Exception as e:
logger.error(f"Error reading existing config file: {e}")
return False
return False
def merge_config(self, preserve_existing: bool = True) -> bool:
"""
Merge default configuration with existing configuration
Args:
preserve_existing: If True, preserve existing values; if False, update to defaults
"""
default_config = self.get_default_config()
changes_made = False
for section_name, section_data in default_config.items():
# Add section if it doesn't exist
if not self.config.has_section(section_name):
self.config.add_section(section_name)
changes_made = True
logger.info(f"Added new section: [{section_name}]")
# Add missing options to existing sections
for option_key, option_value in section_data.items():
if option_key.startswith(';'):
# This is a comment - always add/update
continue
if not self.config.has_option(section_name, option_key):
# Missing option - add it
if option_value is not None:
self.config.set(section_name, option_key, str(option_value))
changes_made = True
logger.info(f"Added missing option: [{section_name}] {option_key}")
elif not preserve_existing and option_value is not None:
# Update existing option to default (only if preserve_existing=False)
current_value = self.config.get(section_name, option_key)
if current_value != str(option_value):
logger.info(f"Would update [{section_name}] {option_key}: {current_value} -> {option_value}")
# Uncomment next line to actually update existing values
# self.config.set(section_name, option_key, str(option_value))
# changes_made = True
return changes_made
def remove_obsolete_options(self) -> bool:
"""Remove configuration options that are no longer needed"""
# Define obsolete options that should be removed
obsolete_options = {
'server': ['UVICORN_WORKERS', 'ASYNC_MODE'], # Old Uvicorn settings
'security': ['FEATURE_POLICY'], # Replaced by Permissions-Policy
'rate_limiting': ['OLD_RATE_LIMIT_SETTING'], # Example obsolete setting
}
changes_made = False
for section_name, option_list in obsolete_options.items():
if self.config.has_section(section_name):
for option_key in option_list:
if self.config.has_option(section_name, option_key):
self.config.remove_option(section_name, option_key)
changes_made = True
logger.info(f"Removed obsolete option: [{section_name}] {option_key}")
return changes_made
def save_config(self) -> bool:
"""Save the configuration to file with proper formatting"""
try:
# Create backup of existing config
if os.path.exists(self.config_path):
backup_path = f"{self.config_path}.backup"
import shutil
shutil.copy2(self.config_path, backup_path)
logger.info(f"Created backup: {backup_path}")
# Write the configuration with proper formatting
with open(self.config_path, 'w', encoding='utf-8') as f:
# Write header comment
f.write("; Configuration file for User Monitor Application\n")
f.write("; This file is auto-managed - existing values are preserved\n")
f.write("; Generated/Updated by ConfigManager\n\n")
# Write sections with comments
default_config = self.get_default_config()
for section_name in default_config.keys():
if self.config.has_section(section_name):
f.write(f"[{section_name}]\n")
# Write comments first
for key, value in default_config[section_name].items():
if key.startswith(';') and value is None:
f.write(f"{key}\n")
# Write actual options
for option_key in self.config.options(section_name):
option_value = self.config.get(section_name, option_key)
f.write(f"{option_key} = {option_value}\n")
f.write("\n") # Empty line between sections
logger.info(f"Configuration saved to {self.config_path}")
return True
except Exception as e:
logger.error(f"Error saving configuration: {e}")
return False
def ensure_config_exists(self, preserve_existing: bool = True) -> bool:
"""
Main method to ensure configuration file exists and is up-to-date
Args:
preserve_existing: If True, preserve existing values; if False, reset to defaults
Returns:
bool: True if config was created/updated successfully
"""
config_existed = self.load_existing_config()
if not config_existed:
logger.info("No configuration file found, creating new one with defaults")
else:
logger.info("Existing configuration found, checking for updates needed")
# Merge configurations
merge_changes = self.merge_config(preserve_existing)
# Remove obsolete options
removal_changes = self.remove_obsolete_options()
# Save if changes were made or if config didn't exist
if not config_existed or merge_changes or removal_changes:
success = self.save_config()
if success:
if not config_existed:
logger.info("✅ New configuration file created successfully")
else:
logger.info("✅ Configuration file updated successfully")
return True
else:
logger.error("❌ Failed to save configuration file")
return False
else:
logger.info("✅ Configuration file is up-to-date, no changes needed")
return True
def initialize_config(config_path: str = 'config.ini', preserve_existing: bool = True) -> configparser.ConfigParser:
"""
Initialize configuration file and return configured ConfigParser instance
Args:
config_path: Path to the configuration file
preserve_existing: Whether to preserve existing configuration values
Returns:
ConfigParser instance with loaded configuration
"""
manager = ConfigManager(config_path)
if manager.ensure_config_exists(preserve_existing):
# Reload the config after ensuring it exists
config = configparser.ConfigParser()
config.read(config_path)
return config
else:
raise RuntimeError("Failed to initialize configuration file")
# Example usage and testing
if __name__ == "__main__":
# Test the configuration manager
import tempfile
import os
# Use a temporary file for testing
test_config_path = os.path.join(tempfile.gettempdir(), 'test_config.ini')
print("=== Testing Configuration Manager ===")
try:
# Test 1: Create new config
print("\n1. Testing new configuration creation...")
config = initialize_config(test_config_path, preserve_existing=True)
print(f"✅ New config created with {len(config.sections())} sections")
# Test 2: Load existing config and add missing options
print("\n2. Testing existing configuration update...")
config = initialize_config(test_config_path, preserve_existing=True)
print("✅ Existing config loaded and updated")
# Test 3: Show some values
print("\n3. Sample configuration values:")
print(f" Database URI: {config.get('database', 'SQLALCHEMY_DATABASE_URI')}")
print(f" Server Port: {config.get('server', 'PORT')}")
print(f" Debug Mode: {config.get('app', 'APP_DEBUG')}")
print(f"\n✅ Configuration file created at: {test_config_path}")
print("You can examine this file to see the output format")
except Exception as e:
print(f"❌ Test failed: {e}")

View File

@@ -52,7 +52,7 @@ def add_security_headers(response):
# Permissions Policy (formerly Feature-Policy)
response.headers['Permissions-Policy'] = (
'camera=(), microphone=(), geolocation=(), interest-cohort=()'
'camera=(), microphone=(), geolocation=(), payment=(), fullscreen=(self)'
)
return response

Binary file not shown.