#!/usr/bin/env python3 """ Flask web server for The Villages Import Tools """ from flask import Flask, render_template, jsonify, request, send_from_directory import subprocess import os import threading import json from datetime import datetime app = Flask(__name__, static_folder='static', template_folder='templates') # Store running processes running_processes = {} process_logs = {} @app.route('/') def index(): """Main index page with script execution buttons""" # Get available scripts and organize them script_map = get_script_map() # Organize scripts by category scripts_by_category = { 'Download County Data': ['download-county-addresses', 'download-county-roads', 'download-county-paths'], 'Download OSM Data': ['download-osm-roads', 'download-osm-paths'], 'Convert Data': ['convert-roads', 'convert-paths'], 'Diff Data': ['diff-roads', 'diff-paths', 'diff-addresses'], 'Utilities': ['ls', 'make-new-latest'] } return render_template('index.html', script_map=script_map, scripts_by_category=scripts_by_category) @app.route('/map') def map_viewer(): """Map viewer page""" return render_template('map.html') def get_script_map(): """Get the map of available scripts and their commands""" return { 'ls': 'ls -alr /data', 'make-new-latest': 'cd /data && NEWDIR=$(date +%y%m%d) && mkdir -p $NEWDIR/lake $NEWDIR/sumter && ln -sfn $NEWDIR latest', # todo: make a clean-old-data script 'download-county-addresses': { # deliver files with standardized names 'sumter': 'mkdir -p /data/latest/sumter && wget https://www.arcgis.com/sharing/rest/content/items/c75c5aac13a648968c5596b0665be28b/data -O /data/latest/sumter/addresses.shp.zip', 'lake': 'mkdir -p /data/latest/lake && wget [LAKE_URL_HERE] -O /data/latest/lake/addresses.shp.zip' }, 'download-county-roads': { # deliver files with standardized names 'sumter': 'mkdir -p /data/latest/sumter && wget https://www.arcgis.com/sharing/rest/content/items/9177e17c72d3433aa79630c7eda84add/data -O /data/latest/sumter/roads.shp.zip', 'lake': 'mkdir -p /data/latest/lake && wget [LAKE_URL_HERE] -O /data/latest/lake/roads.shp.zip' }, 'download-county-paths': { # deliver files with standardized names #'sumter': ['/data/latest/sumter/paths.shp.zip'], #'lake': ['/data/latest/lake/paths.shp.zip'] }, # todo: integrate osm downloading and shapefile converting into diff-roads like addresses 'download-osm-roads': { 'lake': ['python', 'download-overpass.py', '--type', 'highways', 'Lake County', 'Florida', '/data/latest/lake/osm-roads.geojson'], 'sumter': ['python', 'download-overpass.py', '--type', 'highways', 'Sumter County', 'Florida', '/data/latest/sumter/osm-roads.geojson'] }, 'download-osm-paths': { # todo: no lake county paths #'lake': ['python', 'download-overpass.py', '--type', 'highways', 'Lake County', 'Florida', '/data/latest/lake/osm-roads.geojson'], 'sumter': ['python', 'download-overpass.py', '--type', 'paths', 'Sumter County', 'Florida', '/data/latest/sumter/osm-paths.geojson'] }, # todo 'convert-roads': { 'sumter': ['python', 'shp-to-geojson.py', '/data/latest/sumter/roads.shp.zip', '/data/latest/sumter/county-roads.geojson'], 'lake': ['python', 'shp-to-geojson.py', '/data/latest/lake/roads.shp.zip', '/data/latest/lake/county-roads.geojson'] }, 'convert-paths': { #todo: delete sumter-multi-modal-convert.py ? 'sumter': ['python', 'shp-to-geojson.py', '/data/latest/sumter/paths.shp.zip', '/data/latest/sumter/county-paths.geojson'], }, 'diff-roads': { 'lake': ['python', 'diff-highways.py', '/data/latest/lake/osm-roads.geojson', '/data/latest/lake/county-roads.geojson', '--output', '/data/latest/lake/diff-roads.geojson'], 'sumter': ['python', 'diff-highways.py', '/data/latest/sumter/osm-roads.geojson', '/data/latest/sumter/county-roads.geojson', '--output', '/data/latest/sumter/diff-roads.geojson'] }, 'diff-paths': { #todo: no lake county data for paths #'lake': ['python', 'diff-highways.py', '/data/latest/lake/osm-paths.geojson', '/data/latest/lake/county-paths.geojson', '--output', '/data/latest/lake/diff-paths.geojson'], 'sumter': ['python', 'diff-highways.py', '/data/latest/sumter/osm-paths.geojson', '/data/latest/sumter/county-paths.geojson', '--output', '/data/latest/sumter/diff-paths.geojson'], }, # addresses need no osm download or shapefile convert, just county download 'diff-addresses': { #todo: delete sumter-address-convert.py ? 'lake': ['python', 'compare-addresses.py', 'Lake', 'Florida', '--local-zip', '/data/latest/lake/addresses.shp.zip', '--output-dir', '/data/latest/lake', '--cache-dir', '/data/osm_cache'], 'sumter': ['python', 'compare-addresses.py', 'Sumter', 'Florida', '--local-zip', '/data/latest/sumter/addresses.shp.zip', '--output-dir', '/data/latest/sumter', '--cache-dir', '/data/osm_cache'] }, } @app.route('/api/run-script', methods=['POST']) def run_script(): """Execute a script in the background""" data = request.json script_name = data.get('script') county = data.get('county', '') if not script_name: return jsonify({'error': 'No script specified'}), 400 script_map = get_script_map() if script_name not in script_map: return jsonify({'error': 'Unknown script'}), 400 script_config = script_map[script_name] # Handle both string commands and dict of county-specific commands if isinstance(script_config, str): # Simple string command (like 'ls') cmd = ['bash', '-c', script_config] elif isinstance(script_config, dict): # County-specific commands if not county: return jsonify({'error': 'County required for this script'}), 400 if county not in script_config: return jsonify({'error': f'County {county} not supported for {script_name}'}), 400 cmd_config = script_config[county] if isinstance(cmd_config, str): cmd = ['bash', '-c', cmd_config] else: cmd = cmd_config else: return jsonify({'error': 'Invalid script configuration'}), 400 # Generate a unique job ID job_id = f"{script_name}_{county}_{datetime.now().strftime('%Y%m%d_%H%M%S')}" # Start process in background def run_command(): try: process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, cwd=os.path.dirname(os.path.dirname(__file__)) ) running_processes[job_id] = process process_logs[job_id] = [] # Stream output for line in process.stdout: process_logs[job_id].append(line) process.wait() process_logs[job_id].append(f"\n[Process completed with exit code {process.returncode}]") except Exception as e: process_logs[job_id].append(f"\n[ERROR: {str(e)}]") finally: if job_id in running_processes: del running_processes[job_id] thread = threading.Thread(target=run_command) thread.daemon = True thread.start() return jsonify({'job_id': job_id, 'status': 'started'}) @app.route('/api/job-status/') def job_status(job_id): """Get status and logs for a job""" is_running = job_id in running_processes logs = process_logs.get(job_id, []) return jsonify({ 'job_id': job_id, 'running': is_running, 'logs': logs }) @app.route('/api/list-files') def list_files(): """List available GeoJSON files""" data_dir = '/data' files = { 'diff': [], 'osm': [], 'county': [] } # Scan directories for geojson files if os.path.exists(data_dir): for root, dirs, filenames in os.walk(data_dir): for filename in filenames: if filename.endswith('.geojson'): rel_path = os.path.relpath(os.path.join(root, filename), data_dir) if 'diff' in filename.lower(): files['diff'].append(rel_path) elif 'osm' in filename.lower(): files['osm'].append(rel_path) elif any(county in filename.lower() for county in ['lake', 'sumter']): files['county'].append(rel_path) return jsonify(files) @app.route('/data/') def serve_data(filename): """Serve GeoJSON files""" return send_from_directory('/data', filename) if __name__ == '__main__': app.run(host='0.0.0.0', port=5000, debug=True)